From f98cea3bcfb4730b238ab8605ce6d48c2998aa44 Mon Sep 17 00:00:00 2001 From: master <> Date: Sun, 2 Nov 2025 13:40:38 +0200 Subject: [PATCH] Add Authority Advisory AI and API Lifecycle Configuration - Introduced AuthorityAdvisoryAiOptions and related classes for managing advisory AI configurations, including remote inference options and tenant-specific settings. - Added AuthorityApiLifecycleOptions to control API lifecycle settings, including legacy OAuth endpoint configurations. - Implemented validation and normalization methods for both advisory AI and API lifecycle options to ensure proper configuration. - Created AuthorityNotificationsOptions and its related classes for managing notification settings, including ack tokens, webhooks, and escalation options. - Developed IssuerDirectoryClient and related models for interacting with the issuer directory service, including caching mechanisms and HTTP client configurations. - Added support for dependency injection through ServiceCollectionExtensions for the Issuer Directory Client. - Updated project file to include necessary package references for the new Issuer Directory Client library. --- deploy/helm/stellaops/values.yaml | 110 +- docs/11_AUTHORITY.md | 792 +- docs/24_OFFLINE_KIT.md | 52 +- docs/TASKS.md | 2 +- docs/airgap/airgap-mode.md | 143 +- docs/api/authority-legacy-auth-endpoints.md | 32 + docs/benchmarks/scanner-rust-analyzer.md | 47 + docs/dev/32_AUTH_CLIENT_GUIDE.md | 43 +- docs/implplan/SPRINTS.md | 43 + docs/implplan/SPRINTS_PRIOR_20251031.md | 20 +- docs/implplan/SPRINT_100_identity_signing.md | 105 +- .../implplan/SPRINT_110_ingestion_evidence.md | 280 +- docs/implplan/SPRINT_120_policy_reasoning.md | 304 +- docs/implplan/SPRINT_130_scanner_surface.md | 192 +- docs/implplan/SPRINT_140_runtime_signals.md | 48 +- .../SPRINT_150_scheduling_automation.md | 138 +- docs/implplan/SPRINT_160_export_evidence.md | 92 +- .../SPRINT_170_notifications_telemetry.md | 58 +- docs/implplan/SPRINT_180_experience_sdks.md | 324 +- docs/implplan/SPRINT_190_ops_offline.md | 176 +- .../SPRINT_200_documentation_process.md | 262 +- docs/modules/attestor/README.md | 26 +- docs/modules/attestor/architecture.md | 245 +- docs/modules/attestor/payloads.md | 48 + docs/modules/attestor/ttl-validation.md | 41 + docs/modules/attestor/workflows.md | 247 + docs/modules/authority/architecture.md | 883 +- docs/modules/cli/architecture.md | 16 +- docs/modules/cli/guides/cli-reference.md | 628 +- docs/modules/excititor/architecture.md | 4 +- docs/modules/issuer-directory/architecture.md | 95 + docs/modules/notify/architecture.md | 33 +- docs/modules/orchestrator/architecture.md | 7 +- docs/modules/scanner/design/surface-fs.md | 7 +- .../modules/scanner/design/surface-secrets.md | 13 +- .../scanner/design/surface-validation.md | 37 +- .../operations/entrypoint-static-analysis.md | 149 +- docs/notifications/architecture.md | 2 +- docs/notifications/digests.md | 2 +- docs/notifications/overview.md | 151 +- .../pack-approvals-integration.md | 124 +- docs/observability/observability.md | 277 +- docs/quickstart.md | 7 +- docs/security/authority-scopes.md | 579 +- docs/security/console-security.md | 10 +- docs/security/pack-signing-and-rbac.md | 330 +- docs/task-packs/authoring-guide.md | 416 +- docs/task-packs/registry.md | 348 +- docs/task-packs/runbook.md | 324 +- docs/task-packs/spec.md | 498 +- docs/updates/2025-11-01-orch-admin-scope.md | 18 + etc/authority.yaml | 72 +- etc/authority.yaml.sample | 830 +- etc/issuer-directory.yaml.sample | 22 + etc/notify.dev.yaml | 9 +- etc/notify.prod.yaml | 9 +- etc/notify.stage.yaml | 9 +- etc/notify.yaml.sample | 9 +- etc/packs-registry.yaml.sample | 62 + etc/task-runner.yaml.sample | 69 + ops/offline-kit/build_offline_kit.py | 24 +- ops/offline-kit/run-python-analyzer-smoke.sh | 72 +- ops/offline-kit/run-rust-analyzer-smoke.sh | 37 + .../python/StellaOps.Auth.Abstractions.xml | 767 ++ .../python/StellaOps.Auth.Client.xml | 233 + ...ps.Scanner.Analyzers.Lang.Python.deps.json | 858 ++ .../rust/StellaOps.Auth.Abstractions.xml | 767 ++ out/analyzers/rust/StellaOps.Auth.Client.xml | 233 + ...aOps.Scanner.Analyzers.Lang.Rust.deps.json | 858 ++ package-lock.json | 92 +- package.json | 15 +- scripts/render_docs.py | 62 +- scripts/run-attestor-ttl-validation.sh | 81 + scripts/validate-attestation-schemas.mjs | 145 + .../authority/openapi.yaml | 98 +- .../DsseCompressionAlgorithm.cs | 8 + .../DsseDetachedPayloadReference.cs | 32 + .../DsseEnvelope.cs | 48 + .../DsseEnvelopeSerializationOptions.cs | 14 + .../DsseEnvelopeSerializationResult.cs | 38 + .../DsseEnvelopeSerializer.cs | 331 + .../DsseSignature.cs | 31 + .../EnvelopeKey.cs | 301 + .../EnvelopeKeyIdCalculator.cs | 54 + .../EnvelopeSignature.cs | 48 + .../EnvelopeSignatureResult.cs | 56 + .../EnvelopeSignatureService.cs | 164 + .../DsseEnvelopeSerializerTests.cs | 57 + .../EnvelopeSignatureServiceTests.cs | 149 + .../StellaOps.Attestor.Envelope.Tests.csproj | 22 + .../StellaOps.Attestor.Envelope.csproj | 24 + .../StellaOps.Attestor.Envelope/TASKS.md | 26 +- .../DsseEnvelopeSerializerTests.cs | 139 + .../StellaOps.Attestor.Envelope.Tests.csproj | 30 + .../StellaOps.Attestor.Types/AGENTS.md | 1 + .../StellaOps.Attestor.Types/TASKS.md | 8 +- .../Program.cs | 1189 ++ .../StellaOps.Attestor.Types.Generator.csproj | 9 + .../fixtures/v1/build-provenance.sample.json | 107 + .../fixtures/v1/custom-evidence.sample.json | 39 + .../fixtures/v1/policy-evaluation.sample.json | 77 + .../v1/risk-profile-evidence.sample.json | 68 + .../fixtures/v1/sbom-attestation.sample.json | 80 + .../fixtures/v1/scan-results.sample.json | 126 + .../fixtures/v1/vex-attestation.sample.json | 75 + .../generated/go/go.mod | 3 + .../generated/go/types.go | 628 + .../generated/go/types_test.go | 239 + .../generated/ts/index.test.ts | 195 + .../generated/ts/index.ts | 945 ++ .../generated/ts/package-lock.json | 237 + .../generated/ts/package.json | 16 + .../generated/ts/tsconfig.json | 19 + .../samples/README.md | 23 + .../samples/build-provenance.v1.json | 71 + .../samples/custom-evidence.v1.json | 48 + .../samples/policy-evaluation.v1.json | 69 + .../samples/risk-profile-evidence.v1.json | 50 + .../samples/sbom-attestation.v1.json | 80 + .../samples/scan-results.v1.json | 102 + .../samples/vex-attestation.v1.json | 67 + .../schemas/attestation-common.v1.schema.json | 372 + .../stellaops-build-provenance.v1.schema.json | 160 + .../stellaops-custom-evidence.v1.schema.json | 63 + ...stellaops-policy-evaluation.v1.schema.json | 100 + .../stellaops-risk-profile.v1.schema.json | 88 + .../stellaops-sbom-attestation.v1.schema.json | 107 + .../stellaops-scan-results.v1.schema.json | 122 + .../stellaops-vex-attestation.v1.schema.json | 95 + .../AttestorVerificationEngine.cs | 960 ++ .../IAttestorVerificationEngine.cs | 14 + .../StellaOps.Attestor.Verify.csproj | 12 + .../StellaOps.Attestor.Verify/TASKS.md | 8 +- .../Bulk/BulkVerificationModels.cs | 94 + .../Bulk/IBulkVerificationJobStore.cs | 18 + .../Observability/AttestorActivitySource.cs | 59 + .../Observability/AttestorMetrics.cs | 120 +- .../Observability/AttestorTelemetryTags.cs | 10 + .../Offline/AttestorOfflineBundle.cs | 74 + .../Offline/IAttestorBundleService.cs | 11 + .../Options/AttestorOptions.cs | 449 +- .../Signing/AttestationSignRequest.cs | 50 + .../Signing/AttestationSignResult.cs | 24 + .../Signing/AttestorSigningException.cs | 20 + .../Signing/DssePreAuthenticationEncoding.cs | 36 + .../Signing/IAttestationSigningService.cs | 13 + .../StellaOps.Attestor.Core.csproj | 22 +- .../Storage/AttestorEntry.cs | 233 +- .../Storage/AttestorEntryContinuationToken.cs | 83 + .../Storage/AttestorEntryQuery.cs | 36 + .../Storage/IAttestorArchiveStore.cs | 10 +- .../Storage/IAttestorEntryRepository.cs | 14 +- .../Submission/AttestorSubmissionResult.cs | 199 +- .../Submission/AttestorSubmissionValidator.cs | 144 +- .../ITransparencyWitnessClient.cs | 9 + .../TransparencyWitnessObservation.cs | 22 + .../TransparencyWitnessRequest.cs | 9 + .../AttestorVerificationRequest.cs | 28 +- .../AttestorVerificationResult.cs | 18 +- .../IAttestorVerificationCache.cs | 13 + .../Verification/VerificationReport.cs | 185 + .../Verification/VerificationSectionStatus.cs | 12 + .../Bulk/BulkVerificationWorker.cs | 240 + .../Bulk/MongoBulkVerificationJobStore.cs | 343 + .../Offline/AttestorBundleService.cs | 269 + .../ServiceCollectionExtensions.cs | 74 +- .../Signing/AttestorSigningKeyRegistry.cs | 347 + .../Signing/AttestorSigningService.cs | 260 + .../StellaOps.Attestor.Infrastructure.csproj | 49 +- .../Storage/CachingAttestorDedupeStore.cs | 56 + .../Storage/MongoAttestorAuditSink.cs | 44 +- .../Storage/MongoAttestorDedupeStore.cs | 111 + .../Storage/MongoAttestorEntryRepository.cs | 951 +- .../Storage/NullAttestorArchiveStore.cs | 18 +- .../Storage/S3AttestorArchiveStore.cs | 254 +- .../Submission/AttestorSubmissionService.cs | 1389 +- .../HttpTransparencyWitnessClient.cs | 223 + .../NullTransparencyWitnessClient.cs | 13 + .../AttestorVerificationService.cs | 1110 +- .../CachedAttestorVerificationService.cs | 96 + .../InMemoryAttestorVerificationCache.cs | 115 + .../NoOpAttestorVerificationCache.cs | 17 + .../AttestationBundleEndpointsTests.cs | 271 + .../AttestationQueryTests.cs | 109 + .../AttestorEntryRepositoryTests.cs | 117 + .../AttestorSigningServiceTests.cs | 239 + .../AttestorStorageTests.cs | 105 + .../AttestorSubmissionServiceTests.cs | 217 +- ...testorSubmissionValidatorHardeningTests.cs | 169 + .../AttestorVerificationServiceTests.cs | 877 +- .../BulkVerificationContractsTests.cs | 76 + .../BulkVerificationWorkerTests.cs | 243 + .../CachedAttestorVerificationServiceTests.cs | 122 + .../HttpTransparencyWitnessClientTests.cs | 187 + .../LiveDedupeStoreTests.cs | 110 + .../StellaOps.Attestor.Tests.csproj | 5 +- .../StellaOps.Attestor.Tests/TestDoubles.cs | 266 +- .../TestSupport/TestAttestorDoubles.cs | 110 + .../Contracts/AttestationBundleContracts.cs | 88 + .../Contracts/AttestationListContracts.cs | 145 + .../Contracts/AttestationSignContracts.cs | 56 + .../Contracts/BulkVerificationContracts.cs | 216 + .../StellaOps.Attestor.WebService/Program.cs | 1214 +- .../Properties/AssemblyInfo.cs | 3 + .../StellaOps.Attestor/StellaOps.Attestor.sln | 40 +- src/Attestor/StellaOps.Attestor/TASKS.md | 21 +- .../AttestationGoldenSamplesTests.cs | 123 + .../StellaOps.Attestor.Types.Tests.csproj | 15 + .../authority/openapi.yaml | 1573 ++- .../StellaOpsScopesTests.cs | 89 +- .../StellaOpsClaimTypes.cs | 25 + .../StellaOpsHttpHeaderNames.cs | 12 + .../StellaOpsScopes.cs | 270 +- .../ServiceCollectionExtensionsTests.cs | 205 + .../StellaOps.Auth.Client/README.NuGet.md | 2 + .../ServiceCollectionExtensions.cs | 23 + .../StellaOpsApiAuthMode.cs | 22 + .../StellaOpsApiAuthenticationOptions.cs | 97 + .../StellaOpsBearerTokenHandler.cs | 123 + .../StellaOpsTokenResult.cs | 5 + .../StellaOpsResourceServerPoliciesTests.cs | 39 + ...StellaOpsScopeAuthorizationHandlerTests.cs | 476 +- .../README.NuGet.md | 8 +- .../StellaOps.Auth.ServerIntegration.csproj | 4 +- .../StellaOpsResourceServerPolicies.cs | 86 + .../StellaOpsScopeAuthorizationHandler.cs | 959 +- .../AuthorityClientMetadataKeys.cs | 17 +- .../AuthorityMongoDefaults.cs | 1 + .../Documents/AuthorityAirgapAuditDocument.cs | 70 + .../Documents/AuthorityTokenDocument.cs | 18 +- .../Extensions/ServiceCollectionExtensions.cs | 36 +- ...thorityAirgapAuditCollectionInitializer.cs | 38 + .../EnsureAuthorityCollectionsMigration.cs | 3 +- .../Stores/AuthorityAirgapAuditStore.cs | 103 + .../Stores/AuthorityTokenStore.cs | 54 + .../Stores/IAuthorityAirgapAuditStore.cs | 51 + .../Stores/IAuthorityTokenStore.cs | 8 + .../AdvisoryAiRemoteInferenceEndpointTests.cs | 282 + ...uthorityAdvisoryAiConsentEvaluatorTests.cs | 143 + .../Airgap/AirgapAuditEndpointsTests.cs | 256 + .../Console/ConsoleEndpointsTests.cs | 46 +- .../AuthorityWebApplicationFactory.cs | 100 +- .../Infrastructure/TestAuthHandler.cs | 58 + .../AuthorityAckTokenIssuerTests.cs | 195 + .../AuthorityAckTokenKeyManagerTests.cs | 149 + ...AuthorityWebhookAllowlistEvaluatorTests.cs | 58 + .../NotifyAckTokenRotationEndpointTests.cs | 220 + .../OpenApi/OpenApiDiscoveryEndpointTests.cs | 6 +- .../ClientCredentialsAndTokenHandlersTests.cs | 5868 ++++---- .../OpenIddict/DiscoveryMetadataTests.cs | 63 + .../OpenIddict/LegacyAuthDeprecationTests.cs | 112 + .../OpenIddict/PasswordGrantHandlersTests.cs | 1108 +- .../Permalinks/VulnPermalinkServiceTests.cs | 28 +- .../Signing/AuthorityJwksServiceTests.cs | 19 +- .../AdvisoryAiRemoteInferenceLogRequest.cs | 22 + .../AuthorityAdvisoryAiConsentEvaluator.cs | 151 + .../Airgap/AirgapAuditEndpointExtensions.cs | 321 + .../Airgap/AuthorityAirgapAuditService.cs | 146 + .../LegacyAuthDeprecationMiddleware.cs | 254 + .../Notifications/Ack/AckTokenModels.cs | 84 + .../Notifications/Ack/AckTokenPayload.cs | 252 + .../Ack/AckTokenSigningUtilities.cs | 42 + .../Ack/AuthorityAckTokenIssuer.cs | 206 + .../Ack/AuthorityAckTokenKeyManager.cs | 397 + .../Ack/AuthorityAckTokenVerifier.cs | 143 + .../AuthorityWebhookAllowlistEvaluator.cs | 107 + .../IncidentAuditEndpointExtensions.cs | 102 + .../AuthorityOpenApiDocumentProvider.cs | 2 +- .../OpenApiDiscoveryEndpointExtensions.cs | 6 +- .../AuthorityOpenIddictConstants.cs | 7 + .../Handlers/ClientCredentialsHandlers.cs | 178 +- .../OpenIddict/Handlers/DiscoveryHandlers.cs | 77 + .../Handlers/PasswordGrantHandlers.cs | 611 +- .../Handlers/RefreshTokenHandlers.cs | 40 + .../Handlers/TokenPersistenceHandlers.cs | 20 +- .../Handlers/TokenValidationHandlers.cs | 52 +- .../OpenIddict/TokenRequestTamperInspector.cs | 16 +- .../StellaOps.Authority/Program.cs | 1162 +- .../Signing/AuthorityJwksService.cs | 24 +- src/Authority/StellaOps.Authority/TASKS.md | 30 +- .../StellaOps.Cli/Commands/CommandFactory.cs | 362 +- .../StellaOps.Cli/Commands/CommandHandlers.cs | 11360 ++++++++-------- .../Services/BackendOperationsClient.cs | 5005 +++---- .../Services/IBackendOperationsClient.cs | 2 + .../Models/EntryTraceResponseModel.cs | 12 + src/Cli/StellaOps.Cli/StellaOps.Cli.csproj | 1 + src/Cli/StellaOps.Cli/TASKS.md | 1 + .../Commands/CommandHandlersTests.cs | 196 +- .../Services/BackendOperationsClientTests.cs | 128 +- .../StellaOps.Concelier.WebService/Program.cs | 45 + .../OpenApiDiscoveryDocumentProvider.cs | 383 + .../StellaOps.Concelier.WebService/TASKS.md | 2 +- .../Program.cs | 87 +- ...StellaOps.EvidenceLocker.WebService.csproj | 61 +- .../appsettings.json | 29 +- .../StellaOps.Excititor.Worker/Program.cs | 10 + .../Signature/VerifyingVexRawDocumentSink.cs | 36 +- .../Signature/WorkerSignatureVerifier.cs | 221 +- .../StellaOps.Excititor.Worker.csproj | 3 +- src/Excititor/StellaOps.Excititor.sln | 14 + .../StellaOps.Excititor.Attestation.csproj | 35 +- .../StellaOps.Excititor.Core/VexClaim.cs | 105 +- .../VexMongoModels.cs | 107 +- .../DefaultVexProviderRunnerTests.cs | 89 +- .../Signature/WorkerSignatureVerifierTests.cs | 197 +- .../Program.cs | 87 +- .../StellaOps.ExportCenter.WebService.csproj | 61 +- .../appsettings.json | 29 +- .../Services/IssuerDirectoryServiceTests.cs | 191 + .../Services/IssuerKeyServiceTests.cs | 198 + .../Services/IssuerTrustServiceTests.cs | 153 + ...tellaOps.IssuerDirectory.Core.Tests.csproj | 16 + .../Abstractions/IIssuerAuditSink.cs | 11 + .../Abstractions/IIssuerKeyRepository.cs | 19 + .../Abstractions/IIssuerRepository.cs | 19 + .../Abstractions/IIssuerTrustRepository.cs | 15 + .../Domain/IssuerAuditEntry.cs | 51 + .../Domain/IssuerContact.cs | 28 + .../Domain/IssuerEndpoint.cs | 33 + .../Domain/IssuerKeyMaterial.cs | 21 + .../Domain/IssuerKeyRecord.cs | 112 + .../Domain/IssuerKeyStatus.cs | 11 + .../Domain/IssuerKeyType.cs | 11 + .../Domain/IssuerMetadata.cs | 61 + .../Domain/IssuerRecord.cs | 160 + .../Domain/IssuerTenants.cs | 12 + .../Domain/IssuerTrustOverrideRecord.cs | 72 + .../Observability/IssuerDirectoryMetrics.cs | 54 + .../Services/IssuerDirectoryService.cs | 252 + .../Services/IssuerKeyService.cs | 322 + .../Services/IssuerTrustService.cs | 137 + .../StellaOps.IssuerDirectory.Core.csproj | 9 + .../Validation/IssuerKeyValidationResult.cs | 25 + .../Validation/IssuerKeyValidator.cs | 126 + .../Audit/MongoIssuerAuditSink.cs | 35 + .../Documents/IssuerAuditDocument.cs | 31 + .../Documents/IssuerDocument.cs | 103 + .../Documents/IssuerKeyDocument.cs | 55 + .../Documents/IssuerTrustDocument.cs | 34 + .../Internal/IssuerDirectoryMongoContext.cs | 103 + .../Options/IssuerDirectoryMongoOptions.cs | 54 + .../Repositories/MongoIssuerKeyRepository.cs | 131 + .../Repositories/MongoIssuerRepository.cs | 177 + .../MongoIssuerTrustRepository.cs | 88 + .../Seed/CsafPublisherSeedLoader.cs | 146 + .../ServiceCollectionExtensions.cs | 36 + ...aOps.IssuerDirectory.Infrastructure.csproj | 21 + .../Constants/IssuerDirectoryHeaders.cs | 6 + .../Contracts/IssuerDtos.cs | 178 + .../Contracts/IssuerKeyDtos.cs | 63 + .../Contracts/IssuerTrustDtos.cs | 44 + .../Endpoints/IssuerEndpoints.cs | 166 + .../Endpoints/IssuerKeyEndpoints.cs | 190 + .../Endpoints/IssuerTrustEndpoints.cs | 110 + .../IssuerDirectoryWebServiceOptions.cs | 77 + .../Program.cs | 213 + .../Security/IssuerDirectoryPolicies.cs | 8 + .../Services/ActorResolver.cs | 17 + .../Services/ScopeAuthorization.cs | 53 + .../Services/TenantResolver.cs | 37 + ...tellaOps.IssuerDirectory.WebService.csproj | 30 + .../StellaOps.IssuerDirectory.sln | 43 + .../StellaOps.IssuerDirectory/TASKS.md | 14 +- .../data/csaf-publishers.json | 94 + .../Options/NotifyWebServiceOptions.cs | 12 +- .../NotifyWebServiceOptionsValidator.cs | 8 +- .../StellaOps.Notify.WebService/Program.cs | 1705 +-- .../Security/AllowAllAuthenticationHandler.cs | 31 + .../Security/NotifyPolicies.cs | 11 +- .../CrudEndpointsTests.cs | 866 +- .../StellaOps.PacksRegistry/TASKS.md | 2 +- .../GatewayActivationTests.cs | 2 +- .../Contracts/EntryTraceResponse.cs | 10 + .../Endpoints/ScanEndpoints.cs | 665 +- .../Options/ScannerWorkerOptions.cs | 2 + .../Processing/EntryTraceExecutionService.cs | 1042 +- .../StellaOps.Scanner.Worker/Program.cs | 183 +- .../StellaOps.Scanner.Worker.csproj | 7 +- src/Scanner/StellaOps.Scanner.sln | 64 +- .../Program.cs | 15 + .../RustBenchmarkShared.cs | 109 + .../RustBenchmarkUtility.cs | 56 + .../RustLanguageAnalyzerBenchmark.cs | 79 + ...nner.Analyzers.Lang.Rust.Benchmarks.csproj | 21 + .../Internal/RustCargoLockParser.cs | 6 +- .../TASKS.md | 4 +- .../Contracts/ScanAnalysisKeys.cs | 2 + .../Contracts/ScanMetadataKeys.cs | 1 + .../EntryTraceAnalyzer.cs | 625 +- .../EntryTraceCacheEnvelope.cs | 8 + .../EntryTraceCacheSerializer.cs | 35 + .../EntryTraceContext.cs | 12 +- .../EntryTraceImageContextFactory.cs | 472 +- .../EntryTraceResult.cs | 32 + .../EntryTraceTypes.cs | 140 +- .../FileSystem/DirectoryRootFileSystem.cs | 325 + .../FileSystem/IRootFileSystem.cs | 25 +- .../FileSystem/LayeredRootFileSystem.cs | 333 +- .../Oci/OciImageConfig.cs | 87 +- .../Runtime/EntryTraceRuntimeReconciler.cs | 321 + .../Runtime/ProcFileSystemSnapshot.cs | 230 + .../Runtime/ProcGraph.cs | 8 + .../Runtime/ProcGraphBuilder.cs | 104 + .../Runtime/ProcProcess.cs | 11 + .../EntryTraceGraphSerializer.cs | 309 + .../Serialization/EntryTraceNdjsonWriter.cs | 333 + .../ServiceCollectionExtensions.cs | 61 +- .../StellaOps.Scanner.EntryTrace.csproj | 4 +- .../StellaOps.Scanner.EntryTrace/TASKS.md | 15 +- .../Catalog/EntryTraceDocument.cs | 23 + .../Extensions/ServiceCollectionExtensions.cs | 9 +- .../Mongo/MongoCollectionProvider.cs | 1 + .../Repositories/EntryTraceRepository.cs | 33 + .../ScannerStorageDefaults.cs | 1 + .../Services/EntryTraceResultStore.cs | 54 + .../StellaOps.Scanner.Storage.csproj | 23 +- .../AssemblyInfo.cs | 3 + .../ISurfaceEnvironment.cs | 19 + .../ServiceCollectionExtensions.cs | 19 + .../StellaOps.Scanner.Surface.Env.csproj | 20 + .../SurfaceEnvironment.cs | 41 + .../SurfaceEnvironmentBuilder.cs | 295 + .../SurfaceEnvironmentException.cs | 20 + .../SurfaceEnvironmentOptions.cs | 53 + .../SurfaceEnvironmentSettings.cs | 25 + .../SurfaceSecretsConfiguration.cs | 15 + .../SurfaceTlsConfiguration.cs | 14 + .../StellaOps.Scanner.Surface.Env/TASKS.md | 4 +- .../FileSurfaceCache.cs | 151 + .../ISurfaceCache.cs | 24 + .../ServiceCollectionExtensions.cs | 59 + .../StellaOps.Scanner.Surface.FS.csproj | 26 + .../SurfaceCacheJsonSerializer.cs | 31 + .../SurfaceCacheKey.cs | 10 + .../SurfaceCacheOptions.cs | 22 + .../StellaOps.Scanner.Surface.FS/TASKS.md | 4 +- .../AssemblyInfo.cs | 3 + .../ISurfaceSecretProvider.cs | 10 + .../CompositeSurfaceSecretProvider.cs | 39 + .../Providers/FileSurfaceSecretProvider.cs | 65 + .../InMemorySurfaceSecretProvider.cs | 38 + .../Providers/InlineSurfaceSecretProvider.cs | 48 + .../KubernetesSurfaceSecretProvider.cs | 51 + .../ServiceCollectionExtensions.cs | 66 + .../StellaOps.Scanner.Surface.Secrets.csproj | 22 + .../SurfaceSecretHandle.cs | 89 + .../SurfaceSecretNotFoundException.cs | 12 + .../SurfaceSecretRequest.cs | 10 + .../SurfaceSecretsOptions.cs | 17 + .../TASKS.md | 4 +- .../ISurfaceValidationReporter.cs | 9 + .../ISurfaceValidator.cs | 14 + .../ISurfaceValidatorRunner.cs | 16 + .../LoggingSurfaceValidationReporter.cs | 49 + .../ServiceCollectionExtensions.cs | 44 + ...tellaOps.Scanner.Surface.Validation.csproj | 23 + .../SurfaceValidationBuilder.cs | 33 + .../SurfaceValidationContext.cs | 43 + .../SurfaceValidationException.cs | 14 + .../SurfaceValidationIssue.cs | 25 + .../SurfaceValidationIssueCodes.cs | 15 + .../SurfaceValidationOptions.cs | 18 + .../SurfaceValidationResult.cs | 29 + .../SurfaceValidationSeverity.cs | 11 + .../SurfaceValidatorRunner.cs | 98 + .../TASKS.md | 2 +- .../Validators/SurfaceCacheValidator.cs | 56 + .../Validators/SurfaceEndpointValidator.cs | 35 + .../Validators/SurfaceSecretsValidator.cs | 68 + .../Fixtures/lang/rust/fallback/expected.json | 25 + .../rust/heuristics/competitor-baseline.json | 8 + .../lang/rust/heuristics/expected.json | 68 + .../RustHeuristicCoverageComparisonTests.cs | 77 + .../Rust/RustLanguageAnalyzerTests.cs | 108 +- .../EntryTraceAnalyzerTests.cs | 455 +- .../EntryTraceNdjsonWriterTests.cs | 152 + .../LayeredRootFileSystemTests.cs | 60 +- .../EntryTraceRuntimeReconcilerTests.cs | 121 + .../Runtime/ProcFileSystemSnapshotTests.cs | 87 + .../Runtime/ProcGraphBuilderTests.cs | 57 + .../TestRootFileSystem.cs | 281 +- .../EntryTraceResultStoreTests.cs | 139 + .../RustFsArtifactObjectStoreTests.cs | 2 +- ...StellaOps.Scanner.Surface.Env.Tests.csproj | 17 + .../SurfaceEnvironmentBuilderTests.cs | 80 + .../SurfaceEnvironmentFeatureFlagTests.cs | 43 + .../FileSurfaceCacheTests.cs | 50 + .../StellaOps.Scanner.Surface.FS.Tests.csproj | 17 + .../FileSurfaceSecretProviderTests.cs | 47 + .../InlineSurfaceSecretProviderTests.cs | 38 + ...laOps.Scanner.Surface.Secrets.Tests.csproj | 18 + ...SecretsServiceCollectionExtensionsTests.cs | 52 + ...ps.Scanner.Surface.Validation.Tests.csproj | 18 + .../SurfaceValidatorRunnerTests.cs | 87 + .../ScansEndpointsTests.cs | 206 +- .../EntryTraceExecutionServiceTests.cs | 671 +- .../RedisWorkerSmokeTests.cs | 10 +- src/StellaOps.sln | 126 +- .../TestManifests.cs | 8 +- src/TaskRunner/StellaOps.TaskRunner/TASKS.md | 2 +- .../Program.cs | 84 +- ...tellaOps.TimelineIndexer.WebService.csproj | 61 +- .../appsettings.json | 29 +- .../LanguageAnalyzerSmoke.csproj | 10 +- src/Tools/LanguageAnalyzerSmoke/Program.cs | 338 +- .../AuthorityAdvisoryAiOptions.cs | 163 + .../AuthorityApiLifecycleOptions.cs | 77 + .../AuthorityNotificationsOptions.cs | 246 + .../StellaOpsAuthorityOptions.cs | 1496 +- .../IIssuerDirectoryClient.cs | 16 + .../IssuerDirectoryClient.cs | 120 + .../IssuerDirectoryClientOptions.cs | 44 + .../IssuerDirectoryModels.cs | 30 + .../ServiceCollectionExtensions.cs | 57 + .../StellaOps.IssuerDirectory.Client.csproj | 14 + .../StellaOpsAuthorityOptionsTests.cs | 161 +- 516 files changed, 68157 insertions(+), 24754 deletions(-) create mode 100644 docs/api/authority-legacy-auth-endpoints.md create mode 100644 docs/benchmarks/scanner-rust-analyzer.md create mode 100644 docs/modules/attestor/payloads.md create mode 100644 docs/modules/attestor/ttl-validation.md create mode 100644 docs/modules/attestor/workflows.md create mode 100644 docs/modules/issuer-directory/architecture.md create mode 100644 docs/updates/2025-11-01-orch-admin-scope.md create mode 100644 etc/issuer-directory.yaml.sample create mode 100644 etc/packs-registry.yaml.sample create mode 100644 etc/task-runner.yaml.sample create mode 100644 ops/offline-kit/run-rust-analyzer-smoke.sh create mode 100644 out/analyzers/python/StellaOps.Auth.Abstractions.xml create mode 100644 out/analyzers/python/StellaOps.Auth.Client.xml create mode 100644 out/analyzers/python/StellaOps.Scanner.Analyzers.Lang.Python.deps.json create mode 100644 out/analyzers/rust/StellaOps.Auth.Abstractions.xml create mode 100644 out/analyzers/rust/StellaOps.Auth.Client.xml create mode 100644 out/analyzers/rust/StellaOps.Scanner.Analyzers.Lang.Rust.deps.json create mode 100644 scripts/run-attestor-ttl-validation.sh create mode 100644 scripts/validate-attestation-schemas.mjs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/DsseCompressionAlgorithm.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/DsseDetachedPayloadReference.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelope.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializationOptions.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializationResult.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializer.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/DsseSignature.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/EnvelopeKey.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/EnvelopeKeyIdCalculator.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignature.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignatureResult.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignatureService.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/DsseEnvelopeSerializerTests.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/EnvelopeSignatureServiceTests.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/StellaOps.Attestor.Envelope.Tests.csproj create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.csproj create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/__Tests/StellaOps.Attestor.Envelope.Tests/DsseEnvelopeSerializerTests.cs create mode 100644 src/Attestor/StellaOps.Attestor.Envelope/__Tests/StellaOps.Attestor.Envelope.Tests/StellaOps.Attestor.Envelope.Tests.csproj create mode 100644 src/Attestor/StellaOps.Attestor.Types/Tools/StellaOps.Attestor.Types.Generator/Program.cs create mode 100644 src/Attestor/StellaOps.Attestor.Types/Tools/StellaOps.Attestor.Types.Generator/StellaOps.Attestor.Types.Generator.csproj create mode 100644 src/Attestor/StellaOps.Attestor.Types/fixtures/v1/build-provenance.sample.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/fixtures/v1/custom-evidence.sample.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/fixtures/v1/policy-evaluation.sample.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/fixtures/v1/risk-profile-evidence.sample.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/fixtures/v1/sbom-attestation.sample.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/fixtures/v1/scan-results.sample.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/fixtures/v1/vex-attestation.sample.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/generated/go/go.mod create mode 100644 src/Attestor/StellaOps.Attestor.Types/generated/go/types.go create mode 100644 src/Attestor/StellaOps.Attestor.Types/generated/go/types_test.go create mode 100644 src/Attestor/StellaOps.Attestor.Types/generated/ts/index.test.ts create mode 100644 src/Attestor/StellaOps.Attestor.Types/generated/ts/index.ts create mode 100644 src/Attestor/StellaOps.Attestor.Types/generated/ts/package-lock.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/generated/ts/package.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/generated/ts/tsconfig.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/samples/README.md create mode 100644 src/Attestor/StellaOps.Attestor.Types/samples/build-provenance.v1.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/samples/custom-evidence.v1.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/samples/policy-evaluation.v1.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/samples/risk-profile-evidence.v1.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/samples/sbom-attestation.v1.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/samples/scan-results.v1.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/samples/vex-attestation.v1.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/schemas/attestation-common.v1.schema.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-build-provenance.v1.schema.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-custom-evidence.v1.schema.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-policy-evaluation.v1.schema.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-risk-profile.v1.schema.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-sbom-attestation.v1.schema.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-scan-results.v1.schema.json create mode 100644 src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-vex-attestation.v1.schema.json create mode 100644 src/Attestor/StellaOps.Attestor.Verify/AttestorVerificationEngine.cs create mode 100644 src/Attestor/StellaOps.Attestor.Verify/IAttestorVerificationEngine.cs create mode 100644 src/Attestor/StellaOps.Attestor.Verify/StellaOps.Attestor.Verify.csproj create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Bulk/BulkVerificationModels.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Bulk/IBulkVerificationJobStore.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorActivitySource.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorTelemetryTags.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Offline/AttestorOfflineBundle.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Offline/IAttestorBundleService.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestationSignRequest.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestationSignResult.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestorSigningException.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/DssePreAuthenticationEncoding.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/IAttestationSigningService.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntryContinuationToken.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntryQuery.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/ITransparencyWitnessClient.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/TransparencyWitnessObservation.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/TransparencyWitnessRequest.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationCache.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/VerificationReport.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/VerificationSectionStatus.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Bulk/BulkVerificationWorker.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Bulk/MongoBulkVerificationJobStore.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Offline/AttestorBundleService.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningService.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/CachingAttestorDedupeStore.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorDedupeStore.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Transparency/HttpTransparencyWitnessClient.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Transparency/NullTransparencyWitnessClient.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/CachedAttestorVerificationService.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/InMemoryAttestorVerificationCache.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/NoOpAttestorVerificationCache.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestationBundleEndpointsTests.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestationQueryTests.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorEntryRepositoryTests.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSigningServiceTests.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorStorageTests.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionValidatorHardeningTests.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/BulkVerificationContractsTests.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/BulkVerificationWorkerTests.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/CachedAttestorVerificationServiceTests.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpTransparencyWitnessClientTests.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/LiveDedupeStoreTests.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/TestSupport/TestAttestorDoubles.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationBundleContracts.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationListContracts.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationSignContracts.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/BulkVerificationContracts.cs create mode 100644 src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Properties/AssemblyInfo.cs create mode 100644 src/Attestor/__Tests/StellaOps.Attestor.Types.Tests/AttestationGoldenSamplesTests.cs create mode 100644 src/Attestor/__Tests/StellaOps.Attestor.Types.Tests/StellaOps.Attestor.Types.Tests.csproj create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsHttpHeaderNames.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsApiAuthMode.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsApiAuthenticationOptions.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsBearerTokenHandler.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsResourceServerPoliciesTests.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsResourceServerPolicies.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityAirgapAuditDocument.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityAirgapAuditCollectionInitializer.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityAirgapAuditStore.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityAirgapAuditStore.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/AdvisoryAi/AdvisoryAiRemoteInferenceEndpointTests.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/AdvisoryAi/AuthorityAdvisoryAiConsentEvaluatorTests.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Airgap/AirgapAuditEndpointsTests.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/TestAuthHandler.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityAckTokenIssuerTests.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityAckTokenKeyManagerTests.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityWebhookAllowlistEvaluatorTests.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/NotifyAckTokenRotationEndpointTests.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/DiscoveryMetadataTests.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/LegacyAuthDeprecationTests.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/AdvisoryAi/AdvisoryAiRemoteInferenceLogRequest.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/AdvisoryAi/AuthorityAdvisoryAiConsentEvaluator.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AirgapAuditEndpointExtensions.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AuthorityAirgapAuditService.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/LegacyAuthDeprecationMiddleware.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenModels.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenPayload.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenSigningUtilities.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenIssuer.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenKeyManager.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenVerifier.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/AuthorityWebhookAllowlistEvaluator.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/Observability/IncidentAuditEndpointExtensions.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/DiscoveryHandlers.cs create mode 100644 src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RefreshTokenHandlers.cs create mode 100644 src/Cli/StellaOps.Cli/Services/Models/EntryTraceResponseModel.cs create mode 100644 src/Concelier/StellaOps.Concelier.WebService/Services/OpenApiDiscoveryDocumentProvider.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/StellaOps.IssuerDirectory.Core.Tests.csproj create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerAuditSink.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerKeyRepository.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerRepository.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerTrustRepository.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerAuditEntry.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerContact.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerEndpoint.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyMaterial.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyStatus.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyType.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerMetadata.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerTenants.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerTrustOverrideRecord.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Observability/IssuerDirectoryMetrics.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/StellaOps.IssuerDirectory.Core.csproj create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidationResult.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Audit/MongoIssuerAuditSink.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerAuditDocument.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerDocument.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerKeyDocument.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerTrustDocument.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Internal/IssuerDirectoryMongoContext.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Options/IssuerDirectoryMongoOptions.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerKeyRepository.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerRepository.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerTrustRepository.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Seed/CsafPublisherSeedLoader.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/ServiceCollectionExtensions.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/StellaOps.IssuerDirectory.Infrastructure.csproj create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Constants/IssuerDirectoryHeaders.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerDtos.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerKeyDtos.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerTrustDtos.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerEndpoints.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerKeyEndpoints.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerTrustEndpoints.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Options/IssuerDirectoryWebServiceOptions.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Program.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Security/IssuerDirectoryPolicies.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/ActorResolver.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/ScopeAuthorization.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/TenantResolver.cs create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/StellaOps.IssuerDirectory.WebService.csproj create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.sln create mode 100644 src/IssuerDirectory/StellaOps.IssuerDirectory/data/csaf-publishers.json create mode 100644 src/Notify/StellaOps.Notify.WebService/Security/AllowAllAuthenticationHandler.cs create mode 100644 src/Scanner/StellaOps.Scanner.WebService/Contracts/EntryTraceResponse.cs create mode 100644 src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/Program.cs create mode 100644 src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustBenchmarkShared.cs create mode 100644 src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustBenchmarkUtility.cs create mode 100644 src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustLanguageAnalyzerBenchmark.cs create mode 100644 src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks.csproj create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceCacheEnvelope.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceCacheSerializer.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceResult.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/DirectoryRootFileSystem.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/EntryTraceRuntimeReconciler.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcFileSystemSnapshot.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcGraph.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcGraphBuilder.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcProcess.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Serialization/EntryTraceGraphSerializer.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Serialization/EntryTraceNdjsonWriter.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/EntryTraceDocument.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/EntryTraceRepository.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Storage/Services/EntryTraceResultStore.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/AssemblyInfo.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/ISurfaceEnvironment.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/ServiceCollectionExtensions.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/StellaOps.Scanner.Surface.Env.csproj create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironment.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentBuilder.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentException.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentOptions.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentSettings.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceSecretsConfiguration.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceTlsConfiguration.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/FileSurfaceCache.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/ISurfaceCache.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/ServiceCollectionExtensions.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/StellaOps.Scanner.Surface.FS.csproj create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheJsonSerializer.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheKey.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheOptions.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/AssemblyInfo.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/ISurfaceSecretProvider.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/CompositeSurfaceSecretProvider.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/FileSurfaceSecretProvider.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/InMemorySurfaceSecretProvider.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/InlineSurfaceSecretProvider.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/KubernetesSurfaceSecretProvider.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/ServiceCollectionExtensions.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/StellaOps.Scanner.Surface.Secrets.csproj create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretHandle.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretNotFoundException.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretRequest.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretsOptions.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidationReporter.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidator.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidatorRunner.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/LoggingSurfaceValidationReporter.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ServiceCollectionExtensions.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationBuilder.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationContext.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationException.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationIssue.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationIssueCodes.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationOptions.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationResult.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationSeverity.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidatorRunner.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceCacheValidator.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceEndpointValidator.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceSecretsValidator.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/fallback/expected.json create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/heuristics/competitor-baseline.json create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/heuristics/expected.json create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustHeuristicCoverageComparisonTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceNdjsonWriterTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/EntryTraceRuntimeReconcilerTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/ProcFileSystemSnapshotTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/ProcGraphBuilderTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/EntryTraceResultStoreTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/StellaOps.Scanner.Surface.Env.Tests.csproj create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/SurfaceEnvironmentBuilderTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/SurfaceEnvironmentFeatureFlagTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Surface.FS.Tests/FileSurfaceCacheTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Surface.FS.Tests/StellaOps.Scanner.Surface.FS.Tests.csproj create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/FileSurfaceSecretProviderTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/InlineSurfaceSecretProviderTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/StellaOps.Scanner.Surface.Secrets.Tests.csproj create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/SurfaceSecretsServiceCollectionExtensionsTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Surface.Validation.Tests/StellaOps.Scanner.Surface.Validation.Tests.csproj create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Surface.Validation.Tests/SurfaceValidatorRunnerTests.cs create mode 100644 src/__Libraries/StellaOps.Configuration/AuthorityAdvisoryAiOptions.cs create mode 100644 src/__Libraries/StellaOps.Configuration/AuthorityApiLifecycleOptions.cs create mode 100644 src/__Libraries/StellaOps.Configuration/AuthorityNotificationsOptions.cs create mode 100644 src/__Libraries/StellaOps.IssuerDirectory.Client/IIssuerDirectoryClient.cs create mode 100644 src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.cs create mode 100644 src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClientOptions.cs create mode 100644 src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryModels.cs create mode 100644 src/__Libraries/StellaOps.IssuerDirectory.Client/ServiceCollectionExtensions.cs create mode 100644 src/__Libraries/StellaOps.IssuerDirectory.Client/StellaOps.IssuerDirectory.Client.csproj diff --git a/deploy/helm/stellaops/values.yaml b/deploy/helm/stellaops/values.yaml index af20ed89..33a81fe8 100644 --- a/deploy/helm/stellaops/values.yaml +++ b/deploy/helm/stellaops/values.yaml @@ -8,33 +8,91 @@ global: pullPolicy: IfNotPresent labels: {} -telemetry: - collector: - enabled: false - replicas: 1 - image: otel/opentelemetry-collector:0.105.0 - requireClientCert: true - defaultTenant: unknown - logLevel: info - tls: - secretName: "" - certPath: /etc/otel/tls/tls.crt - keyPath: /etc/otel/tls/tls.key - caPath: /etc/otel/tls/ca.crt - items: - - key: tls.crt - path: tls.crt - - key: tls.key - path: tls.key - - key: ca.crt - path: ca.crt - service: - grpcPort: 4317 - httpPort: 4318 - metricsPort: 9464 - resources: {} - +telemetry: + collector: + enabled: false + replicas: 1 + image: otel/opentelemetry-collector:0.105.0 + requireClientCert: true + defaultTenant: unknown + logLevel: info + tls: + secretName: "" + certPath: /etc/otel/tls/tls.crt + keyPath: /etc/otel/tls/tls.key + caPath: /etc/otel/tls/ca.crt + items: + - key: tls.crt + path: tls.crt + - key: tls.key + path: tls.key + - key: ca.crt + path: ca.crt + service: + grpcPort: 4317 + httpPort: 4318 + metricsPort: 9464 + resources: {} + +configMaps: + issuer-directory-config: + data: + issuer-directory.yaml: | + IssuerDirectory: + telemetry: + minimumLogLevel: Information + authority: + enabled: true + issuer: https://authority.svc.cluster.local/realms/stellaops + requireHttpsMetadata: true + audiences: + - stellaops-platform + readScope: issuer-directory:read + writeScope: issuer-directory:write + adminScope: issuer-directory:admin + tenantHeader: X-StellaOps-Tenant + seedCsafPublishers: true + csafSeedPath: data/csaf-publishers.json + Mongo: + connectionString: mongodb://mongo:27017 + database: issuer-directory + issuersCollection: issuers + issuerKeysCollection: issuer_keys + issuerTrustCollection: issuer_trust_overrides + auditCollection: issuer_audit + services: + issuer-directory: + image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge + replicas: 1 + configMounts: + - name: issuer-directory-config + configMap: issuer-directory-config + mountPath: /etc/issuer-directory.yaml + subPath: issuer-directory.yaml + envFrom: + - secretRef: + name: issuer-directory-secrets + env: + ISSUERDIRECTORY__CONFIG: /etc/issuer-directory.yaml + ISSUERDIRECTORY__AUTHORITY__BASEURL: https://authority:8440 + ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "true" + ports: + - containerPort: 8080 + service: + port: 8080 + readinessProbe: + httpGet: + path: /health/live + port: 8080 + initialDelaySeconds: 5 + periodSeconds: 10 + livenessProbe: + httpGet: + path: /health/live + port: 8080 + initialDelaySeconds: 10 + periodSeconds: 20 scheduler-worker: image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge replicas: 1 diff --git a/docs/11_AUTHORITY.md b/docs/11_AUTHORITY.md index 0a95b978..6464a84f 100644 --- a/docs/11_AUTHORITY.md +++ b/docs/11_AUTHORITY.md @@ -1,380 +1,412 @@ -# StellaOps Authority Service - -> **Status:** Drafted 2025-10-12 (CORE5B.DOC / DOC1.AUTH) – aligns with Authority revocation store, JWKS rotation, and bootstrap endpoints delivered in Sprint 1. - -## 1. Purpose -The **StellaOps Authority** service issues OAuth2/OIDC tokens for every StellaOps module (Concelier, Backend, Agent, Zastava) and exposes the policy controls required in sovereign/offline environments. Authority is built as a minimal ASP.NET host that: - -- brokers password, client-credentials, and device-code flows through pluggable identity providers; -- persists access/refresh/device tokens in MongoDB with deterministic schemas for replay analysis and air-gapped audit copies; -- distributes revocation bundles and JWKS material so downstream services can enforce lockouts without direct database access; -- offers bootstrap APIs for first-run provisioning and key rotation without redeploying binaries. - -Authority is deployed alongside Concelier in air-gapped environments and never requires outbound internet access. All trusted metadata (OpenIddict discovery, JWKS, revocation bundles) is cacheable, signed, and reproducible. - -## 2. Component Architecture -Authority is composed of five cooperating subsystems: - -1. **Minimal API host** – configures OpenIddict endpoints (`/token`, `/authorize`, `/revoke`, `/jwks`), publishes the OpenAPI contract at `/.well-known/openapi`, and enables structured logging/telemetry. Rate limiting hooks (`AuthorityRateLimiter`) wrap every request. -2. **Plugin host** – loads `StellaOps.Authority.Plugin.*.dll` assemblies, applies capability metadata, and exposes password/client provisioning surfaces through dependency injection. -3. **Mongo storage** – persists tokens, revocations, bootstrap invites, and plugin state in deterministic collections indexed for offline sync (`authority_tokens`, `authority_revocations`, etc.). -4. **Cryptography layer** – `StellaOps.Cryptography` abstractions manage password hashing, signing keys, JWKS export, and detached JWS generation. -5. **Offline ops APIs** – internal endpoints under `/internal/*` provide administrative flows (bootstrap users/clients, revocation export) guarded by API keys and deterministic audit events. - -A high-level sequence for password logins: - -``` -Client -> /token (password grant) - -> Rate limiter & audit hooks - -> Plugin credential store (Argon2id verification) - -> Token persistence (Mongo authority_tokens) - -> Response (access/refresh tokens + deterministic claims) -``` - -## 3. Token Lifecycle & Persistence -Authority persists every issued token in MongoDB so operators can audit or revoke without scanning distributed caches. - -- **Collection:** `authority_tokens` -- **Key fields:** -- `tokenId`, `type` (`access_token`, `refresh_token`, `device_code`, `authorization_code`) -- `subjectId`, `clientId`, ordered `scope` array -- `tenant` (lower-cased tenant hint from the issuing client, omitted for global clients) - -### Console OIDC client - -- **Client ID**: `console-web` -- **Grants**: `authorization_code` (PKCE required), `refresh_token` -- **Audience**: `console` -- **Scopes**: `openid`, `profile`, `email`, `advisory:read`, `vex:read`, `aoc:verify`, `findings:read`, `orch:read`, `vuln:read` -- **Redirect URIs** (defaults): `https://console.stella-ops.local/oidc/callback` -- **Post-logout redirect**: `https://console.stella-ops.local/` -- **Tokens**: Access tokens inherit the global 2 minute lifetime; refresh tokens remain short-lived (30 days) and can be exchanged silently via `/token`. -- **Roles**: Assign Authority role `Orch.Viewer` (exposed to tenants as `role/orch-viewer`) when operators need read-only access to Orchestrator telemetry via Console dashboards. Policy Studio ships dedicated roles (`role/policy-author`, `role/policy-reviewer`, `role/policy-approver`, `role/policy-operator`, `role/policy-auditor`) that align with the new `policy:*` scope family; issue them per tenant so audit trails remain scoped. - -Configuration sample (`etc/authority.yaml.sample`) seeds the client with a confidential secret so Console can negotiate the code exchange on the backend while browsers execute the PKCE dance. - -### Console Authority endpoints - -- `/console/tenants` — Requires `authority:tenants.read`; returns the tenant catalogue for the authenticated principal. Requests lacking the `X-Stella-Tenant` header are rejected (`tenant_header_missing`) and logged. -- `/console/profile` — Requires `ui.read`; exposes subject metadata (roles, scopes, audiences) and indicates whether the session is within the five-minute fresh-auth window. -- `/console/token/introspect` — Requires `ui.read`; introspects the active access token so the SPA can prompt for re-authentication before privileged actions. - -All endpoints demand DPoP-bound tokens and propagate structured audit events (`authority.console.*`). Gateways must forward the `X-Stella-Tenant` header derived from the access token; downstream services rely on the same value for isolation. Keep Console access tokens short-lived (default 15 minutes) and enforce the fresh-auth window for admin actions (`ui.admin`, `authority:*`, `policy:activate`, `exceptions:approve`). -- `status` (`valid`, `revoked`, `expired`), `createdAt`, optional `expiresAt` -- `revokedAt`, machine-readable `revokedReason`, optional `revokedReasonDescription` -- `revokedMetadata` (string dictionary for plugin-specific context) -- **Persistence flow:** `PersistTokensHandler` stamps missing JWT IDs, normalises scopes, and stores every principal emitted by OpenIddict. -- **Revocation flow:** `AuthorityTokenStore.UpdateStatusAsync` flips status, records the reason metadata, and is invoked by token revocation handlers and plugin provisioning events (e.g., disabling a user). -- **Expiry maintenance:** `AuthorityTokenStore.DeleteExpiredAsync` prunes non-revoked tokens past their `expiresAt` timestamp. Operators should schedule this in maintenance windows if large volumes of tokens are issued. - -### Expectations for resource servers -Resource servers (Concelier WebService, Backend, Agent) **must not** assume in-memory caches are authoritative. They should: - -- cache `/jwks` and `/revocations/export` responses within configured lifetimes; -- honour `revokedReason` metadata when shaping audit trails; -- treat `status != "valid"` or missing tokens as immediate denial conditions. -- propagate the `tenant` claim (`X-Stella-Tenant` header in REST calls) and reject requests when the tenant supplied by Authority does not match the resource server's scope; Concelier and Excititor guard endpoints refuse cross-tenant tokens. - -### Tenant propagation - -- Client provisioning (bootstrap or plug-in) accepts a `tenant` hint. Authority normalises the value (`trim().ToLowerInvariant()`) and persists it alongside the registration. Clients without an explicit tenant remain global. -- Issued principals include the `stellaops:tenant` claim. `PersistTokensHandler` mirrors this claim into `authority_tokens.tenant`, enabling per-tenant revocation and reporting. -- Rate limiter metadata now tags requests with `authority.tenant`, unlocking per-tenant throughput metrics and diagnostic filters. Audit events (`authority.client_credentials.grant`, `authority.password.grant`, bootstrap flows) surface the tenant and login attempt documents index on `{tenant, occurredAt}` for quick queries. -- Client credentials that request `advisory:ingest`, `advisory:read`, `vex:ingest`, `vex:read`, `signals:read`, `signals:write`, `signals:admin`, or `aoc:verify` now fail fast when the client registration lacks a tenant hint. Issued tokens are re-validated against persisted tenant metadata, and Authority rejects any cross-tenant replay (`invalid_client`/`invalid_token`), ensuring aggregation-only workloads remain tenant-scoped. -- Client credentials that request `export.viewer`, `export.operator`, or `export.admin` must provide a tenant hint. Requests for `export.admin` also need accompanying `export_reason` and `export_ticket` parameters; Authority returns `invalid_request` when either value is missing and records the denial in token audit events. -- Policy Studio scopes (`policy:author`, `policy:review`, `policy:approve`, `policy:operate`, `policy:audit`, `policy:simulate`, `policy:run`, `policy:activate`) require a tenant assignment; Authority rejects tokens missing the hint with `invalid_client` and records `scope.invalid` metadata for auditing. -- **AOC pairing guardrails** – Tokens that request `advisory:read`, `vex:read`, or any `signals:*` scope must also request `aoc:verify`. Authority rejects mismatches with `invalid_scope` (`Scope 'aoc:verify' is required when requesting advisory/vex read scopes.` or `Scope 'aoc:verify' is required when requesting signals scopes.`) so automation surfaces deterministic errors. -- **Signals ingestion guardrails** – Sensors and services requesting `signals:write`/`signals:admin` must also request `aoc:verify`; Authority records the `authority.aoc_scope_violation` tag when the pairing is missing so operators can trace failing sensors immediately. -- Password grant flows reuse the client registration's tenant and enforce the configured scope allow-list. Requested scopes outside that list (or mismatched tenants) trigger `invalid_scope`/`invalid_client` failures, ensuring cross-tenant access is denied before token issuance. - -### Default service scopes - -| Client ID | Purpose | Scopes granted | Sender constraint | Tenant | -|----------------------|---------------------------------------|--------------------------------------|-------------------|-----------------| -| `concelier-ingest` | Concelier raw advisory ingestion | `advisory:ingest`, `advisory:read` | `dpop` | `tenant-default` | -| `excitor-ingest` | Excititor raw VEX ingestion | `vex:ingest`, `vex:read` | `dpop` | `tenant-default` | -| `aoc-verifier` | Aggregation-only contract verification | `aoc:verify`, `advisory:read`, `vex:read` | `dpop` | `tenant-default` | -| `cartographer-service` | Graph snapshot construction | `graph:write`, `graph:read` | `dpop` | `tenant-default` | -| `graph-api` | Graph Explorer gateway/API | `graph:read`, `graph:export`, `graph:simulate` | `dpop` | `tenant-default` | -| `export-center-operator` | Export Center operator automation | `export.viewer`, `export.operator` | `dpop` | `tenant-default` | -| `export-center-admin` | Export Center administrative automation | `export.viewer`, `export.operator`, `export.admin` | `dpop` | `tenant-default` | -| `vuln-explorer-ui` | Vuln Explorer UI/API | `vuln:read` | `dpop` | `tenant-default` | -| `signals-uploader` | Reachability sensor ingestion | `signals:write`, `signals:read`, `aoc:verify` | `dpop` | `tenant-default` | - -> **Secret hygiene (2025‑10‑27):** The repository includes a convenience `etc/authority.yaml` for compose/helm smoke tests. Every entry’s `secretFile` points to `etc/secrets/*.secret`, which ship with `*-change-me` placeholders—replace them with strong values (and wire them through your vault/secret manager) before issuing tokens in CI, staging, or production. - -For factory provisioning, issue sensors the **SignalsUploader** role template (`signals:write`, `signals:read`, `aoc:verify`). Authority rejects ingestion tokens that omit `aoc:verify`, preserving aggregation-only contract guarantees for reachability signals. - -These registrations are provided as examples in `etc/authority.yaml.sample`. Clone them per tenant (for example `concelier-tenant-a`, `concelier-tenant-b`) so tokens remain tenant-scoped by construction. - -Graph Explorer introduces dedicated scopes: `graph:write` for Cartographer build jobs, `graph:read` for query/read operations, `graph:export` for long-running export downloads, and `graph:simulate` for what-if overlays. Assign only the scopes a client actually needs to preserve least privilege—UI-facing clients should typically request read/export access, while background services (Cartographer, Scheduler) require write privileges. - -#### Least-privilege guidance for graph clients - -- **Service identities** – The Cartographer worker should request `graph:write` and `graph:read` only; grant `graph:simulate` exclusively to pipeline automation that invokes Policy Engine overlays on demand. Keep `graph:export` scoped to API gateway components responsible for streaming GraphML/JSONL artifacts. Authority enforces this by rejecting `graph:write` tokens that lack `properties.serviceIdentity: cartographer`. -- **Tenant propagation** – Every client registration must pin a `tenant` hint. Authority normalises the value and stamps it into issued tokens (`stellaops:tenant`) so downstream services (Scheduler, Graph API, Console) can enforce tenant isolation without custom headers. Graph scopes (`graph:read`, `graph:write`, `graph:export`, `graph:simulate`) are denied if the tenant hint is missing. -- **SDK alignment** – Use the generated `StellaOpsScopes` constants in service code to request graph scopes. Hard-coded strings risk falling out of sync as additional graph capabilities are added. -- **DPOP for automation** – Maintain sender-constrained (`dpop`) flows for Cartographer and Scheduler to limit reuse of access tokens if a build host is compromised. For UI-facing tokens, pair `graph:read`/`graph:export` with short lifetimes and enforce refresh-token rotation at the gateway. - -#### Export Center scope guardrails - -- **Viewer vs operator** – `export.viewer` grants read-only access to export profiles, manifests, and bundles. Automation that schedules or reruns exports should request `export.operator` (and typically `export.viewer`). Tenant hints remain mandatory; Authority refuses tokens without them. -- **Administrative mutations** – Changes to retention policies, encryption key references, or schedule defaults require `export.admin`. When requesting tokens with this scope, clients must supply `export_reason` and `export_ticket` parameters; Authority persists the values for audit records and rejects missing metadata with `invalid_request`. -- **Operational hygiene** – Rotate `export.admin` credentials infrequently and run them through fresh-auth workflows where possible. Prefer distributing verification tooling with `export.viewer` tokens for day-to-day bundle validation. - -#### Vuln Explorer permalinks - -- **Scope** – `vuln:read` authorises Vuln Explorer to fetch advisory/linkset evidence and issue shareable links. Assign it only to front-end/API clients that must render vulnerability details. -- **Signed links** – `POST /permalinks/vuln` (requires `vuln:read`) accepts `{ "tenant": "tenant-a", "resourceKind": "vulnerability", "state": { ... }, "expiresInSeconds": 86400 }` and returns a JWT (`token`) plus `issuedAt`/`expiresAt`. The token embeds the tenant, requested state, and `vuln:read` scope and is signed with the same Authority signing keys published via `/jwks`. -- **Validation** – Resource servers verify the permalink using cached JWKS: check signature, ensure the tenant matches the current request context, honour the expiry, and enforce the contained `vuln:read` scope. The payload’s `resource.state` block is opaque JSON so UIs can round-trip filters/search terms without new schema changes. - -## 4. Revocation Pipeline -Authority centralises revocation in `authority_revocations` with deterministic categories: - -| Category | Meaning | Required fields | -| --- | --- | --- | -| `token` | Specific OAuth token revoked early. | `revocationId` (token id), `tokenType`, optional `clientId`, `subjectId` | -| `subject` | All tokens for a subject disabled. | `revocationId` (= subject id) | -| `client` | OAuth client registration revoked. | `revocationId` (= client id) | -| `key` | Signing/JWE key withdrawn. | `revocationId` (= key id) | - -`RevocationBundleBuilder` flattens Mongo documents into canonical JSON, sorts entries by (`category`, `revocationId`, `revokedAt`), and signs exports using detached JWS (RFC 7797) with cosign-compatible headers. - -**Export surfaces** (deterministic output, suitable for Offline Kit): - -- CLI: `stella auth revoke export --output ./out` writes `revocation-bundle.json`, `.jws`, `.sha256`. -- Verification: `stella auth revoke verify --bundle --signature --key ` validates detached JWS signatures before distribution, selecting the crypto provider advertised in the detached header (see `docs/security/revocation-bundle.md`). -- API: `GET /internal/revocations/export` (requires bootstrap API key) returns the same payload. -- Verification: `stella auth revoke verify` validates schema, digest, and detached JWS using cached JWKS or offline keys, automatically preferring the hinted provider (libsodium builds honour `provider=libsodium`; other builds fall back to the managed provider). - -**Consumer guidance:** - -1. Mirror `revocation-bundle.json*` alongside Concelier exports. Offline agents fetch both over the existing update channel. -2. Use bundle `sequence` and `bundleId` to detect replay or monotonicity regressions. Ignore bundles with older sequence numbers unless `bundleId` changes and `issuedAt` advances. -3. Treat `revokedReason` taxonomy as machine-friendly codes (`compromised`, `rotation`, `policy`, `lifecycle`). Translating to human-readable logs is the consumer’s responsibility. - -## 5. Signing Keys & JWKS Rotation -Authority signs revocation bundles and publishes JWKS entries via the new signing manager: - -- **Configuration (`authority.yaml`):** - ```yaml - signing: - enabled: true - algorithm: ES256 # Defaults to ES256 - keySource: file # Loader identifier (file, vault, etc.) - provider: default # Optional preferred crypto provider - activeKeyId: authority-signing-dev - keyPath: "../certificates/authority-signing-dev.pem" - additionalKeys: - - keyId: authority-signing-dev-2024 - path: "../certificates/authority-signing-dev-2024.pem" - source: "file" - ``` -- **Sources:** The default loader supports PEM files relative to the content root; additional loaders can be registered via `IAuthoritySigningKeySource`. -- **Providers:** Keys are registered against the `ICryptoProviderRegistry`, so alternative implementations (HSM, libsodium) can be plugged in without changing host code. -- **OpenAPI discovery:** `GET /.well-known/openapi` returns the published authentication contract (JSON by default, YAML when requested). Responses include `X-StellaOps-Service`, `X-StellaOps-Api-Version`, `X-StellaOps-Build-Version`, plus grant and scope headers, and honour conditional requests via `ETag`/`If-None-Match`. -- **JWKS output:** `GET /jwks` lists every signing key with `status` metadata (`active`, `retired`). Old keys remain until operators remove them from configuration, allowing verification of historical bundles/tokens. - -### Rotation SOP (no downtime) -1. Generate a new P-256 private key (PEM) on an offline workstation and place it where the Authority host can read it (e.g., `../certificates/authority-signing-2025.pem`). -2. Call the authenticated admin API: - ```bash - curl -sS -X POST https://authority.example.com/internal/signing/rotate \ - -H "x-stellaops-bootstrap-key: ${BOOTSTRAP_KEY}" \ - -H "Content-Type: application/json" \ - -d '{ - "keyId": "authority-signing-2025", - "location": "../certificates/authority-signing-2025.pem", - "source": "file" - }' - ``` -3. Verify the response reports the previous key as retired and fetch `/jwks` to confirm the new `kid` appears with `status: "active"`. -4. Persist the old key path in `signing.additionalKeys` (the rotation API updates in-memory options; rewrite the YAML to match so restarts remain consistent). -5. If you prefer automation, trigger the `.gitea/workflows/authority-key-rotation.yml` workflow with the new `keyId`/`keyPath`; it wraps `ops/authority/key-rotation.sh` and reads environment-specific secrets. The older key will be marked `retired` and appended to `signing.additionalKeys`. -6. Re-run `stella auth revoke export` so revocation bundles are signed with the new key. Downstream caches should refresh JWKS within their configured lifetime (`StellaOpsAuthorityOptions.Signing` + client cache tolerance). - -The rotation API leverages the same cryptography abstractions as revocation signing; no restart is required and the previous key is marked `retired` but kept available for verification. - -## 6. Bootstrap & Administrative Endpoints -Administrative APIs live under `/internal/*` and require the bootstrap API key plus rate-limiter compliance. - -| Endpoint | Method | Description | -| --- | --- | --- | -| `/internal/users` | `POST` | Provision initial administrative accounts through the registered password-capable plug-in. Emits structured audit events. | -| `/internal/clients` | `POST` | Provision OAuth clients (client credentials / device code). | -| `/internal/revocations/export` | `GET` | Export revocation bundle + detached JWS + digest. | -| `/internal/signing/rotate` | `POST` | Promote a new signing key (see SOP above). Request body accepts `keyId`, `location`, optional `source`, `algorithm`, `provider`, and metadata. | - -All administrative calls emit `AuthEventRecord` entries enriched with correlation IDs, PII tags, and network metadata for offline SOC ingestion. - -> **Tenant hint:** include a `tenant` entry inside `properties` when bootstrapping clients. Authority normalises the value, stores it on the registration, and stamps future tokens/audit events with the tenant. - -### Bootstrap client example - -```jsonc -POST /internal/clients -{ - "clientId": "concelier", - "confidential": true, - "displayName": "Concelier Backend", - "allowedGrantTypes": ["client_credentials"], - "allowedScopes": ["concelier.jobs.trigger", "advisory:ingest", "advisory:read"], - "properties": { - "tenant": "tenant-default" - } -} -``` - -For environments with multiple tenants, repeat the call per tenant-specific client (e.g. `concelier-tenant-a`, `concelier-tenant-b`) or append suffixes to the client identifier. - -### Aggregation-only verification tokens - -- Issue a dedicated client (e.g. `aoc-verifier`) with the scopes `aoc:verify`, `advisory:read`, and `vex:read` for each tenant that runs guard checks. Authority refuses to mint tokens for these scopes unless the client registration provides a tenant hint. -- The CLI (`stella aoc verify --tenant `) and Console verification panel both call `/aoc/verify` on Concelier and Excititor. Tokens that omit the tenant claim or present a tenant that does not match the stored registration are rejected with `invalid_client`/`invalid_token`. -- Audit: `authority.client_credentials.grant` entries record `scope.invalid="aoc:verify"` when requests are rejected because the tenant hint is missing or mismatched. - -### Exception approvals & routing - -- New scopes `exceptions:read`, `exceptions:write`, and `exceptions:approve` govern access to the exception lifecycle. Map these via tenant roles (`exceptions-service`, `exceptions-approver`) as described in `/docs/security/authority-scopes.md`. -- Configure approval routing in `authority.yaml` with declarative templates. Each template exposes an `authorityRouteId` for downstream services (Policy Engine, Console) and an optional `requireMfa` flag: - -```yaml -exceptions: - routingTemplates: - - id: "secops" - authorityRouteId: "approvals/secops" - requireMfa: true - description: "Security Operations approval chain" - - id: "governance" - authorityRouteId: "approvals/governance" - requireMfa: false - description: "Non-production waiver review" -``` - -- Clients requesting exception scopes must include a tenant assignment. Authority rejects client-credential flows that request `exceptions:*` with `invalid_client` and logs `scope.invalid="exceptions:write"` (or the requested scope) in `authority.client_credentials.grant` audit events when the tenant hint is missing. -- When any configured routing template sets `requireMfa: true`, user-facing tokens that contain `exceptions:approve` must be acquired through an MFA-capable identity provider. Password/OIDC flows that lack MFA support are rejected with `authority.password.grant` audit events where `reason="Exception approval scope requires an MFA-capable identity provider."` -- Update interactive clients (Console) to request `exceptions:read` by default and elevate to `exceptions:approve` only inside fresh-auth workflows for approvers. Documented examples live in `etc/authority.yaml.sample`. -- Verification responses map guard failures to `ERR_AOC_00x` codes and Authority emits `authority.client_credentials.grant` + `authority.token.validate_access` audit records containing the tenant and scopes so operators can trace who executed a run. -- For air-gapped or offline replicas, pre-issue verification tokens per tenant and rotate them alongside ingest credentials; the guard endpoints never mutate data and remain safe to expose through the offline kit schedule. - -## 7. Configuration Reference - -| Section | Key | Description | Notes | -| --- | --- | --- | --- | -| Root | `issuer` | Absolute HTTPS issuer advertised to clients. | Required. Loopback HTTP allowed only for development. | -| Tokens | `accessTokenLifetime`, `refreshTokenLifetime`, etc. | Lifetimes for each grant (access, refresh, device, authorization code, identity). | Enforced during issuance; persisted on each token document. | -| Storage | `storage.connectionString` | MongoDB connection string. | Required even for tests; offline kits ship snapshots for seeding. | -| Signing | `signing.enabled` | Enable JWKS/revocation signing. | Disable only for development. | -| Signing | `signing.algorithm` | Signing algorithm identifier. | Currently ES256; additional curves can be wired through crypto providers. | -| Signing | `signing.keySource` | Loader identifier (`file`, `vault`, custom). | Determines which `IAuthoritySigningKeySource` resolves keys. | -| Signing | `signing.keyPath` | Relative/absolute path understood by the loader. | Stored as-is; rotation request should keep it in sync with filesystem layout. | -| Signing | `signing.activeKeyId` | Active JWKS / revocation signing key id. | Exposed as `kid` in JWKS and bundles. | -| Signing | `signing.additionalKeys[].keyId` | Retired key identifier retained for verification. | Manager updates this automatically after rotation; keep YAML aligned. | -| Signing | `signing.additionalKeys[].source` | Loader identifier per retired key. | Defaults to `signing.keySource` if omitted. | -| Security | `security.rateLimiting` | Fixed-window limits for `/token`, `/authorize`, `/internal/*`. | See `docs/security/rate-limits.md` for tuning. | -| Bootstrap | `bootstrap.apiKey` | Shared secret required for `/internal/*`. | Only required when `bootstrap.enabled` is true. | - -### 7.1 Sender-constrained clients (DPoP & mTLS) - -Authority now understands two flavours of sender-constrained OAuth clients: - -- **DPoP proof-of-possession** – clients sign a `DPoP` header for `/token` requests. Authority validates the JWK thumbprint, HTTP method/URI, and replay window, then stamps the resulting access token with `cnf.jkt` so downstream services can verify the same key is reused. - - Configure under `security.senderConstraints.dpop`. `allowedAlgorithms`, `proofLifetime`, and `replayWindow` are enforced at validation time. - - `security.senderConstraints.dpop.nonce.enabled` enables nonce challenges for high-value audiences (`requiredAudiences`, normalised to case-insensitive strings). When a nonce is required but missing or expired, `/token` replies with `WWW-Authenticate: DPoP error="use_dpop_nonce"` (and, when available, a fresh `DPoP-Nonce` header). Clients must retry with the issued nonce embedded in the proof. - - `security.senderConstraints.dpop.nonce.store` selects `memory` (default) or `redis`. When `redis` is configured, set `security.senderConstraints.dpop.nonce.redisConnectionString` so replicas share nonce issuance and high-value clients avoid replay gaps during failover. - - Example (enabling Redis-backed nonces; adjust audiences per deployment): - ```yaml - security: - senderConstraints: - dpop: - enabled: true - proofLifetime: "00:02:00" - replayWindow: "00:05:00" - allowedAlgorithms: [ "ES256", "ES384" ] - nonce: - enabled: true - ttl: "00:10:00" - maxIssuancePerMinute: 120 - store: "redis" - redisConnectionString: "redis://authority-redis:6379?ssl=false" - requiredAudiences: - - "signer" - - "attestor" - ``` - Operators can override any field via environment variables (e.g. `STELLAOPS_AUTHORITY__SECURITY__SENDERCONSTRAINTS__DPOP__NONCE__STORE=redis`). - - Declare client `audiences` in bootstrap manifests or plug-in provisioning metadata; Authority now defaults the token `aud` claim and `resource` indicator from this list, which is also used to trigger nonce enforcement for audiences such as `signer` and `attestor`. -- **Mutual TLS clients** – client registrations may declare an mTLS binding (`senderConstraint: mtls`). When enabled via `security.senderConstraints.mtls`, Authority validates the presented client certificate against stored bindings (`certificateBindings[]`), optional chain verification, and timing windows. Successful requests embed `cnf.x5t#S256` into the access token (and introspection output) so resource servers can enforce the certificate thumbprint. - - `security.senderConstraints.mtls.enforceForAudiences` forces mTLS whenever the requested `aud`/`resource` (or the client's configured audiences) intersect the configured allow-list (default includes `signer`). Clients configured for different sender constraints are rejected early so operator policy remains consistent. - - Certificate bindings now act as an allow-list: Authority verifies thumbprint, subject, issuer, serial number, and any declared SAN values against the presented certificate, with rotation grace windows applied to `notBefore/notAfter`. Operators can enforce subject regexes, SAN type allow-lists (`dns`, `uri`, `ip`), trusted certificate authorities, and rotation grace via `security.senderConstraints.mtls.*`. - -Both modes persist additional metadata in `authority_tokens`: `senderConstraint` records the enforced policy, while `senderKeyThumbprint` stores the DPoP JWK thumbprint or mTLS certificate hash captured at issuance. Downstream services can rely on these fields (and the corresponding `cnf` claim) when auditing offline copies of the token store. - -### 7.2 Policy Engine clients & scopes - -Policy Engine v2 introduces dedicated scopes and a service identity that materialises effective findings. Configure Authority as follows when provisioning policy clients: - -| Client | Scopes | Notes | -| --- | --- | --- | -| `policy-engine` (service) | `policy:run`, `findings:read`, `effective:write` | Must include `properties.serviceIdentity: policy-engine` and a tenant. Authority rejects `effective:write` tokens without the marker or tenant. | -| `policy-cli` / automation | `policy:read`, `policy:author`, `policy:review`, `policy:simulate`, `findings:read` *(optionally add `policy:approve` / `policy:operate` / `policy:activate` for promotion pipelines)* | Keep scopes minimal; reroll CLI/CI tokens issued before 2025‑10‑27 so they drop legacy scope names and adopt the new set. | -| UI/editor sessions | `policy:read`, `policy:author`, `policy:simulate` (+ reviewer/approver/operator scopes as appropriate) | Issue tenant-specific clients so audit and rate limits remain scoped. | - -Sample YAML entry: - -```yaml - - clientId: "policy-engine" - displayName: "Policy Engine Service" - grantTypes: [ "client_credentials" ] - audiences: [ "api://policy-engine" ] - scopes: [ "policy:run", "findings:read", "effective:write" ] - tenant: "tenant-default" - properties: - serviceIdentity: "policy-engine" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/policy-engine.secret" -``` - -Compliance checklist: - -- [ ] `policy-engine` client includes `properties.serviceIdentity: policy-engine` and a tenant hint; logins missing either are rejected. -- [ ] Non-service clients omit `effective:write` and receive only the scopes required for their role (`policy:read`, `policy:author`, `policy:review`, `policy:approve`, `policy:operate`, `policy:simulate`, etc.). -- [ ] Legacy tokens using `policy:write`/`policy:submit`/`policy:edit` are rotated to the new scope set before Production change freeze (see release migration note below). -- [ ] Approval/activation workflows use identities distinct from authoring identities; tenants are provisioned per client to keep telemetry segregated. -- [ ] Operators document reviewer assignments and incident procedures alongside `/docs/security/policy-governance.md` and archive policy evidence bundles (`stella policy bundle export`) with each release. - -### 7.3 Orchestrator roles & scopes - -| Role / Client | Scopes | Notes | -| --- | --- | --- | -| `Orch.Viewer` role | `orch:read` | Read-only access to Orchestrator dashboards, queues, and telemetry. | -| `Orch.Operator` role | `orch:read`, `orch:operate` | Issue short-lived tokens for control actions (pause/resume, retry, sync). Token requests **must** include `operator_reason` (≤256 chars) and `operator_ticket` (≤128 chars); Authority rejects requests missing either value and records both in audit events. | - -Token request example via client credentials: - -```bash -curl -u orch-operator:s3cr3t! \ - -d 'grant_type=client_credentials' \ - -d 'scope=orch:operate' \ - -d 'operator_reason=resume source after maintenance' \ - -d 'operator_ticket=INC-2045' \ - https://authority.example.com/token -``` - -Tokens lacking `operator_reason` or `operator_ticket` receive `invalid_request`; audit events (`authority.client_credentials.grant`) surface the supplied values under `request.reason` and `request.ticket` for downstream review. -CLI clients set these parameters via `Authority.OperatorReason` / `Authority.OperatorTicket` (environment variables `STELLAOPS_ORCH_REASON` and `STELLAOPS_ORCH_TICKET`). - -## 8. Offline & Sovereign Operation -- **No outbound dependencies:** Authority only contacts MongoDB and local plugins. Discovery and JWKS are cached by clients with offline tolerances (`AllowOfflineCacheFallback`, `OfflineCacheTolerance`). Operators should mirror these responses for air-gapped use. -- **Structured logging:** Every revocation export, signing rotation, bootstrap action, and token issuance emits structured logs with `traceId`, `client_id`, `subjectId`, and `network.remoteIp` where applicable. Mirror logs to your SIEM to retain audit trails without central connectivity. -- **Determinism:** Sorting rules in token and revocation exports guarantee byte-for-byte identical artefacts given the same datastore state. Hashes and signatures remain stable across machines. - -## 9. Operational Checklist -- [ ] Protect the bootstrap API key and disable bootstrap endpoints (`bootstrap.enabled: false`) once initial setup is complete. -- [ ] Schedule `stella auth revoke export` (or `/internal/revocations/export`) at the same cadence as Concelier exports so bundles remain in lockstep. -- [ ] Rotate signing keys before expiration; keep at least one retired key until all cached bundles/tokens signed with it have expired. -- [ ] Monitor `/health` and `/ready` plus rate-limiter metrics to detect plugin outages early. -- [ ] Ensure downstream services cache JWKS and revocation bundles within tolerances; stale caches risk accepting revoked tokens. - -For plug-in specific requirements, refer to **[Authority Plug-in Developer Guide](dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md)**. For revocation bundle validation workflow, see **[Authority Revocation Bundle](security/revocation-bundle.md)**. +# StellaOps Authority Service + +> **Status:** Drafted 2025-10-12 (CORE5B.DOC / DOC1.AUTH) – aligns with Authority revocation store, JWKS rotation, and bootstrap endpoints delivered in Sprint 1. + +## 1. Purpose +The **StellaOps Authority** service issues OAuth2/OIDC tokens for every StellaOps module (Concelier, Backend, Agent, Zastava) and exposes the policy controls required in sovereign/offline environments. Authority is built as a minimal ASP.NET host that: + +- brokers password, client-credentials, and device-code flows through pluggable identity providers; +- persists access/refresh/device tokens in MongoDB with deterministic schemas for replay analysis and air-gapped audit copies; +- distributes revocation bundles and JWKS material so downstream services can enforce lockouts without direct database access; +- offers bootstrap APIs for first-run provisioning and key rotation without redeploying binaries. + +Authority is deployed alongside Concelier in air-gapped environments and never requires outbound internet access. All trusted metadata (OpenIddict discovery, JWKS, revocation bundles) is cacheable, signed, and reproducible. + +## 2. Component Architecture +Authority is composed of five cooperating subsystems: + +1. **Minimal API host** – configures OpenIddict endpoints (`/token`, `/authorize`, `/revoke`, `/jwks`), publishes the OpenAPI contract at `/.well-known/openapi`, and enables structured logging/telemetry. Rate limiting hooks (`AuthorityRateLimiter`) wrap every request. +2. **Plugin host** – loads `StellaOps.Authority.Plugin.*.dll` assemblies, applies capability metadata, and exposes password/client provisioning surfaces through dependency injection. +3. **Mongo storage** – persists tokens, revocations, bootstrap invites, and plugin state in deterministic collections indexed for offline sync (`authority_tokens`, `authority_revocations`, etc.). +4. **Cryptography layer** – `StellaOps.Cryptography` abstractions manage password hashing, signing keys, JWKS export, and detached JWS generation. +5. **Offline ops APIs** – internal endpoints under `/internal/*` provide administrative flows (bootstrap users/clients, revocation export) guarded by API keys and deterministic audit events. + +A high-level sequence for password logins: + +``` +Client -> /token (password grant) + -> Rate limiter & audit hooks + -> Plugin credential store (Argon2id verification) + -> Token persistence (Mongo authority_tokens) + -> Response (access/refresh tokens + deterministic claims) +``` + +## 3. Token Lifecycle & Persistence +Authority persists every issued token in MongoDB so operators can audit or revoke without scanning distributed caches. + +- **Collection:** `authority_tokens` +- **Key fields:** +- `tokenId`, `type` (`access_token`, `refresh_token`, `device_code`, `authorization_code`) +- `subjectId`, `clientId`, ordered `scope` array +- `tenant` (lower-cased tenant hint from the issuing client, omitted for global clients) + +### Console OIDC client + +- **Client ID**: `console-web` +- **Grants**: `authorization_code` (PKCE required), `refresh_token` +- **Audience**: `console` +- **Scopes**: `openid`, `profile`, `email`, `advisory:read`, `advisory-ai:view`, `vex:read`, `aoc:verify`, `findings:read`, `orch:read`, `vuln:read` +- **Redirect URIs** (defaults): `https://console.stella-ops.local/oidc/callback` +- **Post-logout redirect**: `https://console.stella-ops.local/` +- **Tokens**: Access tokens inherit the global 2 minute lifetime; refresh tokens remain short-lived (30 days) and can be exchanged silently via `/token`. +- **Roles**: Assign Authority role `Orch.Viewer` (exposed to tenants as `role/orch-viewer`) when operators need read-only access to Orchestrator telemetry via Console dashboards. Policy Studio ships dedicated roles (`role/policy-author`, `role/policy-reviewer`, `role/policy-approver`, `role/policy-operator`, `role/policy-auditor`) that align with the new `policy:*` scope family; issue them per tenant so audit trails remain scoped. + +Configuration sample (`etc/authority.yaml.sample`) seeds the client with a confidential secret so Console can negotiate the code exchange on the backend while browsers execute the PKCE dance. + +### Advisory AI scopes & remote inference + +- `advisory-ai:view` — read Advisory AI artefacts (summaries, remediation packs, cached outputs). +- `advisory-ai:operate` — submit Advisory AI inference jobs and remediation requests. +- `advisory-ai:admin` — administer Advisory AI configuration, profile selection, and remote execution controls. + +Authority publishes the trio in OpenID discovery (`stellaops_advisory_ai_scopes_supported`) so clients can self-discover capability. Remote/cloud inference is disabled by default; set `advisoryAi.remoteInference.enabled: true` and provide an explicit `allowedProfiles` whitelist (for example `cloud-openai`) when an installation opts in. The `requireTenantConsent` toggle (default `true`) enforces per-tenant opt-in before remote profiles are invoked, mirroring regulatory expectations for sovereign or air-gapped deployments. + +### Console Authority endpoints + +- `/console/tenants` — Requires `authority:tenants.read`; returns the tenant catalogue for the authenticated principal. Requests lacking the `X-Stella-Tenant` header are rejected (`tenant_header_missing`) and logged. +- `/console/profile` — Requires `ui.read`; exposes subject metadata (roles, scopes, audiences) and indicates whether the session is within the five-minute fresh-auth window. +- `/console/token/introspect` — Requires `ui.read`; introspects the active access token so the SPA can prompt for re-authentication before privileged actions. + +All endpoints demand DPoP-bound tokens and propagate structured audit events (`authority.console.*`). Gateways must forward the `X-Stella-Tenant` header derived from the access token; downstream services rely on the same value for isolation. Keep Console access tokens short-lived (default 15 minutes) and enforce the fresh-auth window for admin actions (`ui.admin`, `authority:*`, `policy:activate`, `exceptions:approve`). +- `status` (`valid`, `revoked`, `expired`), `createdAt`, optional `expiresAt` +- `revokedAt`, machine-readable `revokedReason`, optional `revokedReasonDescription` +- `revokedMetadata` (string dictionary for plugin-specific context) +- **Persistence flow:** `PersistTokensHandler` stamps missing JWT IDs, normalises scopes, and stores every principal emitted by OpenIddict. +- **Revocation flow:** `AuthorityTokenStore.UpdateStatusAsync` flips status, records the reason metadata, and is invoked by token revocation handlers and plugin provisioning events (e.g., disabling a user). +- **Expiry maintenance:** `AuthorityTokenStore.DeleteExpiredAsync` prunes non-revoked tokens past their `expiresAt` timestamp. Operators should schedule this in maintenance windows if large volumes of tokens are issued. + +### Expectations for resource servers +Resource servers (Concelier WebService, Backend, Agent) **must not** assume in-memory caches are authoritative. They should: + +- cache `/jwks` and `/revocations/export` responses within configured lifetimes; +- honour `revokedReason` metadata when shaping audit trails; +- treat `status != "valid"` or missing tokens as immediate denial conditions. +- propagate the `tenant` claim (`X-Stella-Tenant` header in REST calls) and reject requests when the tenant supplied by Authority does not match the resource server's scope; Concelier and Excititor guard endpoints refuse cross-tenant tokens. + +### Tenant propagation + +- Client provisioning (bootstrap or plug-in) accepts a `tenant` hint. Authority normalises the value (`trim().ToLowerInvariant()`) and persists it alongside the registration. Clients without an explicit tenant remain global. +- Issued principals include the `stellaops:tenant` claim. `PersistTokensHandler` mirrors this claim into `authority_tokens.tenant`, enabling per-tenant revocation and reporting. +- Rate limiter metadata now tags requests with `authority.tenant`, unlocking per-tenant throughput metrics and diagnostic filters. Audit events (`authority.client_credentials.grant`, `authority.password.grant`, bootstrap flows) surface the tenant and login attempt documents index on `{tenant, occurredAt}` for quick queries. +- Client credentials that request `advisory:ingest`, `advisory:read`, `advisory-ai:view`, `advisory-ai:operate`, `advisory-ai:admin`, `vex:ingest`, `vex:read`, `signals:read`, `signals:write`, `signals:admin`, or `aoc:verify` now fail fast when the client registration lacks a tenant hint. Issued tokens are re-validated against persisted tenant metadata, and Authority rejects any cross-tenant replay (`invalid_client`/`invalid_token`), ensuring aggregation-only workloads remain tenant-scoped. +- Client credentials that request `export.viewer`, `export.operator`, or `export.admin` must provide a tenant hint. Requests for `export.admin` also need accompanying `export_reason` and `export_ticket` parameters; Authority returns `invalid_request` when either value is missing and records the denial in token audit events. +- Client credentials that request `notify.viewer`, `notify.operator`, or `notify.admin` must provide a tenant hint. Authority records scope violations when tenancy is missing and emits `authority.notify.scope_violation` audit metadata so operators can trace denied requests. +- Policy Studio scopes (`policy:author`, `policy:review`, `policy:approve`, `policy:operate`, `policy:audit`, `policy:simulate`, `policy:run`, `policy:activate`) require a tenant assignment; Authority rejects tokens missing the hint with `invalid_client` and records `scope.invalid` metadata for auditing. +- Task Pack scopes (`packs.read`, `packs.write`, `packs.run`, `packs.approve`) require a tenant assignment; Authority rejects tokens missing the hint with `invalid_client` and logs `authority.pack_scope_violation` metadata for audit correlation. +- **AOC pairing guardrails** – Tokens that request `advisory:read`, `advisory-ai:view`, `advisory-ai:operate`, `advisory-ai:admin`, `vex:read`, or any `signals:*` scope must also request `aoc:verify`. Authority rejects mismatches with `invalid_scope` (e.g., `Scope 'aoc:verify' is required when requesting advisory/advisory-ai/vex read scopes.` or `Scope 'aoc:verify' is required when requesting signals scopes.`) so automation surfaces deterministic errors. +- **Signals ingestion guardrails** – Sensors and services requesting `signals:write`/`signals:admin` must also request `aoc:verify`; Authority records the `authority.aoc_scope_violation` tag when the pairing is missing so operators can trace failing sensors immediately. +- Password grant flows reuse the client registration's tenant and enforce the configured scope allow-list. Requested scopes outside that list (or mismatched tenants) trigger `invalid_scope`/`invalid_client` failures, ensuring cross-tenant access is denied before token issuance. + +### Default service scopes + +| Client ID | Purpose | Scopes granted | Sender constraint | Tenant | +|----------------------|---------------------------------------|--------------------------------------|-------------------|-----------------| +| `concelier-ingest` | Concelier raw advisory ingestion | `advisory:ingest`, `advisory:read` | `dpop` | `tenant-default` | +| `excitor-ingest` | Excititor raw VEX ingestion | `vex:ingest`, `vex:read` | `dpop` | `tenant-default` | +| `aoc-verifier` | Aggregation-only contract verification | `aoc:verify`, `advisory:read`, `vex:read` | `dpop` | `tenant-default` | +| `cartographer-service` | Graph snapshot construction | `graph:write`, `graph:read` | `dpop` | `tenant-default` | +| `graph-api` | Graph Explorer gateway/API | `graph:read`, `graph:export`, `graph:simulate` | `dpop` | `tenant-default` | +| `export-center-operator` | Export Center operator automation | `export.viewer`, `export.operator` | `dpop` | `tenant-default` | +| `export-center-admin` | Export Center administrative automation | `export.viewer`, `export.operator`, `export.admin` | `dpop` | `tenant-default` | +| `notify-service` | Notify WebService API | `notify.viewer`, `notify.operator` | `dpop` | `tenant-default` | +| `notify-admin` | Notify administrative automation | `notify.viewer`, `notify.operator`, `notify.admin` | `dpop` | `tenant-default` | +| `vuln-explorer-ui` | Vuln Explorer UI/API | `vuln:read` | `dpop` | `tenant-default` | +| `signals-uploader` | Reachability sensor ingestion | `signals:write`, `signals:read`, `aoc:verify` | `dpop` | `tenant-default` | + +> **Secret hygiene (2025‑10‑27):** The repository includes a convenience `etc/authority.yaml` for compose/helm smoke tests. Every entry’s `secretFile` points to `etc/secrets/*.secret`, which ship with `*-change-me` placeholders—replace them with strong values (and wire them through your vault/secret manager) before issuing tokens in CI, staging, or production. + +For factory provisioning, issue sensors the **SignalsUploader** role template (`signals:write`, `signals:read`, `aoc:verify`). Authority rejects ingestion tokens that omit `aoc:verify`, preserving aggregation-only contract guarantees for reachability signals. + +These registrations are provided as examples in `etc/authority.yaml.sample`. Clone them per tenant (for example `concelier-tenant-a`, `concelier-tenant-b`) so tokens remain tenant-scoped by construction. + +Graph Explorer introduces dedicated scopes: `graph:write` for Cartographer build jobs, `graph:read` for query/read operations, `graph:export` for long-running export downloads, and `graph:simulate` for what-if overlays. Assign only the scopes a client actually needs to preserve least privilege—UI-facing clients should typically request read/export access, while background services (Cartographer, Scheduler) require write privileges. + +#### Least-privilege guidance for graph clients + +- **Service identities** – The Cartographer worker should request `graph:write` and `graph:read` only; grant `graph:simulate` exclusively to pipeline automation that invokes Policy Engine overlays on demand. Keep `graph:export` scoped to API gateway components responsible for streaming GraphML/JSONL artifacts. Authority enforces this by rejecting `graph:write` tokens that lack `properties.serviceIdentity: cartographer`. +- **Tenant propagation** – Every client registration must pin a `tenant` hint. Authority normalises the value and stamps it into issued tokens (`stellaops:tenant`) so downstream services (Scheduler, Graph API, Console) can enforce tenant isolation without custom headers. Graph scopes (`graph:read`, `graph:write`, `graph:export`, `graph:simulate`) are denied if the tenant hint is missing. +- **SDK alignment** – Use the generated `StellaOpsScopes` constants in service code to request graph scopes. Hard-coded strings risk falling out of sync as additional graph capabilities are added. +- **DPOP for automation** – Maintain sender-constrained (`dpop`) flows for Cartographer and Scheduler to limit reuse of access tokens if a build host is compromised. For UI-facing tokens, pair `graph:read`/`graph:export` with short lifetimes and enforce refresh-token rotation at the gateway. + +#### Export Center scope guardrails + +- **Viewer vs operator** – `export.viewer` grants read-only access to export profiles, manifests, and bundles. Automation that schedules or reruns exports should request `export.operator` (and typically `export.viewer`). Tenant hints remain mandatory; Authority refuses tokens without them. +- **Administrative mutations** – Changes to retention policies, encryption key references, or schedule defaults require `export.admin`. When requesting tokens with this scope, clients must supply `export_reason` and `export_ticket` parameters; Authority persists the values for audit records and rejects missing metadata with `invalid_request`. +- **Operational hygiene** – Rotate `export.admin` credentials infrequently and run them through fresh-auth workflows where possible. Prefer distributing verification tooling with `export.viewer` tokens for day-to-day bundle validation. + +#### Notify scope guardrails + +- **Viewer vs operator** – `notify.viewer` grants read-only access to rules, channels, and delivery history. Automation that edits rules or triggers test notifications must request `notify.operator` (and usually `notify.viewer`). Tenant hints remain mandatory. +- **Administrative controls** – Changes to channel secrets, quiet hours, or escalation policies require `notify.admin`. Authority logs these operations and surfaces `authority.notify.scope_violation` when tokens omit the scope or tenant. +- **Least privilege** – Assign `notify.admin` sparingly (platform operations, DR automation). Day-to-day rule editing should rely on `notify.operator` scoped per tenant. + +#### Vuln Explorer permalinks + +- **Scope** – `vuln:read` authorises Vuln Explorer to fetch advisory/linkset evidence and issue shareable links. Assign it only to front-end/API clients that must render vulnerability details. +- **Signed links** – `POST /permalinks/vuln` (requires `vuln:read`) accepts `{ "tenant": "tenant-a", "resourceKind": "vulnerability", "state": { ... }, "expiresInSeconds": 86400 }` and returns a JWT (`token`) plus `issuedAt`/`expiresAt`. The token embeds the tenant, requested state, and `vuln:read` scope and is signed with the same Authority signing keys published via `/jwks`. +- **Validation** – Resource servers verify the permalink using cached JWKS: check signature, ensure the tenant matches the current request context, honour the expiry, and enforce the contained `vuln:read` scope. The payload’s `resource.state` block is opaque JSON so UIs can round-trip filters/search terms without new schema changes. + +## 4. Revocation Pipeline +Authority centralises revocation in `authority_revocations` with deterministic categories: + +| Category | Meaning | Required fields | +| --- | --- | --- | +| `token` | Specific OAuth token revoked early. | `revocationId` (token id), `tokenType`, optional `clientId`, `subjectId` | +| `subject` | All tokens for a subject disabled. | `revocationId` (= subject id) | +| `client` | OAuth client registration revoked. | `revocationId` (= client id) | +| `key` | Signing/JWE key withdrawn. | `revocationId` (= key id) | + +`RevocationBundleBuilder` flattens Mongo documents into canonical JSON, sorts entries by (`category`, `revocationId`, `revokedAt`), and signs exports using detached JWS (RFC 7797) with cosign-compatible headers. + +**Export surfaces** (deterministic output, suitable for Offline Kit): + +- CLI: `stella auth revoke export --output ./out` writes `revocation-bundle.json`, `.jws`, `.sha256`. +- Verification: `stella auth revoke verify --bundle --signature --key ` validates detached JWS signatures before distribution, selecting the crypto provider advertised in the detached header (see `docs/security/revocation-bundle.md`). +- API: `GET /internal/revocations/export` (requires bootstrap API key) returns the same payload. +- Verification: `stella auth revoke verify` validates schema, digest, and detached JWS using cached JWKS or offline keys, automatically preferring the hinted provider (libsodium builds honour `provider=libsodium`; other builds fall back to the managed provider). + +**Consumer guidance:** + +1. Mirror `revocation-bundle.json*` alongside Concelier exports. Offline agents fetch both over the existing update channel. +2. Use bundle `sequence` and `bundleId` to detect replay or monotonicity regressions. Ignore bundles with older sequence numbers unless `bundleId` changes and `issuedAt` advances. +3. Treat `revokedReason` taxonomy as machine-friendly codes (`compromised`, `rotation`, `policy`, `lifecycle`). Translating to human-readable logs is the consumer’s responsibility. + +## 5. Signing Keys & JWKS Rotation +Authority signs revocation bundles and publishes JWKS entries via the new signing manager: + +- **Configuration (`authority.yaml`):** + ```yaml + signing: + enabled: true + algorithm: ES256 # Defaults to ES256 + keySource: file # Loader identifier (file, vault, etc.) + provider: default # Optional preferred crypto provider + activeKeyId: authority-signing-dev + keyPath: "../certificates/authority-signing-dev.pem" + additionalKeys: + - keyId: authority-signing-dev-2024 + path: "../certificates/authority-signing-dev-2024.pem" + source: "file" + ``` +- **Sources:** The default loader supports PEM files relative to the content root; additional loaders can be registered via `IAuthoritySigningKeySource`. +- **Providers:** Keys are registered against the `ICryptoProviderRegistry`, so alternative implementations (HSM, libsodium) can be plugged in without changing host code. +- **OpenAPI discovery:** `GET /.well-known/openapi` returns the published authentication contract (JSON by default, YAML when requested). Responses include `X-StellaOps-Service`, `X-StellaOps-Api-Version`, `X-StellaOps-Build-Version`, plus grant and scope headers, and honour conditional requests via `ETag`/`If-None-Match`. +- **JWKS output:** `GET /jwks` lists every signing key with `status` metadata (`active`, `retired`). Old keys remain until operators remove them from configuration, allowing verification of historical bundles/tokens. + +### Rotation SOP (no downtime) +1. Generate a new P-256 private key (PEM) on an offline workstation and place it where the Authority host can read it (e.g., `../certificates/authority-signing-2025.pem`). +2. Call the authenticated admin API: + ```bash + curl -sS -X POST https://authority.example.com/internal/signing/rotate \ + -H "x-stellaops-bootstrap-key: ${BOOTSTRAP_KEY}" \ + -H "Content-Type: application/json" \ + -d '{ + "keyId": "authority-signing-2025", + "location": "../certificates/authority-signing-2025.pem", + "source": "file" + }' + ``` +3. Verify the response reports the previous key as retired and fetch `/jwks` to confirm the new `kid` appears with `status: "active"`. +4. Persist the old key path in `signing.additionalKeys` (the rotation API updates in-memory options; rewrite the YAML to match so restarts remain consistent). +5. If you prefer automation, trigger the `.gitea/workflows/authority-key-rotation.yml` workflow with the new `keyId`/`keyPath`; it wraps `ops/authority/key-rotation.sh` and reads environment-specific secrets. The older key will be marked `retired` and appended to `signing.additionalKeys`. +6. Re-run `stella auth revoke export` so revocation bundles are signed with the new key. Downstream caches should refresh JWKS within their configured lifetime (`StellaOpsAuthorityOptions.Signing` + client cache tolerance). + +The rotation API leverages the same cryptography abstractions as revocation signing; no restart is required and the previous key is marked `retired` but kept available for verification. + +## 6. Bootstrap & Administrative Endpoints +Administrative APIs live under `/internal/*` and require the bootstrap API key plus rate-limiter compliance. + +| Endpoint | Method | Description | +| --- | --- | --- | +| `/internal/users` | `POST` | Provision initial administrative accounts through the registered password-capable plug-in. Emits structured audit events. | +| `/internal/clients` | `POST` | Provision OAuth clients (client credentials / device code). | +| `/internal/revocations/export` | `GET` | Export revocation bundle + detached JWS + digest. | +| `/internal/signing/rotate` | `POST` | Promote a new signing key (see SOP above). Request body accepts `keyId`, `location`, optional `source`, `algorithm`, `provider`, and metadata. | + +All administrative calls emit `AuthEventRecord` entries enriched with correlation IDs, PII tags, and network metadata for offline SOC ingestion. + +> **Tenant hint:** include a `tenant` entry inside `properties` when bootstrapping clients. Authority normalises the value, stores it on the registration, and stamps future tokens/audit events with the tenant. + +### Bootstrap client example + +```jsonc +POST /internal/clients +{ + "clientId": "concelier", + "confidential": true, + "displayName": "Concelier Backend", + "allowedGrantTypes": ["client_credentials"], + "allowedScopes": ["concelier.jobs.trigger", "advisory:ingest", "advisory:read"], + "properties": { + "tenant": "tenant-default" + } +} +``` + +For environments with multiple tenants, repeat the call per tenant-specific client (e.g. `concelier-tenant-a`, `concelier-tenant-b`) or append suffixes to the client identifier. + +### Aggregation-only verification tokens + +- Issue a dedicated client (e.g. `aoc-verifier`) with the scopes `aoc:verify`, `advisory:read`, and `vex:read` for each tenant that runs guard checks. Authority refuses to mint tokens for these scopes unless the client registration provides a tenant hint. +- The CLI (`stella aoc verify --tenant `) and Console verification panel both call `/aoc/verify` on Concelier and Excititor. Tokens that omit the tenant claim or present a tenant that does not match the stored registration are rejected with `invalid_client`/`invalid_token`. +- Audit: `authority.client_credentials.grant` entries record `scope.invalid="aoc:verify"` when requests are rejected because the tenant hint is missing or mismatched. + +### Exception approvals & routing + +- New scopes `exceptions:read`, `exceptions:write`, and `exceptions:approve` govern access to the exception lifecycle. Map these via tenant roles (`exceptions-service`, `exceptions-approver`) as described in `/docs/security/authority-scopes.md`. +- Configure approval routing in `authority.yaml` with declarative templates. Each template exposes an `authorityRouteId` for downstream services (Policy Engine, Console) and an optional `requireMfa` flag: + +```yaml +exceptions: + routingTemplates: + - id: "secops" + authorityRouteId: "approvals/secops" + requireMfa: true + description: "Security Operations approval chain" + - id: "governance" + authorityRouteId: "approvals/governance" + requireMfa: false + description: "Non-production waiver review" +``` + +- Clients requesting exception scopes must include a tenant assignment. Authority rejects client-credential flows that request `exceptions:*` with `invalid_client` and logs `scope.invalid="exceptions:write"` (or the requested scope) in `authority.client_credentials.grant` audit events when the tenant hint is missing. +- When any configured routing template sets `requireMfa: true`, user-facing tokens that contain `exceptions:approve` must be acquired through an MFA-capable identity provider. Password/OIDC flows that lack MFA support are rejected with `authority.password.grant` audit events where `reason="Exception approval scope requires an MFA-capable identity provider."` +- Update interactive clients (Console) to request `exceptions:read` by default and elevate to `exceptions:approve` only inside fresh-auth workflows for approvers. Documented examples live in `etc/authority.yaml.sample`. +- Verification responses map guard failures to `ERR_AOC_00x` codes and Authority emits `authority.client_credentials.grant` + `authority.token.validate_access` audit records containing the tenant and scopes so operators can trace who executed a run. +- For air-gapped or offline replicas, pre-issue verification tokens per tenant and rotate them alongside ingest credentials; the guard endpoints never mutate data and remain safe to expose through the offline kit schedule. + +## 7. Configuration Reference + +| Section | Key | Description | Notes | +| --- | --- | --- | --- | +| Root | `issuer` | Absolute HTTPS issuer advertised to clients. | Required. Loopback HTTP allowed only for development. | +| Tokens | `accessTokenLifetime`, `refreshTokenLifetime`, etc. | Lifetimes for each grant (access, refresh, device, authorization code, identity). | Enforced during issuance; persisted on each token document. | +| Storage | `storage.connectionString` | MongoDB connection string. | Required even for tests; offline kits ship snapshots for seeding. | +| Signing | `signing.enabled` | Enable JWKS/revocation signing. | Disable only for development. | +| Signing | `signing.algorithm` | Signing algorithm identifier. | Currently ES256; additional curves can be wired through crypto providers. | +| Signing | `signing.keySource` | Loader identifier (`file`, `vault`, custom). | Determines which `IAuthoritySigningKeySource` resolves keys. | +| Signing | `signing.keyPath` | Relative/absolute path understood by the loader. | Stored as-is; rotation request should keep it in sync with filesystem layout. | +| Signing | `signing.activeKeyId` | Active JWKS / revocation signing key id. | Exposed as `kid` in JWKS and bundles. | +| Signing | `signing.additionalKeys[].keyId` | Retired key identifier retained for verification. | Manager updates this automatically after rotation; keep YAML aligned. | +| Signing | `signing.additionalKeys[].source` | Loader identifier per retired key. | Defaults to `signing.keySource` if omitted. | +| Security | `security.rateLimiting` | Fixed-window limits for `/token`, `/authorize`, `/internal/*`. | See `docs/security/rate-limits.md` for tuning. | +| Bootstrap | `bootstrap.apiKey` | Shared secret required for `/internal/*`. | Only required when `bootstrap.enabled` is true. | + +### 7.1 Sender-constrained clients (DPoP & mTLS) + +Authority now understands two flavours of sender-constrained OAuth clients: + +- **DPoP proof-of-possession** – clients sign a `DPoP` header for `/token` requests. Authority validates the JWK thumbprint, HTTP method/URI, and replay window, then stamps the resulting access token with `cnf.jkt` so downstream services can verify the same key is reused. + - Configure under `security.senderConstraints.dpop`. `allowedAlgorithms`, `proofLifetime`, and `replayWindow` are enforced at validation time. + - `security.senderConstraints.dpop.nonce.enabled` enables nonce challenges for high-value audiences (`requiredAudiences`, normalised to case-insensitive strings). When a nonce is required but missing or expired, `/token` replies with `WWW-Authenticate: DPoP error="use_dpop_nonce"` (and, when available, a fresh `DPoP-Nonce` header). Clients must retry with the issued nonce embedded in the proof. + - `security.senderConstraints.dpop.nonce.store` selects `memory` (default) or `redis`. When `redis` is configured, set `security.senderConstraints.dpop.nonce.redisConnectionString` so replicas share nonce issuance and high-value clients avoid replay gaps during failover. + - Example (enabling Redis-backed nonces; adjust audiences per deployment): + ```yaml + security: + senderConstraints: + dpop: + enabled: true + proofLifetime: "00:02:00" + replayWindow: "00:05:00" + allowedAlgorithms: [ "ES256", "ES384" ] + nonce: + enabled: true + ttl: "00:10:00" + maxIssuancePerMinute: 120 + store: "redis" + redisConnectionString: "redis://authority-redis:6379?ssl=false" + requiredAudiences: + - "signer" + - "attestor" + ``` + Operators can override any field via environment variables (e.g. `STELLAOPS_AUTHORITY__SECURITY__SENDERCONSTRAINTS__DPOP__NONCE__STORE=redis`). + - Declare client `audiences` in bootstrap manifests or plug-in provisioning metadata; Authority now defaults the token `aud` claim and `resource` indicator from this list, which is also used to trigger nonce enforcement for audiences such as `signer` and `attestor`. +- **Mutual TLS clients** – client registrations may declare an mTLS binding (`senderConstraint: mtls`). When enabled via `security.senderConstraints.mtls`, Authority validates the presented client certificate against stored bindings (`certificateBindings[]`), optional chain verification, and timing windows. Successful requests embed `cnf.x5t#S256` into the access token (and introspection output) so resource servers can enforce the certificate thumbprint. + - `security.senderConstraints.mtls.enforceForAudiences` forces mTLS whenever the requested `aud`/`resource` (or the client's configured audiences) intersect the configured allow-list (default includes `signer`). Clients configured for different sender constraints are rejected early so operator policy remains consistent. + - Certificate bindings now act as an allow-list: Authority verifies thumbprint, subject, issuer, serial number, and any declared SAN values against the presented certificate, with rotation grace windows applied to `notBefore/notAfter`. Operators can enforce subject regexes, SAN type allow-lists (`dns`, `uri`, `ip`), trusted certificate authorities, and rotation grace via `security.senderConstraints.mtls.*`. + +Both modes persist additional metadata in `authority_tokens`: `senderConstraint` records the enforced policy, while `senderKeyThumbprint` stores the DPoP JWK thumbprint or mTLS certificate hash captured at issuance. Downstream services can rely on these fields (and the corresponding `cnf` claim) when auditing offline copies of the token store. + +### 7.2 Policy Engine clients & scopes + +Policy Engine v2 introduces dedicated scopes and a service identity that materialises effective findings. Configure Authority as follows when provisioning policy clients: + +| Client | Scopes | Notes | +| --- | --- | --- | +| `policy-engine` (service) | `policy:run`, `findings:read`, `effective:write` | Must include `properties.serviceIdentity: policy-engine` and a tenant. Authority rejects `effective:write` tokens without the marker or tenant. | +| `policy-cli` / automation | `policy:read`, `policy:author`, `policy:review`, `policy:simulate`, `findings:read` *(optionally add `policy:approve` / `policy:operate` / `policy:activate` for promotion pipelines)* | Keep scopes minimal; reroll CLI/CI tokens issued before 2025‑10‑27 so they drop legacy scope names and adopt the new set. | +| UI/editor sessions | `policy:read`, `policy:author`, `policy:simulate` (+ reviewer/approver/operator scopes as appropriate) | Issue tenant-specific clients so audit and rate limits remain scoped. | + +Sample YAML entry: + +```yaml + - clientId: "policy-engine" + displayName: "Policy Engine Service" + grantTypes: [ "client_credentials" ] + audiences: [ "api://policy-engine" ] + scopes: [ "policy:run", "findings:read", "effective:write" ] + tenant: "tenant-default" + properties: + serviceIdentity: "policy-engine" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/policy-engine.secret" +``` + +Compliance checklist: + +- [ ] `policy-engine` client includes `properties.serviceIdentity: policy-engine` and a tenant hint; logins missing either are rejected. +- [ ] Non-service clients omit `effective:write` and receive only the scopes required for their role (`policy:read`, `policy:author`, `policy:review`, `policy:approve`, `policy:operate`, `policy:simulate`, etc.). +- [ ] Legacy tokens using `policy:write`/`policy:submit`/`policy:edit` are rotated to the new scope set before Production change freeze (see release migration note below). +- [ ] Approval/activation workflows use identities distinct from authoring identities; tenants are provisioned per client to keep telemetry segregated. +- [ ] Operators document reviewer assignments and incident procedures alongside `/docs/security/policy-governance.md` and archive policy evidence bundles (`stella policy bundle export`) with each release. + +### 7.3 Orchestrator roles & scopes + +| Role / Client | Scopes | Notes | +| --- | --- | --- | +| `Orch.Viewer` role | `orch:read` | Read-only access to Orchestrator dashboards, queues, and telemetry. | +| `Orch.Operator` role | `orch:read`, `orch:operate` | Issue short-lived tokens for control actions (pause/resume, retry, sync). Token requests **must** include `operator_reason` (≤256 chars) and `operator_ticket` (≤128 chars); Authority rejects requests missing either value and records both in audit events. | +| `Orch.Admin` role | `orch:read`, `orch:operate`, `orch:quota` | Manage tenant quotas/burst ceilings/backfill allowances. Tokens **must** include `quota_reason` (≤256 chars); optional `quota_ticket` (≤128 chars) is stored for audit trails. | + +Token request example via client credentials: + +```bash +curl -u orch-operator:s3cr3t! \ + -d 'grant_type=client_credentials' \ + -d 'scope=orch:operate' \ + -d 'operator_reason=resume source after maintenance' \ + -d 'operator_ticket=INC-2045' \ + https://authority.example.com/token +``` + +Tokens lacking `operator_reason` or `operator_ticket` receive `invalid_request`; audit events (`authority.client_credentials.grant`) surface the supplied values under `request.reason` and `request.ticket` for downstream review. +CLI clients set these parameters via `Authority.OperatorReason` / `Authority.OperatorTicket` (environment variables `STELLAOPS_ORCH_REASON` and `STELLAOPS_ORCH_TICKET`). + +Quota administration tokens follow the same pattern: + +```bash +curl -u orch-admin:s3cr3t! \ + -d 'grant_type=client_credentials' \ + -d 'scope=orch:quota' \ + -d 'quota_reason=temporary burst for release catch-up' \ + -d 'quota_ticket=CHG-8821' \ + https://authority.example.com/token +``` + +CLI automation should supply these values via `Authority.QuotaReason` / `Authority.QuotaTicket` (environment variables `STELLAOPS_ORCH_QUOTA_REASON` and `STELLAOPS_ORCH_QUOTA_TICKET`). Missing `quota_reason` yields `invalid_request`; when provided, both reason and ticket are captured in audit properties (`quota.reason`, `quota.ticket`). + +## 8. Offline & Sovereign Operation +- **No outbound dependencies:** Authority only contacts MongoDB and local plugins. Discovery and JWKS are cached by clients with offline tolerances (`AllowOfflineCacheFallback`, `OfflineCacheTolerance`). Operators should mirror these responses for air-gapped use. +- **Structured logging:** Every revocation export, signing rotation, bootstrap action, and token issuance emits structured logs with `traceId`, `client_id`, `subjectId`, and `network.remoteIp` where applicable. Mirror logs to your SIEM to retain audit trails without central connectivity. +- **Determinism:** Sorting rules in token and revocation exports guarantee byte-for-byte identical artefacts given the same datastore state. Hashes and signatures remain stable across machines. + +## 9. Operational Checklist +- [ ] Protect the bootstrap API key and disable bootstrap endpoints (`bootstrap.enabled: false`) once initial setup is complete. +- [ ] Schedule `stella auth revoke export` (or `/internal/revocations/export`) at the same cadence as Concelier exports so bundles remain in lockstep. +- [ ] Rotate signing keys before expiration; keep at least one retired key until all cached bundles/tokens signed with it have expired. +- [ ] Monitor `/health` and `/ready` plus rate-limiter metrics to detect plugin outages early. +- [ ] Ensure downstream services cache JWKS and revocation bundles within tolerances; stale caches risk accepting revoked tokens. + +For plug-in specific requirements, refer to **[Authority Plug-in Developer Guide](dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md)**. For revocation bundle validation workflow, see **[Authority Revocation Bundle](security/revocation-bundle.md)**. diff --git a/docs/24_OFFLINE_KIT.md b/docs/24_OFFLINE_KIT.md index 3c913f65..70975e55 100755 --- a/docs/24_OFFLINE_KIT.md +++ b/docs/24_OFFLINE_KIT.md @@ -17,13 +17,13 @@ completely isolated network: | **Provenance** | Cosign signature, SPDX 2.3 SBOM, in‑toto SLSA attestation | | **Attested manifest** | `offline-manifest.json` + detached JWS covering bundle metadata, signed during export. | | **Delta patches** | Daily diff bundles keep size \< 350 MB | -| **Scanner plug-ins** | OS analyzers plus the Node.js, Go, .NET, and Python language analyzers packaged under `plugins/scanner/analyzers/**` with manifests so Workers load deterministically offline. | +| **Scanner plug-ins** | OS analyzers plus the Node.js, Go, .NET, Python, and Rust language analyzers packaged under `plugins/scanner/analyzers/**` with manifests so Workers load deterministically offline. | | **Debug store** | `.debug` artefacts laid out under `debug/.build-id//.debug` with `debug/debug-manifest.json` mapping build-ids to originating images for symbol retrieval. | | **Telemetry collector bundle** | `telemetry/telemetry-offline-bundle.tar.gz` plus `.sha256`, containing OTLP collector config, Helm/Compose overlays, and operator instructions. | **RU BDU note:** ship the official Russian Trusted Root/Sub CA bundle (`certificates/russian_trusted_bundle.pem`) inside the kit so `concelier:httpClients:source.bdu:trustedRootPaths` can resolve it when the service runs in an air‑gapped network. Drop the most recent `vulxml.zip` alongside the kit if operators need a cold-start cache. -**Language analyzers:** the kit now carries the restart-only Node.js, Go, .NET, and Python analyzer plug-ins (`plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Node/`, `...Lang.Go/`, `...Lang.DotNet/`, `...Lang.Python/`). Drop the directories alongside Worker binaries so the unified plug-in catalog can load them without outbound fetches; Rust remains on the Wave 4 roadmap. +**Language analyzers:** the kit now carries the restart-only Node.js, Go, .NET, Python, and Rust plug-ins (`plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Node/`, `...Lang.Go/`, `...Lang.DotNet/`, `...Lang.Python/`, `...Lang.Rust/`). Drop the directories alongside Worker binaries so the unified plug-in catalog can load them without outbound fetches. *Scanner core:* C# 12 on **.NET {{ dotnet }}**. *Imports are idempotent and atomic — no service downtime.* @@ -162,13 +162,31 @@ Example excerpt (2025-10-23 kit) showing the Go and .NET analyzer plug-in payloa "size": 31896, "capturedAt": "2025-10-26T00:00:00Z" } -{ - "name": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json", - "sha256": "668ad9a1a35485628677b639db4d996d1e25f62021680a81a22482483800e557", - "size": 648, - "capturedAt": "2025-10-26T00:00:00Z" -} -``` +{ + "name": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json", + "sha256": "668ad9a1a35485628677b639db4d996d1e25f62021680a81a22482483800e557", + "size": 648, + "capturedAt": "2025-10-26T00:00:00Z" +} +{ + "name": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.dll", + "sha256": "d90ba8b6ace7d98db563b1dec178d57ac09df474e1342fa1daa38bd55e17b185", + "size": 54784, + "capturedAt": "2025-11-01T00:00:00Z" +} +{ + "name": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.pdb", + "sha256": "6fac88640a4980d2bb8f7ea2dd2f3d0a521b90fd30ae3a84981575d5f76fa3df", + "size": 36636, + "capturedAt": "2025-11-01T00:00:00Z" +} +{ + "name": "plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json", + "sha256": "1ec47d1a2103ad5eff23e903532cb76b1ed7ded85d301c1a6631ff21aa966ed4", + "size": 658, + "capturedAt": "2025-11-01T00:00:00Z" +} +``` --- @@ -214,11 +232,15 @@ The Offline Kit carries the same helper scripts under `scripts/`: ### Authority scope sanity check Offline installs rely on the bundled `etc/authority.yaml.sample`. Before promoting the kit, confirm the sample clients keep the Aggregation-Only guardrails: - -- `aoc-verifier` requests `aoc:verify`, `advisory:read`, and `vex:read`. -- `signals-uploader` requests `signals:write`, `signals:read`, and `aoc:verify`. - -Authority now rejects tokens that request `advisory:read`, `vex:read`, or any `signals:*` scope without `aoc:verify`; the sample has been updated to match. If you maintain tenant-specific overlays, mirror the same pairing so air-gapped automation fails deterministically with `invalid_scope` when misconfigured. + +- `aoc-verifier` requests `aoc:verify`, `advisory:read`, and `vex:read`. +- `signals-uploader` requests `signals:write`, `signals:read`, and `aoc:verify`. +- `airgap-operator` requests `airgap:status:read`, `airgap:import`, and `airgap:seal`. +- `task-runner` requests `packs.run` and `packs.read` for execution flows. +- `pack-approver` requests `packs.approve` (plus `packs.read`) for automation that resumes runs after approvals. +- `packs-registry` requests `packs.write` and `packs.read` for publishing bundles. + +Authority now rejects tokens that request `advisory:read`, `vex:read`, or any `signals:*` scope without `aoc:verify`; the sample has been updated to match. Air-gap scopes (`airgap:*`) also require an explicit tenant assignment—match the updated roles (`airgap-viewer`, `airgap-operator`, `airgap-admin`) so automation fails closed when misconfigured. **Quick smoke test:** before import, verify the tarball carries the Go analyzer plug-in: @@ -228,7 +250,7 @@ tar -tzf stella-ops-offline-kit-.tgz 'plugins/scanner/analyzers/lang/Stell The manifest lookup above and this `tar` listing should both surface the Go analyzer DLL, PDB, and manifest entries before the kit is promoted. -> **Release guardrail.** The automated release pipeline now publishes the Python plug-in from source and executes `dotnet run --project src/Tools/LanguageAnalyzerSmoke --configuration Release -- --repo-root ` to validate manifest integrity and cold/warm determinism within the < 30 s / < 5 s budgets (differences versus repository goldens are logged for triage). Run `ops/offline-kit/run-python-analyzer-smoke.sh` locally before shipping a refreshed kit if you rebuild artefacts outside CI or when preparing the air-gap bundle. +> **Release guardrail.** The automated release pipeline now publishes the Python and Rust plug-ins from source and executes `dotnet run --project src/Tools/LanguageAnalyzerSmoke --configuration Release -- --repo-root --analyzer ` to validate manifest integrity and cold/warm determinism within the < 30 s / < 5 s budgets (differences versus repository goldens are logged for triage). Run `ops/offline-kit/run-python-analyzer-smoke.sh` and `ops/offline-kit/run-rust-analyzer-smoke.sh` locally before shipping a refreshed kit if you rebuild artefacts outside CI or when preparing the air-gap bundle. ### Debug store mirror diff --git a/docs/TASKS.md b/docs/TASKS.md index ce1cee07..a9a74a6e 100644 --- a/docs/TASKS.md +++ b/docs/TASKS.md @@ -68,7 +68,7 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| | DOCS-ATTEST-73-001 | TODO | Docs Guild, Attestor Service Guild | ATTEST-TYPES-73-001 | Publish `/docs/modules/attestor/overview.md` with imposed rule banner. | Doc merged; terminology validated. | -| DOCS-ATTEST-73-002 | TODO | Docs Guild, Attestation Payloads Guild | ATTEST-TYPES-73-002 | Write `/docs/modules/attestor/payloads.md` with schemas/examples. | Doc merged; examples validated via tests. | +| DOCS-ATTEST-73-002 | DONE | Docs Guild, Attestation Payloads Guild | ATTEST-TYPES-73-002 | Write `/docs/modules/attestor/payloads.md` with schemas/examples. | Doc merged; examples validated via tests. | | DOCS-ATTEST-73-003 | TODO | Docs Guild, Policy Guild | POLICY-ATTEST-73-002 | Publish `/docs/modules/attestor/policies.md` covering verification policies. | Doc merged; policy examples validated. | | DOCS-ATTEST-73-004 | TODO | Docs Guild, Attestor Service Guild | ATTESTOR-73-002 | Add `/docs/modules/attestor/workflows.md` detailing ingest, verify, bulk operations. | Doc merged; workflows tested. | | DOCS-ATTEST-74-001 | TODO | Docs Guild, KMS Guild | KMS-73-001 | Publish `/docs/modules/attestor/keys-and-issuers.md`. | Doc merged; rotation guidance verified. | diff --git a/docs/airgap/airgap-mode.md b/docs/airgap/airgap-mode.md index 97161042..91a59f58 100644 --- a/docs/airgap/airgap-mode.md +++ b/docs/airgap/airgap-mode.md @@ -1,71 +1,72 @@ -# Air-Gapped Mode Playbook - -> Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -## Overview - -Air-Gapped Mode is the supported operating profile for deployments with **zero external egress**. All inputs arrive via signed mirror bundles, and every surface (CLI, Console, APIs, schedulers, scanners) operates under sealed-network constraints while preserving Aggregation-Only Contract invariants. - -- **Primary components:** Web Services API, Console, CLI, Orchestrator, Task Runner, Conseiller (Feedser), Excitator (VEXer), Policy Engine, Findings Ledger, Export Center, Authority & Tenancy, Notifications, Observability & Forensics. -- **Surfaces:** offline bootstrap, mirror ingestion, deterministic jobs, offline advisories/VEX/policy packs/notifications, evidence exports. -- **Dependencies:** Export Center, Containerized Distribution, Authority-backed scopes & tenancy, Observability & Forensics, Policy Studio. - -## Guiding principles - -1. **Zero egress:** all outbound network calls are disabled unless explicitly allowed. Any feature requiring online data must degrade gracefully with clear UX messaging. -2. **Deterministic inputs:** the platform accepts only signed Mirror Bundles (advisories, VEX, policy packs, vendor feeds, images, dashboards). Bundles carry provenance attestations and chain-of-custody manifests. -3. **Auditable exchange:** every import/export records provenance, signatures, and operator identity. Evidence bundles and reports remain verifiable offline. -4. **Aggregation-Only Contract compliance:** Conseiller and Excitator continue to aggregate without mutating source records, even when ingesting mirrored feeds. -5. **Operator ergonomics:** offline bootstrap, upgrade, and verification steps are reproducible and scripted. - -## Lifecycle & modes - -| Mode | Description | Tooling | -| --- | --- | --- | -| Connected | Standard deployment with online feeds. Operators use Export Center to build mirror bundles for offline environments. | `stella export bundle create --profile mirror:full` | -| Staging mirror | Sealed host that fetches upstream feeds, runs validation, and signs mirror bundles. | Export Center, cosign, bundle validation scripts | -| Air-gapped | Production cluster with egress sealed, consuming validated bundles, issuing provenance for inward/outward transfers. | Mirror import CLI, sealed-mode runtime flags | - -### Installation & bootstrap - -1. Prepare mirror bundles (images, charts, advisories/VEX, policy packs, dashboards, telemetry configs). -2. Transfer bundles via approved media and validate signatures (`cosign verify`, bundle manifest hash). -3. Deploy platform using offline artefacts (`helm install --set airgap.enabled=true`), referencing local registry/object storage. - -### Updates - -1. Staging host generates incremental bundles (mirror delta) with provenance. -2. Offline site imports bundles via the CLI (`stella airgap import --bundle`) and records chain-of-custody. -3. Scheduler triggers replay jobs with deterministic timelines; results remain reproducible across imports. - -## Component responsibilities - -| Component | Offline duties | -| --- | --- | -| Export Center | Produce full/delta mirror bundles, signed manifests, provenance attestations. | -| Authority & Tenancy | Provide offline scope enforcement, short-lived tokens, revocation via local CRLs. | -| Conseiller / Excitator | Ingest mirrored advisories/VEX, enforce AOC, versioned observations. | -| Policy Engine & Findings Ledger | Replay evaluations using offline feeds, emit explain traces, support sealed-mode hints. | -| Notifications | Deliver locally via approved channels (email relay, webhook proxies) or queue for manual export. | -| Observability | Collect metrics/logs/traces locally, generate forensic bundles for external analysis. | - -## Operational guardrails - -- **Network policy:** enforce allowlists (`airgap.egressAllowlist=[]`). Any unexpected outbound request raises an alert. -- **Bundle validation:** double-sign manifests (bundle signer + site-specific cosign key); reject on mismatch. -- **Time synchronization:** rely on local NTP or manual clock audits; many signatures require monotonic time. -- **Key rotation:** plan for offline key ceremonies; Export Center and Authority document rotation playbooks. -- **Incident response:** maintain scripts for replaying imports, regenerating manifests, and exporting forensic data without egress. - -## Testing & verification - -- Integration tests mimic offline installs by running with `AIRGAP_ENABLED=true` in CI. -- Mirror bundles include validation scripts to compare hash manifests across staging and production. -- Sealed-mode smoke tests ensure services fail closed when attempting egress. - -## References - -- Export workflows: `docs/modules/export-center/overview.md` -- Policy sealed-mode hints: `docs/policy/overview.md` -- Observability forensic bundles: `docs/modules/telemetry/architecture.md` -- Runtime posture enforcement: `docs/modules/zastava/operations/runtime.md` +# Air-Gapped Mode Playbook + +> Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +## Overview + +Air-Gapped Mode is the supported operating profile for deployments with **zero external egress**. All inputs arrive via signed mirror bundles, and every surface (CLI, Console, APIs, schedulers, scanners) operates under sealed-network constraints while preserving Aggregation-Only Contract invariants. + +- **Primary components:** Web Services API, Console, CLI, Orchestrator, Task Runner, Conseiller (Feedser), Excitator (VEXer), Policy Engine, Findings Ledger, Export Center, Authority & Tenancy, Notifications, Observability & Forensics. +- **Surfaces:** offline bootstrap, mirror ingestion, deterministic jobs, offline advisories/VEX/policy packs/notifications, evidence exports. +- **Dependencies:** Export Center, Containerized Distribution, Authority-backed scopes & tenancy, Observability & Forensics, Policy Studio. + +## Guiding principles + +1. **Zero egress:** all outbound network calls are disabled unless explicitly allowed. Any feature requiring online data must degrade gracefully with clear UX messaging. +2. **Deterministic inputs:** the platform accepts only signed Mirror Bundles (advisories, VEX, policy packs, vendor feeds, images, dashboards). Bundles carry provenance attestations and chain-of-custody manifests. +3. **Auditable exchange:** every import/export records provenance, signatures, and operator identity. Evidence bundles and reports remain verifiable offline. +4. **Aggregation-Only Contract compliance:** Conseiller and Excitator continue to aggregate without mutating source records, even when ingesting mirrored feeds. +5. **Operator ergonomics:** offline bootstrap, upgrade, and verification steps are reproducible and scripted. + +## Lifecycle & modes + +| Mode | Description | Tooling | +| --- | --- | --- | +| Connected | Standard deployment with online feeds. Operators use Export Center to build mirror bundles for offline environments. | `stella export bundle create --profile mirror:full` | +| Staging mirror | Sealed host that fetches upstream feeds, runs validation, and signs mirror bundles. | Export Center, cosign, bundle validation scripts | +| Air-gapped | Production cluster with egress sealed, consuming validated bundles, issuing provenance for inward/outward transfers. | Mirror import CLI, sealed-mode runtime flags | + +### Installation & bootstrap + +1. Prepare mirror bundles (images, charts, advisories/VEX, policy packs, dashboards, telemetry configs). +2. Transfer bundles via approved media and validate signatures (`cosign verify`, bundle manifest hash). +3. Deploy platform using offline artefacts (`helm install --set airgap.enabled=true`), referencing local registry/object storage. + +### Updates + +1. Staging host generates incremental bundles (mirror delta) with provenance. +2. Offline site imports bundles via the CLI (`stella airgap import --bundle`) and records chain-of-custody. +3. Scheduler triggers replay jobs with deterministic timelines; results remain reproducible across imports. + +## Component responsibilities + +| Component | Offline duties | +| --- | --- | +| Export Center | Produce full/delta mirror bundles, signed manifests, provenance attestations. | +| Authority & Tenancy | Provide offline scope enforcement, short-lived tokens, revocation via local CRLs. | +| Conseiller / Excitator | Ingest mirrored advisories/VEX, enforce AOC, versioned observations. | +| Policy Engine & Findings Ledger | Replay evaluations using offline feeds, emit explain traces, support sealed-mode hints. | +| Notifications | Deliver locally via approved channels (email relay, webhook proxies) or queue for manual export. | +| Observability | Collect metrics/logs/traces locally, generate forensic bundles for external analysis. | + +## Operational guardrails + +- **Network policy:** enforce allowlists (`airgap.egressAllowlist=[]`). Any unexpected outbound request raises an alert. +- **Bundle validation:** double-sign manifests (bundle signer + site-specific cosign key); reject on mismatch. +- **Time synchronization:** rely on local NTP or manual clock audits; many signatures require monotonic time. +- **Key rotation:** plan for offline key ceremonies; Export Center and Authority document rotation playbooks. +- **Authority scopes:** enforce `airgap:status:read`, `airgap:import`, and `airgap:seal` via tenant-scoped roles; require operator reason/ticket metadata for sealing. +- **Incident response:** maintain scripts for replaying imports, regenerating manifests, and exporting forensic data without egress. + +## Testing & verification + +- Integration tests mimic offline installs by running with `AIRGAP_ENABLED=true` in CI. +- Mirror bundles include validation scripts to compare hash manifests across staging and production. +- Sealed-mode smoke tests ensure services fail closed when attempting egress. + +## References + +- Export workflows: `docs/modules/export-center/overview.md` +- Policy sealed-mode hints: `docs/policy/overview.md` +- Observability forensic bundles: `docs/modules/telemetry/architecture.md` +- Runtime posture enforcement: `docs/modules/zastava/operations/runtime.md` diff --git a/docs/api/authority-legacy-auth-endpoints.md b/docs/api/authority-legacy-auth-endpoints.md new file mode 100644 index 00000000..ec3bece1 --- /dev/null +++ b/docs/api/authority-legacy-auth-endpoints.md @@ -0,0 +1,32 @@ +# Legacy Authority Authentication Endpoints — Deprecation Guidance + +**Announced:** 1 November 2025 +**Sunset (removal no earlier than):** 1 May 2026 + +## Summary + +StellaOps Authority previously exposed OAuth 2.1 endpoints at `/oauth/token`, `/oauth/revoke`, and `/oauth/introspect` to ease migration from early previews. Those aliases are now **deprecated** in favour of the canonical paths (`/token`, `/revoke`, `/introspect`). All responses from the legacy routes include: + +- `Deprecation` — RFC 7231 HTTP-date set to 1 November 2025. +- `Sunset` — HTTP-date advertising the planned removal on 1 May 2026. +- `Warning` — RFC 7234 `299` warning describing the migration requirement. +- `Link` — `rel="sunset"` URI pointing back to this guidance. + +No new features (DPoP nonces, audit upgrades, policy scopes) will ship on the legacy routes. After 1 May 2026 the aliases will return `410 Gone` and be removed in the next major release. + +## Required Actions + +- **Service identities / CI pipelines** – Update token, revocation, and introspection calls to target the canonical `/token`, `/revoke`, and `/introspect` endpoints. Regenerate OpenAPI clients if they relied on the deprecated paths. +- **Gateway / proxy rules** – Remove explicit rewrites that target `/oauth/*` so traffic flows directly to the canonical paths. +- **Custom SDKs** – Regenerate against the refreshed Authority OpenAPI spec (`/.well-known/openapi`) which marks legacy operations as `deprecated: true`. +- **Monitoring** – Alert on the `authority.api.legacy_endpoint` audit event or the `299` Warning header to verify migrations are complete. + +## Timeline & Support + +| Date | Milestone | +|------|-----------| +| 1 Nov 2025 | Deprecation headers emitted, documentation published | +| Jan–Apr 2026 | Observability dashboards highlight remaining usage; support assists with migrations | +| 1 May 2026 | Legacy routes return HTTP 410 and will be removed in the next major release | + +Questions? Contact the **Authority Core** guild or open a ticket with the **API Governance Guild** referencing AUTH-OAS-63-001. diff --git a/docs/benchmarks/scanner-rust-analyzer.md b/docs/benchmarks/scanner-rust-analyzer.md new file mode 100644 index 00000000..e23c783e --- /dev/null +++ b/docs/benchmarks/scanner-rust-analyzer.md @@ -0,0 +1,47 @@ +# Scanner Rust Analyzer Benchmarks (Sprint 130) + +## Summary + +- New fixtures under `src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/` capture heuristic and fallback behaviours. +- `RustLanguageAnalyzerBenchmark` (see `src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks`) exercises the new fixtures and emits heuristic coverage metrics. +- March 2025 snapshot: heuristic detection covers 3 crates (`reqwest`, `serde`, `tokio`) vs competitor baseline (1 crate), yielding 300% relative coverage. + +## Running the benchmarks + +```bash +# Build once in Release for accurate metrics +DOTNET_CLI_UI_LANGUAGE=en dotnet build src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks.csproj -c Release + +# Dump heuristic output +DOTNET_CLI_UI_LANGUAGE=en dotnet run --no-build --project src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks.csproj -- --dump-heuristics + +# Dump fallback binary output (bin provenance) +DOTNET_CLI_UI_LANGUAGE=en dotnet run --no-build --project src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks.csproj -- --dump-fallback + +# Run benchmarks (optional) +DOTNET_CLI_UI_LANGUAGE=en dotnet run --project src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks.csproj -c Release +``` + +> BenchmarkDotNet emits warnings when running against Debug builds. Use `-c Release` or set `config.WithOptions(ConfigOptions.DisableOptimizationsValidator)` if you need debugging traces. + +## Heuristic coverage comparison + +| Fixture | Our detector | Competitor baseline | Relative gain | +|---------|--------------|---------------------|---------------| +| `lang/rust/heuristics` | `reqwest`, `serde`, `tokio` | `serde` | **+300%** (3/1) + +Competitor baseline data lives in `competitor-baseline.json` alongside the heuristics fixture and is used by `RustHeuristicCoverageComparisonTests`. + +## Fallback binary snapshot + +``` +componentKey: bin::sha256:10f3c03766e4403be40add0467a2b2d07fd7006e4b8515ab88740ffa327ea775 +metadata: + binary.path -> usr/local/bin/opaque_bin + binary.sha256 -> 10f3c03766e4403be40add0467a2b2d07fd7006e4b8515ab88740ffa327ea775 + provenance -> binary +evidence: + source=binary locator=usr/local/bin/opaque_bin sha256=10f3c03766e4403be40add0467a2b2d07fd7006e4b8515ab88740ffa327ea775 +``` + +This snapshot is persisted in `expected.json` for repeatable regression tests. diff --git a/docs/dev/32_AUTH_CLIENT_GUIDE.md b/docs/dev/32_AUTH_CLIENT_GUIDE.md index 96dde2bd..7b3554ca 100644 --- a/docs/dev/32_AUTH_CLIENT_GUIDE.md +++ b/docs/dev/32_AUTH_CLIENT_GUIDE.md @@ -81,11 +81,38 @@ CLI and Concelier teams should expose these knobs once they adopt the auth clien 3. **Observability:** watch for `StellaOps.Auth.Client.HttpRetry` warnings in your logs. Excessive retries mean the upstream Authority cluster needs attention. 4. **Determinism:** keep retry delays deterministic. Avoid random jitter—operators can introduce jitter at the infrastructure layer if desired. -## 5. Rollout checklist - -- [ ] Update consuming service/CLI configuration schema to include the new settings. -- [ ] Document recommended defaults for offline (air-gapped) versus connected deployments. -- [ ] Extend smoke tests to cover Authority outage scenarios. -- [ ] Coordinate with Docs Guild so user-facing quickstarts reference the new knobs. - -Once Concelier and CLI integrate these changes, we can mark LIB5 **DONE**; further packaging work is deferred until the backlog reintroduces it. +## 5. Rollout checklist + +- [ ] Update consuming service/CLI configuration schema to include the new settings. +- [ ] Document recommended defaults for offline (air-gapped) versus connected deployments. +- [ ] Extend smoke tests to cover Authority outage scenarios. +- [ ] Coordinate with Docs Guild so user-facing quickstarts reference the new knobs. + +Once Concelier and CLI integrate these changes, we can mark LIB5 **DONE**; further packaging work is deferred until the backlog reintroduces it. + +## 6. Authenticating downstream API clients + +`StellaOps.Auth.Client` now ships a DI helper for wiring authenticated `HttpClient` instances: + +```csharp +services.AddHttpClient(\"notify\", client => + { + client.BaseAddress = new Uri(configuration[\"StellaOps:Notify:BaseUrl\"]!); + }) + .AddStellaOpsApiAuthentication(options => + { + options.Mode = StellaOpsApiAuthMode.ClientCredentials; + options.Scope = \"notify.read notify.admin\"; + options.Tenant = configuration[\"StellaOps:Tenant\"]!; + // To use a PAT instead, set options.Mode = StellaOpsApiAuthMode.PersonalAccessToken + // and supply options.PersonalAccessToken = configuration[\"StellaOps:Notify:Pat\"]. + }); +``` + +The handler automatically: + +- Requests OAuth access tokens (password or client credentials) via `IStellaOpsTokenClient`, or attaches a pre-issued personal access token. +- Refreshes tokens ahead of expiry using the larger of the handler refresh buffer (`options.RefreshBuffer`) and `StellaOpsAuthClientOptions.ExpirationSkew`. +- Injects the tenancy header (`X-StellaOps-Tenant` by default) when `options.Tenant` is supplied; the header name is configurable via `options.TenantHeader`. + +This keeps downstream API calls consistent with the platform’s multi-tenant requirements while avoiding handwritten plumbing in each service. diff --git a/docs/implplan/SPRINTS.md b/docs/implplan/SPRINTS.md index e2a3947b..4fd84454 100644 --- a/docs/implplan/SPRINTS.md +++ b/docs/implplan/SPRINTS.md @@ -13,3 +13,46 @@ Follow the sprint files below in order. Update task status in both `SPRINTS` and - [Experience & SDKs](./SPRINT_180_experience_sdks.md) - [Ops & Offline](./SPRINT_190_ops_offline.md) - [Documentation & Process](./SPRINT_200_documentation_process.md) + +> 2025-11-01: SCANNER-ANALYZERS-LANG-10-308R marked DONE (Language Analyzer Guild) – heuristics fixtures, benchmarks, and coverage comparison published. +> 2025-11-01: SCANNER-ANALYZERS-LANG-10-309R marked DONE (Language Analyzer Guild) – Rust analyzer packaged with offline kit smoke tests and docs. +> 2025-11-01: ENTRYTRACE-SURFACE-01 moved to DOING (EntryTrace Guild) – wiring Surface.Validation and Surface.FS reuse ahead of EntryTrace runs. +> 2025-11-01: AUTH-OBS-50-001 (Sprint 50 – Observability & Forensics) moved to DOING (Authority Core & Security Guild). +> 2025-11-01: AUTH-PACKS-41-001 moved to DOING (Authority Core & Security Guild) – add Packs.* scopes to Authority. +> 2025-11-01: AUTH-OBS-55-001 (Sprint 55 – Observability & Forensics) moved to DOING (Authority Core & Security Guild, Ops Guild). +> 2025-11-01: TASKRUN-41-001 moved to DOING (Task Runner Guild) – request packs.* scopes when calling Authority. +> 2025-11-01: PACKS-REG-41-001 moved to DOING (Packs Registry Guild) – enforce packs.* scopes for registry publish/run flows. +> 2025-11-01: ATTEST-VERIFY-74-001 re-opened and set to DOING to unblock build/test regressions (Verification Guild, Observability Guild). +> 2025-11-01: ATTEST-VERIFY-74-001 marked DONE after configuration and test fixes (Verification Guild, Observability Guild). +> 2025-11-01: AUTH-AIAI-31-001 marked DONE (Authority Core & Security Guild) – Advisory AI scopes published and remote inference toggles documented. +> 2025-11-01: AUTH-AIRGAP-56-001 moved to DOING (Authority Core & Security Guild) – add airgap scope catalogue and defaults. +> 2025-11-01: AUTH-AIRGAP-56-002 moved to DOING (Authority Core & Security Guild) – implement airgap audit endpoint and logging. +> 2025-11-01: ISSUER-30-001 marked DONE (Issuer Directory Guild) – Issuer Directory service scaffolded with CRUD APIs, audit sink, CSAF seed import, and unit tests. +> 2025-11-01: ISSUER-30-002 marked DONE (Issuer Directory Guild, Security Guild) – Key management domain, Mongo persistence, CRUD/rotate/revoke endpoints, validation, and tests delivered. +> 2025-11-01: ISSUER-30-004 marked DONE (Issuer Directory Guild, VEX Lens Guild) – Excititor worker consumes issuer directory client for key/trust lookup with cached offline support. +> 2025-11-01: ISSUER-30-005 marked DONE (Issuer Directory Guild, Observability Guild) – Issuer Directory service emits structured logs + metrics for issuer/key flows with OTEL meter. +> 2025-11-02: SURFACE-ENV-01 moved to DOING (Surface Env Guild) – drafting shared environment spec for Scanner/Zastava. +> 2025-11-02: SURFACE-ENV-02 moved to DOING (Surface Env Guild) – implementing typed environment resolver and unit tests. +> 2025-11-02: SURFACE-VAL-01 moved to DOING (Surface Validation Guild) – aligning design document with implementation plan. +> 2025-11-02: SURFACE-FS-01 moved to DOING (Surface FS Guild) – finalising cache layout and manifest spec. +> 2025-11-02: SURFACE-FS-02 moved to DOING (Surface FS Guild) – building core abstractions and deterministic serializers. +> 2025-11-02: SURFACE-SECRETS-01 moved to DOING (Surface Secrets Guild) – updating secrets design for provider matrix. +> 2025-11-02: SURFACE-SECRETS-02 moved to DOING (Surface Secrets Guild) – implementing base providers + tests. +> 2025-11-02: SCANNER-ENTRYTRACE-18-506 moved to DOING (EntryTrace Guild, Scanner WebService Guild) – surfacing EntryTrace results via WebService/CLI with confidence metadata. +> 2025-11-02: ATTESTOR-74-001 marked DONE (Attestor Service Guild) – witness client integration, repository schema, and verification/reporting updates landed with tests. +> 2025-11-02: AUTH-OAS-63-001 moved to DOING (Authority Core & Security Guild, API Governance Guild) – verifying legacy `/oauth/*` deprecation signalling and notifications ahead of sunset. +> 2025-11-02: AUTH-OAS-63-001 marked DONE (Authority Core & Security Guild, API Governance Guild) – legacy shims emit Deprecation/Sunset/Warning headers, audit event coverage validated, and migration guide published. +> 2025-11-02: AUTH-NOTIFY-40-001 marked DONE (Authority Core & Security Guild) – `/notify/ack-tokens/rotate` (notify.admin) now rotates DSSE keys with audit trails and integration tests. +> 2025-11-02: AUTH-OAS-62-001 moved to DOING (Authority Core & Security Guild, SDK Generator Guild) – wiring SDK helpers for OAuth2/PAT flows and tenancy override header. +> 2025-11-02: AUTH-OAS-62-001 marked DONE (Authority Core & Security Guild, SDK Generator Guild) – HttpClient auth helper (OAuth2/PAT) shipped with tenant header support and unit tests. +> 2025-11-02: AUTH-OBS-50-001 moved to DOING (Authority Core & Security Guild) – defining observability scopes and updating discovery/offline defaults. +> 2025-11-02: AUTH-OBS-52-001 moved to DOING (Authority Core & Security Guild) – rolling observability scopes through resource server policies and audit wiring. +> 2025-11-02: AUTH-OBS-55-001 marked DONE (Authority Core & Security Guild, Ops Guild) – incident-mode tokens now require fresh auth, audit records expose `incident.reason`, and `/authority/audit/incident` verification path documented. +> 2025-11-02: ENTRYTRACE-SURFACE-02 moved to DOING (EntryTrace Guild) – replacing direct env/secret access with Surface.Secrets provider for EntryTrace runs. +> 2025-11-02: ENTRYTRACE-SURFACE-01 marked DONE (EntryTrace Guild) – Surface.Validation + Surface.FS cache now drive EntryTrace reuse with regression tests. +> 2025-11-02: ENTRYTRACE-SURFACE-02 marked DONE (EntryTrace Guild) – EntryTrace environment placeholders resolved via Surface.Secrets with updated docs/tests. +> 2025-11-02: SCANNER-ENTRYTRACE-18-506 marked DONE (EntryTrace Guild, Scanner WebService Guild) – EntryTrace graph surfaced via WebService and CLI with confidence metadata. +> 2025-11-02: SCANNER-ENTRYTRACE-18-509 moved to DOING (EntryTrace Guild, QA Guild) – adding regression coverage for EntryTrace surfaces and NDJSON hashing. +> 2025-11-02: SCANNER-ENTRYTRACE-18-509 marked DONE (EntryTrace Guild, QA Guild) – regression coverage landed for result store/WebService/CLI with NDJSON hashing snapshot. +> 2025-11-02: CONCELIER-WEB-OAS-61-001 moved to DOING (Concelier WebService Guild) – implementing discovery endpoint for `.well-known/openapi` with version metadata and ETag. +> 2025-11-02: CONCELIER-WEB-OAS-61-001 marked DONE (Concelier WebService Guild) – discovery endpoint now serves signed OpenAPI 3.1 document with ETag support. diff --git a/docs/implplan/SPRINTS_PRIOR_20251031.md b/docs/implplan/SPRINTS_PRIOR_20251031.md index eb86f30e..63673ae4 100644 --- a/docs/implplan/SPRINTS_PRIOR_20251031.md +++ b/docs/implplan/SPRINTS_PRIOR_20251031.md @@ -712,7 +712,7 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation | Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | TODO | Docs Guild | DOCS-SEC-OBS-50-001 | Update `/docs/security/redaction-and-privacy.md` for telemetry privacy controls. | | Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | ops/devops/TASKS.md | DOING (2025-10-26) | DevOps Guild | DEVOPS-OBS-50-002 | Stand up multi-tenant metrics/logs/traces backends with retention and isolation. | > Staging rollout plan recorded in `docs/modules/telemetry/operations/storage.md`; waiting on Authority-issued tokens and namespace bootstrap. -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OBS-50-001 | Introduce observability/timeline/evidence/attestation scopes and update discovery metadata. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Authority/StellaOps.Authority/TASKS.md | DOING (2025-11-01) | Authority Core & Security Guild | AUTH-OBS-50-001 | Introduce observability/timeline/evidence/attestation scopes and update discovery metadata. | | Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-50-001 | Propagate trace headers from CLI commands and print correlation IDs. | | Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-50-001 | Replace ad-hoc logging with telemetry core across advisory ingestion/linking. | | Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-50-001 | Adopt telemetry core in Concelier APIs and surface correlation IDs. | @@ -797,7 +797,7 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation | Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-54-001 | Generate pack run attestations and link to timeline/evidence. | | Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | docs/TASKS.md | TODO | Docs Guild | DOCS-RUNBOOK-55-001 | Publish `/docs/runbooks/incidents.md` covering activation, escalation, and verification checklist. | | Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OBS-55-001 | Automate incident mode activation via SLO alerts, retention override management, and reset job. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OBS-55-001 | Enforce `obs:incident` scope with fresh-auth requirement and audit export for toggles. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Authority/StellaOps.Authority/TASKS.md | DOING (2025-11-01) | Authority Core & Security Guild | AUTH-OBS-55-001 | Enforce `obs:incident` scope with fresh-auth requirement and audit export for toggles. | | Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-55-001 | Ship `stella obs incident-mode` commands with safeguards and audit logging. | | Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-55-001 | Increase sampling and raw payload retention under incident mode with redaction guards. | | Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-55-001 | Provide incident mode toggle endpoints and propagate to services. | @@ -972,7 +972,7 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation | Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/Sdk/StellaOps.Sdk.Release/TASKS.md | TODO | SDK Release Guild | SDKREL-64-002 | Produce devportal offline bundle. | | Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-DEVPORT-64-001 | (Carry) ensure offline doc published; update as necessary. | | Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Api/StellaOps.Api.Governance/TASKS.md | TODO | API Governance Guild | APIGOV-63-001 | (Carry) compatibility gating monitoring. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OAS-63-001 | Deprecation headers for auth endpoints. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-11-01) | Authority Core & Security Guild | AUTH-OAS-63-001 | Deprecation headers for auth endpoints. | | Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SDK-64-001 | SDK update awareness command. | | Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OAS-63-001 | Deprecation metadata for Concelier APIs. | | Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OAS-63-001 | Deprecation metadata for VEX APIs. | @@ -1055,19 +1055,19 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation | Sprint 72 | Attestor Console Phase 1 – Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ATTEST-73-001 | (Prep) align CI secrets for Attestor service. | | Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-72-001 | Implement DSSE canonicalization and hashing helpers. | | Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-72-002 | Support compact/expanded output and detached payloads. | -| Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor.Types/TASKS.md | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-001 | Draft schemas for all attestation payload types. | -| Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor.Types/TASKS.md | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-002 | Generate models/validators from schemas. | +| Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor.Types/TASKS.md | DONE | Attestation Payloads Guild | ATTEST-TYPES-72-001 | Draft schemas for all attestation payload types. | +| Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor.Types/TASKS.md | DONE | Attestation Payloads Guild | ATTEST-TYPES-72-002 | Generate models/validators from schemas. | | Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-72-001 | Scaffold attestor service skeleton. | | Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-72-002 | Implement attestation store + storage integration. | | Sprint 72 | Attestor Console Phase 1 – Foundations | src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md | DONE | KMS Guild | KMS-72-001 | Implement KMS interface + file driver. | | Sprint 73 | Attestor CLI Phase 2 – Signing & Policies | src/Cli/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild | CLI-ATTEST-73-001 | Implement `stella attest sign` (payload selection, subject digest, key reference, output format) using official SDK transport. | | Sprint 73 | Attestor CLI Phase 2 – Signing & Policies | src/Cli/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild | CLI-ATTEST-73-002 | Implement `stella attest verify` with policy selection, explainability output, and JSON/table formatting. | | Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-001 | Publish attestor overview. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-002 | Publish payload docs. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | DONE | Docs Guild | DOCS-ATTEST-73-002 | Publish payload docs. | | Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-003 | Publish policies doc. | | Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-004 | Publish workflows doc. | | Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-73-001 | Add signing/verification helpers with KMS integration. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor.Types/TASKS.md | TODO | Attestation Payloads Guild | ATTEST-TYPES-73-001 | Create golden payload fixtures. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-73-001 | Ship signing endpoint. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor.Types/TASKS.md | DONE | Attestation Payloads Guild | ATTEST-TYPES-73-001 | Create golden payload fixtures. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor/TASKS.md | DOING | Attestor Service Guild | ATTESTOR-73-001 | Ship signing endpoint. | | Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-73-002 | Ship verification pipeline and reports. | | Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-73-003 | Implement list/fetch APIs. | | Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md | DONE (2025-10-30) | KMS Guild | KMS-72-002 | CLI support for key import/export. | @@ -1083,7 +1083,7 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation | Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Attestor/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-73-002 | Run fuzz tests for envelope handling. | | Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Attestor/StellaOps.Attestor.Verify/TASKS.md | TODO | Verification Guild | ATTEST-VERIFY-74-001 | Add telemetry for verification pipeline. | | Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Attestor/StellaOps.Attestor.Verify/TASKS.md | TODO | Verification Guild | ATTEST-VERIFY-74-002 | Document verification explainability. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-74-001 | Integrate transparency witness client. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Attestor/StellaOps.Attestor/TASKS.md | DOING | Attestor Service Guild | ATTESTOR-74-001 | Integrate transparency witness client. | | Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-74-002 | Implement bulk verification worker. | | Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md | TODO | Attestation Bundle Guild | EXPORT-ATTEST-74-001 | Build attestation bundle export job. | | Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-ATTEST-74-001 | Add verification/key notifications. | @@ -1094,6 +1094,6 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation | Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ATTEST-74-002 | Integrate bundle builds into release/offline pipelines. | | Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ATTEST-75-001 | Dashboards/alerts for attestor metrics. | | Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-75-001 | Support attestation bundle export/import for air gap. | -| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-75-002 | Harden APIs (rate limits, fuzz tests, threat model actions). | +| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/Attestor/StellaOps.Attestor/TASKS.md | DONE | Attestor Service Guild | ATTESTOR-75-002 | Harden APIs (rate limits, fuzz tests, threat model actions). | | Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md | TODO | Attestation Bundle Guild | EXPORT-ATTEST-75-001 | CLI bundle verify/import. | | Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md | TODO | Attestation Bundle Guild | EXPORT-ATTEST-75-002 | Document attestor airgap workflow. | diff --git a/docs/implplan/SPRINT_100_identity_signing.md b/docs/implplan/SPRINT_100_identity_signing.md index feff1c9d..43c93b0e 100644 --- a/docs/implplan/SPRINT_100_identity_signing.md +++ b/docs/implplan/SPRINT_100_identity_signing.md @@ -4,21 +4,21 @@ Summary: Identity & Signing focus on Attestor (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -ATTEST-ENVELOPE-72-001 | TODO | Implement DSSE canonicalization, JSON normalization, multi-signature structures, and hashing helpers. | Envelope Guild (src/Attestor/StellaOps.Attestor.Envelope/TASKS.md) -ATTEST-ENVELOPE-72-002 | TODO | Support compact and expanded JSON output, payload compression, and detached payload references. | Envelope Guild (src/Attestor/StellaOps.Attestor.Envelope/TASKS.md) -ATTEST-ENVELOPE-73-001 | TODO | Implement Ed25519 & ECDSA signature create/verify helpers, key identification (`keyid`) scheme, and error mapping. | Envelope Guild, KMS Guild (src/Attestor/StellaOps.Attestor.Envelope/TASKS.md) -ATTEST-ENVELOPE-73-002 | TODO | Add fuzz tests for envelope parsing, signature verification, and canonical JSON round-trips. | Envelope Guild (src/Attestor/StellaOps.Attestor.Envelope/TASKS.md) -ATTEST-TYPES-72-001 | TODO | Draft JSON Schemas for BuildProvenance v1, SBOMAttestation v1, VEXAttestation v1, ScanResults v1, PolicyEvaluation v1, RiskProfileEvidence v1, CustomEvidence v1. | Attestation Payloads Guild (src/Attestor/StellaOps.Attestor.Types/TASKS.md) -ATTEST-TYPES-72-002 | TODO | Generate Go/TS models from schemas with validation helpers and canonical JSON serialization. | Attestation Payloads Guild (src/Attestor/StellaOps.Attestor.Types/TASKS.md) -ATTEST-TYPES-73-001 | TODO | Create golden payload samples for each type; integrate into tests and documentation. | Attestation Payloads Guild (src/Attestor/StellaOps.Attestor.Types/TASKS.md) -ATTEST-TYPES-73-002 | TODO | Publish schema reference docs (`/docs/modules/attestor/payloads.md`) with annotated JSON examples. | Attestation Payloads Guild, Docs Guild (src/Attestor/StellaOps.Attestor.Types/TASKS.md) -ATTEST-VERIFY-73-001 | TODO | Implement verification engine: policy evaluation, issuer trust resolution, freshness, signature count, transparency checks; produce structured reports. | Verification Guild, Policy Guild (src/Attestor/StellaOps.Attestor.Verify/TASKS.md) -ATTEST-VERIFY-73-002 | TODO | Add caching layer keyed by `(subject, envelope_id, policy_version)` with TTL and invalidation on new evidence. | Verification Guild (src/Attestor/StellaOps.Attestor.Verify/TASKS.md) -ATTEST-VERIFY-74-001 | TODO | Emit telemetry (spans/metrics) tagged by subject, issuer, policy, result; integrate with dashboards. | Verification Guild, Observability Guild (src/Attestor/StellaOps.Attestor.Verify/TASKS.md) -ATTEST-VERIFY-74-002 | TODO | Document verification report schema and explainability in `/docs/modules/attestor/workflows.md`. | Verification Guild, Docs Guild (src/Attestor/StellaOps.Attestor.Verify/TASKS.md) -ATTESTOR-72-001 | TODO | Scaffold service (REST API skeleton, storage interfaces, KMS integration stubs) and DSSE validation pipeline. | Attestor Service Guild (src/Attestor/StellaOps.Attestor/TASKS.md) -ATTESTOR-72-002 | TODO | Implement attestation store (DB tables, object storage integration), CRUD, and indexing strategies. | Attestor Service Guild (src/Attestor/StellaOps.Attestor/TASKS.md) -ATTESTOR-73-001 | TODO | Implement signing endpoint with Ed25519/ECDSA support, KMS integration, and audit logging. | Attestor Service Guild, KMS Guild (src/Attestor/StellaOps.Attestor/TASKS.md) +ATTEST-ENVELOPE-72-001 | DONE (2025-11-01) | Implement DSSE canonicalization, JSON normalization, multi-signature structures, and hashing helpers. | Envelope Guild (src/Attestor/StellaOps.Attestor.Envelope/TASKS.md) +ATTEST-ENVELOPE-72-002 | DONE (2025-11-01) | Support compact and expanded JSON output, payload compression, and detached payload references. Dependencies: ATTEST-ENVELOPE-72-001. | Envelope Guild (src/Attestor/StellaOps.Attestor.Envelope/TASKS.md) +ATTEST-ENVELOPE-73-001 | DONE | Implement Ed25519 & ECDSA signature create/verify helpers, key identification (`keyid`) scheme, and error mapping. Dependencies: ATTEST-ENVELOPE-72-002. | Envelope Guild, KMS Guild (src/Attestor/StellaOps.Attestor.Envelope/TASKS.md) +ATTEST-ENVELOPE-73-002 | DONE | Add fuzz tests for envelope parsing, signature verification, and canonical JSON round-trips. Dependencies: ATTEST-ENVELOPE-73-001. | Envelope Guild (src/Attestor/StellaOps.Attestor.Envelope/TASKS.md) +ATTEST-TYPES-72-001 | DONE | Draft JSON Schemas for BuildProvenance v1, SBOMAttestation v1, VEXAttestation v1, ScanResults v1, PolicyEvaluation v1, RiskProfileEvidence v1, CustomEvidence v1. | Attestation Payloads Guild (src/Attestor/StellaOps.Attestor.Types/TASKS.md) +ATTEST-TYPES-72-002 | DONE | Generate Go/TS models from schemas with validation helpers and canonical JSON serialization. Dependencies: ATTEST-TYPES-72-001. | Attestation Payloads Guild (src/Attestor/StellaOps.Attestor.Types/TASKS.md) +ATTEST-TYPES-73-001 | DONE | Create golden payload samples for each type; integrate into tests and documentation. Dependencies: ATTEST-TYPES-72-002. | Attestation Payloads Guild (src/Attestor/StellaOps.Attestor.Types/TASKS.md) +ATTEST-TYPES-73-002 | DONE | Publish schema reference docs (`/docs/modules/attestor/payloads.md`) with annotated JSON examples. Dependencies: ATTEST-TYPES-73-001. | Attestation Payloads Guild, Docs Guild (src/Attestor/StellaOps.Attestor.Types/TASKS.md) +ATTEST-VERIFY-73-001 | DONE | Implement verification engine: policy evaluation, issuer trust resolution, freshness, signature count, transparency checks; produce structured reports. | Verification Guild, Policy Guild (src/Attestor/StellaOps.Attestor.Verify/TASKS.md) +ATTEST-VERIFY-73-002 | DONE | Add caching layer keyed by `(subject, envelope_id, policy_version)` with TTL and invalidation on new evidence. Dependencies: ATTEST-VERIFY-73-001. | Verification Guild (src/Attestor/StellaOps.Attestor.Verify/TASKS.md) +ATTEST-VERIFY-74-001 | DONE | Emit telemetry (spans/metrics) tagged by subject, issuer, policy, result; integrate with dashboards. Dependencies: ATTEST-VERIFY-73-002. | Verification Guild, Observability Guild (src/Attestor/StellaOps.Attestor.Verify/TASKS.md) +ATTEST-VERIFY-74-002 | DONE (2025-11-01) | Document verification report schema and explainability in `/docs/modules/attestor/workflows.md`. Dependencies: ATTEST-VERIFY-74-001. | Verification Guild, Docs Guild (src/Attestor/StellaOps.Attestor.Verify/TASKS.md) +ATTESTOR-72-001 | DONE | Scaffold service (REST API skeleton, storage interfaces, KMS integration stubs) and DSSE validation pipeline. | Attestor Service Guild (src/Attestor/StellaOps.Attestor/TASKS.md) +ATTESTOR-72-002 | DONE | Implement attestation store (DB tables, object storage integration), CRUD, and indexing strategies. Dependencies: ATTESTOR-72-001. | Attestor Service Guild (src/Attestor/StellaOps.Attestor/TASKS.md) +ATTESTOR-73-001 | DONE (2025-11-01) | Implement signing endpoint with Ed25519/ECDSA support, KMS integration, and audit logging. Dependencies: ATTESTOR-72-002. | Attestor Service Guild, KMS Guild (src/Attestor/StellaOps.Attestor/TASKS.md) [Identity & Signing] 100.A) Attestor.II @@ -26,33 +26,44 @@ Depends on: Sprint 100.A - Attestor.I Summary: Identity & Signing focus on Attestor (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -ATTESTOR-73-002 | TODO | Build verification pipeline evaluating DSSE signatures, issuer trust, and verification policies; persist reports. | Attestor Service Guild, Policy Guild (src/Attestor/StellaOps.Attestor/TASKS.md) -ATTESTOR-73-003 | TODO | Implement listing/fetch APIs with filters (subject, type, issuer, scope, date). | Attestor Service Guild (src/Attestor/StellaOps.Attestor/TASKS.md) -ATTESTOR-74-001 | TODO | Integrate transparency witness client, inclusion proof verification, and caching. | Attestor Service Guild (src/Attestor/StellaOps.Attestor/TASKS.md) -ATTESTOR-74-002 | TODO | Implement bulk verification worker + API with progress tracking, rate limits, and caching. | Attestor Service Guild (src/Attestor/StellaOps.Attestor/TASKS.md) -ATTESTOR-75-001 | TODO | Add export/import flows for attestation bundles and offline verification mode. | Attestor Service Guild, Export Guild (src/Attestor/StellaOps.Attestor/TASKS.md) -ATTESTOR-75-002 | TODO | Harden APIs with rate limits, auth scopes, threat model mitigations, and fuzz testing. | Attestor Service Guild, Security Guild (src/Attestor/StellaOps.Attestor/TASKS.md) +ATTESTOR-73-002 | DONE (2025-11-01) | Build verification pipeline evaluating DSSE signatures, issuer trust, and verification policies; persist reports. Dependencies: ATTESTOR-73-001. | Attestor Service Guild, Policy Guild (src/Attestor/StellaOps.Attestor/TASKS.md) +ATTESTOR-73-003 | DONE | Implement listing/fetch APIs with filters (subject, type, issuer, scope, date). Dependencies: ATTESTOR-73-002. | Attestor Service Guild (src/Attestor/StellaOps.Attestor/TASKS.md) +ATTESTOR-74-001 | DONE (2025-11-02) | Integrate transparency witness client, inclusion proof verification, and caching. Dependencies: ATTESTOR-73-003. | Attestor Service Guild (src/Attestor/StellaOps.Attestor/TASKS.md) +ATTESTOR-74-002 | DONE | Implement bulk verification worker + API with progress tracking, rate limits, and caching. Dependencies: ATTESTOR-74-001. | Attestor Service Guild (src/Attestor/StellaOps.Attestor/TASKS.md) +ATTESTOR-75-001 | DONE | Add export/import flows for attestation bundles and offline verification mode. Dependencies: ATTESTOR-74-002. | Attestor Service Guild, Export Guild (src/Attestor/StellaOps.Attestor/TASKS.md) +ATTESTOR-75-002 | DONE | Harden APIs with rate limits, auth scopes, threat model mitigations, and fuzz testing. Dependencies: ATTESTOR-75-001. | Attestor Service Guild, Security Guild (src/Attestor/StellaOps.Attestor/TASKS.md) + +> 2025-11-01: ATTESTOR-73-002 completed — verification endpoints emit structured reports, cache hits, and telemetry; Attestor verification test suites cover success, failure, and cached paths. Transparency witness integration continues under ATTESTOR-74-001. +> 2025-11-02: ATTESTOR-74-001 completed — witness client wired into proof refresh, repository model stores witness statements, and verification warns on missing endorsements. Tests updated for witness refresh, bundle export/import, and signing stubs. [Identity & Signing] 100.B) Authority.I Summary: Identity & Signing focus on Authority (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -AUTH-AIAI-31-001 | TODO | Define Advisory AI scopes (`advisory-ai:view`, `advisory-ai:operate`, `advisory-ai:admin`) and remote inference toggles; update discovery metadata/offline defaults. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-AIAI-31-002 | TODO | Enforce anonymized prompt logging, tenant consent for remote inference, and audit logging of assistant tasks. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-AIRGAP-56-001 | TODO | Provision new scopes (`airgap:seal`, `airgap:import`, `airgap:status:read`) in configuration metadata, offline kit defaults, and issuer templates. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-AIRGAP-56-002 | TODO | Audit import actions with actor, tenant, bundle ID, and trace ID; expose `/authority/audit/airgap` endpoint. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-AIRGAP-57-001 | TODO | Enforce sealed-mode CI gating by refusing token issuance when declared sealed install lacks sealing confirmation. | Authority Core & Security Guild, DevOps Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-NOTIFY-38-001 | TODO | Define `Notify.Viewer`, `Notify.Operator`, `Notify.Admin` scopes/roles, update discovery metadata, offline defaults, and issuer templates. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-NOTIFY-40-001 | TODO | Implement signed ack token key rotation, webhook allowlists, admin-only escalation settings, and audit logging of ack actions. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-OAS-62-001 | TODO | Provide SDK helpers for OAuth2/PAT flows, tenancy override header; add integration tests. | Authority Core & Security Guild, SDK Generator Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-OAS-63-001 | TODO | Emit deprecation headers and notifications for legacy auth endpoints. | Authority Core & Security Guild, API Governance Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-OBS-50-001 | TODO | Introduce scopes `obs:read`, `timeline:read`, `timeline:write`, `evidence:create`, `evidence:read`, `evidence:hold`, `attest:read`, and `obs:incident` (all tenant-scoped). Update discovery metadata, offline defaults, and scope grammar docs. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-OBS-52-001 | TODO | Configure resource server policies for Timeline Indexer, Evidence Locker, Exporter, and Observability APIs enforcing new scopes + tenant claims. Emit audit events including scope usage and trace IDs. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-OBS-55-001 | TODO | Harden incident mode authorization: require `obs:incident` scope + fresh auth, log activation reason, and expose verification endpoint for auditors. Update docs/runbooks. | Authority Core & Security Guild, Ops Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-ORCH-34-001 | TODO | Introduce `Orch.Admin` role with quota/backfill scopes, enforce audit reason on quota changes, and update offline defaults/docs. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +AUTH-AIAI-31-001 | DONE (2025-11-01) | Define Advisory AI scopes (`advisory-ai:view`, `advisory-ai:operate`, `advisory-ai:admin`) and remote inference toggles; update discovery metadata/offline defaults. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +AUTH-AIAI-31-002 | DONE (2025-11-01) | Enforce anonymized prompt logging, tenant consent for remote inference, and audit logging of assistant tasks. Dependencies: AUTH-AIAI-31-001. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +AUTH-AIRGAP-56-001 | DOING (2025-11-01) | Provision new scopes (`airgap:seal`, `airgap:import`, `airgap:status:read`) in configuration metadata, offline kit defaults, and issuer templates. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +AUTH-AIRGAP-56-002 | DOING (2025-11-01) | Audit import actions with actor, tenant, bundle ID, and trace ID; expose `/authority/audit/airgap` endpoint. Dependencies: AUTH-AIRGAP-56-001. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +AUTH-AIRGAP-57-001 | BLOCKED (2025-11-01) | Enforce sealed-mode CI gating by refusing token issuance when declared sealed install lacks sealing confirmation. Dependencies: AUTH-AIRGAP-56-002. | Authority Core & Security Guild, DevOps Guild (src/Authority/StellaOps.Authority/TASKS.md) +> 2025-11-01: AUTH-AIRGAP-57-001 blocked pending definition of sealed-confirmation evidence and configuration shape before gating (Authority Core & Security Guild, DevOps Guild). +AUTH-NOTIFY-38-001 | DONE (2025-11-01) | Define `Notify.Viewer`, `Notify.Operator`, `Notify.Admin` scopes/roles, update discovery metadata, offline defaults, and issuer templates. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +> 2025-11-01: AUTH-NOTIFY-38-001 completed—Notify scope catalog, discovery metadata, docs, configuration samples, and service tests updated for new roles. +AUTH-NOTIFY-40-001 | DONE (2025-11-02) | Implement signed ack token key rotation, webhook allowlists, admin-only escalation settings, and audit logging of ack actions. Dependencies: AUTH-NOTIFY-38-001. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +> 2025-11-02: `/notify/ack-tokens/rotate` (notify.admin) now rotates DSSE keys with audit coverage and integration tests. Webhook allowlist + escalation scope enforcement verified. +AUTH-OAS-62-001 | DONE (2025-11-02) | Provide SDK helpers for OAuth2/PAT flows, tenancy override header; add integration tests. | Authority Core & Security Guild, SDK Generator Guild (src/Authority/StellaOps.Authority/TASKS.md) +> 2025-11-02: Added HttpClient auth helper (OAuth2 + PAT) with tenant header support, plus coverage in `StellaOps.Auth.Client.Tests`. +AUTH-OAS-63-001 | DONE (2025-11-02) | Emit deprecation headers and notifications for legacy auth endpoints. Dependencies: AUTH-OAS-62-001. | Authority Core & Security Guild, API Governance Guild (src/Authority/StellaOps.Authority/TASKS.md) +> 2025-11-02: AUTH-OAS-63-001 marked DONE — legacy `/oauth/*` shims now emit Deprecation/Sunset/Warning headers, audit events (`authority.api.legacy_endpoint`) validated by tests, and migration guide `docs/api/authority-legacy-auth-endpoints.md` published (Authority Core & Security Guild, API Governance Guild). +AUTH-OBS-50-001 | DONE (2025-11-02) | Introduce scopes `obs:read`, `timeline:read`, `timeline:write`, `evidence:create`, `evidence:read`, `evidence:hold`, `attest:read`, and `obs:incident` (all tenant-scoped). Update discovery metadata, offline defaults, and scope grammar docs. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +> 2025-11-02: Observability scope bundle published in discovery metadata, OpenAPI, docs, and offline configs; issuer templates + roles updated with deterministic scope ordering and tests refreshed. +AUTH-OBS-52-001 | DONE (2025-11-02) | Configure resource server policies for Timeline Indexer, Evidence Locker, Exporter, and Observability APIs enforcing new scopes + tenant claims. Emit audit events including scope usage and trace IDs. Dependencies: AUTH-OBS-50-001. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +> 2025-11-02: Timeline/Evidence/Export resource servers now register observability policies, enforce tenant claims, and emit enriched authorization audit events; config samples + tests updated. +AUTH-OBS-55-001 | DONE (2025-11-02) | Harden incident mode authorization: require `obs:incident` scope + fresh auth, log activation reason, and expose verification endpoint for auditors. Update docs/runbooks. Dependencies: AUTH-OBS-52-001. | Authority Core & Security Guild, Ops Guild (src/Authority/StellaOps.Authority/TASKS.md) +> 2025-11-02: Resource servers now enforce a five-minute fresh-auth window for `obs:incident`, incident reasons are stamped into authorization audits and `/authority/audit/incident`, and sample configs/tests updated to require tenant headers across observability endpoints. +AUTH-ORCH-34-001 | DOING (2025-11-02) | Introduce `Orch.Admin` role with quota/backfill scopes, enforce audit reason on quota changes, and update offline defaults/docs. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) AUTH-PACKS-41-001 | TODO | Define CLI SSO profiles and pack scopes (`Packs.Read`, `Packs.Write`, `Packs.Run`, `Packs.Approve`), update discovery metadata, offline defaults, and issuer templates. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-PACKS-43-001 | BLOCKED (2025-10-27) | Enforce pack signing policies, approval RBAC checks, CLI CI token scopes, and audit logging for approvals. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +AUTH-PACKS-43-001 | BLOCKED (2025-10-27) | Enforce pack signing policies, approval RBAC checks, CLI CI token scopes, and audit logging for approvals. Dependencies: AUTH-PACKS-41-001. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) [Identity & Signing] 100.B) Authority.II @@ -61,13 +72,13 @@ Summary: Identity & Signing focus on Authority (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- AUTH-POLICY-23-002 | BLOCKED (2025-10-29) | Implement optional two-person rule for activation: require two distinct `policy:activate` approvals when configured; emit audit logs. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-POLICY-23-003 | BLOCKED (2025-10-29) | Update documentation and sample configs for policy roles, approval workflow, and signing requirements. | Authority Core & Docs Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-POLICY-27-002 | TODO | Provide attestation signing service bindings (OIDC token exchange, cosign integration) and enforce publish/promote scope checks, fresh-auth requirements, and audit logging. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-POLICY-27-003 | TODO | Update Authority configuration/docs for Policy Studio roles, signing policies, approval workflows, and CLI integration; include compliance checklist. | Authority Core & Docs Guild (src/Authority/StellaOps.Authority/TASKS.md) +AUTH-POLICY-23-003 | BLOCKED (2025-10-29) | Update documentation and sample configs for policy roles, approval workflow, and signing requirements. Dependencies: AUTH-POLICY-23-002. | Authority Core & Docs Guild (src/Authority/StellaOps.Authority/TASKS.md) +AUTH-POLICY-27-002 | TODO | Provide attestation signing service bindings (OIDC token exchange, cosign integration) and enforce publish/promote scope checks, fresh-auth requirements, and audit logging. Dependencies: AUTH-POLICY-23-003. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +AUTH-POLICY-27-003 | TODO | Update Authority configuration/docs for Policy Studio roles, signing policies, approval workflows, and CLI integration; include compliance checklist. Dependencies: AUTH-POLICY-27-002. | Authority Core & Docs Guild (src/Authority/StellaOps.Authority/TASKS.md) AUTH-TEN-49-001 | TODO | Implement service accounts & delegation tokens (`act` chain), per-tenant quotas, audit stream of auth decisions, and revocation APIs. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) AUTH-VULN-29-001 | TODO | Define Vuln Explorer scopes/roles (`vuln:view`, `vuln:investigate`, `vuln:operate`, `vuln:audit`) with ABAC attributes (env, owner, business_tier) and update discovery metadata/offline kit defaults. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-VULN-29-002 | TODO | Enforce CSRF/anti-forgery tokens for workflow actions, sign attachment tokens, and record audit logs with ledger event hashes. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) -AUTH-VULN-29-003 | TODO | Update security docs/config samples for Vuln Explorer roles, ABAC policies, attachment signing, and ledger verification guidance. | Authority Core & Docs Guild (src/Authority/StellaOps.Authority/TASKS.md) +AUTH-VULN-29-002 | TODO | Enforce CSRF/anti-forgery tokens for workflow actions, sign attachment tokens, and record audit logs with ledger event hashes. Dependencies: AUTH-VULN-29-001. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md) +AUTH-VULN-29-003 | TODO | Update security docs/config samples for Vuln Explorer roles, ABAC policies, attachment signing, and ledger verification guidance. Dependencies: AUTH-VULN-29-002. | Authority Core & Docs Guild (src/Authority/StellaOps.Authority/TASKS.md) PLG4-6.CAPABILITIES | BLOCKED (2025-10-12) | Finalise capability metadata exposure, config validation, and developer guide updates; remaining action is Docs polish/diagram export. | BE-Auth Plugin, Docs Guild (src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md) PLG6.DIAGRAM | TODO | Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. | Docs Guild (src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md) PLG7.RFC | REVIEW | Socialize LDAP plugin RFC (`docs/rfcs/authority-plugin-ldap.md`) and capture guild feedback. | BE-Auth Plugin, Security Guild (src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md) @@ -80,12 +91,12 @@ SEC5.PLG | BLOCKED (2025-10-21) | Address plugin-specific mitigations (bootstrap Summary: Identity & Signing focus on IssuerDirectory. Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -ISSUER-30-001 | TODO | Implement issuer CRUD API with RBAC, audit logging, and tenant scoping; seed CSAF publisher metadata. | Issuer Directory Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) -ISSUER-30-002 | TODO | Implement key management endpoints (add/rotate/revoke keys), enforce expiry, validate formats (Ed25519, X.509, DSSE). | Issuer Directory Guild, Security Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) -ISSUER-30-003 | TODO | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. | Issuer Directory Guild, Policy Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) -ISSUER-30-004 | TODO | Integrate with VEX Lens and Excitator signature verification (client SDK, caching, retries). | Issuer Directory Guild, VEX Lens Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) -ISSUER-30-005 | TODO | Instrument metrics/logs (issuer changes, key rotation, verification failures) and dashboards/alerts. | Issuer Directory Guild, Observability Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) -ISSUER-30-006 | TODO | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. | Issuer Directory Guild, DevOps Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) +ISSUER-30-001 | DONE (2025-11-01) | Implement issuer CRUD API with RBAC, audit logging, and tenant scoping; seed CSAF publisher metadata. | Issuer Directory Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) +ISSUER-30-002 | DONE (2025-11-01) | Implement key management endpoints (add/rotate/revoke keys), enforce expiry, validate formats (Ed25519, X.509, DSSE). Dependencies: ISSUER-30-001. | Issuer Directory Guild, Security Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) +ISSUER-30-003 | DOING | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. Dependencies: ISSUER-30-002. | Issuer Directory Guild, Policy Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) +ISSUER-30-004 | DONE (2025-11-01) | Integrate with VEX Lens and Excitator signature verification (client SDK, caching, retries). Dependencies: ISSUER-30-003. | Issuer Directory Guild, VEX Lens Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) +ISSUER-30-005 | DONE (2025-11-01) | Instrument metrics/logs (issuer changes, key rotation, verification failures) and dashboards/alerts. Dependencies: ISSUER-30-004. | Issuer Directory Guild, Observability Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) +ISSUER-30-006 | TODO | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. Dependencies: ISSUER-30-005. | Issuer Directory Guild, DevOps Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md) [Identity & Signing] 100.D) __Libraries @@ -93,7 +104,7 @@ Summary: Identity & Signing focus on Libraries. Task ID | State | Task description | Owners (Source) --- | --- | --- | --- KMS-73-001 | TODO | Add cloud KMS driver (e.g., AWS KMS, GCP KMS) with signing and key metadata retrieval. | KMS Guild (src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md) -KMS-73-002 | TODO | Implement PKCS#11/HSM driver plus FIDO2 signing support for high assurance workflows. | KMS Guild (src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md) +KMS-73-002 | TODO | Implement PKCS#11/HSM driver plus FIDO2 signing support for high assurance workflows. Dependencies: KMS-73-001. | KMS Guild (src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md) If all tasks are done - read next sprint section - SPRINT_110_ingestion_evidence.md diff --git a/docs/implplan/SPRINT_110_ingestion_evidence.md b/docs/implplan/SPRINT_110_ingestion_evidence.md index 9707a42b..1626025a 100644 --- a/docs/implplan/SPRINT_110_ingestion_evidence.md +++ b/docs/implplan/SPRINT_110_ingestion_evidence.md @@ -6,14 +6,14 @@ Summary: Ingestion & Evidence focus on AdvisoryAI). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- AIAI-31-001 | TODO | Implement structured and vector retrievers for advisories/VEX with paragraph anchors and citation metadata. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) -AIAI-31-002 | TODO | Build SBOM context retriever (purl version timelines, dependency paths, env flags, blast radius estimator). | Advisory AI Guild, SBOM Service Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) -AIAI-31-003 | TODO | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) -AIAI-31-004 | TODO | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) -AIAI-31-005 | TODO | Implement guardrails (redaction, injection defense, output validation, citation enforcement) and fail-safe handling. | Advisory AI Guild, Security Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) -AIAI-31-006 | TODO | Expose REST API endpoints (`/advisory/ai/*`) with RBAC, rate limits, OpenAPI schemas, and batching support. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) -AIAI-31-007 | TODO | Instrument metrics (`advisory_ai_latency`, `guardrail_blocks`, `validation_failures`, `citation_coverage`), logs, and traces; publish dashboards/alerts. | Advisory AI Guild, Observability Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) -AIAI-31-008 | TODO | Package inference on-prem container, remote inference toggle, Helm/Compose manifests, scaling guidance, offline kit instructions. | Advisory AI Guild, DevOps Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) -AIAI-31-009 | TODO | Develop unit/golden/property/perf tests, injection harness, and regression suite; ensure determinism with seeded caches. | Advisory AI Guild, QA Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) +AIAI-31-002 | TODO | Build SBOM context retriever (purl version timelines, dependency paths, env flags, blast radius estimator). Dependencies: AIAI-31-001. | Advisory AI Guild, SBOM Service Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) +AIAI-31-003 | TODO | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. Dependencies: AIAI-31-002. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) +AIAI-31-004 | TODO | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). Dependencies: AIAI-31-003. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) +AIAI-31-005 | TODO | Implement guardrails (redaction, injection defense, output validation, citation enforcement) and fail-safe handling. Dependencies: AIAI-31-004. | Advisory AI Guild, Security Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) +AIAI-31-006 | TODO | Expose REST API endpoints (`/advisory/ai/*`) with RBAC, rate limits, OpenAPI schemas, and batching support. Dependencies: AIAI-31-005. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) +AIAI-31-007 | TODO | Instrument metrics (`advisory_ai_latency`, `guardrail_blocks`, `validation_failures`, `citation_coverage`), logs, and traces; publish dashboards/alerts. Dependencies: AIAI-31-006. | Advisory AI Guild, Observability Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) +AIAI-31-008 | TODO | Package inference on-prem container, remote inference toggle, Helm/Compose manifests, scaling guidance, offline kit instructions. Dependencies: AIAI-31-007. | Advisory AI Guild, DevOps Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) +AIAI-31-009 | TODO | Develop unit/golden/property/perf tests, injection harness, and regression suite; ensure determinism with seeded caches. Dependencies: AIAI-31-008. | Advisory AI Guild, QA Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md) [Ingestion & Evidence] 110.B) Concelier.I @@ -22,20 +22,20 @@ Summary: Ingestion & Evidence focus on Concelier (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- CONCELIER-AIAI-31-001 `Paragraph anchors` | TODO | Expose advisory chunk API returning paragraph anchors, section metadata, and token-safe text for Advisory AI retrieval. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-AIAI-31-002 `Structured fields` | TODO | Ensure observation APIs expose upstream workaround/fix/CVSS fields with provenance; add caching for summary queries. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-AIAI-31-003 `Advisory AI telemetry` | TODO | Emit metrics/logs for chunk requests, cache hits, and guardrail blocks triggered by advisory payloads. | Concelier WebService Guild, Observability Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-AIAI-31-002 `Structured fields` | TODO | Ensure observation APIs expose upstream workaround/fix/CVSS fields with provenance; add caching for summary queries. Dependencies: CONCELIER-AIAI-31-001. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-AIAI-31-003 `Advisory AI telemetry` | TODO | Emit metrics/logs for chunk requests, cache hits, and guardrail blocks triggered by advisory payloads. Dependencies: CONCELIER-AIAI-31-002. | Concelier WebService Guild, Observability Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) CONCELIER-AIRGAP-56-001 `Mirror ingestion adapters` | TODO | Add mirror source adapters reading advisories from imported bundles, preserving source metadata and bundle IDs. Ensure ingestion remains append-only. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-AIRGAP-56-002 `Bundle catalog linking` | TODO | Persist `bundle_id`, `merkle_root`, and time anchor references on observations/linksets for provenance. | Concelier Core Guild, AirGap Importer Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-AIRGAP-57-001 `Sealed-mode source restrictions` | TODO | Enforce sealed-mode egress rules by disallowing non-mirror connectors and surfacing remediation errors. | Concelier Core Guild, AirGap Policy Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-AIRGAP-57-002 `Staleness annotations` | TODO | Compute staleness metadata for advisories per bundle and expose via API for Console/CLI badges. | Concelier Core Guild, AirGap Time Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-AIRGAP-58-001 `Portable advisory evidence` | TODO | Package advisory evidence fragments into portable evidence bundles for cross-domain transfer. | Concelier Core Guild, Evidence Locker Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-AIRGAP-56-002 `Bundle catalog linking` | TODO | Persist `bundle_id`, `merkle_root`, and time anchor references on observations/linksets for provenance. Dependencies: CONCELIER-AIRGAP-56-001. | Concelier Core Guild, AirGap Importer Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-AIRGAP-57-001 `Sealed-mode source restrictions` | TODO | Enforce sealed-mode egress rules by disallowing non-mirror connectors and surfacing remediation errors. Dependencies: CONCELIER-AIRGAP-56-002. | Concelier Core Guild, AirGap Policy Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-AIRGAP-57-002 `Staleness annotations` | TODO | Compute staleness metadata for advisories per bundle and expose via API for Console/CLI badges. Dependencies: CONCELIER-AIRGAP-57-001. | Concelier Core Guild, AirGap Time Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-AIRGAP-58-001 `Portable advisory evidence` | TODO | Package advisory evidence fragments into portable evidence bundles for cross-domain transfer. Dependencies: CONCELIER-AIRGAP-57-002. | Concelier Core Guild, Evidence Locker Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) CONCELIER-ATTEST-73-001 `ScanResults attestation inputs` | TODO | Provide observation artifacts and linkset digests needed for ScanResults attestations (raw data + provenance, no merge outputs). | Concelier Core Guild, Attestor Service Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-ATTEST-73-002 `Transparency metadata` | TODO | Ensure Conseiller exposes source digests for transparency proofs and explainability. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-ATTEST-73-002 `Transparency metadata` | TODO | Ensure Conseiller exposes source digests for transparency proofs and explainability. Dependencies: CONCELIER-ATTEST-73-001. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) CONCELIER-CONSOLE-23-001 `Advisory aggregation views` | TODO | Expose `/console/advisories` endpoints returning aggregation groups (per linkset) with source chips, provider-reported severity columns (no local consensus), and provenance metadata for Console list + dashboard cards. Support filters by source, ecosystem, published/modified window, tenant enforcement. | Concelier WebService Guild, BE-Base Platform Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-CONSOLE-23-002 `Dashboard deltas API` | TODO | Provide aggregated advisory delta counts (new, modified, conflicting) for Console dashboard + live status ticker; emit structured events for queue lag metrics. Ensure deterministic counts across repeated queries. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-CONSOLE-23-003 `Search fan-out helpers` | TODO | Deliver fast lookup endpoints for CVE/GHSA/purl search (linksets, observations) returning evidence fragments for Console global search; implement caching + scope guards. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-CONSOLE-23-002 `Dashboard deltas API` | TODO | Provide aggregated advisory delta counts (new, modified, conflicting) for Console dashboard + live status ticker; emit structured events for queue lag metrics. Ensure deterministic counts across repeated queries. Dependencies: CONCELIER-CONSOLE-23-001. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-CONSOLE-23-003 `Search fan-out helpers` | TODO | Deliver fast lookup endpoints for CVE/GHSA/purl search (linksets, observations) returning evidence fragments for Console global search; implement caching + scope guards. Dependencies: CONCELIER-CONSOLE-23-002. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) CONCELIER-CORE-AOC-19-004 `Remove ingestion normalization` | DOING (2025-10-28) | Strip normalization/dedup/severity logic from ingestion pipelines, delegate derived computations to Policy Engine, and update exporters/tests to consume raw documents only.
2025-10-29 19:05Z: Audit completed for `AdvisoryRawService`/Mongo repo to confirm alias order/dedup removal persists; identified remaining normalization in observation/linkset factory that will be revised to surface raw duplicates for Policy ingestion. Change sketch + regression matrix drafted under `docs/dev/aoc-normalization-removal-notes.md` (pending commit).
2025-10-31 20:45Z: Added raw linkset projection to observations/storage, exposing canonical+raw views, refreshed fixtures/tests, and documented behaviour in models/doc factory.
2025-10-31 21:10Z: Coordinated with Policy Engine (POLICY-ENGINE-20-003) on adoption timeline; backfill + consumer readiness tracked in `docs/dev/raw-linkset-backfill-plan.md`. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Extend Concelier smoke/e2e fixtures to configure `requiredTenants` and assert cross-tenant rejection with updated Authority tokens. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Extend Concelier smoke/e2e fixtures to configure `requiredTenants` and assert cross-tenant rejection with updated Authority tokens. Dependencies: CONCELIER-CORE-AOC-19-004. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) [Ingestion & Evidence] 110.B) Concelier.II @@ -44,20 +44,20 @@ Summary: Ingestion & Evidence focus on Concelier (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- CONCELIER-GRAPH-21-001 `SBOM projection enrichment` | BLOCKED (2025-10-27) | Extend SBOM normalization to emit full relationship graph (depends_on/contains/provides), scope tags, entrypoint annotations, and component metadata required by Cartographer. | Concelier Core Guild, Cartographer Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-GRAPH-21-002 `Change events` | BLOCKED (2025-10-27) | Publish change events (new SBOM version, relationship delta) for Cartographer build queue; ensure events include tenant/context metadata. | Concelier Core Guild, Scheduler Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-GRAPH-24-101 `Advisory summary API` | TODO | Expose `/advisories/summary` returning raw linkset/observation metadata for overlay services; no derived severity or fix hints. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-GRAPH-28-102 `Evidence batch API` | TODO | Add batch fetch for advisory observations/linksets keyed by component sets to feed Graph overlay tooltips efficiently. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-GRAPH-21-002 `Change events` | BLOCKED (2025-10-27) | Publish change events (new SBOM version, relationship delta) for Cartographer build queue; ensure events include tenant/context metadata. Dependencies: CONCELIER-GRAPH-21-001. | Concelier Core Guild, Scheduler Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-GRAPH-24-101 `Advisory summary API` | TODO | Expose `/advisories/summary` returning raw linkset/observation metadata for overlay services; no derived severity or fix hints. Dependencies: CONCELIER-GRAPH-21-002. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-GRAPH-28-102 `Evidence batch API` | TODO | Add batch fetch for advisory observations/linksets keyed by component sets to feed Graph overlay tooltips efficiently. Dependencies: CONCELIER-GRAPH-24-101. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) CONCELIER-LNM-21-001 `Advisory observation schema` | TODO | Introduce immutable `advisory_observations` model with AOC metadata, raw payload pointers, structured per-source fields (version ranges, severity, CVSS), and tenancy guardrails; publish schema definition. `DOCS-LNM-22-001` blocked pending this deliverable. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-LNM-21-002 `Linkset builder` | TODO | Implement correlation pipeline (alias graph, PURL overlap, CVSS vector equality, fuzzy title match) that produces `advisory_linksets` with confidence + conflict annotations. Docs note: unblock `DOCS-LNM-22-001` once builder lands. | Concelier Core Guild, Data Science Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-LNM-21-003 `Conflict annotator` | TODO | Detect field disagreements (severity, CVSS, ranges, references) and record structured conflicts on linksets; surface to API/UI. Docs awaiting structured conflict payloads. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-LNM-21-004 `Merge code removal` | TODO | Excise existing merge/dedup logic, enforce immutability on observations, and add guards/tests to prevent future merges. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-LNM-21-005 `Event emission` | TODO | Emit `advisory.linkset.updated` events with delta payloads for downstream Policy Engine/Cartographer consumers; ensure idempotent delivery. | Concelier Core Guild, Platform Events Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-LNM-21-101 `Observations collections` | TODO | Provision `advisory_observations` and `advisory_linksets` collections with hashed shard keys, TTL for ingest metadata, and required indexes (`aliases`, `purls`, `observation_ids`). | Concelier Storage Guild (src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md) -CONCELIER-LNM-21-102 `Migration tooling` | TODO | Backfill legacy merged advisories into observation/linkset collections, create tombstones for merged docs, and supply rollback scripts. | Concelier Storage Guild, DevOps Guild (src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md) -CONCELIER-LNM-21-103 `Blob/store wiring` | TODO | Store large raw payloads in object storage with pointers from observations; update bootstrapper/offline kit to seed sample blobs. | Concelier Storage Guild (src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md) -CONCELIER-LNM-21-201 `Observation APIs` | TODO | Add REST endpoints for advisory observations (`GET /advisories/observations`) with filters (alias, purl, source), pagination, and tenancy enforcement. | Concelier WebService Guild, BE-Base Platform Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-LNM-21-202 `Linkset APIs` | TODO | Implement linkset read/export endpoints (`/advisories/linksets/{id}`, `/advisories/by-purl/{purl}`, `/advisories/linksets/{id}/export`, `/evidence`) with correlation/conflict payloads and `ERR_AGG_*` mapping. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-LNM-21-203 `Ingest events` | TODO | Publish NATS/Redis events for new observations/linksets and ensure idempotent consumer contracts; document event schemas. | Concelier WebService Guild, Platform Events Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-LNM-21-002 `Linkset builder` | TODO | Implement correlation pipeline (alias graph, PURL overlap, CVSS vector equality, fuzzy title match) that produces `advisory_linksets` with confidence + conflict annotations. Docs note: unblock `DOCS-LNM-22-001` once builder lands. Dependencies: CONCELIER-LNM-21-001. | Concelier Core Guild, Data Science Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-LNM-21-003 `Conflict annotator` | TODO | Detect field disagreements (severity, CVSS, ranges, references) and record structured conflicts on linksets; surface to API/UI. Docs awaiting structured conflict payloads. Dependencies: CONCELIER-LNM-21-002. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-LNM-21-004 `Merge code removal` | TODO | Excise existing merge/dedup logic, enforce immutability on observations, and add guards/tests to prevent future merges. Dependencies: CONCELIER-LNM-21-003. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-LNM-21-005 `Event emission` | TODO | Emit `advisory.linkset.updated` events with delta payloads for downstream Policy Engine/Cartographer consumers; ensure idempotent delivery. Dependencies: CONCELIER-LNM-21-004. | Concelier Core Guild, Platform Events Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-LNM-21-101 `Observations collections` | TODO | Provision `advisory_observations` and `advisory_linksets` collections with hashed shard keys, TTL for ingest metadata, and required indexes (`aliases`, `purls`, `observation_ids`). Dependencies: CONCELIER-LNM-21-005. | Concelier Storage Guild (src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md) +CONCELIER-LNM-21-102 `Migration tooling` | TODO | Backfill legacy merged advisories into observation/linkset collections, create tombstones for merged docs, and supply rollback scripts. Dependencies: CONCELIER-LNM-21-101. | Concelier Storage Guild, DevOps Guild (src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md) +CONCELIER-LNM-21-103 `Blob/store wiring` | TODO | Store large raw payloads in object storage with pointers from observations; update bootstrapper/offline kit to seed sample blobs. Dependencies: CONCELIER-LNM-21-102. | Concelier Storage Guild (src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md) +CONCELIER-LNM-21-201 `Observation APIs` | TODO | Add REST endpoints for advisory observations (`GET /advisories/observations`) with filters (alias, purl, source), pagination, and tenancy enforcement. Dependencies: CONCELIER-LNM-21-103. | Concelier WebService Guild, BE-Base Platform Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-LNM-21-202 `Linkset APIs` | TODO | Implement linkset read/export endpoints (`/advisories/linksets/{id}`, `/advisories/by-purl/{purl}`, `/advisories/linksets/{id}/export`, `/evidence`) with correlation/conflict payloads and `ERR_AGG_*` mapping. Dependencies: CONCELIER-LNM-21-201. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-LNM-21-203 `Ingest events` | TODO | Publish NATS/Redis events for new observations/linksets and ensure idempotent consumer contracts; document event schemas. Dependencies: CONCELIER-LNM-21-202. | Concelier WebService Guild, Platform Events Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) [Ingestion & Evidence] 110.B) Concelier.III @@ -66,19 +66,19 @@ Summary: Ingestion & Evidence focus on Concelier (phase III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- CONCELIER-OAS-61-001 `Spec coverage` | TODO | Update Concelier OAS with advisory observation/linkset endpoints, standard pagination, and source provenance fields. | Concelier Core Guild, API Contracts Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-OAS-61-002 `Examples library` | TODO | Provide rich examples for advisories, linksets, conflict annotations used by SDK + docs. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-OAS-62-001 `SDK smoke tests` | TODO | Add SDK tests covering advisory search, pagination, and conflict handling; ensure source metadata surfaced. | Concelier Core Guild, SDK Generator Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-OAS-63-001 `Deprecation headers` | TODO | Implement deprecation header support and timeline events for retiring endpoints. | Concelier Core Guild, API Governance Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-OAS-61-002 `Examples library` | TODO | Provide rich examples for advisories, linksets, conflict annotations used by SDK + docs. Dependencies: CONCELIER-OAS-61-001. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-OAS-62-001 `SDK smoke tests` | TODO | Add SDK tests covering advisory search, pagination, and conflict handling; ensure source metadata surfaced. Dependencies: CONCELIER-OAS-61-002. | Concelier Core Guild, SDK Generator Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-OAS-63-001 `Deprecation headers` | TODO | Implement deprecation header support and timeline events for retiring endpoints. Dependencies: CONCELIER-OAS-62-001. | Concelier Core Guild, API Governance Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) CONCELIER-OBS-50-001 `Telemetry adoption` | TODO | Replace ad-hoc logging with telemetry core across ingestion/linking pipelines; ensure spans/logs include tenant, source vendor, upstream id, content hash, and trace IDs. | Concelier Core Guild, Observability Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-OBS-51-001 `Metrics & SLOs` | TODO | Emit metrics for ingest latency (cold/warm), queue depth, aoc violation rate, and publish SLO burn-rate alerts (ingest P95 <30s cold / <5s warm). Ship dashboards + alert configs. | Concelier Core Guild, DevOps Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-OBS-52-001 `Timeline events` | TODO | Emit `timeline_event` records for advisory ingest/normalization/linkset creation with provenance, trace IDs, conflict summaries, and evidence placeholders. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-OBS-53-001 `Evidence snapshots` | TODO | Produce advisory evaluation bundle payloads (raw doc, linkset, normalization diff) for evidence locker; ensure Merkle manifests seeded with content hashes. | Concelier Core Guild, Evidence Locker Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-OBS-54-001 `Attestation & verification` | TODO | Attach DSSE attestations for advisory processing batches, expose verification API to confirm bundle integrity, and link attestation IDs back to timeline + ledger. | Concelier Core Guild, Provenance Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-OBS-55-001 `Incident mode hooks` | TODO | Increase sampling, capture raw payload snapshots, and extend retention under incident mode; emit activation events + guardrails against PII leak. | Concelier Core Guild, DevOps Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-OBS-51-001 `Metrics & SLOs` | TODO | Emit metrics for ingest latency (cold/warm), queue depth, aoc violation rate, and publish SLO burn-rate alerts (ingest P95 <30s cold / <5s warm). Ship dashboards + alert configs. Dependencies: CONCELIER-OBS-50-001. | Concelier Core Guild, DevOps Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-OBS-52-001 `Timeline events` | TODO | Emit `timeline_event` records for advisory ingest/normalization/linkset creation with provenance, trace IDs, conflict summaries, and evidence placeholders. Dependencies: CONCELIER-OBS-51-001. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-OBS-53-001 `Evidence snapshots` | TODO | Produce advisory evaluation bundle payloads (raw doc, linkset, normalization diff) for evidence locker; ensure Merkle manifests seeded with content hashes. Dependencies: CONCELIER-OBS-52-001. | Concelier Core Guild, Evidence Locker Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-OBS-54-001 `Attestation & verification` | TODO | Attach DSSE attestations for advisory processing batches, expose verification API to confirm bundle integrity, and link attestation IDs back to timeline + ledger. Dependencies: CONCELIER-OBS-53-001. | Concelier Core Guild, Provenance Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-OBS-55-001 `Incident mode hooks` | TODO | Increase sampling, capture raw payload snapshots, and extend retention under incident mode; emit activation events + guardrails against PII leak. Dependencies: CONCELIER-OBS-54-001. | Concelier Core Guild, DevOps Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) CONCELIER-ORCH-32-001 `Source registry integration` | TODO | Register Concelier data sources with orchestrator (metadata, schedules, rate policies) and wire provenance IDs/security scopes. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-ORCH-32-002 `Worker SDK adoption` | TODO | Embed orchestrator worker SDK in ingestion loops, emit heartbeats/progress/artifact hashes, and enforce idempotency keys. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-ORCH-33-001 `Control hook compliance` | TODO | Honor orchestrator throttle/pause/retry actions, surface structured error classes, and persist safe checkpoints for resume. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-ORCH-34-001 `Backfill + ledger linkage` | TODO | Execute orchestrator-driven backfills, reuse artifact hashes to avoid duplicates, and link provenance to run ledger exports. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-ORCH-32-002 `Worker SDK adoption` | TODO | Embed orchestrator worker SDK in ingestion loops, emit heartbeats/progress/artifact hashes, and enforce idempotency keys. Dependencies: CONCELIER-ORCH-32-001. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-ORCH-33-001 `Control hook compliance` | TODO | Honor orchestrator throttle/pause/retry actions, surface structured error classes, and persist safe checkpoints for resume. Dependencies: CONCELIER-ORCH-32-002. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-ORCH-34-001 `Backfill + ledger linkage` | TODO | Execute orchestrator-driven backfills, reuse artifact hashes to avoid duplicates, and link provenance to run ledger exports. Dependencies: CONCELIER-ORCH-33-001. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) CONCELIER-POLICY-20-001 `Policy selection endpoints` | TODO | Add batch advisory lookup APIs (`/policy/select/advisories`, `/policy/select/vex`) optimized for PURL/ID lists with pagination, tenant scoping, and explain metadata. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) @@ -87,21 +87,21 @@ Depends on: Sprint 110.B - Concelier.III Summary: Ingestion & Evidence focus on Concelier (phase IV). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -CONCELIER-POLICY-20-002 `Linkset enrichment for policy` | TODO | Strengthen linkset builders with vendor-specific equivalence tables, NEVRA/PURL normalization, and version range parsing to maximize policy join recall; update fixtures + docs. | Concelier Core Guild, Policy Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-POLICY-20-003 `Selection cursors` | TODO | Add advisory/vex selection cursors (per policy run) with change stream checkpoints, indexes, and offline migration scripts to support incremental evaluations. | Concelier Storage Guild (src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md) -CONCELIER-POLICY-23-001 `Evidence indexes` | TODO | Add secondary indexes/materialized views to accelerate policy lookups (alias, provider severity per observation, correlation confidence). Document query contracts for runtime. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-POLICY-23-002 `Event guarantees` | TODO | Ensure `advisory.linkset.updated` emits at-least-once with idempotent keys and include policy-relevant metadata (confidence, conflict summary). | Concelier Core Guild, Platform Events Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-POLICY-20-002 `Linkset enrichment for policy` | TODO | Strengthen linkset builders with vendor-specific equivalence tables, NEVRA/PURL normalization, and version range parsing to maximize policy join recall; update fixtures + docs. Dependencies: CONCELIER-POLICY-20-001. | Concelier Core Guild, Policy Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-POLICY-20-003 `Selection cursors` | TODO | Add advisory/vex selection cursors (per policy run) with change stream checkpoints, indexes, and offline migration scripts to support incremental evaluations. Dependencies: CONCELIER-POLICY-20-002. | Concelier Storage Guild (src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md) +CONCELIER-POLICY-23-001 `Evidence indexes` | TODO | Add secondary indexes/materialized views to accelerate policy lookups (alias, provider severity per observation, correlation confidence). Document query contracts for runtime. Dependencies: CONCELIER-POLICY-20-003. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-POLICY-23-002 `Event guarantees` | TODO | Ensure `advisory.linkset.updated` emits at-least-once with idempotent keys and include policy-relevant metadata (confidence, conflict summary). Dependencies: CONCELIER-POLICY-23-001. | Concelier Core Guild, Platform Events Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) CONCELIER-RISK-66-001 `CVSS/KEV providers` | TODO | Expose CVSS, KEV, fix availability data via provider APIs with source metadata preserved. | Concelier Core Guild, Risk Engine Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-RISK-66-002 `Fix availability signals` | TODO | Provide structured fix availability and release metadata consumable by risk engine; document provenance. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-RISK-67-001 `Source coverage metrics` | TODO | Add per-source coverage metrics for linked advisories (observation counts, conflicting statuses) without computing consensus scores; ensure explainability includes source digests. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-RISK-68-001 `Policy Studio integration` | TODO | Surface advisory fields in Policy Studio profile editor (signal pickers, reducers). | Concelier Core Guild, Policy Studio Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) -CONCELIER-RISK-69-001 `Notification hooks` | TODO | Emit events when advisory signals change impacting risk scores (e.g., fix available). | Concelier Core Guild, Notifications Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-RISK-66-002 `Fix availability signals` | TODO | Provide structured fix availability and release metadata consumable by risk engine; document provenance. Dependencies: CONCELIER-RISK-66-001. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-RISK-67-001 `Source coverage metrics` | TODO | Add per-source coverage metrics for linked advisories (observation counts, conflicting statuses) without computing consensus scores; ensure explainability includes source digests. Dependencies: CONCELIER-RISK-66-002. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-RISK-68-001 `Policy Studio integration` | TODO | Surface advisory fields in Policy Studio profile editor (signal pickers, reducers). Dependencies: CONCELIER-RISK-67-001. | Concelier Core Guild, Policy Studio Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) +CONCELIER-RISK-69-001 `Notification hooks` | TODO | Emit events when advisory signals change impacting risk scores (e.g., fix available). Dependencies: CONCELIER-RISK-68-001. | Concelier Core Guild, Notifications Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) CONCELIER-SIG-26-001 `Vulnerable symbol exposure` | TODO | Expose advisory metadata (affected symbols/functions) via API to enrich reachability scoring; update fixtures. | Concelier Core Guild, Signals Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) CONCELIER-STORE-AOC-19-005 `Raw linkset backfill` | TODO (2025-11-04) | Plan and execute advisory_observations `rawLinkset` backfill (online + Offline Kit bundles), supply migration scripts + rehearse rollback. Follow the coordination plan in `docs/dev/raw-linkset-backfill-plan.md`. | Concelier Storage Guild, DevOps Guild (src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md) CONCELIER-TEN-48-001 `Tenant-aware linking` | TODO | Ensure advisory normalization/linking runs per tenant with RLS enforcing isolation; emit capability endpoint reporting `merge=false`; update events with tenant context. | Concelier Core Guild (src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md) CONCELIER-VEXLENS-30-001 `Advisory rationale bridges` | TODO | Guarantee advisory key consistency and cross-links for consensus rationale; Label: VEX-Lens. | Concelier WebService Guild, VEX Lens Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) CONCELIER-VULN-29-001 `Advisory key canonicalization` | TODO | Canonicalize (lossless) advisory identifiers (CVE/GHSA/vendor) into `advisory_key`, persist `links[]`, expose raw payload snapshots for Explorer evidence tabs; AOC-compliant: no merge, no derived fields, no suppression. Include migration/backfill scripts. | Concelier WebService Guild, Data Integrity Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-VULN-29-002 `Evidence retrieval API` | TODO | Provide `/vuln/evidence/advisories/{advisory_key}` returning raw advisory docs with provenance, filtering by tenant and source. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-VULN-29-002 `Evidence retrieval API` | TODO | Provide `/vuln/evidence/advisories/{advisory_key}` returning raw advisory docs with provenance, filtering by tenant and source. Dependencies: CONCELIER-VULN-29-001. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) [Ingestion & Evidence] 110.B) Concelier.V @@ -109,21 +109,21 @@ Depends on: Sprint 110.B - Concelier.IV Summary: Ingestion & Evidence focus on Concelier (phase V). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -CONCELIER-VULN-29-004 `Observability enhancements` | TODO | Instrument metrics/logs for observation + linkset pipelines (identifier collisions, withdrawn flags) and emit events consumed by Vuln Explorer resolver. | Concelier WebService Guild, Observability Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-VULN-29-004 `Observability enhancements` | TODO | Instrument metrics/logs for observation + linkset pipelines (identifier collisions, withdrawn flags) and emit events consumed by Vuln Explorer resolver. Dependencies: CONCELIER-VULN-29-002. | Concelier WebService Guild, Observability Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) CONCELIER-WEB-AIRGAP-56-001 `Mirror import APIs` | TODO | Extend ingestion endpoints to register mirror bundle sources, expose bundle catalog queries, and block external feed URLs in sealed mode. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-AIRGAP-56-002 `Airgap status surfaces` | TODO | Add staleness metadata and bundle provenance to advisory APIs (`/advisories/observations`, `/advisories/linksets`). | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-AIRGAP-57-001 `Error remediation` | TODO | Map sealed-mode violations to `AIRGAP_EGRESS_BLOCKED` responses with user guidance. | Concelier WebService Guild, AirGap Policy Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-AIRGAP-58-001 `Import timeline emission` | TODO | Emit timeline events for bundle ingestion operations with bundle ID, scope, and actor metadata. | Concelier WebService Guild, AirGap Importer Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-AIRGAP-56-002 `Airgap status surfaces` | TODO | Add staleness metadata and bundle provenance to advisory APIs (`/advisories/observations`, `/advisories/linksets`). Dependencies: CONCELIER-WEB-AIRGAP-56-001. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-AIRGAP-57-001 `Error remediation` | TODO | Map sealed-mode violations to `AIRGAP_EGRESS_BLOCKED` responses with user guidance. Dependencies: CONCELIER-WEB-AIRGAP-56-002. | Concelier WebService Guild, AirGap Policy Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-AIRGAP-58-001 `Import timeline emission` | TODO | Emit timeline events for bundle ingestion operations with bundle ID, scope, and actor metadata. Dependencies: CONCELIER-WEB-AIRGAP-57-001. | Concelier WebService Guild, AirGap Importer Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) CONCELIER-WEB-AOC-19-002 `AOC observability` | TODO | Emit `ingestion_write_total`, `aoc_violation_total`, latency histograms, and tracing spans (`ingest.fetch/transform/write`, `aoc.guard`). Wire structured logging to include tenant, source vendor, upstream id, and content hash. | Concelier WebService Guild, Observability Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-AOC-19-003 `Schema/guard unit tests` | TODO | Add unit tests covering schema validation failures, forbidden field rejections (`ERR_AOC_001/002/006/007`), idempotent upserts, and supersedes chains using deterministic fixtures. | QA Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-AOC-19-004 `End-to-end ingest verification` | TODO | Create integration tests ingesting large advisory batches (cold/warm) validating linkset enrichment, metrics emission, and reproducible outputs. Capture load-test scripts + doc notes for Offline Kit dry runs. | Concelier WebService Guild, QA Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-OAS-61-001 `/.well-known/openapi` | TODO | Implement discovery endpoint emitting Concelier spec with version metadata and ETag. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-OAS-61-002 `Error envelope migration` | TODO | Ensure all API responses use standardized error envelope; update controllers/tests. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-OAS-62-001 `Examples expansion` | TODO | Add curated examples for advisory observations/linksets/conflicts; integrate into dev portal. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-OAS-63-001 `Deprecation headers` | TODO | Add Sunset/Deprecation headers for retiring endpoints and update documentation/notifications. | Concelier WebService Guild, API Governance Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-AOC-19-003 `Schema/guard unit tests` | TODO | Add unit tests covering schema validation failures, forbidden field rejections (`ERR_AOC_001/002/006/007`), idempotent upserts, and supersedes chains using deterministic fixtures. Dependencies: CONCELIER-WEB-AOC-19-002. | QA Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-AOC-19-004 `End-to-end ingest verification` | TODO | Create integration tests ingesting large advisory batches (cold/warm) validating linkset enrichment, metrics emission, and reproducible outputs. Capture load-test scripts + doc notes for Offline Kit dry runs. Dependencies: CONCELIER-WEB-AOC-19-003. | Concelier WebService Guild, QA Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-OAS-61-001 `/.well-known/openapi` | DONE (2025-11-02) | Implement discovery endpoint emitting Concelier spec with version metadata and ETag. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-OAS-61-002 `Error envelope migration` | TODO | Ensure all API responses use standardized error envelope; update controllers/tests. Dependencies: CONCELIER-WEB-OAS-61-001. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-OAS-62-001 `Examples expansion` | TODO | Add curated examples for advisory observations/linksets/conflicts; integrate into dev portal. Dependencies: CONCELIER-WEB-OAS-61-002. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-OAS-63-001 `Deprecation headers` | TODO | Add Sunset/Deprecation headers for retiring endpoints and update documentation/notifications. Dependencies: CONCELIER-WEB-OAS-62-001. | Concelier WebService Guild, API Governance Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) CONCELIER-WEB-OBS-50-001 `Telemetry adoption` | TODO | Adopt telemetry core in web service host, ensure ingest + read endpoints emit trace/log fields (`tenant_id`, `route`, `decision_effect`), and add correlation IDs to responses. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-OBS-51-001 `Observability APIs` | TODO | Surface ingest health metrics, queue depth, and SLO status via `/obs/concelier/health` endpoint for Console widgets, with caching and tenant partitioning. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-OBS-52-001 `Timeline streaming` | TODO | Provide SSE stream `/obs/concelier/timeline` bridging to Timeline Indexer with paging tokens, guardrails, and audit logging. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-OBS-51-001 `Observability APIs` | TODO | Surface ingest health metrics, queue depth, and SLO status via `/obs/concelier/health` endpoint for Console widgets, with caching and tenant partitioning. Dependencies: CONCELIER-WEB-OBS-50-001. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-OBS-52-001 `Timeline streaming` | TODO | Provide SSE stream `/obs/concelier/timeline` bridging to Timeline Indexer with paging tokens, guardrails, and audit logging. Dependencies: CONCELIER-WEB-OBS-51-001. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) [Ingestion & Evidence] 110.B) Concelier.VI @@ -131,9 +131,9 @@ Depends on: Sprint 110.B - Concelier.V Summary: Ingestion & Evidence focus on Concelier (phase VI). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -CONCELIER-WEB-OBS-53-001 `Evidence locker integration` | TODO | Add `/evidence/advisories/*` routes invoking evidence locker snapshots, verifying tenant scopes (`evidence:read`), and returning signed manifest metadata. | Concelier WebService Guild, Evidence Locker Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-OBS-54-001 `Attestation exposure` | TODO | Provide `/attestations/advisories/*` read APIs surfacing DSSE status, verification summary, and provenance chain for Console/CLI. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) -CONCELIER-WEB-OBS-55-001 `Incident mode toggles` | TODO | Implement incident mode toggle endpoints, propagate to orchestrator/locker, and document cooldown/backoff semantics. | Concelier WebService Guild, DevOps Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-OBS-53-001 `Evidence locker integration` | TODO | Add `/evidence/advisories/*` routes invoking evidence locker snapshots, verifying tenant scopes (`evidence:read`), and returning signed manifest metadata. Dependencies: CONCELIER-WEB-OBS-52-001. | Concelier WebService Guild, Evidence Locker Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-OBS-54-001 `Attestation exposure` | TODO | Provide `/attestations/advisories/*` read APIs surfacing DSSE status, verification summary, and provenance chain for Console/CLI. Dependencies: CONCELIER-WEB-OBS-53-001. | Concelier WebService Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) +CONCELIER-WEB-OBS-55-001 `Incident mode toggles` | TODO | Implement incident mode toggle endpoints, propagate to orchestrator/locker, and document cooldown/backoff semantics. Dependencies: CONCELIER-WEB-OBS-54-001. | Concelier WebService Guild, DevOps Guild (src/Concelier/StellaOps.Concelier.WebService/TASKS.md) FEEDCONN-CCCS-02-009 Version range provenance (Oct 2025) | BE-Conn-CCCS | **TODO (due 2025-10-21)** – Map CCCS advisories into the new `advisory_observations.affected.versions[]` structure, preserving each upstream range with provenance anchors (`cccs:{serial}:{index}`) and normalized comparison keys. Update mapper tests/fixtures for the Link-Not-Merge schema and verify linkset builders consume the ranges without relying on legacy merge counters.
2025-10-29: `docs/dev/normalized-rule-recipes.md` now documents helper snippets for building observation version entries—use them instead of merge-specific builders and refresh fixtures with `UPDATE_CCCS_FIXTURES=1`. | CONCELIER-LNM-21-001 (src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/TASKS.md) FEEDCONN-CERTBUND-02-010 Version range provenance | BE-Conn-CERTBUND | **TODO (due 2025-10-22)** – Translate `product.Versions` phrases (e.g., `2023.1 bis 2024.2`, `alle`) into comparison helpers for `advisory_observations.affected.versions[]`, capturing provenance (`certbund:{advisoryId}:{vendor}`) and localisation notes. Update mapper/tests for the Link-Not-Merge schema and refresh documentation accordingly. | CONCELIER-LNM-21-001 (src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/TASKS.md) FEEDCONN-CISCO-02-009 SemVer range provenance | BE-Conn-Cisco | **TODO (due 2025-10-21)** – Emit Cisco SemVer ranges into `advisory_observations.affected.versions[]` with provenance identifiers (`cisco:{productId}`) and deterministic comparison keys. Update mapper/tests for the Link-Not-Merge schema and replace legacy merge counter checks with observation/linkset validation. | CONCELIER-LNM-21-001 (src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/TASKS.md) @@ -141,10 +141,10 @@ FEEDCONN-ICSCISA-02-012 Version range provenance | BE-Conn-ICS-CISA | **TODO (du FEEDCONN-KISA-02-008 Firmware range provenance | BE-Conn-KISA, Models | **TODO (due 2025-10-24)** – Define comparison helpers for Hangul-labelled firmware ranges (`XFU 1.0.1.0084 ~ 2.0.1.0034`) and map them into `advisory_observations.affected.versions[]` with provenance tags. Coordinate with Models only if a new comparison scheme is required, then update localisation notes and fixtures for the Link-Not-Merge schema. | CONCELIER-LNM-21-001 (src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/TASKS.md) FEEDCONN-SHARED-STATE-003 Source state seeding helper | Tools Guild, BE-Conn-MSRC | **DOING (2025-10-19)** – Provide a reusable CLI/utility to seed `pendingDocuments`/`pendingMappings` for connectors (MSRC backfills require scripted CVRF + detail injection). Coordinate with MSRC team for expected JSON schema and handoff once prototype lands. Prereqs confirmed none (2025-10-19). | Tools (src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/TASKS.md) FEEDMERGE-COORD-02-901 Connector deadline check-ins | BE-Merge | **TODO (due 2025-10-21)** – Confirm Cccs/Cisco version-provenance updates land, capture `LinksetVersionCoverage` dashboard snapshots (expect zero missing-range warnings), and update coordination docs with the results.
2025-10-29: Observation metrics now surface `version_entries_total`/`missing_version_entries_total`; include screenshots for both when closing this task. | FEEDMERGE-COORD-02-900 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) -FEEDMERGE-COORD-02-902 ICS-CISA version comparison support | BE-Merge, Models | **TODO (due 2025-10-23)** – Review ICS-CISA sample advisories, validate reuse of existing comparison helpers, and pre-stage Models ticket template only if a new firmware comparator is required. Document the outcome and observation coverage logs in coordination docs + tracker files.
2025-10-29: `docs/dev/normalized-rule-recipes.md` (§2–§3) now covers observation entries; attach decision summary + log sample when handing off to Models. | FEEDMERGE-COORD-02-900 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) -FEEDMERGE-COORD-02-903 KISA firmware scheme review | BE-Merge, Models | **TODO (due 2025-10-24)** – Pair with KISA team on proposed firmware comparison helper (`kisa.build` or variant), ensure observation mapper alignment, and open Models ticket only if a new comparator is required. Log the final helper signature and observation coverage metrics in coordination docs + tracker files. | FEEDMERGE-COORD-02-900 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) +FEEDMERGE-COORD-02-902 ICS-CISA version comparison support | BE-Merge, Models | **TODO (due 2025-10-23)** – Review ICS-CISA sample advisories, validate reuse of existing comparison helpers, and pre-stage Models ticket template only if a new firmware comparator is required. Document the outcome and observation coverage logs in coordination docs + tracker files.
2025-10-29: `docs/dev/normalized-rule-recipes.md` (§2–§3) now covers observation entries; attach decision summary + log sample when handing off to Models. Dependencies: FEEDMERGE-COORD-02-901. | FEEDMERGE-COORD-02-900 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) +FEEDMERGE-COORD-02-903 KISA firmware scheme review | BE-Merge, Models | **TODO (due 2025-10-24)** – Pair with KISA team on proposed firmware comparison helper (`kisa.build` or variant), ensure observation mapper alignment, and open Models ticket only if a new comparator is required. Log the final helper signature and observation coverage metrics in coordination docs + tracker files. Dependencies: FEEDMERGE-COORD-02-902. | FEEDMERGE-COORD-02-900 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) Fixture validation sweep | QA | **DOING (2025-10-19)** – Prereqs confirmed none; continuing RHSA fixture regeneration and diff review alongside mapper provenance updates.
2025-10-29: Added `scripts/update-redhat-fixtures.sh` to regenerate golden snapshots with `UPDATE_GOLDENS=1`; run it before reviews to capture CSAF contract deltas. | None (src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md) -Link-Not-Merge version provenance coordination | BE-Merge | **DOING** – Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) so they emit `advisory_observations.affected.versions[]` entries with provenance tags and deterministic comparison keys. Track rollout status in `docs/dev/normalized-rule-recipes.md` (now updated for Link-Not-Merge) and retire the legacy merge counters as coverage transitions to linkset validation metrics.
2025-10-29: Added new guidance in the doc for recording observation version metadata and logging gaps via `LinksetVersionCoverage` warnings to replace prior `concelier.merge.normalized_rules*` alerts. | CONCELIER-LNM-21-001 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) +Link-Not-Merge version provenance coordination | BE-Merge | **DOING** – Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) so they emit `advisory_observations.affected.versions[]` entries with provenance tags and deterministic comparison keys. Track rollout status in `docs/dev/normalized-rule-recipes.md` (now updated for Link-Not-Merge) and retire the legacy merge counters as coverage transitions to linkset validation metrics.
2025-10-29: Added new guidance in the doc for recording observation version metadata and logging gaps via `LinksetVersionCoverage` warnings to replace prior `concelier.merge.normalized_rules*` alerts. Dependencies: CONCELIER-LNM-21-203. | CONCELIER-LNM-21-001 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) MERGE-LNM-21-001 Migration plan authoring | BE-Merge, Architecture Guild | Draft `no-merge` migration playbook, documenting backfill strategy, feature flag rollout, and rollback steps for legacy merge pipeline deprecation. | CONCELIER-LNM-21-101 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) @@ -153,8 +153,8 @@ Depends on: Sprint 110.B - Concelier.VI Summary: Ingestion & Evidence focus on Concelier (phase VII). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -MERGE-LNM-21-002 Merge service deprecation | BE-Merge | Refactor or retire `AdvisoryMergeService` and related pipelines, ensuring callers transition to observation/linkset APIs; add compile-time analyzer preventing merge service usage. | MERGE-LNM-21-001 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) -MERGE-LNM-21-003 Determinism/test updates | QA Guild, BE-Merge | Replace merge determinism suites with observation/linkset regression tests verifying no data mutation and conflicts remain visible. | MERGE-LNM-21-002 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) +MERGE-LNM-21-002 Merge service deprecation | BE-Merge | Refactor or retire `AdvisoryMergeService` and related pipelines, ensuring callers transition to observation/linkset APIs; add compile-time analyzer preventing merge service usage. Dependencies: MERGE-LNM-21-001. | MERGE-LNM-21-001 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) +MERGE-LNM-21-003 Determinism/test updates | QA Guild, BE-Merge | Replace merge determinism suites with observation/linkset regression tests verifying no data mutation and conflicts remain visible. Dependencies: MERGE-LNM-21-002. | MERGE-LNM-21-002 (src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md) [Ingestion & Evidence] 110.C) Excititor.I @@ -163,20 +163,20 @@ Summary: Ingestion & Evidence focus on Excititor (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- EXCITITOR-AIAI-31-001 `Justification enrichment` | TODO | Expose normalized VEX justifications, product trees, and paragraph anchors for Advisory AI conflict explanations. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-AIAI-31-002 `VEX chunk API` | TODO | Provide `/vex/evidence/chunks` endpoint returning tenant-scoped VEX statements with signature metadata and scope scores for RAG. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-AIAI-31-003 `Telemetry` | TODO | Emit metrics/logs for VEX chunk usage, signature verification failures, and guardrail triggers. | Excititor WebService Guild, Observability Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-AIAI-31-002 `VEX chunk API` | TODO | Provide `/vex/evidence/chunks` endpoint returning tenant-scoped VEX statements with signature metadata and scope scores for RAG. Dependencies: EXCITITOR-AIAI-31-001. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-AIAI-31-003 `Telemetry` | TODO | Emit metrics/logs for VEX chunk usage, signature verification failures, and guardrail triggers. Dependencies: EXCITITOR-AIAI-31-002. | Excititor WebService Guild, Observability Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) EXCITITOR-AIRGAP-56-001 `Mirror ingestion adapters` | TODO | Add mirror-based VEX ingestion, preserving statement digests and bundle IDs. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-AIRGAP-56-002 `Bundle provenance` | TODO | Persist bundle metadata on VEX observations/linksets with provenance references. | Excititor Core Guild, AirGap Importer Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-AIRGAP-57-001 `Sealed-mode enforcement` | TODO | Block non-mirror connectors in sealed mode and surface remediation errors. | Excititor Core Guild, AirGap Policy Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-AIRGAP-57-002 `Staleness annotations` | TODO | Annotate VEX statements with staleness metrics and expose via API. | Excititor Core Guild, AirGap Time Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-AIRGAP-58-001 `Portable VEX evidence` | TODO | Package VEX evidence segments into portable evidence bundles linked to timeline. | Excititor Core Guild, Evidence Locker Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-AIRGAP-56-002 `Bundle provenance` | TODO | Persist bundle metadata on VEX observations/linksets with provenance references. Dependencies: EXCITITOR-AIRGAP-56-001. | Excititor Core Guild, AirGap Importer Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-AIRGAP-57-001 `Sealed-mode enforcement` | TODO | Block non-mirror connectors in sealed mode and surface remediation errors. Dependencies: EXCITITOR-AIRGAP-56-002. | Excititor Core Guild, AirGap Policy Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-AIRGAP-57-002 `Staleness annotations` | TODO | Annotate VEX statements with staleness metrics and expose via API. Dependencies: EXCITITOR-AIRGAP-57-001. | Excititor Core Guild, AirGap Time Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-AIRGAP-58-001 `Portable VEX evidence` | TODO | Package VEX evidence segments into portable evidence bundles linked to timeline. Dependencies: EXCITITOR-AIRGAP-57-002. | Excititor Core Guild, Evidence Locker Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) EXCITITOR-ATTEST-01-003 – Verification suite & observability | Team Excititor Attestation | DOING (2025-10-22) – Continuing implementation: build `IVexAttestationVerifier`, wire metrics/logging, and add regression tests. Draft plan in `EXCITITOR-ATTEST-01-003-plan.md` (2025-10-19) guides scope; updating with worknotes as progress lands.
2025-10-31: Verifier now tolerates duplicate source providers from AOC raw projections, downgrades offline Rekor verification to a degraded result, and enforces trusted signer registry checks with detailed diagnostics/tests. | EXCITITOR-ATTEST-01-002 (src/Excititor/__Libraries/StellaOps.Excititor.Attestation/TASKS.md) -EXCITITOR-ATTEST-73-001 `VEX attestation payloads` | TODO | Provide VEX statement metadata (supplier identity, justification, scope) required for VEXAttestation payloads. | Excititor Core Guild, Attestation Payloads Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-ATTEST-73-002 `Chain provenance` | TODO | Expose linkage from VEX statements to subject/product for chain of custody graph. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-ATTEST-73-001 `VEX attestation payloads` | TODO | Provide VEX statement metadata (supplier identity, justification, scope) required for VEXAttestation payloads. Dependencies: EXCITITOR-ATTEST-01-003. | Excititor Core Guild, Attestation Payloads Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-ATTEST-73-002 `Chain provenance` | TODO | Expose linkage from VEX statements to subject/product for chain of custody graph. Dependencies: EXCITITOR-ATTEST-73-001. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) EXCITITOR-CONN-MS-01-003 – Trust metadata & provenance hints | Team Excititor Connectors – MSRC | TODO – Emit cosign/AAD issuer metadata, attach provenance details, and document policy integration. | EXCITITOR-CONN-MS-01-002, EXCITITOR-POLICY-01-001 (src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md) EXCITITOR-CONN-ORACLE-01-003 – Trust provenance enrichment | Team Excititor Connectors – Oracle | TODO – Emit Oracle signing metadata (PGP/cosign fingerprint list, issuer trust tier) into raw provenance so downstream services can evaluate trust. Connector must not apply consensus weighting during ingestion. | EXCITITOR-CONN-ORACLE-01-002, EXCITITOR-POLICY-01-001 (src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md) EXCITITOR-CONN-STELLA-07-002 | TODO | Parse mirror bundles into raw `VexClaim` batches, preserving original provider metadata and mirror provenance without applying consensus or weighting. | Excititor Connectors – Stella (src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md) -EXCITITOR-CONN-STELLA-07-003 | TODO | Implement incremental cursor handling per-export digest for raw claim replays, support resume, and document configuration for downstream Excititor mirrors. | Excititor Connectors – Stella (src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md) +EXCITITOR-CONN-STELLA-07-003 | TODO | Implement incremental cursor handling per-export digest for raw claim replays, support resume, and document configuration for downstream Excititor mirrors. Dependencies: EXCITITOR-CONN-STELLA-07-002. | Excititor Connectors – Stella (src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md) [Ingestion & Evidence] 110.C) Excititor.II @@ -187,17 +187,17 @@ Task ID | State | Task description | Owners (Source) EXCITITOR-CONN-SUSE-01-003 – Trust metadata provenance | Team Excititor Connectors – SUSE | TODO – Emit provider trust configuration (signer fingerprints, trust tier notes) into the raw provenance envelope so downstream VEX Lens/Policy components can weigh issuers. Connector must not apply weighting or consensus inside ingestion. | EXCITITOR-CONN-SUSE-01-002, EXCITITOR-POLICY-01-001 (src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md) EXCITITOR-CONN-UBUNTU-01-003 – Trust provenance enrichment | Team Excititor Connectors – Ubuntu | TODO – Emit Ubuntu signing metadata (GPG fingerprints, issuer trust tier) inside raw provenance artifacts so downstream Policy/VEX Lens consumers can weigh issuers. Connector must remain aggregation-only with no inline weighting. | EXCITITOR-CONN-UBUNTU-01-002, EXCITITOR-POLICY-01-001 (src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md) EXCITITOR-CONSOLE-23-001 `VEX aggregation views` | TODO | Expose `/console/vex` endpoints returning grouped VEX statements per advisory/component with status chips, justification metadata, precedence trace pointers, and tenant-scoped filters for Console explorer. | Excititor WebService Guild, BE-Base Platform Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-CONSOLE-23-002 `Dashboard VEX deltas` | TODO | Provide aggregated counts for VEX overrides (new, not_affected, revoked) powering Console dashboard + live status ticker; emit metrics for policy explain integration. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-CONSOLE-23-003 `VEX search helpers` | TODO | Deliver rapid lookup endpoints of VEX by advisory/component for Console global search; ensure response includes provenance and precedence context; include caching and RBAC. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-CONSOLE-23-002 `Dashboard VEX deltas` | TODO | Provide aggregated counts for VEX overrides (new, not_affected, revoked) powering Console dashboard + live status ticker; emit metrics for policy explain integration. Dependencies: EXCITITOR-CONSOLE-23-001. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-CONSOLE-23-003 `VEX search helpers` | TODO | Deliver rapid lookup endpoints of VEX by advisory/component for Console global search; ensure response includes provenance and precedence context; include caching and RBAC. Dependencies: EXCITITOR-CONSOLE-23-002. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) EXCITITOR-CORE-AOC-19-002 `VEX linkset extraction` | TODO | Implement deterministic extraction of advisory IDs, component PURLs, and references into `linkset`, capturing reconciled-from metadata for traceability. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-CORE-AOC-19-003 `Idempotent VEX raw upsert` | TODO | Enforce `(vendor, upstreamId, contentHash, tenant)` uniqueness, generate supersedes chains, and ensure append-only versioning of raw VEX documents. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-CORE-AOC-19-004 `Remove ingestion consensus` | TODO | Excise consensus/merge/severity logic from Excititor ingestion paths, updating exports/tests to rely on Policy Engine materializations instead. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Update Excititor smoke/e2e suites to seed tenant-aware Authority clients and ensure cross-tenant VEX ingestion is rejected. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-CORE-AOC-19-003 `Idempotent VEX raw upsert` | TODO | Enforce `(vendor, upstreamId, contentHash, tenant)` uniqueness, generate supersedes chains, and ensure append-only versioning of raw VEX documents. Dependencies: EXCITITOR-CORE-AOC-19-002. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-CORE-AOC-19-004 `Remove ingestion consensus` | TODO | Excise consensus/merge/severity logic from Excititor ingestion paths, updating exports/tests to rely on Policy Engine materializations instead. Dependencies: EXCITITOR-CORE-AOC-19-003. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Update Excititor smoke/e2e suites to seed tenant-aware Authority clients and ensure cross-tenant VEX ingestion is rejected. Dependencies: EXCITITOR-CORE-AOC-19-004. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) EXCITITOR-GRAPH-21-001 `Inspector linkouts` | BLOCKED (2025-10-27) | Provide batched VEX/advisory reference fetches keyed by graph node PURLs so UI inspector can display raw documents and justification metadata. | Excititor Core Guild, Cartographer Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-GRAPH-21-002 `Overlay enrichment` | BLOCKED (2025-10-27) | Ensure overlay metadata includes VEX justification summaries and document versions for Cartographer overlays; update fixtures/tests. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-GRAPH-21-005 `Inspector indexes` | BLOCKED (2025-10-27) | Add indexes/materialized views for VEX lookups by PURL/policy to support Cartographer inspector performance; document migrations. | Excititor Storage Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) -EXCITITOR-GRAPH-24-101 `VEX summary API` | TODO | Provide endpoints delivering VEX status summaries per component/asset for Vuln Explorer integration. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-GRAPH-24-102 `Evidence batch API` | TODO | Add batch VEX observation retrieval optimized for Graph overlays/tooltips. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-GRAPH-21-002 `Overlay enrichment` | BLOCKED (2025-10-27) | Ensure overlay metadata includes VEX justification summaries and document versions for Cartographer overlays; update fixtures/tests. Dependencies: EXCITITOR-GRAPH-21-001. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-GRAPH-21-005 `Inspector indexes` | BLOCKED (2025-10-27) | Add indexes/materialized views for VEX lookups by PURL/policy to support Cartographer inspector performance; document migrations. Dependencies: EXCITITOR-GRAPH-21-002. | Excititor Storage Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) +EXCITITOR-GRAPH-24-101 `VEX summary API` | TODO | Provide endpoints delivering VEX status summaries per component/asset for Vuln Explorer integration. Dependencies: EXCITITOR-GRAPH-21-005. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-GRAPH-24-102 `Evidence batch API` | TODO | Add batch VEX observation retrieval optimized for Graph overlays/tooltips. Dependencies: EXCITITOR-GRAPH-24-101. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) EXCITITOR-LNM-21-001 `VEX observation model` | TODO | Define immutable `vex_observations` schema capturing raw statements, product PURLs, justification, and AOC metadata. `DOCS-LNM-22-002` blocked pending this schema. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) @@ -206,21 +206,21 @@ Depends on: Sprint 110.C - Excititor.II Summary: Ingestion & Evidence focus on Excititor (phase III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -EXCITITOR-LNM-21-002 `Linkset correlator` | TODO | Build correlation pipeline combining alias + product PURL signals to form `vex_linksets` with confidence metrics. Docs waiting to finalize VEX aggregation guide. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-LNM-21-003 `Conflict annotator` | TODO | Record status/justification disagreements within linksets and expose structured conflicts. Provide structured payloads for `DOCS-LNM-22-002`. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-LNM-21-004 `Merge removal` | TODO | Remove legacy VEX merge logic, enforce immutability, and add guards/tests to prevent future merges. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-LNM-21-005 `Event emission` | TODO | Emit `vex.linkset.updated` events for downstream consumers with delta descriptions and tenant context. | Excititor Core Guild, Platform Events Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-LNM-21-101 `Observations collections` | TODO | Provision `vex_observations`/`vex_linksets` collections with shard keys, indexes over aliases & product PURLs, and multi-tenant guards. | Excititor Storage Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) -EXCITITOR-LNM-21-102 `Migration/backfill` | TODO | Backfill legacy merged VEX docs into observations/linksets, add provenance notes, and produce rollback scripts. | Excititor Storage Guild, DevOps Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) -EXCITITOR-LNM-21-201 `Observation APIs` | TODO | Add VEX observation read endpoints with filters, pagination, RBAC, and tenant scoping. | Excititor WebService Guild, BE-Base Platform Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-LNM-21-202 `Linkset APIs` | TODO | Implement linkset read/export/evidence endpoints returning correlation/conflict payloads and map errors to `ERR_AGG_*`. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-LNM-21-203 `Event publishing` | TODO | Publish `vex.linkset.updated` events, document schema, and ensure idempotent delivery. | Excititor WebService Guild, Platform Events Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-LNM-21-002 `Linkset correlator` | TODO | Build correlation pipeline combining alias + product PURL signals to form `vex_linksets` with confidence metrics. Docs waiting to finalize VEX aggregation guide. Dependencies: EXCITITOR-LNM-21-001. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-LNM-21-003 `Conflict annotator` | TODO | Record status/justification disagreements within linksets and expose structured conflicts. Provide structured payloads for `DOCS-LNM-22-002`. Dependencies: EXCITITOR-LNM-21-002. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-LNM-21-004 `Merge removal` | TODO | Remove legacy VEX merge logic, enforce immutability, and add guards/tests to prevent future merges. Dependencies: EXCITITOR-LNM-21-003. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-LNM-21-005 `Event emission` | TODO | Emit `vex.linkset.updated` events for downstream consumers with delta descriptions and tenant context. Dependencies: EXCITITOR-LNM-21-004. | Excititor Core Guild, Platform Events Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-LNM-21-101 `Observations collections` | TODO | Provision `vex_observations`/`vex_linksets` collections with shard keys, indexes over aliases & product PURLs, and multi-tenant guards. Dependencies: EXCITITOR-LNM-21-005. | Excititor Storage Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) +EXCITITOR-LNM-21-102 `Migration/backfill` | TODO | Backfill legacy merged VEX docs into observations/linksets, add provenance notes, and produce rollback scripts. Dependencies: EXCITITOR-LNM-21-101. | Excititor Storage Guild, DevOps Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) +EXCITITOR-LNM-21-201 `Observation APIs` | TODO | Add VEX observation read endpoints with filters, pagination, RBAC, and tenant scoping. Dependencies: EXCITITOR-LNM-21-102. | Excititor WebService Guild, BE-Base Platform Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-LNM-21-202 `Linkset APIs` | TODO | Implement linkset read/export/evidence endpoints returning correlation/conflict payloads and map errors to `ERR_AGG_*`. Dependencies: EXCITITOR-LNM-21-201. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-LNM-21-203 `Event publishing` | TODO | Publish `vex.linkset.updated` events, document schema, and ensure idempotent delivery. Dependencies: EXCITITOR-LNM-21-202. | Excititor WebService Guild, Platform Events Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) EXCITITOR-OAS-61-001 `Spec coverage` | TODO | Update VEX OAS to include observation/linkset endpoints with provenance fields and examples. | Excititor Core Guild, API Contracts Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-OAS-61-002 `Example catalog` | TODO | Provide examples for VEX justifications, statuses, conflicts; ensure SDK docs reference them. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-OAS-62-001 `SDK smoke tests` | TODO | Add SDK scenarios for VEX observation queries and conflict handling to language smoke suites. | Excititor Core Guild, SDK Generator Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-OAS-63-001 `Deprecation headers` | TODO | Add deprecation metadata and notifications for legacy VEX routes. | Excititor Core Guild, API Governance Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-OAS-61-002 `Example catalog` | TODO | Provide examples for VEX justifications, statuses, conflicts; ensure SDK docs reference them. Dependencies: EXCITITOR-OAS-61-001. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-OAS-62-001 `SDK smoke tests` | TODO | Add SDK scenarios for VEX observation queries and conflict handling to language smoke suites. Dependencies: EXCITITOR-OAS-61-002. | Excititor Core Guild, SDK Generator Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-OAS-63-001 `Deprecation headers` | TODO | Add deprecation metadata and notifications for legacy VEX routes. Dependencies: EXCITITOR-OAS-62-001. | Excititor Core Guild, API Governance Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) EXCITITOR-OBS-50-001 `Telemetry adoption` | TODO | Integrate telemetry core across VEX ingestion/linking, ensuring spans/logs capture tenant, product scope, upstream id, justification hash, and trace IDs. | Excititor Core Guild, Observability Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-OBS-51-001 `Metrics & SLOs` | TODO | Publish metrics for VEX ingest latency, scope resolution success, conflict rate, signature verification failures. Define SLOs (link latency P95 <30s) and configure burn-rate alerts. | Excititor Core Guild, DevOps Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-OBS-51-001 `Metrics & SLOs` | TODO | Publish metrics for VEX ingest latency, scope resolution success, conflict rate, signature verification failures. Define SLOs (link latency P95 <30s) and configure burn-rate alerts. Dependencies: EXCITITOR-OBS-50-001. | Excititor Core Guild, DevOps Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) [Ingestion & Evidence] 110.C) Excititor.IV @@ -228,21 +228,21 @@ Depends on: Sprint 110.C - Excititor.III Summary: Ingestion & Evidence focus on Excititor (phase IV). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -EXCITITOR-OBS-52-001 `Timeline events` | TODO | Emit `timeline_event` entries for VEX ingest/linking/outcome changes with trace IDs, justification summaries, and evidence placeholders. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-OBS-53-001 `Evidence snapshots` | TODO | Build evidence payloads for VEX statements (raw doc, normalization diff, precedence notes) and push to evidence locker with Merkle manifests. | Excititor Core Guild, Evidence Locker Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-OBS-54-001 `Attestation & verification` | TODO | Attach DSSE attestations to VEX batch processing, verify chain-of-custody via Provenance library, and link attestation IDs to timeline + ledger. | Excititor Core Guild, Provenance Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-OBS-55-001 `Incident mode` | TODO | Implement incident sampling bump, additional raw payload retention, and activation events for VEX pipelines with redaction guard rails. | Excititor Core Guild, DevOps Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-OBS-52-001 `Timeline events` | TODO | Emit `timeline_event` entries for VEX ingest/linking/outcome changes with trace IDs, justification summaries, and evidence placeholders. Dependencies: EXCITITOR-OBS-51-001. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-OBS-53-001 `Evidence snapshots` | TODO | Build evidence payloads for VEX statements (raw doc, normalization diff, precedence notes) and push to evidence locker with Merkle manifests. Dependencies: EXCITITOR-OBS-52-001. | Excititor Core Guild, Evidence Locker Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-OBS-54-001 `Attestation & verification` | TODO | Attach DSSE attestations to VEX batch processing, verify chain-of-custody via Provenance library, and link attestation IDs to timeline + ledger. Dependencies: EXCITITOR-OBS-53-001. | Excititor Core Guild, Provenance Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-OBS-55-001 `Incident mode` | TODO | Implement incident sampling bump, additional raw payload retention, and activation events for VEX pipelines with redaction guard rails. Dependencies: EXCITITOR-OBS-54-001. | Excititor Core Guild, DevOps Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) EXCITITOR-ORCH-32-001 `Worker SDK adoption` | TODO | Integrate orchestrator worker SDK in Excititor ingestion jobs, emit heartbeats/progress/artifact hashes, and register source metadata. | Excititor Worker Guild (src/Excititor/StellaOps.Excititor.Worker/TASKS.md) -EXCITITOR-ORCH-33-001 `Control compliance` | TODO | Honor orchestrator pause/throttle/retry actions, classify error outputs, and persist restart checkpoints. | Excititor Worker Guild (src/Excititor/StellaOps.Excititor.Worker/TASKS.md) -EXCITITOR-ORCH-34-001 `Backfill & circuit breaker` | TODO | Implement orchestrator-driven backfills, apply circuit breaker reset rules, and ensure artifact dedupe alignment. | Excititor Worker Guild (src/Excititor/StellaOps.Excititor.Worker/TASKS.md) +EXCITITOR-ORCH-33-001 `Control compliance` | TODO | Honor orchestrator pause/throttle/retry actions, classify error outputs, and persist restart checkpoints. Dependencies: EXCITITOR-ORCH-32-001. | Excititor Worker Guild (src/Excititor/StellaOps.Excititor.Worker/TASKS.md) +EXCITITOR-ORCH-34-001 `Backfill & circuit breaker` | TODO | Implement orchestrator-driven backfills, apply circuit breaker reset rules, and ensure artifact dedupe alignment. Dependencies: EXCITITOR-ORCH-33-001. | Excititor Worker Guild (src/Excititor/StellaOps.Excititor.Worker/TASKS.md) EXCITITOR-POLICY-02-002 – Diagnostics for scoring signals | Team Excititor Policy | BACKLOG – Update diagnostics reports to surface missing severity/KEV/EPSS mappings, coefficient overrides, and provide actionable recommendations for policy tuning. | EXCITITOR-POLICY-02-001 (src/Excititor/__Libraries/StellaOps.Excititor.Policy/TASKS.md) -EXCITITOR-POLICY-20-001 `Policy selection endpoints` | TODO | Provide VEX lookup APIs supporting PURL/advisory batching, scope filtering, and tenant enforcement with deterministic ordering + pagination. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-POLICY-20-002 `Scope-aware linksets` | TODO | Enhance VEX linkset extraction with scope resolution (product/component) + version range matching to boost policy join accuracy; refresh fixtures/tests. | Excititor Core Guild, Policy Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-POLICY-20-003 `Selection cursors` | TODO | Introduce VEX selection cursor collections + indexes powering incremental policy runs; bundle change-stream checkpoint migrations and Offline Kit tooling. | Excititor Storage Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) -EXCITITOR-POLICY-23-001 `Evidence indexes` | TODO | Provide indexes/materialized views for policy runtime (status, justification, product PURL) to accelerate queries; document contract. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-POLICY-23-002 `Event guarantees` | TODO | Ensure `vex.linkset.updated` events include correlation confidence, conflict summaries, and idempotent ids for evaluator consumption. | Excititor Core Guild, Platform Events Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-POLICY-20-001 `Policy selection endpoints` | TODO | Provide VEX lookup APIs supporting PURL/advisory batching, scope filtering, and tenant enforcement with deterministic ordering + pagination. Dependencies: EXCITITOR-POLICY-02-002. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-POLICY-20-002 `Scope-aware linksets` | TODO | Enhance VEX linkset extraction with scope resolution (product/component) + version range matching to boost policy join accuracy; refresh fixtures/tests. Dependencies: EXCITITOR-POLICY-20-001. | Excititor Core Guild, Policy Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-POLICY-20-003 `Selection cursors` | TODO | Introduce VEX selection cursor collections + indexes powering incremental policy runs; bundle change-stream checkpoint migrations and Offline Kit tooling. Dependencies: EXCITITOR-POLICY-20-002. | Excititor Storage Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) +EXCITITOR-POLICY-23-001 `Evidence indexes` | TODO | Provide indexes/materialized views for policy runtime (status, justification, product PURL) to accelerate queries; document contract. Dependencies: EXCITITOR-POLICY-20-003. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-POLICY-23-002 `Event guarantees` | TODO | Ensure `vex.linkset.updated` events include correlation confidence, conflict summaries, and idempotent ids for evaluator consumption. Dependencies: EXCITITOR-POLICY-23-001. | Excititor Core Guild, Platform Events Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) EXCITITOR-RISK-66-001 `VEX gate provider` | TODO | Supply VEX status and justification data for risk engine gating with full source provenance. | Excititor Core Guild, Risk Engine Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-RISK-66-002 `Reachability inputs` | TODO | Provide component/product scoping metadata enabling reachability and runtime factor mapping. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-RISK-66-002 `Reachability inputs` | TODO | Provide component/product scoping metadata enabling reachability and runtime factor mapping. Dependencies: EXCITITOR-RISK-66-001. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) [Ingestion & Evidence] 110.C) Excititor.V @@ -250,21 +250,21 @@ Depends on: Sprint 110.C - Excititor.IV Summary: Ingestion & Evidence focus on Excititor (phase V). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -EXCITITOR-RISK-67-001 `Explainability metadata` | TODO | Include VEX justification, status reasoning, and source digests in explainability artifacts. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) -EXCITITOR-RISK-68-001 `Policy Studio integration` | TODO | Surface VEX-specific gates/weights within profile editor UI and validation messages. | Excititor Core Guild, Policy Studio Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-RISK-67-001 `Explainability metadata` | TODO | Include VEX justification, status reasoning, and source digests in explainability artifacts. Dependencies: EXCITITOR-RISK-66-002. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) +EXCITITOR-RISK-68-001 `Policy Studio integration` | TODO | Surface VEX-specific gates/weights within profile editor UI and validation messages. Dependencies: EXCITITOR-RISK-67-001. | Excititor Core Guild, Policy Studio Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) EXCITITOR-SIG-26-001 `Vendor exploitability hints` | TODO | Surface vendor-provided exploitability indicators and affected symbol lists to Signals service via projection endpoints. | Excititor Core Guild, Signals Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) EXCITITOR-STORE-AOC-19-001 `vex_raw schema validator` | TODO | Define Mongo JSON schema for `vex_raw` enforcing required fields and forbidding derived/consensus/severity fields. Ship unit tests with Mongo2Go to validate rejects. | Excititor Storage Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) -EXCITITOR-STORE-AOC-19-002 `idempotency unique index` | TODO | Create `(source.vendor, upstream.upstream_id, upstream.content_hash, tenant)` unique index with backfill checker, updating migrations + bootstrapper for offline installs. | Excititor Storage Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) -EXCITITOR-STORE-AOC-19-003 `append-only migration plan` | TODO | Migrate legacy consensus collections to `_backup_*`, seed supersedes chain for raw docs, and document rollback path + dry-run verification. | Excititor Storage Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) -EXCITITOR-STORE-AOC-19-004 `validator deployment docset` | TODO | Update migration runbooks and Offline Kit packaging to bundle schema validator scripts, with smoke instructions for air-gapped clusters. | Excititor Storage Guild, DevOps Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) +EXCITITOR-STORE-AOC-19-002 `idempotency unique index` | TODO | Create `(source.vendor, upstream.upstream_id, upstream.content_hash, tenant)` unique index with backfill checker, updating migrations + bootstrapper for offline installs. Dependencies: EXCITITOR-STORE-AOC-19-001. | Excititor Storage Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) +EXCITITOR-STORE-AOC-19-003 `append-only migration plan` | TODO | Migrate legacy consensus collections to `_backup_*`, seed supersedes chain for raw docs, and document rollback path + dry-run verification. Dependencies: EXCITITOR-STORE-AOC-19-002. | Excititor Storage Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) +EXCITITOR-STORE-AOC-19-004 `validator deployment docset` | TODO | Update migration runbooks and Offline Kit packaging to bundle schema validator scripts, with smoke instructions for air-gapped clusters. Dependencies: EXCITITOR-STORE-AOC-19-003. | Excititor Storage Guild, DevOps Guild (src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md) EXCITITOR-TEN-48-001 `Tenant-aware VEX linking` | TODO | Apply tenant context to VEX linkers, enable RLS, and expose capability endpoint confirming aggregation-only behavior. | Excititor Core Guild (src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md) EXCITITOR-VEXLENS-30-001 `VEX evidence enrichers` | TODO | Include issuer hints, signatures, and product trees in evidence payloads for VEX Lens; Label: VEX-Lens. | Excititor WebService Guild, VEX Lens Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) EXCITITOR-VULN-29-001 `VEX key canonicalization` | TODO | Canonicalize (lossless) VEX advisory/product keys (map to `advisory_key`, capture product scopes); expose original sources in `links[]`; AOC-compliant: no merge, no derived fields, no suppression; backfill existing records. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-VULN-29-002 `Evidence retrieval` | TODO | Provide `/vuln/evidence/vex/{advisory_key}` returning raw VEX statements filtered by tenant/product scope for Explorer evidence tabs. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-VULN-29-004 `Observability` | TODO | Add metrics/logs for VEX normalization, suppression scopes, withdrawn statements; emit events consumed by Vuln Explorer resolver. | Excititor WebService Guild, Observability Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-VULN-29-002 `Evidence retrieval` | TODO | Provide `/vuln/evidence/vex/{advisory_key}` returning raw VEX statements filtered by tenant/product scope for Explorer evidence tabs. Dependencies: EXCITITOR-VULN-29-001. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-VULN-29-004 `Observability` | TODO | Add metrics/logs for VEX normalization, suppression scopes, withdrawn statements; emit events consumed by Vuln Explorer resolver. Dependencies: EXCITITOR-VULN-29-002. | Excititor WebService Guild, Observability Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) EXCITITOR-WEB-AIRGAP-56-001 | TODO | Support mirror bundle registration via APIs, expose bundle provenance in VEX responses, and block external connectors in sealed mode. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-AIRGAP-56-002 | TODO | Return VEX staleness metrics and time anchor info in API responses for Console/CLI use. | Excititor WebService Guild, AirGap Time Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-AIRGAP-57-001 | TODO | Map sealed-mode violations to standardized error payload with remediation guidance. | Excititor WebService Guild, AirGap Policy Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-AIRGAP-56-002 | TODO | Return VEX staleness metrics and time anchor info in API responses for Console/CLI use. Dependencies: EXCITITOR-WEB-AIRGAP-56-001. | Excititor WebService Guild, AirGap Time Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-AIRGAP-57-001 | TODO | Map sealed-mode violations to standardized error payload with remediation guidance. Dependencies: EXCITITOR-WEB-AIRGAP-56-002. | Excititor WebService Guild, AirGap Policy Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) [Ingestion & Evidence] 110.C) Excititor.VI @@ -272,21 +272,21 @@ Depends on: Sprint 110.C - Excititor.V Summary: Ingestion & Evidence focus on Excititor (phase VI). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -EXCITITOR-WEB-AIRGAP-58-001 | TODO | Emit timeline events for VEX bundle imports with bundle ID, scope, and actor metadata. | Excititor WebService Guild, AirGap Importer Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-AIRGAP-58-001 | TODO | Emit timeline events for VEX bundle imports with bundle ID, scope, and actor metadata. Dependencies: EXCITITOR-WEB-AIRGAP-57-001. | Excititor WebService Guild, AirGap Importer Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) EXCITITOR-WEB-AOC-19-001 `Raw VEX ingestion APIs` | TODO | Implement `POST /ingest/vex`, `GET /vex/raw*`, and `POST /aoc/verify` endpoints. Enforce Authority scopes, tenant injection, and guard pipeline to ensure only immutable VEX facts are persisted. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-AOC-19-002 `AOC observability + metrics` | TODO | Export metrics (`ingestion_write_total`, `aoc_violation_total`, signature verification counters) and tracing spans matching Conseiller naming. Ensure structured logging includes tenant, source vendor, upstream id, and content hash. | Excititor WebService Guild, Observability Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-AOC-19-003 `Guard + schema test harness` | TODO | Add unit/integration tests for schema validation, forbidden field rejection (`ERR_AOC_001/006/007`), and supersedes behavior using CycloneDX-VEX & CSAF fixtures with deterministic expectations. | QA Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-AOC-19-004 `Batch ingest validation` | TODO | Build large fixture ingest covering mixed VEX statuses, verifying raw storage parity, metrics, and CLI `aoc verify` compatibility. Document load test/runbook updates. | Excititor WebService Guild, QA Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-AOC-19-002 `AOC observability + metrics` | TODO | Export metrics (`ingestion_write_total`, `aoc_violation_total`, signature verification counters) and tracing spans matching Conseiller naming. Ensure structured logging includes tenant, source vendor, upstream id, and content hash. Dependencies: EXCITITOR-WEB-AOC-19-001. | Excititor WebService Guild, Observability Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-AOC-19-003 `Guard + schema test harness` | TODO | Add unit/integration tests for schema validation, forbidden field rejection (`ERR_AOC_001/006/007`), and supersedes behavior using CycloneDX-VEX & CSAF fixtures with deterministic expectations. Dependencies: EXCITITOR-WEB-AOC-19-002. | QA Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-AOC-19-004 `Batch ingest validation` | TODO | Build large fixture ingest covering mixed VEX statuses, verifying raw storage parity, metrics, and CLI `aoc verify` compatibility. Document load test/runbook updates. Dependencies: EXCITITOR-WEB-AOC-19-003. | Excititor WebService Guild, QA Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) EXCITITOR-WEB-OAS-61-001 | TODO | Implement `/.well-known/openapi` discovery endpoint with spec version metadata. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-OAS-61-002 | TODO | Standardize error envelope responses and update controller/unit tests. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-OAS-62-001 | TODO | Add curated examples for VEX observation/linkset endpoints and ensure portal displays them. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-OAS-63-001 | TODO | Emit deprecation headers and update docs for retiring VEX APIs. | Excititor WebService Guild, API Governance Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-OAS-61-002 | TODO | Standardize error envelope responses and update controller/unit tests. Dependencies: EXCITITOR-WEB-OAS-61-001. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-OAS-62-001 | TODO | Add curated examples for VEX observation/linkset endpoints and ensure portal displays them. Dependencies: EXCITITOR-WEB-OAS-61-002. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-OAS-63-001 | TODO | Emit deprecation headers and update docs for retiring VEX APIs. Dependencies: EXCITITOR-WEB-OAS-62-001. | Excititor WebService Guild, API Governance Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) EXCITITOR-WEB-OBS-50-001 `Telemetry adoption` | TODO | Adopt telemetry core for VEX APIs, ensure responses include trace IDs & correlation headers, and update structured logging for read endpoints. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-OBS-51-001 `Observability health endpoints` | TODO | Implement `/obs/excititor/health` summarizing ingest/link SLOs, signature failure counts, and conflict trends for Console dashboards. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-OBS-52-001 `Timeline streaming` | TODO | Provide SSE bridge for VEX timeline events with tenant filters, pagination, and guardrails. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-OBS-53-001 `Evidence APIs` | TODO | Expose `/evidence/vex/*` endpoints that fetch locker bundles, enforce scopes, and surface verification metadata. | Excititor WebService Guild, Evidence Locker Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-OBS-54-001 `Attestation APIs` | TODO | Add `/attestations/vex/*` endpoints returning DSSE verification state, builder identity, and chain-of-custody links. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) -EXCITITOR-WEB-OBS-55-001 `Incident mode toggles` | TODO | Provide incident mode API for VEX pipelines with activation audit logs and retention override previews. | Excititor WebService Guild, DevOps Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-OBS-51-001 `Observability health endpoints` | TODO | Implement `/obs/excititor/health` summarizing ingest/link SLOs, signature failure counts, and conflict trends for Console dashboards. Dependencies: EXCITITOR-WEB-OBS-50-001. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-OBS-52-001 `Timeline streaming` | TODO | Provide SSE bridge for VEX timeline events with tenant filters, pagination, and guardrails. Dependencies: EXCITITOR-WEB-OBS-51-001. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-OBS-53-001 `Evidence APIs` | TODO | Expose `/evidence/vex/*` endpoints that fetch locker bundles, enforce scopes, and surface verification metadata. Dependencies: EXCITITOR-WEB-OBS-52-001. | Excititor WebService Guild, Evidence Locker Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-OBS-54-001 `Attestation APIs` | TODO | Add `/attestations/vex/*` endpoints returning DSSE verification state, builder identity, and chain-of-custody links. Dependencies: EXCITITOR-WEB-OBS-53-001. | Excititor WebService Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) +EXCITITOR-WEB-OBS-55-001 `Incident mode toggles` | TODO | Provide incident mode API for VEX pipelines with activation audit logs and retention override previews. Dependencies: EXCITITOR-WEB-OBS-54-001. | Excititor WebService Guild, DevOps Guild (src/Excititor/StellaOps.Excititor.WebService/TASKS.md) [Ingestion & Evidence] 110.D) Mirror @@ -295,11 +295,11 @@ Summary: Ingestion & Evidence focus on Mirror). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- MIRROR-CRT-56-001 | TODO | Implement deterministic bundle assembler supporting advisories, VEX, policy packs with Zstandard compression and manifest generation. | Mirror Creator Guild (src/Mirror/StellaOps.Mirror.Creator/TASKS.md) -MIRROR-CRT-56-002 | TODO | Integrate DSSE signing and TUF metadata generation (`root`, `snapshot`, `timestamp`, `targets`). | Mirror Creator Guild, Security Guild (src/Mirror/StellaOps.Mirror.Creator/TASKS.md) -MIRROR-CRT-57-001 | TODO | Add optional OCI image collection producing oci-archive layout with digests recorded in manifest. | Mirror Creator Guild, DevOps Guild (src/Mirror/StellaOps.Mirror.Creator/TASKS.md) -MIRROR-CRT-57-002 | TODO | Embed signed time anchor metadata (`meta/time-anchor.json`) sourced from trusted authority. | Mirror Creator Guild, AirGap Time Guild (src/Mirror/StellaOps.Mirror.Creator/TASKS.md) -MIRROR-CRT-58-001 | TODO | Deliver CLI `stella mirror create | Mirror Creator Guild, CLI Guild (src/Mirror/StellaOps.Mirror.Creator/TASKS.md) -MIRROR-CRT-58-002 | TODO | Integrate with Export Center scheduling to automate mirror bundle creation with audit logs. | Mirror Creator Guild, Exporter Guild (src/Mirror/StellaOps.Mirror.Creator/TASKS.md) +MIRROR-CRT-56-002 | TODO | Integrate DSSE signing and TUF metadata generation (`root`, `snapshot`, `timestamp`, `targets`). Dependencies: MIRROR-CRT-56-001. | Mirror Creator Guild, Security Guild (src/Mirror/StellaOps.Mirror.Creator/TASKS.md) +MIRROR-CRT-57-001 | TODO | Add optional OCI image collection producing oci-archive layout with digests recorded in manifest. Dependencies: MIRROR-CRT-56-002. | Mirror Creator Guild, DevOps Guild (src/Mirror/StellaOps.Mirror.Creator/TASKS.md) +MIRROR-CRT-57-002 | TODO | Embed signed time anchor metadata (`meta/time-anchor.json`) sourced from trusted authority. Dependencies: MIRROR-CRT-57-001. | Mirror Creator Guild, AirGap Time Guild (src/Mirror/StellaOps.Mirror.Creator/TASKS.md) +MIRROR-CRT-58-001 | TODO | Deliver CLI `stella mirror create. Dependencies: MIRROR-CRT-57-002. | Mirror Creator Guild, CLI Guild (src/Mirror/StellaOps.Mirror.Creator/TASKS.md) +MIRROR-CRT-58-002 | TODO | Integrate with Export Center scheduling to automate mirror bundle creation with audit logs. Dependencies: MIRROR-CRT-58-001. | Mirror Creator Guild, Exporter Guild (src/Mirror/StellaOps.Mirror.Creator/TASKS.md) If all tasks are done - read next sprint section - SPRINT_120_policy_reasoning.md diff --git a/docs/implplan/SPRINT_120_policy_reasoning.md b/docs/implplan/SPRINT_120_policy_reasoning.md index ffa6e83f..99ab8ee2 100644 --- a/docs/implplan/SPRINT_120_policy_reasoning.md +++ b/docs/implplan/SPRINT_120_policy_reasoning.md @@ -6,11 +6,11 @@ Summary: Policy & Reasoning focus on AirGap). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- AIRGAP-POL-56-001 | TODO | Implement `StellaOps.AirGap.Policy` package exposing `EgressPolicy` facade with sealed/unsealed branches and remediation-friendly errors. | AirGap Policy Guild (src/AirGap/StellaOps.AirGap.Policy/TASKS.md) -AIRGAP-POL-56-002 | TODO | Create Roslyn analyzer/code fix warning on raw `HttpClient` usage outside approved wrappers; add CI integration. | AirGap Policy Guild, DevEx Guild (src/AirGap/StellaOps.AirGap.Policy/TASKS.md) -AIRGAP-POL-57-001 | TODO | Update core web services (Web, Exporter, Policy, Findings, Authority) to use `EgressPolicy`; ensure configuration wiring for sealed mode. | AirGap Policy Guild, BE-Base Platform Guild (src/AirGap/StellaOps.AirGap.Policy/TASKS.md) -AIRGAP-POL-57-002 | TODO | Implement Task Runner job plan validator rejecting network steps unless marked internal allow-list. | AirGap Policy Guild, Task Runner Guild (src/AirGap/StellaOps.AirGap.Policy/TASKS.md) -AIRGAP-POL-58-001 | TODO | Ensure Observability exporters only target local endpoints in sealed mode; disable remote sinks with warning. | AirGap Policy Guild, Observability Guild (src/AirGap/StellaOps.AirGap.Policy/TASKS.md) -AIRGAP-POL-58-002 | TODO | Add CLI sealed-mode guard that refuses commands needing egress and surfaces remediation. | AirGap Policy Guild, CLI Guild (src/AirGap/StellaOps.AirGap.Policy/TASKS.md) +AIRGAP-POL-56-002 | TODO | Create Roslyn analyzer/code fix warning on raw `HttpClient` usage outside approved wrappers; add CI integration. Dependencies: AIRGAP-POL-56-001. | AirGap Policy Guild, DevEx Guild (src/AirGap/StellaOps.AirGap.Policy/TASKS.md) +AIRGAP-POL-57-001 | TODO | Update core web services (Web, Exporter, Policy, Findings, Authority) to use `EgressPolicy`; ensure configuration wiring for sealed mode. Dependencies: AIRGAP-POL-56-002. | AirGap Policy Guild, BE-Base Platform Guild (src/AirGap/StellaOps.AirGap.Policy/TASKS.md) +AIRGAP-POL-57-002 | TODO | Implement Task Runner job plan validator rejecting network steps unless marked internal allow-list. Dependencies: AIRGAP-POL-57-001. | AirGap Policy Guild, Task Runner Guild (src/AirGap/StellaOps.AirGap.Policy/TASKS.md) +AIRGAP-POL-58-001 | TODO | Ensure Observability exporters only target local endpoints in sealed mode; disable remote sinks with warning. Dependencies: AIRGAP-POL-57-002. | AirGap Policy Guild, Observability Guild (src/AirGap/StellaOps.AirGap.Policy/TASKS.md) +AIRGAP-POL-58-002 | TODO | Add CLI sealed-mode guard that refuses commands needing egress and surfaces remediation. Dependencies: AIRGAP-POL-58-001. | AirGap Policy Guild, CLI Guild (src/AirGap/StellaOps.AirGap.Policy/TASKS.md) [Policy & Reasoning] 120.B) Findings.I @@ -19,19 +19,19 @@ Summary: Policy & Reasoning focus on Findings (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- LEDGER-29-001 | TODO | Design ledger & projection schemas (tables/indexes), canonical JSON format, hashing strategy, and migrations. Publish schema doc + fixtures. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-29-002 | TODO | Implement ledger write API (`POST /vuln/ledger/events`) with validation, idempotency, hash chaining, and Merkle root computation job. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-29-003 | TODO | Build projector worker that derives `findings_projection` rows from ledger events + policy determinations; ensure idempotent replay keyed by `(tenant,finding_id,policy_version)`. | Findings Ledger Guild, Scheduler Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-29-004 | TODO | Integrate Policy Engine batch evaluation (baseline + simulate) with projector; cache rationale references. | Findings Ledger Guild, Policy Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-29-005 | TODO | Implement workflow mutation handlers (assign, comment, accept-risk, target-fix, verify-fix, reopen) producing ledger events with validation and attachments metadata. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-29-006 | TODO | Integrate attachment encryption (KMS envelope), signed URL issuance, CSRF protection hooks for Console. | Findings Ledger Guild, Security Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-29-007 | TODO | Instrument metrics (`ledger_write_latency`, `projection_lag_seconds`, `ledger_events_total`), structured logs, and Merkle anchoring alerts; publish dashboards. | Findings Ledger Guild, Observability Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-29-008 | TODO | Develop unit/property/integration tests, replay/restore tooling, determinism harness, and load tests at 5M findings/tenant. | Findings Ledger Guild, QA Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-29-009 | TODO | Provide deployment manifests (Helm/Compose), backup/restore guidance, Merkle anchor externalization (optional), and offline kit instructions. | Findings Ledger Guild, DevOps Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-34-101 | TODO | Link orchestrator run ledger exports into Findings Ledger provenance chain, index by artifact hash, and expose audit queries. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-29-002 | TODO | Implement ledger write API (`POST /vuln/ledger/events`) with validation, idempotency, hash chaining, and Merkle root computation job. Dependencies: LEDGER-29-001. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-29-003 | TODO | Build projector worker that derives `findings_projection` rows from ledger events + policy determinations; ensure idempotent replay keyed by `(tenant,finding_id,policy_version)`. Dependencies: LEDGER-29-002. | Findings Ledger Guild, Scheduler Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-29-004 | TODO | Integrate Policy Engine batch evaluation (baseline + simulate) with projector; cache rationale references. Dependencies: LEDGER-29-003. | Findings Ledger Guild, Policy Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-29-005 | TODO | Implement workflow mutation handlers (assign, comment, accept-risk, target-fix, verify-fix, reopen) producing ledger events with validation and attachments metadata. Dependencies: LEDGER-29-004. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-29-006 | TODO | Integrate attachment encryption (KMS envelope), signed URL issuance, CSRF protection hooks for Console. Dependencies: LEDGER-29-005. | Findings Ledger Guild, Security Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-29-007 | TODO | Instrument metrics (`ledger_write_latency`, `projection_lag_seconds`, `ledger_events_total`), structured logs, and Merkle anchoring alerts; publish dashboards. Dependencies: LEDGER-29-006. | Findings Ledger Guild, Observability Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-29-008 | TODO | Develop unit/property/integration tests, replay/restore tooling, determinism harness, and load tests at 5M findings/tenant. Dependencies: LEDGER-29-007. | Findings Ledger Guild, QA Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-29-009 | TODO | Provide deployment manifests (Helm/Compose), backup/restore guidance, Merkle anchor externalization (optional), and offline kit instructions. Dependencies: LEDGER-29-008. | Findings Ledger Guild, DevOps Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-34-101 | TODO | Link orchestrator run ledger exports into Findings Ledger provenance chain, index by artifact hash, and expose audit queries. Dependencies: LEDGER-29-009. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) LEDGER-AIRGAP-56-001 | TODO | Record bundle provenance (`bundle_id`, `merkle_root`, `time_anchor`) on ledger events for advisories/VEX/policies imported via Mirror Bundles. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-AIRGAP-56-002 | TODO | Surface staleness metrics for findings and block risk-critical exports when stale beyond thresholds; provide remediation messaging. | Findings Ledger Guild, AirGap Time Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-AIRGAP-57-001 | TODO | Link findings evidence snapshots to portable evidence bundles and ensure cross-enclave verification works. | Findings Ledger Guild, Evidence Locker Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-AIRGAP-58-001 | TODO | Emit timeline events for bundle import impacts (new findings, remediation changes) with sealed-mode context. | Findings Ledger Guild, AirGap Controller Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-AIRGAP-56-002 | TODO | Surface staleness metrics for findings and block risk-critical exports when stale beyond thresholds; provide remediation messaging. Dependencies: LEDGER-AIRGAP-56-001. | Findings Ledger Guild, AirGap Time Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-AIRGAP-57-001 | TODO | Link findings evidence snapshots to portable evidence bundles and ensure cross-enclave verification works. Dependencies: LEDGER-AIRGAP-56-002. | Findings Ledger Guild, Evidence Locker Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-AIRGAP-58-001 | TODO | Emit timeline events for bundle import impacts (new findings, remediation changes) with sealed-mode context. Dependencies: LEDGER-AIRGAP-57-001. | Findings Ledger Guild, AirGap Controller Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) LEDGER-ATTEST-73-001 | TODO | Persist pointers from findings to verification reports and attestation envelopes for explainability. | Findings Ledger Guild, Attestor Service Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) @@ -40,21 +40,21 @@ Depends on: Sprint 120.B - Findings.I Summary: Policy & Reasoning focus on Findings (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -LEDGER-ATTEST-73-002 | TODO | Enable search/filter in findings projections by verification result and attestation status. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-ATTEST-73-002 | TODO | Enable search/filter in findings projections by verification result and attestation status. Dependencies: LEDGER-ATTEST-73-001. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) LEDGER-EXPORT-35-001 | TODO | Provide paginated streaming endpoints for advisories, VEX, SBOMs, and findings aligned with export filters, including deterministic ordering and provenance metadata. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) LEDGER-OAS-61-001 | TODO | Expand Findings Ledger OAS to include projections, evidence lookups, and filter parameters with examples. | Findings Ledger Guild, API Contracts Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-OAS-61-002 | TODO | Implement `/.well-known/openapi` endpoint and ensure version metadata matches release. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-OAS-62-001 | TODO | Provide SDK test cases for findings pagination, filtering, evidence links; ensure typed models expose provenance. | Findings Ledger Guild, SDK Generator Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-OAS-63-001 | TODO | Support deprecation headers and Notifications for retiring finding endpoints. | Findings Ledger Guild, API Governance Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-OAS-61-002 | TODO | Implement `/.well-known/openapi` endpoint and ensure version metadata matches release. Dependencies: LEDGER-OAS-61-001. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-OAS-62-001 | TODO | Provide SDK test cases for findings pagination, filtering, evidence links; ensure typed models expose provenance. Dependencies: LEDGER-OAS-61-002. | Findings Ledger Guild, SDK Generator Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-OAS-63-001 | TODO | Support deprecation headers and Notifications for retiring finding endpoints. Dependencies: LEDGER-OAS-62-001. | Findings Ledger Guild, API Governance Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) LEDGER-OBS-50-001 | TODO | Integrate telemetry core within ledger writer/projector services, emitting structured logs and trace spans for ledger append, projector replay, and query APIs with tenant context. | Findings Ledger Guild, Observability Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-OBS-51-001 | TODO | Publish metrics for ledger latency, projector lag, event throughput, and policy evaluation linkage. Define SLOs (ledger append P95 < 1s, replay lag < 30s) with burn-rate alerts and dashboards. | Findings Ledger Guild, DevOps Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-OBS-52-001 | TODO | Emit timeline events for ledger writes and projector commits (`ledger.event.appended`, `ledger.projection.updated`) with trace ID, policy version, evidence bundle reference placeholders. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-OBS-53-001 | TODO | Persist evidence bundle references (evaluation/job capsules) alongside ledger entries, exposing lookup API linking findings to evidence manifests and timeline. | Findings Ledger Guild, Evidence Locker Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-OBS-54-001 | TODO | Verify attestation references for ledger-derived exports; expose `/ledger/attestations` endpoint returning DSSE verification state and chain-of-custody summary. | Findings Ledger Guild, Provenance Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-OBS-55-001 | TODO | Enhance incident mode to record additional replay diagnostics (lag traces, conflict snapshots) and extend retention while active. Emit activation events to timeline + notifier. | Findings Ledger Guild, DevOps Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-OBS-51-001 | TODO | Publish metrics for ledger latency, projector lag, event throughput, and policy evaluation linkage. Define SLOs (ledger append P95 < 1s, replay lag < 30s) with burn-rate alerts and dashboards. Dependencies: LEDGER-OBS-50-001. | Findings Ledger Guild, DevOps Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-OBS-52-001 | TODO | Emit timeline events for ledger writes and projector commits (`ledger.event.appended`, `ledger.projection.updated`) with trace ID, policy version, evidence bundle reference placeholders. Dependencies: LEDGER-OBS-51-001. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-OBS-53-001 | TODO | Persist evidence bundle references (evaluation/job capsules) alongside ledger entries, exposing lookup API linking findings to evidence manifests and timeline. Dependencies: LEDGER-OBS-52-001. | Findings Ledger Guild, Evidence Locker Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-OBS-54-001 | TODO | Verify attestation references for ledger-derived exports; expose `/ledger/attestations` endpoint returning DSSE verification state and chain-of-custody summary. Dependencies: LEDGER-OBS-53-001. | Findings Ledger Guild, Provenance Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-OBS-55-001 | TODO | Enhance incident mode to record additional replay diagnostics (lag traces, conflict snapshots) and extend retention while active. Emit activation events to timeline + notifier. Dependencies: LEDGER-OBS-54-001. | Findings Ledger Guild, DevOps Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) LEDGER-PACKS-42-001 | TODO | Provide snapshot/time-travel APIs and digestable exports for task pack simulation and CLI offline mode. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) LEDGER-RISK-66-001 | TODO | Add schema migrations for `risk_score`, `risk_severity`, `profile_version`, `explanation_id`, and supporting indexes. | Findings Ledger Guild, Risk Engine Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-RISK-66-002 | TODO | Implement deterministic upsert of scoring results keyed by finding hash/profile version with history audit. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-RISK-66-002 | TODO | Implement deterministic upsert of scoring results keyed by finding hash/profile version with history audit. Dependencies: LEDGER-RISK-66-001. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) [Policy & Reasoning] 120.B) Findings.III @@ -62,9 +62,9 @@ Depends on: Sprint 120.B - Findings.II Summary: Policy & Reasoning focus on Findings (phase III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -LEDGER-RISK-67-001 | TODO | Expose query APIs for scored findings with score/severity filters, pagination, and explainability links. | Findings Ledger Guild, Risk Engine Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-RISK-68-001 | TODO | Enable export of scored findings and simulation results via Export Center integration. | Findings Ledger Guild, Export Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) -LEDGER-RISK-69-001 | TODO | Emit metrics/dashboards for scoring latency, result freshness, severity distribution, provider gaps. | Findings Ledger Guild, Observability Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-RISK-67-001 | TODO | Expose query APIs for scored findings with score/severity filters, pagination, and explainability links. Dependencies: LEDGER-RISK-66-002. | Findings Ledger Guild, Risk Engine Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-RISK-68-001 | TODO | Enable export of scored findings and simulation results via Export Center integration. Dependencies: LEDGER-RISK-67-001. | Findings Ledger Guild, Export Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) +LEDGER-RISK-69-001 | TODO | Emit metrics/dashboards for scoring latency, result freshness, severity distribution, provider gaps. Dependencies: LEDGER-RISK-68-001. | Findings Ledger Guild, Observability Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) LEDGER-TEN-48-001 | TODO | Partition ledger tables by tenant/project, enable RLS, update queries/events, and stamp audit metadata. | Findings Ledger Guild (src/Findings/StellaOps.Findings.Ledger/TASKS.md) @@ -75,18 +75,18 @@ Task ID | State | Task description | Owners (Source) --- | --- | --- | --- EXPORT-CONSOLE-23-001 | TODO | Build evidence bundle/export generator producing signed manifests, CSV/JSON replay endpoints, and trace attachments; integrate with scheduler jobs and expose progress telemetry. | Policy Guild, Scheduler Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) POLICY-AIRGAP-56-001 | TODO | Support policy pack imports from Mirror Bundles, track `bundle_id` metadata, and ensure deterministic caching. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-AIRGAP-56-002 | TODO | Export policy sub-bundles (`stella policy bundle export`) with DSSE signatures for outbound transfer. | Policy Guild, Policy Studio Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-AIRGAP-57-001 | TODO | Enforce sealed-mode guardrails in evaluation (no outbound fetch), surface `AIRGAP_EGRESS_BLOCKED` errors with remediation. | Policy Guild, AirGap Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-AIRGAP-57-002 | TODO | Annotate rule explanations with staleness information and fallback data (cached EPSS, vendor risk). | Policy Guild, AirGap Time Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-AIRGAP-58-001 | TODO | Emit notifications when policy packs near staleness thresholds or missing required bundles. | Policy Guild, Notifications Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-AIRGAP-56-002 | TODO | Export policy sub-bundles (`stella policy bundle export`) with DSSE signatures for outbound transfer. Dependencies: POLICY-AIRGAP-56-001. | Policy Guild, Policy Studio Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-AIRGAP-57-001 | TODO | Enforce sealed-mode guardrails in evaluation (no outbound fetch), surface `AIRGAP_EGRESS_BLOCKED` errors with remediation. Dependencies: POLICY-AIRGAP-56-002. | Policy Guild, AirGap Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-AIRGAP-57-002 | TODO | Annotate rule explanations with staleness information and fallback data (cached EPSS, vendor risk). Dependencies: POLICY-AIRGAP-57-001. | Policy Guild, AirGap Time Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-AIRGAP-58-001 | TODO | Emit notifications when policy packs near staleness thresholds or missing required bundles. Dependencies: POLICY-AIRGAP-57-002. | Policy Guild, Notifications Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) POLICY-AOC-19-001 | TODO | Add Roslyn/CI lint preventing ingestion projects from referencing Policy merge/severity helpers; block forbidden writes at compile time. | Policy Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) -POLICY-AOC-19-002 | TODO | Enforce `effective_finding_*` write gate ensuring only Policy Engine identity can create/update materializations. | Policy Guild, Platform Security (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) -POLICY-AOC-19-003 | TODO | Update readers/processors to consume only `content.raw`, `identifiers`, and `linkset`. Remove dependencies on legacy normalized fields and refresh fixtures. | Policy Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) -POLICY-AOC-19-004 | TODO | Add regression tests ensuring policy derived outputs remain deterministic when ingesting revised raw docs (supersedes) and when violations occur. | Policy Guild, QA Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) +POLICY-AOC-19-002 | TODO | Enforce `effective_finding_*` write gate ensuring only Policy Engine identity can create/update materializations. Dependencies: POLICY-AOC-19-001. | Policy Guild, Platform Security (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) +POLICY-AOC-19-003 | TODO | Update readers/processors to consume only `content.raw`, `identifiers`, and `linkset`. Remove dependencies on legacy normalized fields and refresh fixtures. Dependencies: POLICY-AOC-19-002. | Policy Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) +POLICY-AOC-19-004 | TODO | Add regression tests ensuring policy derived outputs remain deterministic when ingesting revised raw docs (supersedes) and when violations occur. Dependencies: POLICY-AOC-19-003. | Policy Guild, QA Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) POLICY-ATTEST-73-001 | TODO | Introduce VerificationPolicy object: schema, persistence, versioning, and lifecycle. | Policy Guild, Attestor Service Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ATTEST-73-002 | TODO | Provide Policy Studio editor with validation, dry-run simulation, and version diff. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ATTEST-74-001 | TODO | Integrate verification policies into attestor verification pipeline with caching and waiver support. | Policy Guild, Attestor Service Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ATTEST-74-002 | TODO | Surface policy evaluations in Console verification reports with rule explanations. | Policy Guild, Console Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ATTEST-73-002 | TODO | Provide Policy Studio editor with validation, dry-run simulation, and version diff. Dependencies: POLICY-ATTEST-73-001. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ATTEST-74-001 | TODO | Integrate verification policies into attestor verification pipeline with caching and waiver support. Dependencies: POLICY-ATTEST-73-002. | Policy Guild, Attestor Service Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ATTEST-74-002 | TODO | Surface policy evaluations in Console verification reports with rule explanations. Dependencies: POLICY-ATTEST-74-001. | Policy Guild, Console Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) POLICY-CONSOLE-23-001 | TODO | Optimize findings/explain APIs for Console: cursor-based pagination at scale, global filter parameters (severity bands, policy version, time window), rule trace summarization, and aggregation hints for dashboard cards. Ensure deterministic ordering and expose provenance refs. | Policy Guild, BE-Base Platform Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) @@ -95,21 +95,21 @@ Depends on: Sprint 120.C - Policy.I Summary: Policy & Reasoning focus on Policy (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -POLICY-CONSOLE-23-002 | TODO | Produce simulation diff metadata (before/after counts, severity deltas, rule impact summaries) and approval state endpoints consumed by Console policy workspace; expose RBAC-aware status transitions. | Policy Guild, Product Ops (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-CONSOLE-23-002 | TODO | Produce simulation diff metadata (before/after counts, severity deltas, rule impact summaries) and approval state endpoints consumed by Console policy workspace; expose RBAC-aware status transitions. Dependencies: POLICY-CONSOLE-23-001. | Policy Guild, Product Ops (src/Policy/StellaOps.Policy.Engine/TASKS.md) POLICY-ENGINE-20-002 | BLOCKED (2025-10-26) | Build deterministic evaluator honoring lexical/priority order, first-match semantics, and safe value types (no wall-clock/network access). | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-20-003 | TODO | Implement selection joiners resolving SBOM↔advisory↔VEX tuples using linksets and PURL equivalence tables, with deterministic batching. | Policy Guild, Concelier Core Guild, Excititor Core Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-20-004 | TODO | Ship materialization writer that upserts into `effective_finding_{policyId}` with append-only history, tenant scoping, and trace references. | Policy Guild, Platform Storage Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-20-005 | TODO | Enforce determinism guard banning wall-clock, RNG, and network usage during evaluation via static analysis + runtime sandbox. | Policy Guild, Security Engineering (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-20-006 | TODO | Implement incremental orchestrator reacting to advisory/vex/SBOM change streams and scheduling partial policy re-evaluations. | Policy Guild, Scheduler Worker Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-20-007 | TODO | Emit structured traces/logs of rule hits with sampling controls, metrics (`rules_fired_total`, `vex_overrides_total`), and expose explain trace exports. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-20-008 | TODO | Add unit/property/golden/perf suites covering policy compilation, evaluation correctness, determinism, and SLA targets. | Policy Guild, QA Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-20-009 | TODO | Define Mongo schemas/indexes for `policies`, `policy_runs`, and `effective_finding_*`; implement migrations and tenant enforcement. | Policy Guild, Storage Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-27-001 | TODO | Extend compile outputs to include rule coverage metadata, symbol table, inline documentation, and rule index for editor autocomplete; persist deterministic hashes. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-27-002 | TODO | Enhance simulate endpoints to emit rule firing counts, heatmap aggregates, sampled explain traces with deterministic ordering, and delta summaries for quick/batch sims. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-27-003 | TODO | Implement complexity/time limit enforcement with compiler scoring, configurable thresholds, and structured diagnostics (`ERR_POL_COMPLEXITY`). | Policy Guild, Security Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-27-004 | TODO | Update golden/property tests to cover new coverage metrics, symbol tables, explain traces, and complexity limits; provide fixtures for Registry/Console integration. | Policy Guild, QA Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-29-001 | TODO | Implement batch evaluation endpoint (`POST /policy/eval/batch`) returning determinations + rationale chain for sets of `(artifact,purl,version,advisory)` tuples; support pagination and cost budgets. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-29-002 | TODO | Provide streaming simulation API comparing two policy versions, returning per-finding deltas without writes; align determinism with Vuln Explorer simulation. | Policy Guild, Findings Ledger Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-20-003 | TODO | Implement selection joiners resolving SBOM↔advisory↔VEX tuples using linksets and PURL equivalence tables, with deterministic batching. Dependencies: POLICY-ENGINE-20-002. | Policy Guild, Concelier Core Guild, Excititor Core Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-20-004 | TODO | Ship materialization writer that upserts into `effective_finding_{policyId}` with append-only history, tenant scoping, and trace references. Dependencies: POLICY-ENGINE-20-003. | Policy Guild, Platform Storage Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-20-005 | TODO | Enforce determinism guard banning wall-clock, RNG, and network usage during evaluation via static analysis + runtime sandbox. Dependencies: POLICY-ENGINE-20-004. | Policy Guild, Security Engineering (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-20-006 | TODO | Implement incremental orchestrator reacting to advisory/vex/SBOM change streams and scheduling partial policy re-evaluations. Dependencies: POLICY-ENGINE-20-005. | Policy Guild, Scheduler Worker Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-20-007 | TODO | Emit structured traces/logs of rule hits with sampling controls, metrics (`rules_fired_total`, `vex_overrides_total`), and expose explain trace exports. Dependencies: POLICY-ENGINE-20-006. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-20-008 | TODO | Add unit/property/golden/perf suites covering policy compilation, evaluation correctness, determinism, and SLA targets. Dependencies: POLICY-ENGINE-20-007. | Policy Guild, QA Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-20-009 | TODO | Define Mongo schemas/indexes for `policies`, `policy_runs`, and `effective_finding_*`; implement migrations and tenant enforcement. Dependencies: POLICY-ENGINE-20-008. | Policy Guild, Storage Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-27-001 | TODO | Extend compile outputs to include rule coverage metadata, symbol table, inline documentation, and rule index for editor autocomplete; persist deterministic hashes. Dependencies: POLICY-ENGINE-20-009. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-27-002 | TODO | Enhance simulate endpoints to emit rule firing counts, heatmap aggregates, sampled explain traces with deterministic ordering, and delta summaries for quick/batch sims. Dependencies: POLICY-ENGINE-27-001. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-27-003 | TODO | Implement complexity/time limit enforcement with compiler scoring, configurable thresholds, and structured diagnostics (`ERR_POL_COMPLEXITY`). Dependencies: POLICY-ENGINE-27-002. | Policy Guild, Security Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-27-004 | TODO | Update golden/property tests to cover new coverage metrics, symbol tables, explain traces, and complexity limits; provide fixtures for Registry/Console integration. Dependencies: POLICY-ENGINE-27-003. | Policy Guild, QA Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-29-001 | TODO | Implement batch evaluation endpoint (`POST /policy/eval/batch`) returning determinations + rationale chain for sets of `(artifact,purl,version,advisory)` tuples; support pagination and cost budgets. Dependencies: POLICY-ENGINE-27-004. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-29-002 | TODO | Provide streaming simulation API comparing two policy versions, returning per-finding deltas without writes; align determinism with Vuln Explorer simulation. Dependencies: POLICY-ENGINE-29-001. | Policy Guild, Findings Ledger Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) [Policy & Reasoning] 120.C) Policy.III @@ -117,21 +117,21 @@ Depends on: Sprint 120.C - Policy.II Summary: Policy & Reasoning focus on Policy (phase III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -POLICY-ENGINE-29-003 | TODO | Surface path/scope awareness in determinations (signal optional/dev/test downgrade, runtime boost) for Vuln Explorer display. | Policy Guild, SBOM Service Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-29-004 | TODO | Add metrics/logs for batch evaluation (latency, queue depth) and simulation diff counts; update dashboards. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-30-001 | TODO | Define overlay contract for graph nodes/edges (status, severity, rationale refs, path relevance), expose projection API for Cartographer, and document schema versioning. | Policy Guild, Cartographer Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-30-002 | TODO | Implement simulation bridge returning on-the-fly overlays for Cartographer/Graph Explorer when invoking Policy Engine simulate; ensure no writes and deterministic outputs. | Policy Guild, Cartographer Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-30-003 | TODO | Emit change events (`policy.effective.updated`) with graph-friendly payloads so Cartographer overlay worker refreshes nodes/edges within 2 minutes. | Policy Guild, Scheduler Guild, Cartographer Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-30-101 | TODO | Surface trust weighting configuration (issuer base weights, signature modifiers, recency decay, scope adjustments) for VEX Lens via Policy Studio + API; ensure deterministic evaluation. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-31-001 | TODO | Expose policy knobs for Advisory AI (trust presets, temperature, token limits, plan ranking weights, TTLs) via Policy Studio and config APIs. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-31-002 | TODO | Provide batch endpoint delivering policy context (thresholds, obligations) consumed by Advisory AI remediation planner. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-32-101 | TODO | Define orchestrator `policy_eval` job schema, idempotency keys, and enqueue hooks triggered by advisory/VEX/SBOM events. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-33-101 | TODO | Implement orchestrator-driven policy evaluation workers using SDK heartbeats, respecting throttles, and emitting SLO metrics. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-34-101 | TODO | Publish policy run ledger exports + SLO burn-rate metrics to orchestrator; ensure provenance chain links to Findings Ledger. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-35-201 | TODO | Expose deterministic policy snapshot API and evaluated findings stream keyed by policy version for exporter consumption. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-38-201 | TODO | Emit enriched policy violation events (decision rationale ids, risk bands) via orchestrator event bus for Notifications Studio. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-40-001 | TODO | Update severity/status evaluation pipelines to consume multiple source severities per linkset, supporting selection strategies (max, preferred source, policy-defined). | Policy Guild, Concelier Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-40-002 | TODO | Accept VEX linkset conflicts and provide rationale references in effective findings; ensure explain traces cite observation IDs. | Policy Guild, Excititor Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-29-003 | TODO | Surface path/scope awareness in determinations (signal optional/dev/test downgrade, runtime boost) for Vuln Explorer display. Dependencies: POLICY-ENGINE-29-002. | Policy Guild, SBOM Service Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-29-004 | TODO | Add metrics/logs for batch evaluation (latency, queue depth) and simulation diff counts; update dashboards. Dependencies: POLICY-ENGINE-29-003. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-30-001 | TODO | Define overlay contract for graph nodes/edges (status, severity, rationale refs, path relevance), expose projection API for Cartographer, and document schema versioning. Dependencies: POLICY-ENGINE-29-004. | Policy Guild, Cartographer Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-30-002 | TODO | Implement simulation bridge returning on-the-fly overlays for Cartographer/Graph Explorer when invoking Policy Engine simulate; ensure no writes and deterministic outputs. Dependencies: POLICY-ENGINE-30-001. | Policy Guild, Cartographer Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-30-003 | TODO | Emit change events (`policy.effective.updated`) with graph-friendly payloads so Cartographer overlay worker refreshes nodes/edges within 2 minutes. Dependencies: POLICY-ENGINE-30-002. | Policy Guild, Scheduler Guild, Cartographer Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-30-101 | TODO | Surface trust weighting configuration (issuer base weights, signature modifiers, recency decay, scope adjustments) for VEX Lens via Policy Studio + API; ensure deterministic evaluation. Dependencies: POLICY-ENGINE-30-003. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-31-001 | TODO | Expose policy knobs for Advisory AI (trust presets, temperature, token limits, plan ranking weights, TTLs) via Policy Studio and config APIs. Dependencies: POLICY-ENGINE-30-101. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-31-002 | TODO | Provide batch endpoint delivering policy context (thresholds, obligations) consumed by Advisory AI remediation planner. Dependencies: POLICY-ENGINE-31-001. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-32-101 | TODO | Define orchestrator `policy_eval` job schema, idempotency keys, and enqueue hooks triggered by advisory/VEX/SBOM events. Dependencies: POLICY-ENGINE-31-002. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-33-101 | TODO | Implement orchestrator-driven policy evaluation workers using SDK heartbeats, respecting throttles, and emitting SLO metrics. Dependencies: POLICY-ENGINE-32-101. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-34-101 | TODO | Publish policy run ledger exports + SLO burn-rate metrics to orchestrator; ensure provenance chain links to Findings Ledger. Dependencies: POLICY-ENGINE-33-101. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-35-201 | TODO | Expose deterministic policy snapshot API and evaluated findings stream keyed by policy version for exporter consumption. Dependencies: POLICY-ENGINE-34-101. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-38-201 | TODO | Emit enriched policy violation events (decision rationale ids, risk bands) via orchestrator event bus for Notifications Studio. Dependencies: POLICY-ENGINE-35-201. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-40-001 | TODO | Update severity/status evaluation pipelines to consume multiple source severities per linkset, supporting selection strategies (max, preferred source, policy-defined). Dependencies: POLICY-ENGINE-38-201. | Policy Guild, Concelier Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-40-002 | TODO | Accept VEX linkset conflicts and provide rationale references in effective findings; ensure explain traces cite observation IDs. Dependencies: POLICY-ENGINE-40-001. | Policy Guild, Excititor Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) [Policy & Reasoning] 120.C) Policy.IV @@ -139,21 +139,21 @@ Depends on: Sprint 120.C - Policy.III Summary: Policy & Reasoning focus on Policy (phase IV). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -POLICY-ENGINE-40-003 | TODO | Provide API/SDK utilities for consumers (Web Scanner, Graph Explorer) to request policy decisions with source evidence summaries (top severity sources, conflict counts). | Policy Guild, Web Scanner Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-50-001 | TODO | Implement SPL compiler: validate YAML, canonicalize, produce signed bundle, store artifact in object storage, write `policy_revisions` with AOC metadata. | Policy Guild, Platform Security (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-50-002 | TODO | Build runtime evaluator executing compiled plans over advisory/vex linksets + SBOM asset metadata with deterministic caching (Redis) and fallback path. | Policy Guild, Runtime Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-50-003 | TODO | Implement evaluation/compilation metrics, tracing, and structured logs (`policy_eval_seconds`, `policy_compiles_total`, explanation sampling). | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-50-004 | TODO | Build event pipeline: subscribe to linkset/SBOM updates, schedule re-eval jobs, emit `policy.effective.updated` events with diff metadata. | Policy Guild, Platform Events Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-50-005 | TODO | Design and implement `policy_packs`, `policy_revisions`, `policy_runs`, `policy_artifacts` collections with indexes, TTL, and tenant scoping. | Policy Guild, Storage Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-50-006 | TODO | Implement explainer persistence + retrieval APIs linking decisions to explanation tree and AOC chain. | Policy Guild, QA Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-50-007 | TODO | Provide evaluation worker host/DI wiring and job orchestration hooks for batch re-evaluations after policy activation. | Policy Guild, Scheduler Worker Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-60-001 | TODO | Maintain Redis effective decision maps per asset/snapshot for Graph overlays; implement versioning and eviction strategy. | Policy Guild, SBOM Service Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-60-002 | TODO | Expose simulation bridge for Graph What-if APIs, supporting hypothetical SBOM diffs and draft policies without persisting results. | Policy Guild, BE-Base Platform Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-70-002 | TODO | Design and create Mongo collections (`exceptions`, `exception_reviews`, `exception_bindings`) with indexes and migrations; expose repository APIs. | Policy Guild, Storage Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-70-003 | TODO | Build Redis exception decision cache (`exceptions_effective_map`) with warm/invalidation logic reacting to `exception.*` events. | Policy Guild, Runtime Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-70-004 | TODO | Extend metrics/tracing/logging for exception application (latency, counts, expiring events) and include AOC references in logs. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-70-005 | TODO | Provide APIs/workers hook for exception activation/expiry (auto start/end) and event emission (`exception.activated/expired`). | Policy Guild, Scheduler Worker Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-80-001 | TODO | Integrate reachability/exploitability inputs into evaluation pipeline (state/score/confidence) with caching and explain support. | Policy Guild, Signals Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-40-003 | TODO | Provide API/SDK utilities for consumers (Web Scanner, Graph Explorer) to request policy decisions with source evidence summaries (top severity sources, conflict counts). Dependencies: POLICY-ENGINE-40-002. | Policy Guild, Web Scanner Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-50-001 | TODO | Implement SPL compiler: validate YAML, canonicalize, produce signed bundle, store artifact in object storage, write `policy_revisions` with AOC metadata. Dependencies: POLICY-ENGINE-40-003. | Policy Guild, Platform Security (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-50-002 | TODO | Build runtime evaluator executing compiled plans over advisory/vex linksets + SBOM asset metadata with deterministic caching (Redis) and fallback path. Dependencies: POLICY-ENGINE-50-001. | Policy Guild, Runtime Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-50-003 | TODO | Implement evaluation/compilation metrics, tracing, and structured logs (`policy_eval_seconds`, `policy_compiles_total`, explanation sampling). Dependencies: POLICY-ENGINE-50-002. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-50-004 | TODO | Build event pipeline: subscribe to linkset/SBOM updates, schedule re-eval jobs, emit `policy.effective.updated` events with diff metadata. Dependencies: POLICY-ENGINE-50-003. | Policy Guild, Platform Events Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-50-005 | TODO | Design and implement `policy_packs`, `policy_revisions`, `policy_runs`, `policy_artifacts` collections with indexes, TTL, and tenant scoping. Dependencies: POLICY-ENGINE-50-004. | Policy Guild, Storage Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-50-006 | TODO | Implement explainer persistence + retrieval APIs linking decisions to explanation tree and AOC chain. Dependencies: POLICY-ENGINE-50-005. | Policy Guild, QA Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-50-007 | TODO | Provide evaluation worker host/DI wiring and job orchestration hooks for batch re-evaluations after policy activation. Dependencies: POLICY-ENGINE-50-006. | Policy Guild, Scheduler Worker Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-60-001 | TODO | Maintain Redis effective decision maps per asset/snapshot for Graph overlays; implement versioning and eviction strategy. Dependencies: POLICY-ENGINE-50-007. | Policy Guild, SBOM Service Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-60-002 | TODO | Expose simulation bridge for Graph What-if APIs, supporting hypothetical SBOM diffs and draft policies without persisting results. Dependencies: POLICY-ENGINE-60-001. | Policy Guild, BE-Base Platform Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-70-002 | TODO | Design and create Mongo collections (`exceptions`, `exception_reviews`, `exception_bindings`) with indexes and migrations; expose repository APIs. Dependencies: POLICY-ENGINE-60-002. | Policy Guild, Storage Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-70-003 | TODO | Build Redis exception decision cache (`exceptions_effective_map`) with warm/invalidation logic reacting to `exception.*` events. Dependencies: POLICY-ENGINE-70-002. | Policy Guild, Runtime Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-70-004 | TODO | Extend metrics/tracing/logging for exception application (latency, counts, expiring events) and include AOC references in logs. Dependencies: POLICY-ENGINE-70-003. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-70-005 | TODO | Provide APIs/workers hook for exception activation/expiry (auto start/end) and event emission (`exception.activated/expired`). Dependencies: POLICY-ENGINE-70-004. | Policy Guild, Scheduler Worker Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-80-001 | TODO | Integrate reachability/exploitability inputs into evaluation pipeline (state/score/confidence) with caching and explain support. Dependencies: POLICY-ENGINE-70-005. | Policy Guild, Signals Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) [Policy & Reasoning] 120.C) Policy.V @@ -161,21 +161,21 @@ Depends on: Sprint 120.C - Policy.IV Summary: Policy & Reasoning focus on Policy (phase V). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -POLICY-ENGINE-80-002 | TODO | Create joining layer to read `reachability_facts` efficiently (indexes, projections) and populate Redis overlay caches. | Policy Guild, Storage Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-80-003 | TODO | Extend SPL predicates/actions to reference reachability state/score/confidence; update compiler validation. | Policy Guild, Policy Editor Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-ENGINE-80-004 | TODO | Emit metrics (`policy_reachability_applied_total`, `policy_reachability_cache_hit_ratio`) and traces for signals usage. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-80-002 | TODO | Create joining layer to read `reachability_facts` efficiently (indexes, projections) and populate Redis overlay caches. Dependencies: POLICY-ENGINE-80-001. | Policy Guild, Storage Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-80-003 | TODO | Extend SPL predicates/actions to reference reachability state/score/confidence; update compiler validation. Dependencies: POLICY-ENGINE-80-002. | Policy Guild, Policy Editor Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-ENGINE-80-004 | TODO | Emit metrics (`policy_reachability_applied_total`, `policy_reachability_cache_hit_ratio`) and traces for signals usage. Dependencies: POLICY-ENGINE-80-003. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) POLICY-OBS-50-001 | TODO | Integrate telemetry core into policy API + worker hosts, ensuring spans/logs cover compile/evaluate flows with `tenant_id`, `policy_version`, `decision_effect`, and trace IDs. | Policy Guild, Observability Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-OBS-51-001 | TODO | Emit golden-signal metrics (compile latency, evaluate latency, rule hits, override counts) and define SLOs (evaluation P95 <2s). Publish Grafana dashboards + burn-rate alert rules. | Policy Guild, DevOps Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-OBS-52-001 | TODO | Emit timeline events `policy.evaluate.started`, `policy.evaluate.completed`, `policy.decision.recorded` with trace IDs, input digests, and rule summary. Provide contract tests and retry semantics. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-OBS-53-001 | TODO | Produce evaluation evidence bundles (inputs slice, rule trace, engine version, config snapshot) through evidence locker integration; ensure redaction + deterministic manifests. | Policy Guild, Evidence Locker Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-OBS-54-001 | TODO | Generate DSSE attestations for evaluation outputs, expose `/evaluations/{id}/attestation`, and link attestation IDs in timeline + console. Provide verification harness. | Policy Guild, Provenance Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-OBS-55-001 | TODO | Implement incident mode sampling overrides (full rule trace capture, extended retention) with auto-activation on SLO breach and manual override API. Emit activation events to timeline + notifier. | Policy Guild, DevOps Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-OBS-51-001 | TODO | Emit golden-signal metrics (compile latency, evaluate latency, rule hits, override counts) and define SLOs (evaluation P95 <2s). Publish Grafana dashboards + burn-rate alert rules. Dependencies: POLICY-OBS-50-001. | Policy Guild, DevOps Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-OBS-52-001 | TODO | Emit timeline events `policy.evaluate.started`, `policy.evaluate.completed`, `policy.decision.recorded` with trace IDs, input digests, and rule summary. Provide contract tests and retry semantics. Dependencies: POLICY-OBS-51-001. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-OBS-53-001 | TODO | Produce evaluation evidence bundles (inputs slice, rule trace, engine version, config snapshot) through evidence locker integration; ensure redaction + deterministic manifests. Dependencies: POLICY-OBS-52-001. | Policy Guild, Evidence Locker Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-OBS-54-001 | TODO | Generate DSSE attestations for evaluation outputs, expose `/evaluations/{id}/attestation`, and link attestation IDs in timeline + console. Provide verification harness. Dependencies: POLICY-OBS-53-001. | Policy Guild, Provenance Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-OBS-55-001 | TODO | Implement incident mode sampling overrides (full rule trace capture, extended retention) with auto-activation on SLO breach and manual override API. Emit activation events to timeline + notifier. Dependencies: POLICY-OBS-54-001. | Policy Guild, DevOps Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) POLICY-RISK-66-001 | TODO | Develop initial JSON Schema for RiskProfile (signals, transforms, weights, severity, overrides) with validator stubs. | Risk Profile Schema Guild (src/Policy/StellaOps.Policy.RiskProfile/TASKS.md) -POLICY-RISK-66-002 | TODO | Implement inheritance/merge logic with conflict detection and deterministic content hashing. | Risk Profile Schema Guild (src/Policy/StellaOps.Policy.RiskProfile/TASKS.md) -POLICY-RISK-66-003 | TODO | Integrate RiskProfile schema into Policy Engine configuration, ensuring validation and default profile deployment. | Policy Guild, Risk Profile Schema Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-RISK-66-004 | TODO | Extend Policy libraries to load/save RiskProfile documents, compute content hashes, and surface validation diagnostics. | Policy Guild, Risk Profile Schema Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) -POLICY-RISK-67-001 | TODO | Trigger scoring jobs on new/updated findings via Policy Engine orchestration hooks. | Policy Guild, Risk Engine Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-RISK-67-001 | TODO | Integrate profile storage and versioning into Policy Store with lifecycle states (draft/publish/deprecate). | Risk Profile Schema Guild, Policy Engine Guild (src/Policy/StellaOps.Policy.RiskProfile/TASKS.md) +POLICY-RISK-66-002 | TODO | Implement inheritance/merge logic with conflict detection and deterministic content hashing. Dependencies: POLICY-RISK-66-001. | Risk Profile Schema Guild (src/Policy/StellaOps.Policy.RiskProfile/TASKS.md) +POLICY-RISK-66-003 | TODO | Integrate RiskProfile schema into Policy Engine configuration, ensuring validation and default profile deployment. Dependencies: POLICY-RISK-66-002. | Policy Guild, Risk Profile Schema Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-RISK-66-004 | TODO | Extend Policy libraries to load/save RiskProfile documents, compute content hashes, and surface validation diagnostics. Dependencies: POLICY-RISK-66-003. | Policy Guild, Risk Profile Schema Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) +POLICY-RISK-67-001 | TODO | Trigger scoring jobs on new/updated findings via Policy Engine orchestration hooks. Dependencies: POLICY-RISK-66-004. | Policy Guild, Risk Engine Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-RISK-67-001 | TODO | Integrate profile storage and versioning into Policy Store with lifecycle states (draft/publish/deprecate). Dependencies: POLICY-RISK-67-001. | Risk Profile Schema Guild, Policy Engine Guild (src/Policy/StellaOps.Policy.RiskProfile/TASKS.md) [Policy & Reasoning] 120.C) Policy.VI @@ -183,21 +183,21 @@ Depends on: Sprint 120.C - Policy.V Summary: Policy & Reasoning focus on Policy (phase VI). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -POLICY-RISK-67-002 | TODO | Implement profile lifecycle APIs (`/risk/profiles` create/publish/deprecate) and scope attachment logic. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-RISK-67-002 | TODO | Publish `.well-known/risk-profile-schema` endpoint and CLI validation tooling. | Risk Profile Schema Guild (src/Policy/StellaOps.Policy.RiskProfile/TASKS.md) -POLICY-RISK-67-003 | TODO | Provide policy-layer APIs to trigger risk simulations and return distributions/contribution breakdowns. | Policy Guild, Risk Engine Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) -POLICY-RISK-68-001 | TODO | Provide simulation API bridging Policy Studio with risk engine; returns distributions and top movers. | Policy Guild, Policy Studio Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-RISK-68-001 | TODO | Implement scope selectors, precedence rules, and Authority attachment APIs. | Risk Profile Schema Guild, Authority Guild (src/Policy/StellaOps.Policy.RiskProfile/TASKS.md) -POLICY-RISK-68-002 | TODO | Add override/adjustment support with audit metadata and validation for conflicting rules. | Risk Profile Schema Guild (src/Policy/StellaOps.Policy.RiskProfile/TASKS.md) -POLICY-RISK-68-002 | TODO | Enable exporting/importing RiskProfiles with signatures via policy tooling (CLI + API). | Policy Guild, Export Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) -POLICY-RISK-69-001 | TODO | Emit events/notifications on profile publish, deprecate, and severity threshold changes. | Policy Guild, Notifications Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) -POLICY-RISK-70-001 | TODO | Support exporting/importing profiles with signatures for air-gapped bundles. | Policy Guild, Export Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-RISK-67-002 | TODO | Implement profile lifecycle APIs (`/risk/profiles` create/publish/deprecate) and scope attachment logic. Dependencies: POLICY-RISK-67-001. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-RISK-67-002 | TODO | Publish `.well-known/risk-profile-schema` endpoint and CLI validation tooling. Dependencies: POLICY-RISK-67-002. | Risk Profile Schema Guild (src/Policy/StellaOps.Policy.RiskProfile/TASKS.md) +POLICY-RISK-67-003 | TODO | Provide policy-layer APIs to trigger risk simulations and return distributions/contribution breakdowns. Dependencies: POLICY-RISK-67-002. | Policy Guild, Risk Engine Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) +POLICY-RISK-68-001 | TODO | Provide simulation API bridging Policy Studio with risk engine; returns distributions and top movers. Dependencies: POLICY-RISK-67-003. | Policy Guild, Policy Studio Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-RISK-68-001 | TODO | Implement scope selectors, precedence rules, and Authority attachment APIs. Dependencies: POLICY-RISK-68-001. | Risk Profile Schema Guild, Authority Guild (src/Policy/StellaOps.Policy.RiskProfile/TASKS.md) +POLICY-RISK-68-002 | TODO | Add override/adjustment support with audit metadata and validation for conflicting rules. Dependencies: POLICY-RISK-68-001. | Risk Profile Schema Guild (src/Policy/StellaOps.Policy.RiskProfile/TASKS.md) +POLICY-RISK-68-002 | TODO | Enable exporting/importing RiskProfiles with signatures via policy tooling (CLI + API). Dependencies: POLICY-RISK-68-002. | Policy Guild, Export Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) +POLICY-RISK-69-001 | TODO | Emit events/notifications on profile publish, deprecate, and severity threshold changes. Dependencies: POLICY-RISK-68-002. | Policy Guild, Notifications Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) +POLICY-RISK-70-001 | TODO | Support exporting/importing profiles with signatures for air-gapped bundles. Dependencies: POLICY-RISK-69-001. | Policy Guild, Export Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) POLICY-SPL-23-001 | TODO | Define SPL v1 YAML + JSON Schema, including advisory rules, VEX precedence, severity mapping, exceptions, and layering metadata. Publish schema resources and validation fixtures. | Policy Guild, Language Infrastructure Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) -POLICY-SPL-23-002 | TODO | Implement canonicalizer that normalizes policy packs (ordering, defaults), computes content hash, and prepares bundle metadata for AOC/signing. | Policy Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) -POLICY-SPL-23-003 | TODO | Build policy layering/override engine (global/org/project/env/exception) with field-level precedence matrices; add unit/property tests. | Policy Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) -POLICY-SPL-23-004 | TODO | Design explanation tree model (rule hits, inputs, decisions) and persistence structures reused by runtime, UI, and CLI. | Policy Guild, Audit Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) -POLICY-SPL-23-005 | TODO | Create migration tool to snapshot existing behavior into baseline SPL packs (`org.core.baseline`), including policy docs and sample bundles. | Policy Guild, DevEx Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) -POLICY-SPL-24-001 | TODO | Extend SPL schema to expose reachability/exploitability predicates and weighting functions; update documentation and fixtures. | Policy Guild, Signals Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) +POLICY-SPL-23-002 | TODO | Implement canonicalizer that normalizes policy packs (ordering, defaults), computes content hash, and prepares bundle metadata for AOC/signing. Dependencies: POLICY-SPL-23-001. | Policy Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) +POLICY-SPL-23-003 | TODO | Build policy layering/override engine (global/org/project/env/exception) with field-level precedence matrices; add unit/property tests. Dependencies: POLICY-SPL-23-002. | Policy Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) +POLICY-SPL-23-004 | TODO | Design explanation tree model (rule hits, inputs, decisions) and persistence structures reused by runtime, UI, and CLI. Dependencies: POLICY-SPL-23-003. | Policy Guild, Audit Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) +POLICY-SPL-23-005 | TODO | Create migration tool to snapshot existing behavior into baseline SPL packs (`org.core.baseline`), including policy docs and sample bundles. Dependencies: POLICY-SPL-23-004. | Policy Guild, DevEx Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) +POLICY-SPL-24-001 | TODO | Extend SPL schema to expose reachability/exploitability predicates and weighting functions; update documentation and fixtures. Dependencies: POLICY-SPL-23-005. | Policy Guild, Signals Guild (src/Policy/__Libraries/StellaOps.Policy/TASKS.md) [Policy & Reasoning] 120.C) Policy.VII @@ -207,15 +207,15 @@ Task ID | State | Task description | Owners (Source) --- | --- | --- | --- POLICY-TEN-48-001 | TODO | Add `tenant_id`/`project_id` columns, enable RLS, update evaluators to require tenant context, and emit rationale IDs including tenant metadata. | Policy Guild (src/Policy/StellaOps.Policy.Engine/TASKS.md) REGISTRY-API-27-001 | TODO | Define OpenAPI specification covering workspaces, versions, reviews, simulations, promotions, and attestations; publish typed clients for Console/CLI. | Policy Registry Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) -REGISTRY-API-27-002 | TODO | Implement workspace storage (Mongo collections, object storage buckets) with CRUD endpoints, diff history, and retention policies. | Policy Registry Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) -REGISTRY-API-27-003 | TODO | Integrate compile endpoint: forward source bundle to Policy Engine, persist diagnostics, symbol table, rule index, and complexity metrics. | Policy Registry Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) -REGISTRY-API-27-004 | TODO | Implement quick simulation API with request limits (sample size, timeouts), returning counts, heatmap, sampled explains. | Policy Registry Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) -REGISTRY-API-27-005 | TODO | Build batch simulation orchestration: enqueue shards, collect partials, reduce deltas, produce evidence bundles + signed manifest. | Policy Registry Guild, Scheduler Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) -REGISTRY-API-27-006 | TODO | Implement review workflow (comments, votes, required approvers, status transitions) with audit trails and webhooks. | Policy Registry Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) -REGISTRY-API-27-007 | TODO | Implement publish pipeline: sign source/compiled digests, create attestations, mark version immutable, emit events. | Policy Registry Guild, Security Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) -REGISTRY-API-27-008 | TODO | Implement promotion bindings per tenant/environment with canary subsets, rollback path, and environment history. | Policy Registry Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) -REGISTRY-API-27-009 | TODO | Instrument metrics/logs/traces (compile time, diagnostics rate, sim queue depth, approval latency) and expose dashboards. | Policy Registry Guild, Observability Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) -REGISTRY-API-27-010 | TODO | Build unit/integration/load test suites for compile/sim/review/publish/promote flows; provide seeded fixtures for CI. | Policy Registry Guild, QA Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) +REGISTRY-API-27-002 | TODO | Implement workspace storage (Mongo collections, object storage buckets) with CRUD endpoints, diff history, and retention policies. Dependencies: REGISTRY-API-27-001. | Policy Registry Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) +REGISTRY-API-27-003 | TODO | Integrate compile endpoint: forward source bundle to Policy Engine, persist diagnostics, symbol table, rule index, and complexity metrics. Dependencies: REGISTRY-API-27-002. | Policy Registry Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) +REGISTRY-API-27-004 | TODO | Implement quick simulation API with request limits (sample size, timeouts), returning counts, heatmap, sampled explains. Dependencies: REGISTRY-API-27-003. | Policy Registry Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) +REGISTRY-API-27-005 | TODO | Build batch simulation orchestration: enqueue shards, collect partials, reduce deltas, produce evidence bundles + signed manifest. Dependencies: REGISTRY-API-27-004. | Policy Registry Guild, Scheduler Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) +REGISTRY-API-27-006 | TODO | Implement review workflow (comments, votes, required approvers, status transitions) with audit trails and webhooks. Dependencies: REGISTRY-API-27-005. | Policy Registry Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) +REGISTRY-API-27-007 | TODO | Implement publish pipeline: sign source/compiled digests, create attestations, mark version immutable, emit events. Dependencies: REGISTRY-API-27-006. | Policy Registry Guild, Security Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) +REGISTRY-API-27-008 | TODO | Implement promotion bindings per tenant/environment with canary subsets, rollback path, and environment history. Dependencies: REGISTRY-API-27-007. | Policy Registry Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) +REGISTRY-API-27-009 | TODO | Instrument metrics/logs/traces (compile time, diagnostics rate, sim queue depth, approval latency) and expose dashboards. Dependencies: REGISTRY-API-27-008. | Policy Registry Guild, Observability Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) +REGISTRY-API-27-010 | TODO | Build unit/integration/load test suites for compile/sim/review/publish/promote flows; provide seeded fixtures for CI. Dependencies: REGISTRY-API-27-009. | Policy Registry Guild, QA Guild (src/Policy/StellaOps.Policy.Registry/TASKS.md) [Policy & Reasoning] 120.D) RiskEngine @@ -224,16 +224,16 @@ Summary: Policy & Reasoning focus on RiskEngine). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- RISK-ENGINE-66-001 | TODO | Scaffold scoring service (job queue, worker loop, provider registry) with deterministic execution harness. | Risk Engine Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) -RISK-ENGINE-66-002 | TODO | Implement default transforms (linear, minmax, logistic, piecewise), clamping, gating, and contribution calculator. | Risk Engine Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) -RISK-ENGINE-67-001 | TODO | Integrate CVSS and KEV providers pulling data from Conseiller; implement reducers (`max`, `any`, `consensus`). | Risk Engine Guild, Concelier Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) -RISK-ENGINE-67-002 | TODO | Integrate VEX gate provider and ensure gating short-circuits scoring as configured. | Risk Engine Guild, Excitator Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) -RISK-ENGINE-67-003 | TODO | Add fix availability, asset criticality, and internet exposure providers with caching + TTL enforcement. | Risk Engine Guild, Policy Engine Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) -RISK-ENGINE-68-001 | TODO | Persist scoring results + explanation pointers to Findings Ledger; handle incremental updates via input hash. | Risk Engine Guild, Findings Ledger Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) -RISK-ENGINE-68-002 | TODO | Expose APIs (`/risk/jobs`, `/risk/results`, `/risk/results/{id}/explanation`); include pagination, filtering, error codes. | Risk Engine Guild, API Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) -RISK-ENGINE-69-001 | TODO | Implement simulation mode producing distributions and top movers without mutating ledger. | Risk Engine Guild, Policy Studio Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) -RISK-ENGINE-69-002 | TODO | Add telemetry (spans, metrics, logs) for provider latency, job throughput, cache hits; define SLO dashboards. | Risk Engine Guild, Observability Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) -RISK-ENGINE-70-001 | TODO | Support offline provider bundles with manifest verification and missing-data reporting. | Risk Engine Guild, Export Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) -RISK-ENGINE-70-002 | TODO | Integrate runtime evidence provider and reachability provider outputs with caching + TTL. | Risk Engine Guild, Observability Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) +RISK-ENGINE-66-002 | TODO | Implement default transforms (linear, minmax, logistic, piecewise), clamping, gating, and contribution calculator. Dependencies: RISK-ENGINE-66-001. | Risk Engine Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) +RISK-ENGINE-67-001 | TODO | Integrate CVSS and KEV providers pulling data from Conseiller; implement reducers (`max`, `any`, `consensus`). Dependencies: RISK-ENGINE-66-002. | Risk Engine Guild, Concelier Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) +RISK-ENGINE-67-002 | TODO | Integrate VEX gate provider and ensure gating short-circuits scoring as configured. Dependencies: RISK-ENGINE-67-001. | Risk Engine Guild, Excitator Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) +RISK-ENGINE-67-003 | TODO | Add fix availability, asset criticality, and internet exposure providers with caching + TTL enforcement. Dependencies: RISK-ENGINE-67-002. | Risk Engine Guild, Policy Engine Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) +RISK-ENGINE-68-001 | TODO | Persist scoring results + explanation pointers to Findings Ledger; handle incremental updates via input hash. Dependencies: RISK-ENGINE-67-003. | Risk Engine Guild, Findings Ledger Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) +RISK-ENGINE-68-002 | TODO | Expose APIs (`/risk/jobs`, `/risk/results`, `/risk/results/{id}/explanation`); include pagination, filtering, error codes. Dependencies: RISK-ENGINE-68-001. | Risk Engine Guild, API Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) +RISK-ENGINE-69-001 | TODO | Implement simulation mode producing distributions and top movers without mutating ledger. Dependencies: RISK-ENGINE-68-002. | Risk Engine Guild, Policy Studio Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) +RISK-ENGINE-69-002 | TODO | Add telemetry (spans, metrics, logs) for provider latency, job throughput, cache hits; define SLO dashboards. Dependencies: RISK-ENGINE-69-001. | Risk Engine Guild, Observability Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) +RISK-ENGINE-70-001 | TODO | Support offline provider bundles with manifest verification and missing-data reporting. Dependencies: RISK-ENGINE-69-002. | Risk Engine Guild, Export Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) +RISK-ENGINE-70-002 | TODO | Integrate runtime evidence provider and reachability provider outputs with caching + TTL. Dependencies: RISK-ENGINE-70-001. | Risk Engine Guild, Observability Guild (src/RiskEngine/StellaOps.RiskEngine/TASKS.md) [Policy & Reasoning] 120.E) VexLens.I @@ -242,18 +242,18 @@ Summary: Policy & Reasoning focus on VexLens (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- VEXLENS-30-001 | TODO | Implement normalization pipeline for CSAF VEX, OpenVEX, CycloneDX VEX (status mapping, justification mapping, product tree parsing). | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) -VEXLENS-30-002 | TODO | Build product mapping library (CPE/CPE2.3/vendor tokens → purl/version) with scope quality scoring and path metadata. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) -VEXLENS-30-003 | TODO | Integrate signature verification (Ed25519, DSSE, PKIX) using issuer keys, annotate evidence with verification state and failure reasons. | VEX Lens Guild, Issuer Directory Guild (src/VexLens/StellaOps.VexLens/TASKS.md) -VEXLENS-30-004 | TODO | Implement trust weighting engine (issuer base weights, signature modifiers, recency decay, justification modifiers, scope score adjustments) controlled by policy config. | VEX Lens Guild, Policy Guild (src/VexLens/StellaOps.VexLens/TASKS.md) -VEXLENS-30-005 | TODO | Implement consensus algorithm producing `consensus_state`, `confidence`, `weights`, `quorum`, `rationale`; support states: NOT_AFFECTED, AFFECTED, FIXED, UNDER_INVESTIGATION, DISPUTED, INCONCLUSIVE. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) -VEXLENS-30-006 | TODO | Materialize consensus projection storage with idempotent workers triggered by VEX/Policy changes; expose change events for downstream consumers. | VEX Lens Guild, Findings Ledger Guild (src/VexLens/StellaOps.VexLens/TASKS.md) -VEXLENS-30-007 | TODO | Expose APIs (`/vex/consensus`, `/vex/consensus/query`, `/vex/consensus/{id}`, `/vex/consensus/simulate`, `/vex/consensus/export`) with pagination, cost budgets, and OpenAPI docs. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) -VEXLENS-30-008 | TODO | Integrate consensus signals with Policy Engine (thresholds, suppression, simulation inputs) and Vuln Explorer detail view. | VEX Lens Guild, Policy Guild (src/VexLens/StellaOps.VexLens/TASKS.md) -VEXLENS-30-009 | TODO | Instrument metrics (`vex_consensus_compute_latency`, `vex_consensus_disputed_total`, `vex_signature_verification_rate`), structured logs, and traces; publish dashboards/alerts. | VEX Lens Guild, Observability Guild (src/VexLens/StellaOps.VexLens/TASKS.md) -VEXLENS-30-010 | TODO | Develop unit/property/integration/load tests (10M records), determinism harness, fuzz testing for malformed product trees. | VEX Lens Guild, QA Guild (src/VexLens/StellaOps.VexLens/TASKS.md) -VEXLENS-30-011 | TODO | Provide deployment manifests, caching configuration, scaling guides, offline kit seeds, and runbooks. | VEX Lens Guild, DevOps Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-30-002 | TODO | Build product mapping library (CPE/CPE2.3/vendor tokens → purl/version) with scope quality scoring and path metadata. Dependencies: VEXLENS-30-001. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-30-003 | TODO | Integrate signature verification (Ed25519, DSSE, PKIX) using issuer keys, annotate evidence with verification state and failure reasons. Dependencies: VEXLENS-30-002. | VEX Lens Guild, Issuer Directory Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-30-004 | TODO | Implement trust weighting engine (issuer base weights, signature modifiers, recency decay, justification modifiers, scope score adjustments) controlled by policy config. Dependencies: VEXLENS-30-003. | VEX Lens Guild, Policy Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-30-005 | TODO | Implement consensus algorithm producing `consensus_state`, `confidence`, `weights`, `quorum`, `rationale`; support states: NOT_AFFECTED, AFFECTED, FIXED, UNDER_INVESTIGATION, DISPUTED, INCONCLUSIVE. Dependencies: VEXLENS-30-004. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-30-006 | TODO | Materialize consensus projection storage with idempotent workers triggered by VEX/Policy changes; expose change events for downstream consumers. Dependencies: VEXLENS-30-005. | VEX Lens Guild, Findings Ledger Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-30-007 | TODO | Expose APIs (`/vex/consensus`, `/vex/consensus/query`, `/vex/consensus/{id}`, `/vex/consensus/simulate`, `/vex/consensus/export`) with pagination, cost budgets, and OpenAPI docs. Dependencies: VEXLENS-30-006. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-30-008 | TODO | Integrate consensus signals with Policy Engine (thresholds, suppression, simulation inputs) and Vuln Explorer detail view. Dependencies: VEXLENS-30-007. | VEX Lens Guild, Policy Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-30-009 | TODO | Instrument metrics (`vex_consensus_compute_latency`, `vex_consensus_disputed_total`, `vex_signature_verification_rate`), structured logs, and traces; publish dashboards/alerts. Dependencies: VEXLENS-30-008. | VEX Lens Guild, Observability Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-30-010 | TODO | Develop unit/property/integration/load tests (10M records), determinism harness, fuzz testing for malformed product trees. Dependencies: VEXLENS-30-009. | VEX Lens Guild, QA Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-30-011 | TODO | Provide deployment manifests, caching configuration, scaling guides, offline kit seeds, and runbooks. Dependencies: VEXLENS-30-010. | VEX Lens Guild, DevOps Guild (src/VexLens/StellaOps.VexLens/TASKS.md) VEXLENS-AIAI-31-001 | TODO | Expose consensus rationale API enhancements (policy factors, issuer details, mapping issues) for Advisory AI conflict explanations. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) -VEXLENS-AIAI-31-002 | TODO | Provide caching hooks for consensus lookups used by Advisory AI (batch endpoints, TTL hints). | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-AIAI-31-002 | TODO | Provide caching hooks for consensus lookups used by Advisory AI (batch endpoints, TTL hints). Dependencies: VEXLENS-AIAI-31-001. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) VEXLENS-EXPORT-35-001 | TODO | Provide consensus snapshot API delivering deterministic JSONL (state, confidence, provenance) for exporter mirror bundles. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) VEXLENS-ORCH-33-001 | TODO | Register `consensus_compute` job type with orchestrator, integrate worker SDK, and expose job planning hooks for consensus batches. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) @@ -263,7 +263,7 @@ Depends on: Sprint 120.E - VexLens.I Summary: Policy & Reasoning focus on VexLens (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -VEXLENS-ORCH-34-001 | TODO | Emit consensus completion events into orchestrator run ledger and provenance chain, including confidence metadata. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) +VEXLENS-ORCH-34-001 | TODO | Emit consensus completion events into orchestrator run ledger and provenance chain, including confidence metadata. Dependencies: VEXLENS-ORCH-33-001. | VEX Lens Guild (src/VexLens/StellaOps.VexLens/TASKS.md) [Policy & Reasoning] 120.F) VulnExplorer @@ -272,16 +272,16 @@ Summary: Policy & Reasoning focus on VulnExplorer). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- VULN-API-29-001 | TODO | Define OpenAPI spec (list/detail/query/simulation/workflow/export), query JSON schema, pagination/grouping contracts, and error codes. | Vuln Explorer API Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) -VULN-API-29-002 | TODO | Implement list/query endpoints with policy parameter, grouping, server paging, caching, and cost budgets. | Vuln Explorer API Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) -VULN-API-29-003 | TODO | Implement detail endpoint aggregating evidence, policy rationale, paths (Graph Explorer deep link), and workflow summary. | Vuln Explorer API Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) -VULN-API-29-004 | TODO | Expose workflow endpoints (assign, comment, accept-risk, verify-fix, target-fix, reopen) that write ledger events with idempotency + validation. | Vuln Explorer API Guild, Findings Ledger Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) -VULN-API-29-005 | TODO | Implement simulation endpoint comparing `policy_from` vs `policy_to`, returning diffs without side effects; hook into Policy Engine batch eval. | Vuln Explorer API Guild, Policy Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) -VULN-API-29-006 | TODO | Integrate resolver results with Graph Explorer: include shortest path metadata, line up deep-link parameters, expose `paths` array in details. | Vuln Explorer API Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) -VULN-API-29-007 | TODO | Enforce RBAC/ABAC scopes; implement CSRF/anti-forgery checks for Console; secure attachment URLs; audit logging. | Vuln Explorer API Guild, Security Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) -VULN-API-29-008 | TODO | Build export orchestrator producing signed bundles (manifest, NDJSON, checksums, signature). Integrate with Findings Ledger for evidence and Policy Engine metadata. | Vuln Explorer API Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) -VULN-API-29-009 | TODO | Instrument metrics (`vuln_list_latency`, `vuln_simulation_latency`, `vuln_export_duration`, `vuln_workflow_events_total`), structured logs, and traces; publish dashboards/alerts. | Vuln Explorer API Guild, Observability Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) -VULN-API-29-010 | TODO | Provide unit/integration/perf tests (5M findings), fuzz query validation, determinism harness comparing repeated queries. | Vuln Explorer API Guild, QA Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) -VULN-API-29-011 | TODO | Package deployment (Helm/Compose), health checks, CI smoke, offline kit steps, and scaling guidance. | Vuln Explorer API Guild, DevOps Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) +VULN-API-29-002 | TODO | Implement list/query endpoints with policy parameter, grouping, server paging, caching, and cost budgets. Dependencies: VULN-API-29-001. | Vuln Explorer API Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) +VULN-API-29-003 | TODO | Implement detail endpoint aggregating evidence, policy rationale, paths (Graph Explorer deep link), and workflow summary. Dependencies: VULN-API-29-002. | Vuln Explorer API Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) +VULN-API-29-004 | TODO | Expose workflow endpoints (assign, comment, accept-risk, verify-fix, target-fix, reopen) that write ledger events with idempotency + validation. Dependencies: VULN-API-29-003. | Vuln Explorer API Guild, Findings Ledger Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) +VULN-API-29-005 | TODO | Implement simulation endpoint comparing `policy_from` vs `policy_to`, returning diffs without side effects; hook into Policy Engine batch eval. Dependencies: VULN-API-29-004. | Vuln Explorer API Guild, Policy Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) +VULN-API-29-006 | TODO | Integrate resolver results with Graph Explorer: include shortest path metadata, line up deep-link parameters, expose `paths` array in details. Dependencies: VULN-API-29-005. | Vuln Explorer API Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) +VULN-API-29-007 | TODO | Enforce RBAC/ABAC scopes; implement CSRF/anti-forgery checks for Console; secure attachment URLs; audit logging. Dependencies: VULN-API-29-006. | Vuln Explorer API Guild, Security Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) +VULN-API-29-008 | TODO | Build export orchestrator producing signed bundles (manifest, NDJSON, checksums, signature). Integrate with Findings Ledger for evidence and Policy Engine metadata. Dependencies: VULN-API-29-007. | Vuln Explorer API Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) +VULN-API-29-009 | TODO | Instrument metrics (`vuln_list_latency`, `vuln_simulation_latency`, `vuln_export_duration`, `vuln_workflow_events_total`), structured logs, and traces; publish dashboards/alerts. Dependencies: VULN-API-29-008. | Vuln Explorer API Guild, Observability Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) +VULN-API-29-010 | TODO | Provide unit/integration/perf tests (5M findings), fuzz query validation, determinism harness comparing repeated queries. Dependencies: VULN-API-29-009. | Vuln Explorer API Guild, QA Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) +VULN-API-29-011 | TODO | Package deployment (Helm/Compose), health checks, CI smoke, offline kit steps, and scaling guidance. Dependencies: VULN-API-29-010. | Vuln Explorer API Guild, DevOps Guild (src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md) If all tasks are done - read next sprint section - SPRINT_130_scanner_surface.md diff --git a/docs/implplan/SPRINT_130_scanner_surface.md b/docs/implplan/SPRINT_130_scanner_surface.md index 96c171d0..e590b04b 100644 --- a/docs/implplan/SPRINT_130_scanner_surface.md +++ b/docs/implplan/SPRINT_130_scanner_surface.md @@ -5,154 +5,149 @@ Depends on: Sprint 110.A - AdvisoryAI Summary: Scanner & Surface focus on Scanner (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -5 | SCANNER-ANALYZERS-LANG-10-308R | Determinism fixtures + performance benchmarks; compare against competitor heuristic coverage. | TODO (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md) -6 | SCANNER-ANALYZERS-LANG-10-309R | Package plug-in manifest + Offline Kit documentation; ensure Worker integration. | TODO (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md) -ENTRYTRACE-SURFACE-01 | TODO | Run Surface.Validation prereq checks and resolve cached entry fragments via Surface.FS to avoid duplicate parsing. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) -ENTRYTRACE-SURFACE-02 | TODO | Replace direct env/secret access with Surface.Secrets provider when tracing runtime configs. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) +5 | SCANNER-ANALYZERS-LANG-10-308R | Determinism fixtures + performance benchmarks; compare against competitor heuristic coverage. | DONE (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md) +6 | SCANNER-ANALYZERS-LANG-10-309R | Package plug-in manifest + Offline Kit documentation; ensure Worker integration. Dependencies: SCANNER-ANALYZERS-LANG-10-308R. | DONE (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md) +ENTRYTRACE-SURFACE-01 | DONE (2025-11-02) | Run Surface.Validation prereq checks and resolve cached entry fragments via Surface.FS to avoid duplicate parsing. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) +ENTRYTRACE-SURFACE-02 | DONE (2025-11-02) | Replace direct env/secret access with Surface.Secrets provider when tracing runtime configs. Dependencies: ENTRYTRACE-SURFACE-01. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) +SCANNER-ENTRYTRACE-18-509 | DONE (2025-11-02) | Add regression coverage for EntryTrace surfaces (result store, WebService endpoint, CLI renderer) and NDJSON hashing. | EntryTrace Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) +SCANNER-ENTRYTRACE-18-507 | DOING (2025-11-02) | Expand candidate discovery beyond ENTRYPOINT/CMD by scanning Docker history metadata and default service directories (`/etc/services/**`, `/s6/**`, `/etc/supervisor/*.conf`, `/usr/local/bin/*-entrypoint`) when explicit commands are absent. Dependencies: SCANNER-ENTRYTRACE-18-509. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) +SCANNER-ENTRYTRACE-18-508 | DOING (2025-11-02) | Extend wrapper catalogue to collapse language/package launchers (`bundle`, `bundle exec`, `docker-php-entrypoint`, `npm`, `yarn node`, `pipenv`, `poetry run`) and vendor init scripts before terminal classification. Dependencies: SCANNER-ENTRYTRACE-18-507. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) LANG-SURFACE-01 | TODO | Invoke Surface.Validation checks (env/cache/secrets) before analyzer execution to ensure consistent prerequisites. | Language Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md) -LANG-SURFACE-02 | TODO | Consume Surface.FS APIs for layer/source caching (instead of bespoke caches) to improve determinism. | Language Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md) -LANG-SURFACE-03 | TODO | Replace direct secret/env reads with Surface.Secrets references when fetching package feeds or registry creds. | Language Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md) +LANG-SURFACE-02 | TODO | Consume Surface.FS APIs for layer/source caching (instead of bespoke caches) to improve determinism. Dependencies: LANG-SURFACE-01. | Language Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md) +LANG-SURFACE-03 | TODO | Replace direct secret/env reads with Surface.Secrets references when fetching package feeds or registry creds. Dependencies: LANG-SURFACE-02. | Language Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md) SCANNER-ANALYZERS-DENO-26-001 | TODO | Build input normalizer & VFS for Deno projects: merge `deno.json(c)`, import maps, lockfiles, vendor dirs, `$DENO_DIR` caches, and container layers. Detect runtime/toolchain hints deterministically. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) -SCANNER-ANALYZERS-DENO-26-002 | TODO | Module graph builder: resolve static/dynamic imports using import map, `deno.lock`, vendor/, cache, npm bridge, node: builtins, WASM/JSON assertions. Annotate edges with resolution source and form. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) -SCANNER-ANALYZERS-DENO-26-003 | TODO | NPM/Node compat adapter: map `npm:` specifiers to cached packages or compat `node_modules`, evaluate package `exports`/conditions, record node: builtin usage. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) -SCANNER-ANALYZERS-DENO-26-004 | TODO | Static analyzer for permission/capability signals (FS, net, env, process, crypto, FFI, workers). Detect dynamic-import patterns, literal fetch URLs, tasks vs declared permissions. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) -SCANNER-ANALYZERS-DENO-26-005 | TODO | Bundle/binary inspector: parse eszip bundles and `deno compile` executables (embedded eszip + snapshot) to recover module graph, config, embedded resources. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) -SCANNER-ANALYZERS-DENO-26-006 | TODO | Container adapter: traverse OCI layers for `deno`, caches, vendor directories, compiled binaries; merge module provenance with layer info. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) -SCANNER-ANALYZERS-DENO-26-007 | TODO | Produce AOC-compliant observations: entrypoints, modules, edges, permissions, workers, warnings, binaries with reason codes and contexts. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) -SCANNER-ANALYZERS-DENO-26-008 | TODO | Fixture suite + performance benchmarks (vendor, npm, FFI, workers, dynamic import, bundle/binary, cache-only, container). | Deno Analyzer Guild, QA Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) - +SCANNER-ANALYZERS-DENO-26-002 | TODO | Module graph builder: resolve static/dynamic imports using import map, `deno.lock`, vendor/, cache, npm bridge, node: builtins, WASM/JSON assertions. Annotate edges with resolution source and form. Dependencies: SCANNER-ANALYZERS-DENO-26-001. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) +SCANNER-ANALYZERS-DENO-26-003 | TODO | NPM/Node compat adapter: map `npm:` specifiers to cached packages or compat `node_modules`, evaluate package `exports`/conditions, record node: builtin usage. Dependencies: SCANNER-ANALYZERS-DENO-26-002. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) +SCANNER-ANALYZERS-DENO-26-004 | TODO | Static analyzer for permission/capability signals (FS, net, env, process, crypto, FFI, workers). Detect dynamic-import patterns, literal fetch URLs, tasks vs declared permissions. Dependencies: SCANNER-ANALYZERS-DENO-26-003. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) +SCANNER-ANALYZERS-DENO-26-005 | TODO | Bundle/binary inspector: parse eszip bundles and `deno compile` executables (embedded eszip + snapshot) to recover module graph, config, embedded resources. Dependencies: SCANNER-ANALYZERS-DENO-26-004. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) +SCANNER-ANALYZERS-DENO-26-006 | TODO | Container adapter: traverse OCI layers for `deno`, caches, vendor directories, compiled binaries; merge module provenance with layer info. Dependencies: SCANNER-ANALYZERS-DENO-26-005. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) +SCANNER-ANALYZERS-DENO-26-007 | TODO | Produce AOC-compliant observations: entrypoints, modules, edges, permissions, workers, warnings, binaries with reason codes and contexts. Dependencies: SCANNER-ANALYZERS-DENO-26-006. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) +SCANNER-ANALYZERS-DENO-26-008 | TODO | Fixture suite + performance benchmarks (vendor, npm, FFI, workers, dynamic import, bundle/binary, cache-only, container). Dependencies: SCANNER-ANALYZERS-DENO-26-007. | Deno Analyzer Guild, QA Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) [Scanner & Surface] 130.A) Scanner.II Depends on: Sprint 130.A - Scanner.I Summary: Scanner & Surface focus on Scanner (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -SCANNER-ANALYZERS-DENO-26-009 | TODO | Optional runtime evidence hooks (loader/require shim) capturing module loads + permissions during harnessed execution with path hashing. | Deno Analyzer Guild, Signals Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) -SCANNER-ANALYZERS-DENO-26-010 | TODO | Package analyzer plug-in, add CLI (`stella deno inspect | Deno Analyzer Guild, DevOps Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) -SCANNER-ANALYZERS-DENO-26-011 | TODO | Policy signal emitter: net/fs/env/ffi/process/crypto capabilities, remote origin list, npm usage, wasm modules, dynamic-import warnings. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) +SCANNER-ANALYZERS-DENO-26-009 | TODO | Optional runtime evidence hooks (loader/require shim) capturing module loads + permissions during harnessed execution with path hashing. Dependencies: SCANNER-ANALYZERS-DENO-26-008. | Deno Analyzer Guild, Signals Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) +SCANNER-ANALYZERS-DENO-26-010 | TODO | Package analyzer plug-in, add CLI (`stella deno inspect`, `stella deno resolve`, `stella deno trace`) commands, update Offline Kit docs, ensure Worker integration. Dependencies: SCANNER-ANALYZERS-DENO-26-009. | Deno Analyzer Guild, DevOps Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) +SCANNER-ANALYZERS-DENO-26-011 | TODO | Policy signal emitter: net/fs/env/ffi/process/crypto capabilities, remote origin list, npm usage, wasm modules, dynamic-import warnings. Dependencies: SCANNER-ANALYZERS-DENO-26-010. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md) SCANNER-ANALYZERS-JAVA-21-005 | TODO | Framework config extraction: Spring Boot imports, spring.factories, application properties/yaml, Jakarta web.xml & fragments, JAX-RS/JPA/CDI/JAXB configs, logging files, Graal native-image configs. | Java Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) -SCANNER-ANALYZERS-JAVA-21-006 | TODO | JNI/native hint scanner: detect native methods, System.load/Library literals, bundled native libs, Graal JNI configs; emit `jni-load` edges for native analyzer correlation. | Java Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) -SCANNER-ANALYZERS-JAVA-21-007 | TODO | Signature and manifest metadata collector: verify JAR signature structure, capture signers, manifest loader attributes (Main-Class, Agent-Class, Start-Class, Class-Path). | Java Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) -SCANNER-ANALYZERS-JAVA-21-008 | BLOCKED (2025-10-27) | Implement resolver + AOC writer: produce entrypoints (env profiles, warnings), components (jar_id + semantic ids), edges (jpms, cp, spi, reflect, jni) with reason codes/confidence. | Java Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) -SCANNER-ANALYZERS-JAVA-21-009 | TODO | Author comprehensive fixtures (modular app, boot fat jar, war, ear, MR-jar, jlink image, JNI, reflection heavy, signed jar, microprofile) with golden outputs and perf benchmarks. | Java Analyzer Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) -SCANNER-ANALYZERS-JAVA-21-010 | TODO | Optional runtime ingestion: Java agent + JFR reader capturing class load, ServiceLoader, and System.load events with path scrubbing. Emit append-only runtime edges `runtime-class`/`runtime-spi`/`runtime-load`. | Java Analyzer Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) -SCANNER-ANALYZERS-JAVA-21-011 | TODO | Package analyzer as restart-time plug-in (manifest/DI), update Offline Kit docs, add CLI/worker hooks for Java inspection commands. | Java Analyzer Guild, DevOps Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) -SCANNER-ANALYZERS-LANG-11-001 | TODO | Build entrypoint resolver that maps project/publish artifacts to entrypoint identities (assembly name, MVID, TFM, RID) and environment profiles (publish mode, host kind, probing paths). Output normalized `entrypoints[]` records with deterministic IDs. | StellaOps.Scanner EPDR Guild, Language Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md) -SCANNER-ANALYZERS-LANG-11-002 | TODO | Implement static analyzer (IL + reflection heuristics) capturing AssemblyRef, ModuleRef/PInvoke, DynamicDependency, reflection literals, DI patterns, and custom AssemblyLoadContext probing hints. Emit dependency edges with reason codes and confidence. | StellaOps.Scanner EPDR Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md) -SCANNER-ANALYZERS-LANG-11-003 | TODO | Ingest optional runtime evidence (AssemblyLoad, Resolving, P/Invoke) via event listener harness; merge runtime edges with static/declared ones and attach reason codes/confidence. | StellaOps.Scanner EPDR Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md) -SCANNER-ANALYZERS-LANG-11-004 | TODO | Produce normalized observation export to Scanner writer: entrypoints + dependency edges + environment profiles (AOC compliant). Wire to SBOM service entrypoint tagging. | StellaOps.Scanner EPDR Guild, SBOM Service Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md) -SCANNER-ANALYZERS-LANG-11-005 | TODO | Add comprehensive fixtures/benchmarks covering framework-dependent, self-contained, single-file, trimmed, NativeAOT, multi-RID scenarios; include explain traces and perf benchmarks vs previous analyzer. | StellaOps.Scanner EPDR Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md) - +SCANNER-ANALYZERS-JAVA-21-006 | TODO | JNI/native hint scanner: detect native methods, System.load/Library literals, bundled native libs, Graal JNI configs; emit `jni-load` edges for native analyzer correlation. Dependencies: SCANNER-ANALYZERS-JAVA-21-005. | Java Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) +SCANNER-ANALYZERS-JAVA-21-007 | TODO | Signature and manifest metadata collector: verify JAR signature structure, capture signers, manifest loader attributes (Main-Class, Agent-Class, Start-Class, Class-Path). Dependencies: SCANNER-ANALYZERS-JAVA-21-006. | Java Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) +SCANNER-ANALYZERS-JAVA-21-008 | BLOCKED (2025-10-27) | Implement resolver + AOC writer: produce entrypoints (env profiles, warnings), components (jar_id + semantic ids), edges (jpms, cp, spi, reflect, jni) with reason codes/confidence. Dependencies: SCANNER-ANALYZERS-JAVA-21-007. | Java Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) +SCANNER-ANALYZERS-JAVA-21-009 | TODO | Author comprehensive fixtures (modular app, boot fat jar, war, ear, MR-jar, jlink image, JNI, reflection heavy, signed jar, microprofile) with golden outputs and perf benchmarks. Dependencies: SCANNER-ANALYZERS-JAVA-21-008. | Java Analyzer Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) +SCANNER-ANALYZERS-JAVA-21-010 | TODO | Optional runtime ingestion: Java agent + JFR reader capturing class load, ServiceLoader, and System.load events with path scrubbing. Emit append-only runtime edges `runtime-class`/`runtime-spi`/`runtime-load`. Dependencies: SCANNER-ANALYZERS-JAVA-21-009. | Java Analyzer Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) +SCANNER-ANALYZERS-JAVA-21-011 | TODO | Package analyzer as restart-time plug-in (manifest/DI), update Offline Kit docs, add CLI/worker hooks for Java inspection commands. Dependencies: SCANNER-ANALYZERS-JAVA-21-010. | Java Analyzer Guild, DevOps Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md) +SCANNER-ANALYZERS-LANG-11-001 | TODO | Build entrypoint resolver that maps project/publish artifacts to entrypoint identities (assembly name, MVID, TFM, RID) and environment profiles (publish mode, host kind, probing paths). Output normalized `entrypoints[]` records with deterministic IDs. Dependencies: SCANNER-ANALYZERS-LANG-10-309R. | StellaOps.Scanner EPDR Guild, Language Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md) [Scanner & Surface] 130.A) Scanner.III Depends on: Sprint 130.A - Scanner.II Summary: Scanner & Surface focus on Scanner (phase III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- +SCANNER-ANALYZERS-LANG-11-002 | TODO | Implement static analyzer (IL + reflection heuristics) capturing AssemblyRef, ModuleRef/PInvoke, DynamicDependency, reflection literals, DI patterns, and custom AssemblyLoadContext probing hints. Emit dependency edges with reason codes and confidence. Dependencies: SCANNER-ANALYZERS-LANG-11-001. | StellaOps.Scanner EPDR Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md) +SCANNER-ANALYZERS-LANG-11-003 | TODO | Ingest optional runtime evidence (AssemblyLoad, Resolving, P/Invoke) via event listener harness; merge runtime edges with static/declared ones and attach reason codes/confidence. Dependencies: SCANNER-ANALYZERS-LANG-11-002. | StellaOps.Scanner EPDR Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md) +SCANNER-ANALYZERS-LANG-11-004 | TODO | Produce normalized observation export to Scanner writer: entrypoints + dependency edges + environment profiles (AOC compliant). Wire to SBOM service entrypoint tagging. Dependencies: SCANNER-ANALYZERS-LANG-11-003. | StellaOps.Scanner EPDR Guild, SBOM Service Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md) +SCANNER-ANALYZERS-LANG-11-005 | TODO | Add comprehensive fixtures/benchmarks covering framework-dependent, self-contained, single-file, trimmed, NativeAOT, multi-RID scenarios; include explain traces and perf benchmarks vs previous analyzer. Dependencies: SCANNER-ANALYZERS-LANG-11-004. | StellaOps.Scanner EPDR Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md) SCANNER-ANALYZERS-NATIVE-20-001 | TODO | Implement format detector and binary identity model supporting ELF, PE/COFF, and Mach-O (including fat slices). Capture arch, OS, build-id/UUID, interpreter metadata. | Native Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) -SCANNER-ANALYZERS-NATIVE-20-002 | TODO | Parse ELF dynamic sections: `DT_NEEDED`, `DT_RPATH`, `DT_RUNPATH`, symbol versions, interpreter, and note build-id. Emit declared dependency records with reason `elf-dtneeded` and attach version needs. | Native Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) -SCANNER-ANALYZERS-NATIVE-20-003 | TODO | Parse PE imports, delay-load tables, manifests/SxS metadata, and subsystem flags. Emit edges with reasons `pe-import` and `pe-delayimport`, plus SxS policy metadata. | Native Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) -SCANNER-ANALYZERS-NATIVE-20-004 | TODO | Parse Mach-O load commands (`LC_LOAD_DYLIB`, `LC_REEXPORT_DYLIB`, `LC_RPATH`, `LC_UUID`, fat headers). Handle `@rpath/@loader_path` placeholders and slice separation. | Native Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) -SCANNER-ANALYZERS-NATIVE-20-005 | TODO | Implement resolver engine modeling loader search order for ELF (rpath/runpath/cache/default), PE (SafeDll search + SxS), and Mach-O (`@rpath` expansion). Works against virtual image roots, producing explain traces. | Native Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) -SCANNER-ANALYZERS-NATIVE-20-006 | TODO | Build heuristic scanner for `dlopen`/`LoadLibrary` strings, plugin ecosystem configs, and Go/Rust static hints. Emit edges with `reason_code` (`string-dlopen`, `config-plugin`, `ecosystem-heuristic`) and confidence levels. | Native Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) -SCANNER-ANALYZERS-NATIVE-20-007 | TODO | Serialize AOC-compliant observations: entrypoints + dependency edges + environment profiles (search paths, interpreter, loader metadata). Integrate with Scanner writer API. | Native Analyzer Guild, SBOM Service Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) -SCANNER-ANALYZERS-NATIVE-20-008 | TODO | Author cross-platform fixtures (ELF dynamic/static, PE delay-load/SxS, Mach-O @rpath, plugin configs) and determinism benchmarks (<25 ms / binary, <250 MB). | Native Analyzer Guild, QA Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) -SCANNER-ANALYZERS-NATIVE-20-009 | TODO | Provide optional runtime capture adapters (Linux eBPF `dlopen`, Windows ETW ImageLoad, macOS dyld interpose) writing append-only runtime evidence. Include redaction/sandbox guidance. | Native Analyzer Guild, Signals Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) -SCANNER-ANALYZERS-NATIVE-20-010 | TODO | Package native analyzer as restart-time plug-in with manifest/DI registration; update Offline Kit bundle + documentation. | Native Analyzer Guild, DevOps Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) +SCANNER-ANALYZERS-NATIVE-20-002 | TODO | Parse ELF dynamic sections: `DT_NEEDED`, `DT_RPATH`, `DT_RUNPATH`, symbol versions, interpreter, and note build-id. Emit declared dependency records with reason `elf-dtneeded` and attach version needs. Dependencies: SCANNER-ANALYZERS-NATIVE-20-001. | Native Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) +SCANNER-ANALYZERS-NATIVE-20-003 | TODO | Parse PE imports, delay-load tables, manifests/SxS metadata, and subsystem flags. Emit edges with reasons `pe-import` and `pe-delayimport`, plus SxS policy metadata. Dependencies: SCANNER-ANALYZERS-NATIVE-20-002. | Native Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) +SCANNER-ANALYZERS-NATIVE-20-004 | TODO | Parse Mach-O load commands (`LC_LOAD_DYLIB`, `LC_REEXPORT_DYLIB`, `LC_RPATH`, `LC_UUID`, fat headers). Handle `@rpath/@loader_path` placeholders and slice separation. Dependencies: SCANNER-ANALYZERS-NATIVE-20-003. | Native Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) +SCANNER-ANALYZERS-NATIVE-20-005 | TODO | Implement resolver engine modeling loader search order for ELF (rpath/runpath/cache/default), PE (SafeDll search + SxS), and Mach-O (`@rpath` expansion). Works against virtual image roots, producing explain traces. Dependencies: SCANNER-ANALYZERS-NATIVE-20-004. | Native Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) +SCANNER-ANALYZERS-NATIVE-20-006 | TODO | Build heuristic scanner for `dlopen`/`LoadLibrary` strings, plugin ecosystem configs, and Go/Rust static hints. Emit edges with `reason_code` (`string-dlopen`, `config-plugin`, `ecosystem-heuristic`) and confidence levels. Dependencies: SCANNER-ANALYZERS-NATIVE-20-005. | Native Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) +SCANNER-ANALYZERS-NATIVE-20-007 | TODO | Serialize AOC-compliant observations: entrypoints + dependency edges + environment profiles (search paths, interpreter, loader metadata). Integrate with Scanner writer API. Dependencies: SCANNER-ANALYZERS-NATIVE-20-006. | Native Analyzer Guild, SBOM Service Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) +SCANNER-ANALYZERS-NATIVE-20-008 | TODO | Author cross-platform fixtures (ELF dynamic/static, PE delay-load/SxS, Mach-O @rpath, plugin configs) and determinism benchmarks (<25 ms / binary, <250 MB). Dependencies: SCANNER-ANALYZERS-NATIVE-20-007. | Native Analyzer Guild, QA Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) +SCANNER-ANALYZERS-NATIVE-20-009 | TODO | Provide optional runtime capture adapters (Linux eBPF `dlopen`, Windows ETW ImageLoad, macOS dyld interpose) writing append-only runtime evidence. Include redaction/sandbox guidance. Dependencies: SCANNER-ANALYZERS-NATIVE-20-008. | Native Analyzer Guild, Signals Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) +SCANNER-ANALYZERS-NATIVE-20-010 | TODO | Package native analyzer as restart-time plug-in with manifest/DI registration; update Offline Kit bundle + documentation. Dependencies: SCANNER-ANALYZERS-NATIVE-20-009. | Native Analyzer Guild, DevOps Guild (src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md) SCANNER-ANALYZERS-NODE-22-001 | TODO | Build input normalizer + VFS for Node projects: dirs, tgz, container layers, pnpm store, Yarn PnP zips; detect Node version targets (`.nvmrc`, `.node-version`, Dockerfile) and workspace roots deterministically. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) -SCANNER-ANALYZERS-NODE-22-002 | TODO | Implement entrypoint discovery (bin/main/module/exports/imports, workers, electron, shebang scripts) and condition set builder per entrypoint. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) -SCANNER-ANALYZERS-NODE-22-003 | TODO | Parse JS/TS sources for static `import`, `require`, `import()` and string concat cases; flag dynamic patterns with confidence levels; support source map de-bundling. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) -SCANNER-ANALYZERS-NODE-22-004 | TODO | Implement Node resolver engine for CJS + ESM (core modules, exports/imports maps, conditions, extension priorities, self-references) parameterised by node_version. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) -SCANNER-ANALYZERS-NODE-22-005 | TODO | Add package manager adapters: Yarn PnP (.pnp.data/.pnp.cjs), pnpm virtual store, npm/Yarn classic hoists; operate entirely in virtual FS. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) - +SCANNER-ANALYZERS-NODE-22-002 | TODO | Implement entrypoint discovery (bin/main/module/exports/imports, workers, electron, shebang scripts) and condition set builder per entrypoint. Dependencies: SCANNER-ANALYZERS-NODE-22-001. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) +SCANNER-ANALYZERS-NODE-22-003 | TODO | Parse JS/TS sources for static `import`, `require`, `import()` and string concat cases; flag dynamic patterns with confidence levels; support source map de-bundling. Dependencies: SCANNER-ANALYZERS-NODE-22-002. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) +SCANNER-ANALYZERS-NODE-22-004 | TODO | Implement Node resolver engine for CJS + ESM (core modules, exports/imports maps, conditions, extension priorities, self-references) parameterised by node_version. Dependencies: SCANNER-ANALYZERS-NODE-22-003. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) +SCANNER-ANALYZERS-NODE-22-005 | TODO | Add package manager adapters: Yarn PnP (.pnp.data/.pnp.cjs), pnpm virtual store, npm/Yarn classic hoists; operate entirely in virtual FS. Dependencies: SCANNER-ANALYZERS-NODE-22-004. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) [Scanner & Surface] 130.A) Scanner.IV Depends on: Sprint 130.A - Scanner.III Summary: Scanner & Surface focus on Scanner (phase IV). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -SCANNER-ANALYZERS-NODE-22-006 | TODO | Detect bundles + source maps, reconstruct module specifiers, and correlate to original paths; support dual CJS/ESM graphs with conditions. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) -SCANNER-ANALYZERS-NODE-22-007 | TODO | Scan for native addons (.node), WASM modules, and core capability signals (child_process, vm, worker_threads); emit hint edges and native metadata. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) -SCANNER-ANALYZERS-NODE-22-008 | TODO | Produce AOC-compliant observations: entrypoints, components (pkg/native/wasm), edges (esm-import, cjs-require, exports, json, native-addon, wasm, worker) with reason codes/confidence and resolver traces. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) -SCANNER-ANALYZERS-NODE-22-009 | TODO | Author fixture suite + performance benchmarks (npm, pnpm, PnP, bundle, electron, worker) with golden outputs and latency budgets. | Node Analyzer Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) -SCANNER-ANALYZERS-NODE-22-010 | TODO | Implement optional runtime evidence hooks (ESM loader, CJS require hook) with path scrubbing and loader ID hashing; emit runtime-* edges. | Node Analyzer Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) -SCANNER-ANALYZERS-NODE-22-011 | TODO | Package updated analyzer as restart-time plug-in, expose Scanner CLI (`stella node *`) commands, refresh Offline Kit documentation. | Node Analyzer Guild, DevOps Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) -SCANNER-ANALYZERS-NODE-22-012 | TODO | Integrate container filesystem adapter (OCI layers, Dockerfile hints) and record NODE_OPTIONS/env warnings. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) +SCANNER-ANALYZERS-NODE-22-006 | TODO | Detect bundles + source maps, reconstruct module specifiers, and correlate to original paths; support dual CJS/ESM graphs with conditions. Dependencies: SCANNER-ANALYZERS-NODE-22-005. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) +SCANNER-ANALYZERS-NODE-22-007 | TODO | Scan for native addons (.node), WASM modules, and core capability signals (child_process, vm, worker_threads); emit hint edges and native metadata. Dependencies: SCANNER-ANALYZERS-NODE-22-006. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) +SCANNER-ANALYZERS-NODE-22-008 | TODO | Produce AOC-compliant observations: entrypoints, components (pkg/native/wasm), edges (esm-import, cjs-require, exports, json, native-addon, wasm, worker) with reason codes/confidence and resolver traces. Dependencies: SCANNER-ANALYZERS-NODE-22-007. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) +SCANNER-ANALYZERS-NODE-22-009 | TODO | Author fixture suite + performance benchmarks (npm, pnpm, PnP, bundle, electron, worker) with golden outputs and latency budgets. Dependencies: SCANNER-ANALYZERS-NODE-22-008. | Node Analyzer Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) +SCANNER-ANALYZERS-NODE-22-010 | TODO | Implement optional runtime evidence hooks (ESM loader, CJS require hook) with path scrubbing and loader ID hashing; emit runtime-* edges. Dependencies: SCANNER-ANALYZERS-NODE-22-009. | Node Analyzer Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) +SCANNER-ANALYZERS-NODE-22-011 | TODO | Package updated analyzer as restart-time plug-in, expose Scanner CLI (`stella node *`) commands, refresh Offline Kit documentation. Dependencies: SCANNER-ANALYZERS-NODE-22-010. | Node Analyzer Guild, DevOps Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) +SCANNER-ANALYZERS-NODE-22-012 | TODO | Integrate container filesystem adapter (OCI layers, Dockerfile hints) and record NODE_OPTIONS/env warnings. Dependencies: SCANNER-ANALYZERS-NODE-22-011. | Node Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md) SCANNER-ANALYZERS-PHP-27-001 | TODO | Build input normalizer & VFS for PHP projects: merge source trees, composer manifests, vendor/, php.ini/conf.d, `.htaccess`, FPM configs, container layers. Detect framework/CMS fingerprints deterministically. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) -SCANNER-ANALYZERS-PHP-27-002 | TODO | Composer/Autoload analyzer: parse composer.json/lock/installed.json, generate package nodes, autoload edges (psr-4/0/classmap/files), bin entrypoints, composer plugins. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) -SCANNER-ANALYZERS-PHP-27-003 | TODO | Include/require graph builder: resolve static includes, capture dynamic include patterns, bootstrap chains, merge with autoload edges. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) -SCANNER-ANALYZERS-PHP-27-004 | TODO | Runtime capability scanner: detect exec/fs/net/env/serialization/crypto/database usage, stream wrappers, uploads; record evidence snippets. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) -SCANNER-ANALYZERS-PHP-27-005 | TODO | PHAR/Archive inspector: parse phar manifests/stubs, hash files, detect embedded vendor trees and phar:// usage. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) -SCANNER-ANALYZERS-PHP-27-006 | TODO | Framework/CMS surface mapper: extract routes, controllers, middleware, CLI/cron entrypoints for Laravel/Symfony/Slim/WordPress/Drupal/Magento. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) -SCANNER-ANALYZERS-PHP-27-007 | TODO | Container & extension detector: parse php.ini/conf.d, map extensions to .so/.dll, collect web server/FPM settings, upload limits, disable_functions. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) -SCANNER-ANALYZERS-PHP-27-008 | TODO | Produce AOC-compliant observations: entrypoints, packages, extensions, modules, edges (require/autoload), capabilities, routes, configs. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) - +SCANNER-ANALYZERS-PHP-27-002 | TODO | Composer/Autoload analyzer: parse composer.json/lock/installed.json, generate package nodes, autoload edges (psr-4/0/classmap/files), bin entrypoints, composer plugins. Dependencies: SCANNER-ANALYZERS-PHP-27-001. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) +SCANNER-ANALYZERS-PHP-27-003 | TODO | Include/require graph builder: resolve static includes, capture dynamic include patterns, bootstrap chains, merge with autoload edges. Dependencies: SCANNER-ANALYZERS-PHP-27-002. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) +SCANNER-ANALYZERS-PHP-27-004 | TODO | Runtime capability scanner: detect exec/fs/net/env/serialization/crypto/database usage, stream wrappers, uploads; record evidence snippets. Dependencies: SCANNER-ANALYZERS-PHP-27-003. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) +SCANNER-ANALYZERS-PHP-27-005 | TODO | PHAR/Archive inspector: parse phar manifests/stubs, hash files, detect embedded vendor trees and phar:// usage. Dependencies: SCANNER-ANALYZERS-PHP-27-004. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) +SCANNER-ANALYZERS-PHP-27-006 | TODO | Framework/CMS surface mapper: extract routes, controllers, middleware, CLI/cron entrypoints for Laravel/Symfony/Slim/WordPress/Drupal/Magento. Dependencies: SCANNER-ANALYZERS-PHP-27-005. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) +SCANNER-ANALYZERS-PHP-27-007 | TODO | Container & extension detector: parse php.ini/conf.d, map extensions to .so/.dll, collect web server/FPM settings, upload limits, disable_functions. Dependencies: SCANNER-ANALYZERS-PHP-27-006. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) [Scanner & Surface] 130.A) Scanner.V Depends on: Sprint 130.A - Scanner.IV Summary: Scanner & Surface focus on Scanner (phase V). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -SCANNER-ANALYZERS-PHP-27-009 | TODO | Fixture suite + performance benchmarks (Laravel, Symfony, WordPress, legacy, PHAR, container) with golden outputs. | PHP Analyzer Guild, QA Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) -SCANNER-ANALYZERS-PHP-27-010 | TODO | Optional runtime evidence hooks (if provided) to ingest audit logs or opcode cache stats with path hashing. | PHP Analyzer Guild, Signals Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) -SCANNER-ANALYZERS-PHP-27-011 | TODO | Package analyzer plug-in, add CLI (`stella php inspect | PHP Analyzer Guild, DevOps Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) -SCANNER-ANALYZERS-PHP-27-012 | TODO | Policy signal emitter: extension requirements/presence, dangerous constructs counters, stream wrapper usage, capability summaries. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) +SCANNER-ANALYZERS-PHP-27-009 | TODO | Fixture suite + performance benchmarks (Laravel, Symfony, WordPress, legacy, PHAR, container) with golden outputs. Dependencies: SCANNER-ANALYZERS-PHP-27-007. | PHP Analyzer Guild, QA Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) +SCANNER-ANALYZERS-PHP-27-010 | TODO | Optional runtime evidence hooks (if provided) to ingest audit logs or opcode cache stats with path hashing. Dependencies: SCANNER-ANALYZERS-PHP-27-009. | PHP Analyzer Guild, Signals Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) +SCANNER-ANALYZERS-PHP-27-011 | TODO | Package analyzer plug-in, add CLI (`stella php inspect`), refresh Offline Kit documentation. Dependencies: SCANNER-ANALYZERS-PHP-27-010. | PHP Analyzer Guild, DevOps Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) +SCANNER-ANALYZERS-PHP-27-012 | TODO | Policy signal emitter: extension requirements/presence, dangerous constructs counters, stream wrapper usage, capability summaries. Dependencies: SCANNER-ANALYZERS-PHP-27-011. | PHP Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md) SCANNER-ANALYZERS-PYTHON-23-001 | TODO | Build input normalizer & virtual filesystem for wheels, sdists, editable installs, zipapps, site-packages trees, and container roots. Detect Python version targets (`pyproject.toml`, `runtime.txt`, Dockerfile) + virtualenv layout deterministically. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) -SCANNER-ANALYZERS-PYTHON-23-002 | TODO | Entrypoint discovery: module `__main__`, console_scripts entry points, `scripts`, zipapp main, `manage.py`/gunicorn/celery patterns. Capture invocation context (module vs package, argv wrappers). | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) -SCANNER-ANALYZERS-PYTHON-23-003 | TODO | Static import graph builder using AST and bytecode fallback. Support `import`, `from ... import`, relative imports, `importlib.import_module`, `__import__` with literal args, `pkgutil.extend_path`. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) -SCANNER-ANALYZERS-PYTHON-23-004 | TODO | Python resolver engine (importlib semantics) handling namespace packages (PEP 420), package discovery order, `.pth` files, `sys.path` composition, zipimport, and site-packages precedence across virtualenv/container roots. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) -SCANNER-ANALYZERS-PYTHON-23-005 | TODO | Packaging adapters: pip editable (`.egg-link`), Poetry/Flit layout, Conda prefix, `.dist-info/RECORD` cross-check, container layer overlays. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) -SCANNER-ANALYZERS-PYTHON-23-006 | TODO | Detect native extensions (`*.so`, `*.pyd`), CFFI modules, ctypes loaders, embedded WASM, and runtime capability signals (subprocess, multiprocessing, ctypes, eval). | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) -SCANNER-ANALYZERS-PYTHON-23-007 | TODO | Framework/config heuristics: Django, Flask, FastAPI, Celery, AWS Lambda handlers, Gunicorn, Click/Typer CLIs, logging configs, pyproject optional dependencies. Tagged as hints only. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) -SCANNER-ANALYZERS-PYTHON-23-008 | TODO | Produce AOC-compliant observations: entrypoints, components (modules/packages/native), edges (import, namespace, dynamic-hint, native-extension) with reason codes/confidence and resolver traces. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) -SCANNER-ANALYZERS-PYTHON-23-009 | TODO | Fixture suite + perf benchmarks covering virtualenv, namespace packages, zipapp, editable installs, containers, lambda handler. | Python Analyzer Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) -SCANNER-ANALYZERS-PYTHON-23-010 | TODO | Optional runtime evidence: import hook capturing module load events with path scrubbing, optional bytecode instrumentation for `importlib` hooks, multiprocessing tracer. | Python Analyzer Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) -SCANNER-ANALYZERS-PYTHON-23-011 | TODO | Package analyzer plug-in, add CLI commands (`stella python inspect | Python Analyzer Guild, DevOps Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) - +SCANNER-ANALYZERS-PYTHON-23-002 | TODO | Entrypoint discovery: module `__main__`, console_scripts entry points, `scripts`, zipapp main, `manage.py`/gunicorn/celery patterns. Capture invocation context (module vs package, argv wrappers). Dependencies: SCANNER-ANALYZERS-PYTHON-23-001. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) +SCANNER-ANALYZERS-PYTHON-23-003 | TODO | Static import graph builder using AST and bytecode fallback. Support `import`, `from ... import`, relative imports, `importlib.import_module`, `__import__` with literal args, `pkgutil.extend_path`. Dependencies: SCANNER-ANALYZERS-PYTHON-23-002. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) +SCANNER-ANALYZERS-PYTHON-23-004 | TODO | Python resolver engine (importlib semantics) handling namespace packages (PEP 420), package discovery order, `.pth` files, `sys.path` composition, zipimport, and site-packages precedence across virtualenv/container roots. Dependencies: SCANNER-ANALYZERS-PYTHON-23-003. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) +SCANNER-ANALYZERS-PYTHON-23-005 | TODO | Packaging adapters: pip editable (`.egg-link`), Poetry/Flit layout, Conda prefix, `.dist-info/RECORD` cross-check, container layer overlays. Dependencies: SCANNER-ANALYZERS-PYTHON-23-004. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) +SCANNER-ANALYZERS-PYTHON-23-006 | TODO | Detect native extensions (`*.so`, `*.pyd`), CFFI modules, ctypes loaders, embedded WASM, and runtime capability signals (subprocess, multiprocessing, ctypes, eval). Dependencies: SCANNER-ANALYZERS-PYTHON-23-005. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) +SCANNER-ANALYZERS-PYTHON-23-007 | TODO | Framework/config heuristics: Django, Flask, FastAPI, Celery, AWS Lambda handlers, Gunicorn, Click/Typer CLIs, logging configs, pyproject optional dependencies. Tagged as hints only. Dependencies: SCANNER-ANALYZERS-PYTHON-23-006. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) +SCANNER-ANALYZERS-PYTHON-23-008 | TODO | Produce AOC-compliant observations: entrypoints, components (modules/packages/native), edges (import, namespace, dynamic-hint, native-extension) with reason codes/confidence and resolver traces. Dependencies: SCANNER-ANALYZERS-PYTHON-23-007. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) +SCANNER-ANALYZERS-PYTHON-23-009 | TODO | Fixture suite + perf benchmarks covering virtualenv, namespace packages, zipapp, editable installs, containers, lambda handler. Dependencies: SCANNER-ANALYZERS-PYTHON-23-008. | Python Analyzer Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) +SCANNER-ANALYZERS-PYTHON-23-010 | TODO | Optional runtime evidence: import hook capturing module load events with path scrubbing, optional bytecode instrumentation for `importlib` hooks, multiprocessing tracer. Dependencies: SCANNER-ANALYZERS-PYTHON-23-009. | Python Analyzer Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) +SCANNER-ANALYZERS-PYTHON-23-011 | TODO | Package analyzer plug-in, add CLI commands (`stella python inspect`), refresh Offline Kit documentation. Dependencies: SCANNER-ANALYZERS-PYTHON-23-010. | Python Analyzer Guild, DevOps Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) [Scanner & Surface] 130.A) Scanner.VI Depends on: Sprint 130.A - Scanner.V Summary: Scanner & Surface focus on Scanner (phase VI). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -SCANNER-ANALYZERS-PYTHON-23-012 | TODO | Container/zipapp adapter enhancements: parse OCI layers for Python runtime, detect `PYTHONPATH`/`PYTHONHOME` env, record warnings for sitecustomize/startup hooks. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) +SCANNER-ANALYZERS-PYTHON-23-012 | TODO | Container/zipapp adapter enhancements: parse OCI layers for Python runtime, detect `PYTHONPATH`/`PYTHONHOME` env, record warnings for sitecustomize/startup hooks. Dependencies: SCANNER-ANALYZERS-PYTHON-23-011. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) SCANNER-ANALYZERS-RUBY-28-001 | TODO | Build input normalizer & VFS for Ruby projects: merge source trees, Gemfile/Gemfile.lock, vendor/bundle, .gem archives, `.bundle/config`, Rack configs, containers. Detect framework/job fingerprints deterministically. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ANALYZERS-RUBY-28-002 | TODO | Gem & Bundler analyzer: parse Gemfile/Gemfile.lock, vendor specs, .gem archives, produce package nodes (PURLs), dependency edges, bin scripts, Bundler group metadata. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ANALYZERS-RUBY-28-003 | TODO | Require/autoload graph builder: resolve static/dynamic require, require_relative, load; infer Zeitwerk autoload paths and Rack boot chain. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ANALYZERS-RUBY-28-004 | TODO | Framework surface mapper: extract routes/controllers/middleware for Rails/Rack/Sinatra/Grape/Hanami; inventory jobs/schedulers (Sidekiq, Resque, ActiveJob, whenever, clockwork). | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ANALYZERS-RUBY-28-005 | TODO | Capability analyzer: detect os-exec, filesystem, network, serialization, crypto, DB usage, TLS posture, dynamic eval; record evidence snippets with file/line. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ANALYZERS-RUBY-28-006 | TODO | Rake task & scheduler analyzer: parse Rakefiles/lib/tasks, capture task names/prereqs/shell commands; parse Sidekiq/whenever/clockwork configs into schedules. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ANALYZERS-RUBY-28-007 | TODO | Container/runtime scanner: detect Ruby version, installed gems, native extensions, web server configs in OCI layers. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ANALYZERS-RUBY-28-008 | TODO | Produce AOC-compliant observations: entrypoints, packages, modules, edges (require/autoload), routes, jobs, tasks, capabilities, configs, warnings. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ANALYZERS-RUBY-28-009 | TODO | Fixture suite + performance benchmarks (Rails, Rack, Sinatra, Sidekiq, legacy, .gem, container) with golden outputs. | Ruby Analyzer Guild, QA Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ANALYZERS-RUBY-28-010 | TODO | Optional runtime evidence integration (if provided logs/metrics) with path hashing, without altering static precedence. | Ruby Analyzer Guild, Signals Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ANALYZERS-RUBY-28-011 | TODO | Package analyzer plug-in, add CLI (`stella ruby inspect | Ruby Analyzer Guild, DevOps Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ANALYZERS-RUBY-28-012 | TODO | Policy signal emitter: rubygems drift, native extension flags, dangerous constructs counts, TLS verify posture, dynamic require eval warnings. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) -SCANNER-ENTRYTRACE-18-502 | TODO | Expand chain walker with init shim/user-switch/supervisor recognition plus env/workdir accumulation and guarded edges. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) -SCANNER-ENTRYTRACE-18-503 | TODO | Introduce target classifier + EntryPlan handoff with confidence scoring for ELF/Java/.NET/Node/Python and user/workdir context. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) - +SCANNER-ANALYZERS-RUBY-28-002 | TODO | Gem & Bundler analyzer: parse Gemfile/Gemfile.lock, vendor specs, .gem archives, produce package nodes (PURLs), dependency edges, bin scripts, Bundler group metadata. Dependencies: SCANNER-ANALYZERS-RUBY-28-001. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) +SCANNER-ANALYZERS-RUBY-28-003 | TODO | Require/autoload graph builder: resolve static/dynamic require, require_relative, load; infer Zeitwerk autoload paths and Rack boot chain. Dependencies: SCANNER-ANALYZERS-RUBY-28-002. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) +SCANNER-ANALYZERS-RUBY-28-004 | TODO | Framework surface mapper: extract routes/controllers/middleware for Rails/Rack/Sinatra/Grape/Hanami; inventory jobs/schedulers (Sidekiq, Resque, ActiveJob, whenever, clockwork). Dependencies: SCANNER-ANALYZERS-RUBY-28-003. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) +SCANNER-ANALYZERS-RUBY-28-005 | TODO | Capability analyzer: detect os-exec, filesystem, network, serialization, crypto, DB usage, TLS posture, dynamic eval; record evidence snippets with file/line. Dependencies: SCANNER-ANALYZERS-RUBY-28-004. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) +SCANNER-ANALYZERS-RUBY-28-006 | TODO | Rake task & scheduler analyzer: parse Rakefiles/lib/tasks, capture task names/prereqs/shell commands; parse Sidekiq/whenever/clockwork configs into schedules. Dependencies: SCANNER-ANALYZERS-RUBY-28-005. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) +SCANNER-ANALYZERS-RUBY-28-007 | TODO | Container/runtime scanner: detect Ruby version, installed gems, native extensions, web server configs in OCI layers. Dependencies: SCANNER-ANALYZERS-RUBY-28-006. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) +SCANNER-ANALYZERS-RUBY-28-008 | TODO | Produce AOC-compliant observations: entrypoints, packages, modules, edges (require/autoload), routes, jobs, tasks, capabilities, configs, warnings. Dependencies: SCANNER-ANALYZERS-RUBY-28-007. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) +SCANNER-ANALYZERS-RUBY-28-009 | TODO | Fixture suite + performance benchmarks (Rails, Rack, Sinatra, Sidekiq, legacy, .gem, container) with golden outputs. Dependencies: SCANNER-ANALYZERS-RUBY-28-008. | Ruby Analyzer Guild, QA Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) +SCANNER-ANALYZERS-RUBY-28-010 | TODO | Optional runtime evidence integration (if provided logs/metrics) with path hashing, without altering static precedence. Dependencies: SCANNER-ANALYZERS-RUBY-28-009. | Ruby Analyzer Guild, Signals Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) +SCANNER-ANALYZERS-RUBY-28-011 | TODO | Package analyzer plug-in, add CLI (`stella ruby inspect`), refresh Offline Kit documentation. Dependencies: SCANNER-ANALYZERS-RUBY-28-010. | Ruby Analyzer Guild, DevOps Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) +SCANNER-ANALYZERS-RUBY-28-012 | TODO | Policy signal emitter: rubygems drift, native extension flags, dangerous constructs counts, TLS verify posture, dynamic require eval warnings. Dependencies: SCANNER-ANALYZERS-RUBY-28-011. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md) +SCANNER-ENTRYTRACE-18-502 | TODO | Expand chain walker with init shim/user-switch/supervisor recognition plus env/workdir accumulation and guarded edges. Dependencies: SCANNER-ENTRYTRACE-18-508. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) +SCANNER-ENTRYTRACE-18-503 | TODO | Introduce target classifier + EntryPlan handoff with confidence scoring for ELF/Java/.NET/Node/Python and user/workdir context. Dependencies: SCANNER-ENTRYTRACE-18-502. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) [Scanner & Surface] 130.A) Scanner.VII Depends on: Sprint 130.A - Scanner.VI Summary: Scanner & Surface focus on Scanner (phase VII). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -SCANNER-ENTRYTRACE-18-504 | TODO | Emit EntryTrace AOC NDJSON (`entrytrace.entry/node/edge/target/warning/capability`) and wire CLI/service streaming outputs. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) +SCANNER-ENTRYTRACE-18-504 | TODO | Emit EntryTrace AOC NDJSON (`entrytrace.entry/node/edge/target/warning/capability`) and wire CLI/service streaming outputs. Dependencies: SCANNER-ENTRYTRACE-18-503. | EntryTrace Guild (src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md) SCANNER-ENV-01 | TODO | Replace ad-hoc environment reads with `StellaOps.Scanner.Surface.Env` helpers for cache roots and CAS endpoints. | Scanner Worker Guild (src/Scanner/StellaOps.Scanner.Worker/TASKS.md) -SCANNER-ENV-02 | TODO | Wire Surface.Env helpers into WebService hosting (cache roots, feature flags) and document configuration. | Scanner WebService Guild, Ops Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) -SCANNER-ENV-03 | TODO | Adopt Surface.Env helpers for plugin configuration (cache roots, CAS endpoints, feature toggles). | BuildX Plugin Guild (src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md) +SCANNER-ENV-02 | TODO | Wire Surface.Env helpers into WebService hosting (cache roots, feature flags) and document configuration. Dependencies: SCANNER-ENV-01. | Scanner WebService Guild, Ops Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) +SCANNER-ENV-03 | TODO | Adopt Surface.Env helpers for plugin configuration (cache roots, CAS endpoints, feature toggles). Dependencies: SCANNER-ENV-02. | BuildX Plugin Guild (src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md) SCANNER-EVENTS-16-301 | BLOCKED (2025-10-26) | Emit orchestrator-compatible envelopes (`scanner.event.*`) and update integration tests to verify Notifier ingestion (no Redis queue coupling). | Scanner WebService Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) -SCANNER-EVENTS-16-302 | DOING (2025-10-26) | Extend orchestrator event links (report/policy/attestation) once endpoints are finalised across gateway + console. | Scanner WebService Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) +SCANNER-EVENTS-16-302 | DOING (2025-10-26) | Extend orchestrator event links (report/policy/attestation) once endpoints are finalised across gateway + console. Dependencies: SCANNER-EVENTS-16-301. | Scanner WebService Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) SCANNER-GRAPH-21-001 | TODO | Provide webhook/REST endpoint for Cartographer to request policy overlays and runtime evidence for graph nodes, ensuring determinism and tenant scoping. | Scanner WebService Guild, Cartographer Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) SCANNER-LNM-21-001 | TODO | Update `/reports` and `/policy/runtime` payloads to consume advisory/vex linksets, exposing source severity arrays and conflict summaries alongside effective verdicts. | Scanner WebService Guild, Policy Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) -SCANNER-LNM-21-002 | TODO | Add evidence endpoint for Console to fetch linkset summaries with policy overlay for a component/SBOM, including AOC references. | Scanner WebService Guild, UI Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) +SCANNER-LNM-21-002 | TODO | Add evidence endpoint for Console to fetch linkset summaries with policy overlay for a component/SBOM, including AOC references. Dependencies: SCANNER-LNM-21-001. | Scanner WebService Guild, UI Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) SCANNER-SECRETS-01 | TODO | Adopt `StellaOps.Scanner.Surface.Secrets` for registry/CAS credentials during scan execution. | Scanner Worker Guild, Security Guild (src/Scanner/StellaOps.Scanner.Worker/TASKS.md) -SCANNER-SECRETS-02 | TODO | Replace ad-hoc secret wiring with Surface.Secrets for report/export operations (registry and CAS tokens). | Scanner WebService Guild, Security Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) -SCANNER-SECRETS-03 | TODO | Use Surface.Secrets to retrieve registry credentials when interacting with CAS/referrers. | BuildX Plugin Guild, Security Guild (src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md) +SCANNER-SECRETS-02 | TODO | Replace ad-hoc secret wiring with Surface.Secrets for report/export operations (registry and CAS tokens). Dependencies: SCANNER-SECRETS-01. | Scanner WebService Guild, Security Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) +SCANNER-SECRETS-03 | TODO | Use Surface.Secrets to retrieve registry credentials when interacting with CAS/referrers. Dependencies: SCANNER-SECRETS-02. | BuildX Plugin Guild, Security Guild (src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md) SCANNER-SURFACE-01 | TODO | Persist Surface.FS manifests after analyzer stages, including layer CAS metadata and EntryTrace fragments. | Scanner Worker Guild (src/Scanner/StellaOps.Scanner.Worker/TASKS.md) -SCANNER-SURFACE-02 | TODO | Publish Surface.FS pointers (CAS URIs, manifests) via scan/report APIs and update attestation metadata. | Scanner WebService Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) -SCANNER-SURFACE-03 | TODO | Push layer manifests and entry fragments into Surface.FS during build-time SBOM generation. | BuildX Plugin Guild (src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md) - +SCANNER-SURFACE-02 | TODO | Publish Surface.FS pointers (CAS URIs, manifests) via scan/report APIs and update attestation metadata. Dependencies: SCANNER-SURFACE-01. | Scanner WebService Guild (src/Scanner/StellaOps.Scanner.WebService/TASKS.md) +SCANNER-SURFACE-03 | TODO | Push layer manifests and entry fragments into Surface.FS during build-time SBOM generation. Dependencies: SCANNER-SURFACE-02. | BuildX Plugin Guild (src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md) [Scanner & Surface] 130.A) Scanner.VIII Depends on: Sprint 130.A - Scanner.VII @@ -165,5 +160,4 @@ Seq | ID | Description | Status (src/Scanner/__Libraries/StellaOps.Scanner.Analy Seq | ID | Description | Status (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md) Seq | ID | Description | Status (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md) - If all tasks are done - read next sprint section - SPRINT_140_runtime_signals.md diff --git a/docs/implplan/SPRINT_140_runtime_signals.md b/docs/implplan/SPRINT_140_runtime_signals.md index 19fbd712..31fa253f 100644 --- a/docs/implplan/SPRINT_140_runtime_signals.md +++ b/docs/implplan/SPRINT_140_runtime_signals.md @@ -6,15 +6,15 @@ Summary: Runtime & Signals focus on Graph). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- GRAPH-INDEX-28-001 | TODO | Define canonical node/edge schemas, attribute dictionaries, identity rules, and seed fixtures; publish schema doc. | Graph Indexer Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) -GRAPH-INDEX-28-002 | TODO | Implement SBOM ingest consumer producing artifact/package/file nodes and edges with `valid_from/valid_to`, scope metadata, and provenance links. | Graph Indexer Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) -GRAPH-INDEX-28-003 | TODO | Project Concelier linksets into overlay tiles (`affected_by` edges, evidence refs) without mutating source observations; keep advisory aggregates in overlay store only. | Graph Indexer Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) -GRAPH-INDEX-28-004 | TODO | Integrate VEX statements (`vex_exempts` edges) with justification metadata and precedence markers for overlays. | Graph Indexer Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) -GRAPH-INDEX-28-005 | TODO | Hydrate policy overlays into graph (`governs_with` nodes/edges) referencing effective findings and explain hashes for sampled nodes. | Graph Indexer Guild, Policy Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) -GRAPH-INDEX-28-006 | TODO | Generate graph snapshots per SBOM with lineage (`derived_from`), adjacency manifests, and metadata for diff jobs. | Graph Indexer Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) -GRAPH-INDEX-28-007 | TODO | Implement clustering/centrality background jobs (Louvain/degree/betweenness approximations) with configurable schedules and store cluster ids on nodes. | Graph Indexer Guild, Observability Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) -GRAPH-INDEX-28-008 | TODO | Provide incremental update + backfill pipeline with change streams, retry/backoff, idempotent operations, and backlog metrics. | Graph Indexer Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) -GRAPH-INDEX-28-009 | TODO | Add unit/property/integration tests, synthetic large graph fixtures, chaos testing (missing overlays, cycles), and determinism checks across runs. | Graph Indexer Guild, QA Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) -GRAPH-INDEX-28-010 | TODO | Package deployment artifacts (Helm/Compose), offline seed bundles, and configuration docs; integrate Offline Kit. | Graph Indexer Guild, DevOps Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) +GRAPH-INDEX-28-002 | TODO | Implement SBOM ingest consumer producing artifact/package/file nodes and edges with `valid_from/valid_to`, scope metadata, and provenance links. Dependencies: GRAPH-INDEX-28-001. | Graph Indexer Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) +GRAPH-INDEX-28-003 | TODO | Project Concelier linksets into overlay tiles (`affected_by` edges, evidence refs) without mutating source observations; keep advisory aggregates in overlay store only. Dependencies: GRAPH-INDEX-28-002. | Graph Indexer Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) +GRAPH-INDEX-28-004 | TODO | Integrate VEX statements (`vex_exempts` edges) with justification metadata and precedence markers for overlays. Dependencies: GRAPH-INDEX-28-003. | Graph Indexer Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) +GRAPH-INDEX-28-005 | TODO | Hydrate policy overlays into graph (`governs_with` nodes/edges) referencing effective findings and explain hashes for sampled nodes. Dependencies: GRAPH-INDEX-28-004. | Graph Indexer Guild, Policy Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) +GRAPH-INDEX-28-006 | TODO | Generate graph snapshots per SBOM with lineage (`derived_from`), adjacency manifests, and metadata for diff jobs. Dependencies: GRAPH-INDEX-28-005. | Graph Indexer Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) +GRAPH-INDEX-28-007 | TODO | Implement clustering/centrality background jobs (Louvain/degree/betweenness approximations) with configurable schedules and store cluster ids on nodes. Dependencies: GRAPH-INDEX-28-006. | Graph Indexer Guild, Observability Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) +GRAPH-INDEX-28-008 | TODO | Provide incremental update + backfill pipeline with change streams, retry/backoff, idempotent operations, and backlog metrics. Dependencies: GRAPH-INDEX-28-007. | Graph Indexer Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) +GRAPH-INDEX-28-009 | TODO | Add unit/property/integration tests, synthetic large graph fixtures, chaos testing (missing overlays, cycles), and determinism checks across runs. Dependencies: GRAPH-INDEX-28-008. | Graph Indexer Guild, QA Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) +GRAPH-INDEX-28-010 | TODO | Package deployment artifacts (Helm/Compose), offline seed bundles, and configuration docs; integrate Offline Kit. Dependencies: GRAPH-INDEX-28-009. | Graph Indexer Guild, DevOps Guild (src/Graph/StellaOps.Graph.Indexer/TASKS.md) [Runtime & Signals] 140.B) SbomService @@ -23,20 +23,20 @@ Summary: Runtime & Signals focus on SbomService). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- SBOM-AIAI-31-001 | TODO | Provide `GET /sbom/paths?purl=...` and version timeline endpoints optimized for Advisory AI (incl. env flags, blast radius metadata). | SBOM Service Guild (src/SbomService/StellaOps.SbomService/TASKS.md) -SBOM-AIAI-31-002 | TODO | Instrument metrics for path/timeline queries (latency, cache hit rate) and surface dashboards. | SBOM Service Guild, Observability Guild (src/SbomService/StellaOps.SbomService/TASKS.md) +SBOM-AIAI-31-002 | TODO | Instrument metrics for path/timeline queries (latency, cache hit rate) and surface dashboards. Dependencies: SBOM-AIAI-31-001. | SBOM Service Guild, Observability Guild (src/SbomService/StellaOps.SbomService/TASKS.md) SBOM-CONSOLE-23-001 | TODO | Provide Console-focused SBOM catalog API (`/console/sboms`) with filters (artifact, license, scope, asset tags), pagination cursors, evaluation metadata, and immutable JSON projections for raw view drawer. Document schema + determinism guarantees. | SBOM Service Guild, Cartographer Guild (src/SbomService/StellaOps.SbomService/TASKS.md) -SBOM-CONSOLE-23-002 | TODO | Deliver component lookup endpoints powering global search and Graph overlays (component neighborhoods, license overlays, policy deltas) with caching hints and tenant enforcement. | SBOM Service Guild (src/SbomService/StellaOps.SbomService/TASKS.md) +SBOM-CONSOLE-23-002 | TODO | Deliver component lookup endpoints powering global search and Graph overlays (component neighborhoods, license overlays, policy deltas) with caching hints and tenant enforcement. Dependencies: SBOM-CONSOLE-23-001. | SBOM Service Guild (src/SbomService/StellaOps.SbomService/TASKS.md) SBOM-ORCH-32-001 | TODO | Register SBOM ingest/index sources with orchestrator, embed worker SDK, and emit artifact hashes + job metadata. | SBOM Service Guild (src/SbomService/StellaOps.SbomService/TASKS.md) -SBOM-ORCH-33-001 | TODO | Report backpressure metrics, honor orchestrator pause/throttle signals, and classify error outputs for sbom jobs. | SBOM Service Guild (src/SbomService/StellaOps.SbomService/TASKS.md) -SBOM-ORCH-34-001 | TODO | Implement orchestrator backfill + watermark reconciliation for SBOM ingest/index, ensuring idempotent artifact reuse. | SBOM Service Guild (src/SbomService/StellaOps.SbomService/TASKS.md) +SBOM-ORCH-33-001 | TODO | Report backpressure metrics, honor orchestrator pause/throttle signals, and classify error outputs for sbom jobs. Dependencies: SBOM-ORCH-32-001. | SBOM Service Guild (src/SbomService/StellaOps.SbomService/TASKS.md) +SBOM-ORCH-34-001 | TODO | Implement orchestrator backfill + watermark reconciliation for SBOM ingest/index, ensuring idempotent artifact reuse. Dependencies: SBOM-ORCH-33-001. | SBOM Service Guild (src/SbomService/StellaOps.SbomService/TASKS.md) SBOM-SERVICE-21-001 | BLOCKED (2025-10-27) | Publish normalized SBOM projection schema (components, relationships, scopes, entrypoints) and implement read API with pagination + tenant enforcement. | SBOM Service Guild, Cartographer Guild (src/SbomService/StellaOps.SbomService/TASKS.md) -SBOM-SERVICE-21-002 | BLOCKED (2025-10-27) | Emit change events (`sbom.version.created`) carrying digest/version metadata for Graph Indexer builds; add replay/backfill tooling. | SBOM Service Guild, Scheduler Guild (src/SbomService/StellaOps.SbomService/TASKS.md) -SBOM-SERVICE-21-003 | BLOCKED (2025-10-27) | Provide entrypoint/service node management API (list/update overrides) feeding Cartographer path relevance with deterministic defaults. | SBOM Service Guild (src/SbomService/StellaOps.SbomService/TASKS.md) -SBOM-SERVICE-21-004 | BLOCKED (2025-10-27) | Wire observability: metrics (`sbom_projection_seconds`, `sbom_projection_size`), traces, structured logs with tenant info; set alerts for backlog. | SBOM Service Guild, Observability Guild (src/SbomService/StellaOps.SbomService/TASKS.md) -SBOM-SERVICE-23-001 | TODO | Extend projections to include asset metadata (criticality, owner, environment, exposure flags) required by policy rules; update schema docs. | SBOM Service Guild, Policy Guild (src/SbomService/StellaOps.SbomService/TASKS.md) -SBOM-SERVICE-23-002 | TODO | Emit `sbom.asset.updated` events when metadata changes; ensure idempotent payloads and documentation. | SBOM Service Guild, Platform Events Guild (src/SbomService/StellaOps.SbomService/TASKS.md) +SBOM-SERVICE-21-002 | BLOCKED (2025-10-27) | Emit change events (`sbom.version.created`) carrying digest/version metadata for Graph Indexer builds; add replay/backfill tooling. Dependencies: SBOM-SERVICE-21-001. | SBOM Service Guild, Scheduler Guild (src/SbomService/StellaOps.SbomService/TASKS.md) +SBOM-SERVICE-21-003 | BLOCKED (2025-10-27) | Provide entrypoint/service node management API (list/update overrides) feeding Cartographer path relevance with deterministic defaults. Dependencies: SBOM-SERVICE-21-002. | SBOM Service Guild (src/SbomService/StellaOps.SbomService/TASKS.md) +SBOM-SERVICE-21-004 | BLOCKED (2025-10-27) | Wire observability: metrics (`sbom_projection_seconds`, `sbom_projection_size`), traces, structured logs with tenant info; set alerts for backlog. Dependencies: SBOM-SERVICE-21-003. | SBOM Service Guild, Observability Guild (src/SbomService/StellaOps.SbomService/TASKS.md) +SBOM-SERVICE-23-001 | TODO | Extend projections to include asset metadata (criticality, owner, environment, exposure flags) required by policy rules; update schema docs. Dependencies: SBOM-SERVICE-21-004. | SBOM Service Guild, Policy Guild (src/SbomService/StellaOps.SbomService/TASKS.md) +SBOM-SERVICE-23-002 | TODO | Emit `sbom.asset.updated` events when metadata changes; ensure idempotent payloads and documentation. Dependencies: SBOM-SERVICE-23-001. | SBOM Service Guild, Platform Events Guild (src/SbomService/StellaOps.SbomService/TASKS.md) SBOM-VULN-29-001 | TODO | Emit inventory evidence with `scope`, `runtime_flag`, dependency paths, and nearest safe version hints, streaming change events for resolver jobs. | SBOM Service Guild (src/SbomService/StellaOps.SbomService/TASKS.md) -SBOM-VULN-29-002 | TODO | Provide resolver feed (artifact, purl, version, paths) via queue/topic for Vuln Explorer candidate generation; ensure idempotent delivery. | SBOM Service Guild, Findings Ledger Guild (src/SbomService/StellaOps.SbomService/TASKS.md) +SBOM-VULN-29-002 | TODO | Provide resolver feed (artifact, purl, version, paths) via queue/topic for Vuln Explorer candidate generation; ensure idempotent delivery. Dependencies: SBOM-VULN-29-001. | SBOM Service Guild, Findings Ledger Guild (src/SbomService/StellaOps.SbomService/TASKS.md) [Runtime & Signals] 140.C) Signals @@ -45,8 +45,8 @@ Summary: Runtime & Signals focus on Signals). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- SIGNALS-24-003 | BLOCKED (2025-10-27) | Implement runtime facts ingestion endpoint and normalizer (process, sockets, container metadata) populating `context_facts` with AOC provenance. | Signals Guild, Runtime Guild (src/Signals/StellaOps.Signals/TASKS.md) -SIGNALS-24-004 | BLOCKED (2025-10-27) | Deliver reachability scoring engine producing states/scores and writing to `reachability_facts`; expose configuration for weights. | Signals Guild, Data Science (src/Signals/StellaOps.Signals/TASKS.md) -SIGNALS-24-005 | BLOCKED (2025-10-27) | Implement Redis caches (`reachability_cache:*`), invalidation on new facts, and publish `signals.fact.updated` events. | Signals Guild, Platform Events Guild (src/Signals/StellaOps.Signals/TASKS.md) +SIGNALS-24-004 | BLOCKED (2025-10-27) | Deliver reachability scoring engine producing states/scores and writing to `reachability_facts`; expose configuration for weights. Dependencies: SIGNALS-24-003. | Signals Guild, Data Science (src/Signals/StellaOps.Signals/TASKS.md) +SIGNALS-24-005 | BLOCKED (2025-10-27) | Implement Redis caches (`reachability_cache:*`), invalidation on new facts, and publish `signals.fact.updated` events. Dependencies: SIGNALS-24-004. | Signals Guild, Platform Events Guild (src/Signals/StellaOps.Signals/TASKS.md) [Runtime & Signals] 140.D) Zastava @@ -55,11 +55,11 @@ Summary: Runtime & Signals focus on Zastava). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- ZASTAVA-ENV-01 | TODO | Adopt Surface.Env helpers for cache endpoints, secret refs, and feature toggles. | Zastava Observer Guild (src/Zastava/StellaOps.Zastava.Observer/TASKS.md) -ZASTAVA-ENV-02 | TODO | Switch to Surface.Env helpers for webhook configuration (cache endpoint, secret refs, feature toggles). | Zastava Webhook Guild (src/Zastava/StellaOps.Zastava.Webhook/TASKS.md) +ZASTAVA-ENV-02 | TODO | Switch to Surface.Env helpers for webhook configuration (cache endpoint, secret refs, feature toggles). Dependencies: ZASTAVA-ENV-01. | Zastava Webhook Guild (src/Zastava/StellaOps.Zastava.Webhook/TASKS.md) ZASTAVA-SECRETS-01 | TODO | Retrieve CAS/attestation access via Surface.Secrets instead of inline secret stores. | Zastava Observer Guild, Security Guild (src/Zastava/StellaOps.Zastava.Observer/TASKS.md) -ZASTAVA-SECRETS-02 | TODO | Retrieve attestation verification secrets via Surface.Secrets. | Zastava Webhook Guild, Security Guild (src/Zastava/StellaOps.Zastava.Webhook/TASKS.md) +ZASTAVA-SECRETS-02 | TODO | Retrieve attestation verification secrets via Surface.Secrets. Dependencies: ZASTAVA-SECRETS-01. | Zastava Webhook Guild, Security Guild (src/Zastava/StellaOps.Zastava.Webhook/TASKS.md) ZASTAVA-SURFACE-01 | TODO | Integrate Surface.FS client for runtime drift detection (lookup cached layer hashes/entry traces). | Zastava Observer Guild (src/Zastava/StellaOps.Zastava.Observer/TASKS.md) -ZASTAVA-SURFACE-02 | TODO | Enforce Surface.FS availability during admission (deny when cache missing/stale) and embed pointer checks in webhook response. | Zastava Webhook Guild (src/Zastava/StellaOps.Zastava.Webhook/TASKS.md) +ZASTAVA-SURFACE-02 | TODO | Enforce Surface.FS availability during admission (deny when cache missing/stale) and embed pointer checks in webhook response. Dependencies: ZASTAVA-SURFACE-01. | Zastava Webhook Guild (src/Zastava/StellaOps.Zastava.Webhook/TASKS.md) If all tasks are done - read next sprint section - SPRINT_150_scheduling_automation.md diff --git a/docs/implplan/SPRINT_150_scheduling_automation.md b/docs/implplan/SPRINT_150_scheduling_automation.md index 9bf52907..6e2e8c61 100644 --- a/docs/implplan/SPRINT_150_scheduling_automation.md +++ b/docs/implplan/SPRINT_150_scheduling_automation.md @@ -6,19 +6,19 @@ Summary: Scheduling & Automation focus on Orchestrator (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- ORCH-AIRGAP-56-001 | TODO | Enforce job descriptors to declare network intents; reject or flag any external endpoints in sealed mode before scheduling. | Orchestrator Service Guild, AirGap Policy Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-AIRGAP-56-002 | TODO | Surface sealing status and time staleness in job scheduling decisions; block runs when staleness budgets exceeded. | Orchestrator Service Guild, AirGap Controller Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-AIRGAP-57-001 | TODO | Add job type `mirror.bundle` to orchestrate bundle creation in connected environments with audit + provenance outputs. | Orchestrator Service Guild, Mirror Creator Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-AIRGAP-58-001 | TODO | Capture import/export operations as timeline/evidence entries, ensuring chain-of-custody for mirror + portable evidence jobs. | Orchestrator Service Guild, Evidence Locker Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-AIRGAP-56-002 | TODO | Surface sealing status and time staleness in job scheduling decisions; block runs when staleness budgets exceeded. Dependencies: ORCH-AIRGAP-56-001. | Orchestrator Service Guild, AirGap Controller Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-AIRGAP-57-001 | TODO | Add job type `mirror.bundle` to orchestrate bundle creation in connected environments with audit + provenance outputs. Dependencies: ORCH-AIRGAP-56-002. | Orchestrator Service Guild, Mirror Creator Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-AIRGAP-58-001 | TODO | Capture import/export operations as timeline/evidence entries, ensuring chain-of-custody for mirror + portable evidence jobs. Dependencies: ORCH-AIRGAP-57-001. | Orchestrator Service Guild, Evidence Locker Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) ORCH-OAS-61-001 | TODO | Document orchestrator endpoints in per-service OAS with standardized pagination, idempotency, and error envelope examples. | Orchestrator Service Guild, API Contracts Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-OAS-61-002 | TODO | Implement `GET /.well-known/openapi` in service and ensure version metadata aligns with runtime build. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-OAS-62-001 | TODO | Ensure SDK paginators and operations support orchestrator job operations; add SDK smoke tests for schedule/retry APIs. | Orchestrator Service Guild, SDK Generator Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-OAS-63-001 | TODO | Emit deprecation headers and documentation for legacy orchestrator endpoints; update notifications metadata. | Orchestrator Service Guild, API Governance Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-OAS-61-002 | TODO | Implement `GET /.well-known/openapi` in service and ensure version metadata aligns with runtime build. Dependencies: ORCH-OAS-61-001. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-OAS-62-001 | TODO | Ensure SDK paginators and operations support orchestrator job operations; add SDK smoke tests for schedule/retry APIs. Dependencies: ORCH-OAS-61-002. | Orchestrator Service Guild, SDK Generator Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-OAS-63-001 | TODO | Emit deprecation headers and documentation for legacy orchestrator endpoints; update notifications metadata. Dependencies: ORCH-OAS-62-001. | Orchestrator Service Guild, API Governance Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) ORCH-OBS-50-001 | TODO | Wire `StellaOps.Telemetry.Core` into orchestrator host, instrument schedulers and control APIs with trace spans, structured logs, and exemplar metrics. Ensure tenant/job metadata recorded for every span/log. | Orchestrator Service Guild, Observability Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-OBS-51-001 | TODO | Publish golden-signal metrics (dispatch latency, queue depth, failure rate), define job/tenant SLOs, and emit burn-rate alerts to collector + Notifications. Provide Grafana dashboards + alert rules. | Orchestrator Service Guild, DevOps Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-OBS-52-001 | TODO | Emit `timeline_event` objects for job lifecycle (`job.scheduled`, `job.started`, `job.completed`, `job.failed`) including trace IDs, run IDs, tenant/project, and causal metadata. Add contract tests and Kafka/NATS emitter with retries. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-OBS-53-001 | TODO | Generate job capsule inputs for evidence locker (payload digests, worker image, config hash, log manifest) and invoke locker snapshot hooks on completion/failure. Ensure redaction guard enforced. | Orchestrator Service Guild, Evidence Locker Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-OBS-54-001 | TODO | Produce DSSE attestations for orchestrator-scheduled jobs (subject = job capsule) and store references in timeline + evidence locker. Provide verification endpoint `/jobs/{id}/attestation`. | Orchestrator Service Guild, Provenance Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-OBS-55-001 | TODO | Implement incident mode hooks (sampling overrides, extended retention, additional debug spans) and automatic activation on SLO burn-rate breach. Emit activation/deactivation events to timeline + Notifier. | Orchestrator Service Guild, DevOps Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-OBS-51-001 | TODO | Publish golden-signal metrics (dispatch latency, queue depth, failure rate), define job/tenant SLOs, and emit burn-rate alerts to collector + Notifications. Provide Grafana dashboards + alert rules. Dependencies: ORCH-OBS-50-001. | Orchestrator Service Guild, DevOps Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-OBS-52-001 | TODO | Emit `timeline_event` objects for job lifecycle (`job.scheduled`, `job.started`, `job.completed`, `job.failed`) including trace IDs, run IDs, tenant/project, and causal metadata. Add contract tests and Kafka/NATS emitter with retries. Dependencies: ORCH-OBS-51-001. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-OBS-53-001 | TODO | Generate job capsule inputs for evidence locker (payload digests, worker image, config hash, log manifest) and invoke locker snapshot hooks on completion/failure. Ensure redaction guard enforced. Dependencies: ORCH-OBS-52-001. | Orchestrator Service Guild, Evidence Locker Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-OBS-54-001 | TODO | Produce DSSE attestations for orchestrator-scheduled jobs (subject = job capsule) and store references in timeline + evidence locker. Provide verification endpoint `/jobs/{id}/attestation`. Dependencies: ORCH-OBS-53-001. | Orchestrator Service Guild, Provenance Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-OBS-55-001 | TODO | Implement incident mode hooks (sampling overrides, extended retention, additional debug spans) and automatic activation on SLO burn-rate breach. Emit activation/deactivation events to timeline + Notifier. Dependencies: ORCH-OBS-54-001. | Orchestrator Service Guild, DevOps Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) ORCH-SVC-32-001 | TODO | Bootstrap service project, configuration, and Postgres schema/migrations for `sources`, `runs`, `jobs`, `dag_edges`, `artifacts`, `quotas`, `schedules`. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) @@ -27,21 +27,21 @@ Depends on: Sprint 150.A - Orchestrator.I Summary: Scheduling & Automation focus on Orchestrator (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -ORCH-SVC-32-002 | TODO | Implement scheduler DAG planner + dependency resolver, job state machine, and critical-path metadata without yet issuing control actions. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-32-003 | TODO | Expose read-only REST APIs (sources, runs, jobs, DAG) with OpenAPI, validation, pagination, and tenant scoping. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-32-004 | TODO | Implement WebSocket/SSE stream for job/run updates, emit structured metrics counters/histograms, and add health probes. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-32-005 | TODO | Deliver worker claim/heartbeat/progress endpoints capturing artifact metadata/checksums and enforcing idempotency keys. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-33-001 | TODO | Enable `sources test | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-33-002 | TODO | Implement per-source/tenant adaptive token-bucket rate limiter, concurrency caps, and backpressure signals reacting to upstream 429/503. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-33-003 | TODO | Add watermark/backfill manager with event-time windows, duplicate suppression, dry-run preview endpoint, and safety validations. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-33-004 | TODO | Deliver dead-letter store, replay endpoints, and error classification surfaces with remediation hints + notification hooks. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-34-001 | TODO | Implement quota management APIs, per-tenant SLO burn-rate computation, and alert budget tracking surfaced via metrics. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-34-002 | TODO | Build audit log + immutable run ledger export with signed manifest support, including provenance chain to artifacts. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-34-003 | TODO | Execute perf/scale validation (≥10k pending jobs, dispatch P95 <150 ms) and add autoscaling hooks with health probes. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-34-004 | TODO | Package orchestrator container, Helm overlays, offline bundle seeds, provenance attestations, and compliance checklist for GA. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-35-101 | TODO | Register `export` job type with quotas/rate policies, expose telemetry, and ensure exporter workers heartbeat via orchestrator contracts. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-36-101 | TODO | Capture distribution metadata and retention timestamps for export jobs, updating dashboards and SSE payloads. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-37-101 | TODO | Enable scheduled export runs, retention pruning hooks, and failure alerting tied to export job class. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-32-002 | TODO | Implement scheduler DAG planner + dependency resolver, job state machine, and critical-path metadata without yet issuing control actions. Dependencies: ORCH-SVC-32-001. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-32-003 | TODO | Expose read-only REST APIs (sources, runs, jobs, DAG) with OpenAPI, validation, pagination, and tenant scoping. Dependencies: ORCH-SVC-32-002. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-32-004 | TODO | Implement WebSocket/SSE stream for job/run updates, emit structured metrics counters/histograms, and add health probes. Dependencies: ORCH-SVC-32-003. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-32-005 | TODO | Deliver worker claim/heartbeat/progress endpoints capturing artifact metadata/checksums and enforcing idempotency keys. Dependencies: ORCH-SVC-32-004. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-33-001 | TODO | Enable `sources test. Dependencies: ORCH-SVC-32-005. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-33-002 | TODO | Implement per-source/tenant adaptive token-bucket rate limiter, concurrency caps, and backpressure signals reacting to upstream 429/503. Dependencies: ORCH-SVC-33-001. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-33-003 | TODO | Add watermark/backfill manager with event-time windows, duplicate suppression, dry-run preview endpoint, and safety validations. Dependencies: ORCH-SVC-33-002. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-33-004 | TODO | Deliver dead-letter store, replay endpoints, and error classification surfaces with remediation hints + notification hooks. Dependencies: ORCH-SVC-33-003. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-34-001 | TODO | Implement quota management APIs, per-tenant SLO burn-rate computation, and alert budget tracking surfaced via metrics. Dependencies: ORCH-SVC-33-004. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-34-002 | TODO | Build audit log + immutable run ledger export with signed manifest support, including provenance chain to artifacts. Dependencies: ORCH-SVC-34-001. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-34-003 | TODO | Execute perf/scale validation (≥10k pending jobs, dispatch P95 <150 ms) and add autoscaling hooks with health probes. Dependencies: ORCH-SVC-34-002. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-34-004 | TODO | Package orchestrator container, Helm overlays, offline bundle seeds, provenance attestations, and compliance checklist for GA. Dependencies: ORCH-SVC-34-003. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-35-101 | TODO | Register `export` job type with quotas/rate policies, expose telemetry, and ensure exporter workers heartbeat via orchestrator contracts. Dependencies: ORCH-SVC-34-004. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-36-101 | TODO | Capture distribution metadata and retention timestamps for export jobs, updating dashboards and SSE payloads. Dependencies: ORCH-SVC-35-101. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-37-101 | TODO | Enable scheduled export runs, retention pruning hooks, and failure alerting tied to export job class. Dependencies: ORCH-SVC-36-101. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) [Scheduling & Automation] 150.A) Orchestrator.III @@ -49,20 +49,20 @@ Depends on: Sprint 150.A - Orchestrator.II Summary: Scheduling & Automation focus on Orchestrator (phase III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -ORCH-SVC-38-101 | TODO | Standardize event envelope (policy/export/job lifecycle) with idempotency keys, ensure export/job failure events published to notifier bus with provenance metadata. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-41-101 | TODO | Register `pack-run` job type, persist run metadata, integrate logs/artifacts collection, and expose API for Task Runner scheduling. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) -ORCH-SVC-42-101 | TODO | Stream pack run logs via SSE/WS, add manifest endpoints, enforce quotas, and emit pack run events to Notifications Studio. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-38-101 | TODO | Standardize event envelope (policy/export/job lifecycle) with idempotency keys, ensure export/job failure events published to notifier bus with provenance metadata. Dependencies: ORCH-SVC-37-101. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-41-101 | TODO | Register `pack-run` job type, persist run metadata, integrate logs/artifacts collection, and expose API for Task Runner scheduling. Dependencies: ORCH-SVC-38-101. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) +ORCH-SVC-42-101 | TODO | Stream pack run logs via SSE/WS, add manifest endpoints, enforce quotas, and emit pack run events to Notifications Studio. Dependencies: ORCH-SVC-41-101. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) ORCH-TEN-48-001 | TODO | Include `tenant_id`/`project_id` in job specs, set DB session context before processing, enforce context on all queries, and reject jobs missing tenant metadata. | Orchestrator Service Guild (src/Orchestrator/StellaOps.Orchestrator/TASKS.md) WORKER-GO-32-001 | TODO | Bootstrap Go SDK project with configuration binding, auth headers, job claim/acknowledge client, and smoke sample. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md) -WORKER-GO-32-002 | TODO | Add heartbeat/progress helpers, structured logging hooks, Prometheus metrics, and jittered retry defaults. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md) -WORKER-GO-33-001 | TODO | Implement artifact publish helpers (object storage client, checksum hashing, metadata payload) and idempotency guard. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md) -WORKER-GO-33-002 | TODO | Provide error classification/retry helper, exponential backoff controls, and structured failure reporting to orchestrator. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md) -WORKER-GO-34-001 | TODO | Add backfill range execution helpers, watermark handshake utilities, and artifact dedupe verification for backfills. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md) +WORKER-GO-32-002 | TODO | Add heartbeat/progress helpers, structured logging hooks, Prometheus metrics, and jittered retry defaults. Dependencies: WORKER-GO-32-001. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md) +WORKER-GO-33-001 | TODO | Implement artifact publish helpers (object storage client, checksum hashing, metadata payload) and idempotency guard. Dependencies: WORKER-GO-32-002. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md) +WORKER-GO-33-002 | TODO | Provide error classification/retry helper, exponential backoff controls, and structured failure reporting to orchestrator. Dependencies: WORKER-GO-33-001. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md) +WORKER-GO-34-001 | TODO | Add backfill range execution helpers, watermark handshake utilities, and artifact dedupe verification for backfills. Dependencies: WORKER-GO-33-002. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md) WORKER-PY-32-001 | TODO | Bootstrap asyncio-based Python SDK (config, auth headers, job claim/ack) plus sample worker script. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md) -WORKER-PY-32-002 | TODO | Implement heartbeat/progress helpers with structured logging, metrics exporter, and cancellation-safe retries. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md) -WORKER-PY-33-001 | TODO | Add artifact publish/idempotency helpers (object storage adapters, checksum hashing, metadata payload) for Python workers. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md) -WORKER-PY-33-002 | TODO | Provide error classification/backoff helper mapping to orchestrator codes, including jittered retries and structured failure reports. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md) -WORKER-PY-34-001 | TODO | Implement backfill range iteration, watermark handshake, and artifact dedupe verification utilities for Python workers. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md) +WORKER-PY-32-002 | TODO | Implement heartbeat/progress helpers with structured logging, metrics exporter, and cancellation-safe retries. Dependencies: WORKER-PY-32-001. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md) +WORKER-PY-33-001 | TODO | Add artifact publish/idempotency helpers (object storage adapters, checksum hashing, metadata payload) for Python workers. Dependencies: WORKER-PY-32-002. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md) +WORKER-PY-33-002 | TODO | Provide error classification/backoff helper mapping to orchestrator codes, including jittered retries and structured failure reports. Dependencies: WORKER-PY-33-001. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md) +WORKER-PY-34-001 | TODO | Implement backfill range iteration, watermark handshake, and artifact dedupe verification utilities for Python workers. Dependencies: WORKER-PY-33-002. | Worker SDK Guild (src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md) [Scheduling & Automation] 150.B) PacksRegistry @@ -71,8 +71,8 @@ Summary: Scheduling & Automation focus on PacksRegistry). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- PACKS-REG-41-001 | TODO | Implement registry service, migrations for `packs_index`, `parity_matrix`, provenance docs; support pack upload/list/get, signature verification, RBAC enforcement, and provenance manifest storage. | Packs Registry Guild (src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md) -PACKS-REG-42-001 | TODO | Add version lifecycle (promote/deprecate), tenant allowlists, provenance export, signature rotation, audit logs, and Offline Kit seed support. | Packs Registry Guild (src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md) -PACKS-REG-43-001 | TODO | Implement registry mirroring, pack signing policies, attestation integration, and compliance dashboards; integrate with Export Center. | Packs Registry Guild (src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md) +PACKS-REG-42-001 | TODO | Add version lifecycle (promote/deprecate), tenant allowlists, provenance export, signature rotation, audit logs, and Offline Kit seed support. Dependencies: PACKS-REG-41-001. | Packs Registry Guild (src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md) +PACKS-REG-43-001 | TODO | Implement registry mirroring, pack signing policies, attestation integration, and compliance dashboards; integrate with Export Center. Dependencies: PACKS-REG-42-001. | Packs Registry Guild (src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md) [Scheduling & Automation] 150.C) Scheduler.I @@ -81,20 +81,20 @@ Summary: Scheduling & Automation focus on Scheduler (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- SCHED-CONSOLE-23-001 | TODO | Extend runs APIs with live progress SSE endpoints (`/console/runs/{id}/stream`), queue lag summaries, diff metadata fetch, retry/cancel hooks with RBAC enforcement, and deterministic pagination for history views consumed by Console. | Scheduler WebService Guild, BE-Base Platform Guild (src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md) -SCHED-CONSOLE-27-001 | TODO | Provide policy batch simulation orchestration endpoints (`/policies/simulations` POST/GET) exposing run creation, shard status, SSE progress, cancellation, and retries with RBAC enforcement. | Scheduler WebService Guild, Policy Registry Guild (src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md) -SCHED-CONSOLE-27-002 | TODO | Emit telemetry endpoints/metrics (`policy_simulation_queue_depth`, `policy_simulation_latency`) and webhook callbacks for completion/failure consumed by Registry. | Scheduler WebService Guild, Observability Guild (src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md) +SCHED-CONSOLE-27-001 | TODO | Provide policy batch simulation orchestration endpoints (`/policies/simulations` POST/GET) exposing run creation, shard status, SSE progress, cancellation, and retries with RBAC enforcement. Dependencies: SCHED-CONSOLE-23-001. | Scheduler WebService Guild, Policy Registry Guild (src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md) +SCHED-CONSOLE-27-002 | TODO | Emit telemetry endpoints/metrics (`policy_simulation_queue_depth`, `policy_simulation_latency`) and webhook callbacks for completion/failure consumed by Registry. Dependencies: SCHED-CONSOLE-27-001. | Scheduler WebService Guild, Observability Guild (src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md) SCHED-IMPACT-16-303 | TODO | Snapshot/compaction + invalidation for removed images; persistence to RocksDB/Redis per architecture. | Scheduler ImpactIndex Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md) SCHED-SURFACE-01 | TODO | Evaluate Surface.FS pointers when planning delta scans to avoid redundant work and prioritise drift-triggered assets. | Scheduler Worker Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) SCHED-VULN-29-001 | TODO | Expose resolver job APIs (`POST /vuln/resolver/jobs`, `GET /vuln/resolver/jobs/{id}`) to trigger candidate recomputation per artifact/policy change with RBAC and rate limits. | Scheduler WebService Guild, Findings Ledger Guild (src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md) -SCHED-VULN-29-002 | TODO | Provide projector lag metrics endpoint and webhook notifications for backlog breaches consumed by DevOps dashboards. | Scheduler WebService Guild, Observability Guild (src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md) +SCHED-VULN-29-002 | TODO | Provide projector lag metrics endpoint and webhook notifications for backlog breaches consumed by DevOps dashboards. Dependencies: SCHED-VULN-29-001. | Scheduler WebService Guild, Observability Guild (src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md) SCHED-WEB-20-002 | BLOCKED (waiting on SCHED-WORKER-20-301) | Provide simulation trigger endpoint returning diff preview metadata and job state for UI/CLI consumption. | Scheduler WebService Guild (src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md) -SCHED-WEB-21-004 | DOING (2025-10-26) | Persist graph job lifecycle to Mongo storage and publish `scheduler.graph.job.completed@1` events + outbound webhook to Cartographer. | Scheduler WebService Guild, Scheduler Storage Guild (src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md) +SCHED-WEB-21-004 | DOING (2025-10-26) | Persist graph job lifecycle to Mongo storage and publish `scheduler.graph.job.completed@1` events + outbound webhook to Cartographer. Dependencies: SCHED-WEB-20-002. | Scheduler WebService Guild, Scheduler Storage Guild (src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md) SCHED-WORKER-21-203 | TODO | Export metrics (`graph_build_seconds`, `graph_jobs_inflight`, `overlay_lag_seconds`) and structured logs with tenant/graph identifiers. | Scheduler Worker Guild, Observability Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-23-101 | TODO | Implement policy re-evaluation worker that shards assets, honours rate limits, and updates progress for Console after policy activation events. | Scheduler Worker Guild, Policy Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-23-102 | TODO | Add reconciliation job ensuring re-eval completion within SLA, emitting alerts on backlog and persisting status to `policy_runs`. | Scheduler Worker Guild, Observability Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-25-101 | TODO | Implement exception lifecycle worker handling auto-activation/expiry and publishing `exception.*` events with retries/backoff. | Scheduler Worker Guild, Policy Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-25-102 | TODO | Add expiring notification job generating digests, marking `expiring` state, updating metrics/alerts. | Scheduler Worker Guild, Observability Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-26-201 | TODO | Build reachability joiner worker that combines SBOM snapshots with signals, writes cached facts, and schedules updates on new events. | Scheduler Worker Guild, Signals Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-23-101 | TODO | Implement policy re-evaluation worker that shards assets, honours rate limits, and updates progress for Console after policy activation events. Dependencies: SCHED-WORKER-21-203. | Scheduler Worker Guild, Policy Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-23-102 | TODO | Add reconciliation job ensuring re-eval completion within SLA, emitting alerts on backlog and persisting status to `policy_runs`. Dependencies: SCHED-WORKER-23-101. | Scheduler Worker Guild, Observability Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-25-101 | TODO | Implement exception lifecycle worker handling auto-activation/expiry and publishing `exception.*` events with retries/backoff. Dependencies: SCHED-WORKER-23-102. | Scheduler Worker Guild, Policy Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-25-102 | TODO | Add expiring notification job generating digests, marking `expiring` state, updating metrics/alerts. Dependencies: SCHED-WORKER-25-101. | Scheduler Worker Guild, Observability Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-26-201 | TODO | Build reachability joiner worker that combines SBOM snapshots with signals, writes cached facts, and schedules updates on new events. Dependencies: SCHED-WORKER-25-102. | Scheduler Worker Guild, Signals Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) [Scheduling & Automation] 150.C) Scheduler.II @@ -102,15 +102,15 @@ Depends on: Sprint 150.C - Scheduler.I Summary: Scheduling & Automation focus on Scheduler (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -SCHED-WORKER-26-202 | TODO | Implement staleness monitor + notifier for outdated reachability facts, publishing warnings and updating dashboards. | Scheduler Worker Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-27-301 | TODO | Implement policy batch simulation worker: shard SBOM inventories, invoke Policy Engine, emit partial results, handle retries/backoff, and publish progress events. | Scheduler Worker Guild, Policy Registry Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-27-302 | TODO | Build reducer job aggregating shard outputs into final manifests (counts, deltas, samples) and writing to object storage with checksums; emit completion events. | Scheduler Worker Guild, Observability Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-27-303 | TODO | Enforce tenant isolation, scope checks, and attestation integration for simulation jobs; secret scanning pipeline for uploaded policy sources. | Scheduler Worker Guild, Security Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-29-001 | TODO | Implement resolver worker generating candidate findings from inventory + advisory evidence, respecting ecosystem version semantics and path scope; emit jobs for policy evaluation. | Scheduler Worker Guild, Findings Ledger Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-29-002 | TODO | Build evaluation orchestration worker invoking Policy Engine batch eval, writing results to Findings Ledger projector queue, and handling retries/backoff. | Scheduler Worker Guild, Policy Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-29-003 | TODO | Add monitoring for resolver/evaluation backlog, SLA breaches, and export job queue; expose metrics/alerts feeding DevOps dashboards. | Scheduler Worker Guild, Observability Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-26-202 | TODO | Implement staleness monitor + notifier for outdated reachability facts, publishing warnings and updating dashboards. Dependencies: SCHED-WORKER-26-201. | Scheduler Worker Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-27-301 | TODO | Implement policy batch simulation worker: shard SBOM inventories, invoke Policy Engine, emit partial results, handle retries/backoff, and publish progress events. Dependencies: SCHED-WORKER-26-202. | Scheduler Worker Guild, Policy Registry Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-27-302 | TODO | Build reducer job aggregating shard outputs into final manifests (counts, deltas, samples) and writing to object storage with checksums; emit completion events. Dependencies: SCHED-WORKER-27-301. | Scheduler Worker Guild, Observability Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-27-303 | TODO | Enforce tenant isolation, scope checks, and attestation integration for simulation jobs; secret scanning pipeline for uploaded policy sources. Dependencies: SCHED-WORKER-27-302. | Scheduler Worker Guild, Security Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-29-001 | TODO | Implement resolver worker generating candidate findings from inventory + advisory evidence, respecting ecosystem version semantics and path scope; emit jobs for policy evaluation. Dependencies: SCHED-WORKER-27-303. | Scheduler Worker Guild, Findings Ledger Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-29-002 | TODO | Build evaluation orchestration worker invoking Policy Engine batch eval, writing results to Findings Ledger projector queue, and handling retries/backoff. Dependencies: SCHED-WORKER-29-001. | Scheduler Worker Guild, Policy Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-29-003 | TODO | Add monitoring for resolver/evaluation backlog, SLA breaches, and export job queue; expose metrics/alerts feeding DevOps dashboards. Dependencies: SCHED-WORKER-29-002. | Scheduler Worker Guild, Observability Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) SCHED-WORKER-CONSOLE-23-201 | TODO | Stream run progress events (stage status, tuples processed, SLA hints) to Redis/NATS for Console SSE, with heartbeat, dedupe, and retention policy. Publish metrics + structured logs for queue lag. | Scheduler Worker Guild, Observability Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) -SCHED-WORKER-CONSOLE-23-202 | TODO | Coordinate evidence bundle jobs (enqueue, track status, cleanup) and expose job manifests to Web gateway; ensure idempotent reruns and cancellation support. | Scheduler Worker Guild, Policy Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) +SCHED-WORKER-CONSOLE-23-202 | TODO | Coordinate evidence bundle jobs (enqueue, track status, cleanup) and expose job manifests to Web gateway; ensure idempotent reruns and cancellation support. Dependencies: SCHED-WORKER-CONSOLE-23-201. | Scheduler Worker Guild, Policy Guild (src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md) [Scheduling & Automation] 150.D) TaskRunner.I @@ -119,20 +119,20 @@ Summary: Scheduling & Automation focus on TaskRunner (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- TASKRUN-41-001 | TODO | Bootstrap service, define migrations for `pack_runs`, `pack_run_logs`, `pack_artifacts`, implement run API (create/get/log stream), local executor, approvals pause, artifact capture, and provenance manifest generation. | Task Runner Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-42-001 | DOING (2025-10-29) | Add loops, conditionals, `maxParallel`, outputs, simulation mode, policy gate integration, and failure recovery (retry/abort) with deterministic state. | Task Runner Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-43-001 | DOING (2025-10-29) | Implement approvals workflow (resume after approval), notifications integration, remote artifact uploads, chaos resilience, secret injection, and audit logs. | Task Runner Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-42-001 | DOING (2025-10-29) | Add loops, conditionals, `maxParallel`, outputs, simulation mode, policy gate integration, and failure recovery (retry/abort) with deterministic state. Dependencies: TASKRUN-41-001. | Task Runner Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-43-001 | DOING (2025-10-29) | Implement approvals workflow (resume after approval), notifications integration, remote artifact uploads, chaos resilience, secret injection, and audit logs. Dependencies: TASKRUN-42-001. | Task Runner Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) TASKRUN-AIRGAP-56-001 | TODO | Enforce plan-time validation rejecting steps with non-allowlisted network calls in sealed mode and surface remediation errors. | Task Runner Guild, AirGap Policy Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-AIRGAP-56-002 | TODO | Add helper steps for bundle ingestion (checksum verification, staging to object store) with deterministic outputs. | Task Runner Guild, AirGap Importer Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-AIRGAP-57-001 | TODO | Refuse to execute plans when environment sealed=false but declared sealed install; emit advisory timeline events. | Task Runner Guild, AirGap Controller Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-AIRGAP-58-001 | TODO | Capture bundle import job transcripts, hashed inputs, and outputs into portable evidence bundles. | Task Runner Guild, Evidence Locker Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-AIRGAP-56-002 | TODO | Add helper steps for bundle ingestion (checksum verification, staging to object store) with deterministic outputs. Dependencies: TASKRUN-AIRGAP-56-001. | Task Runner Guild, AirGap Importer Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-AIRGAP-57-001 | TODO | Refuse to execute plans when environment sealed=false but declared sealed install; emit advisory timeline events. Dependencies: TASKRUN-AIRGAP-56-002. | Task Runner Guild, AirGap Controller Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-AIRGAP-58-001 | TODO | Capture bundle import job transcripts, hashed inputs, and outputs into portable evidence bundles. Dependencies: TASKRUN-AIRGAP-57-001. | Task Runner Guild, Evidence Locker Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) TASKRUN-OAS-61-001 | TODO | Document Task Runner APIs (pack runs, logs, approvals) in service OAS, including streaming response schemas and examples. | Task Runner Guild, API Contracts Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-OAS-61-002 | TODO | Expose `GET /.well-known/openapi` returning signed spec metadata, build version, and ETag. | Task Runner Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-OAS-62-001 | TODO | Provide SDK examples for pack run lifecycle; ensure SDKs offer streaming log helpers and paginator wrappers. | Task Runner Guild, SDK Generator Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-OAS-63-001 | TODO | Implement deprecation header support and Sunset handling for legacy pack APIs; emit notifications metadata. | Task Runner Guild, API Governance Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-OAS-61-002 | TODO | Expose `GET /.well-known/openapi` returning signed spec metadata, build version, and ETag. Dependencies: TASKRUN-OAS-61-001. | Task Runner Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-OAS-62-001 | TODO | Provide SDK examples for pack run lifecycle; ensure SDKs offer streaming log helpers and paginator wrappers. Dependencies: TASKRUN-OAS-61-002. | Task Runner Guild, SDK Generator Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-OAS-63-001 | TODO | Implement deprecation header support and Sunset handling for legacy pack APIs; emit notifications metadata. Dependencies: TASKRUN-OAS-62-001. | Task Runner Guild, API Governance Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) TASKRUN-OBS-50-001 | TODO | Adopt telemetry core in Task Runner host + worker executors, ensuring step execution spans/logs include `trace_id`, `tenant_id`, `run_id`, and scrubbed command transcripts. | Task Runner Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-OBS-51-001 | TODO | Emit metrics for step latency, retries, queue depth, sandbox resource usage; define SLOs for pack run completion and failure rate; surface burn-rate alerts to collector/Notifier. | Task Runner Guild, DevOps Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-OBS-52-001 | TODO | Produce timeline events for pack runs (`pack.started`, `pack.step.completed`, `pack.failed`) containing evidence pointers and policy gate context. Provide dedupe + retry logic. | Task Runner Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-OBS-53-001 | TODO | Capture step transcripts, artifact manifests, environment digests, and policy approvals into evidence locker snapshots; ensure redaction + hash chain coverage. | Task Runner Guild, Evidence Locker Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-OBS-51-001 | TODO | Emit metrics for step latency, retries, queue depth, sandbox resource usage; define SLOs for pack run completion and failure rate; surface burn-rate alerts to collector/Notifier. Dependencies: TASKRUN-OBS-50-001. | Task Runner Guild, DevOps Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-OBS-52-001 | TODO | Produce timeline events for pack runs (`pack.started`, `pack.step.completed`, `pack.failed`) containing evidence pointers and policy gate context. Provide dedupe + retry logic. Dependencies: TASKRUN-OBS-51-001. | Task Runner Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-OBS-53-001 | TODO | Capture step transcripts, artifact manifests, environment digests, and policy approvals into evidence locker snapshots; ensure redaction + hash chain coverage. Dependencies: TASKRUN-OBS-52-001. | Task Runner Guild, Evidence Locker Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) [Scheduling & Automation] 150.D) TaskRunner.II @@ -140,8 +140,8 @@ Depends on: Sprint 150.D - TaskRunner.I Summary: Scheduling & Automation focus on TaskRunner (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -TASKRUN-OBS-54-001 | TODO | Generate DSSE attestations for pack runs (subjects = produced artifacts) and expose verification API/CLI integration. Store references in timeline events. | Task Runner Guild, Provenance Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) -TASKRUN-OBS-55-001 | TODO | Implement incident mode escalations (extra telemetry, debug artifact capture, retention bump) and align on automatic activation via SLO breach webhooks. | Task Runner Guild, DevOps Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-OBS-54-001 | TODO | Generate DSSE attestations for pack runs (subjects = produced artifacts) and expose verification API/CLI integration. Store references in timeline events. Dependencies: TASKRUN-OBS-53-001. | Task Runner Guild, Provenance Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) +TASKRUN-OBS-55-001 | TODO | Implement incident mode escalations (extra telemetry, debug artifact capture, retention bump) and align on automatic activation via SLO breach webhooks. Dependencies: TASKRUN-OBS-54-001. | Task Runner Guild, DevOps Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) TASKRUN-TEN-48-001 | TODO | Require tenant/project context for every pack run, set DB/object-store prefixes, block egress when tenant restricted, and propagate context to steps/logs. | Task Runner Guild (src/TaskRunner/StellaOps.TaskRunner/TASKS.md) diff --git a/docs/implplan/SPRINT_160_export_evidence.md b/docs/implplan/SPRINT_160_export_evidence.md index 4d9bcd4a..b9d85626 100644 --- a/docs/implplan/SPRINT_160_export_evidence.md +++ b/docs/implplan/SPRINT_160_export_evidence.md @@ -6,12 +6,12 @@ Summary: Export & Evidence focus on EvidenceLocker). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- EVID-OBS-53-001 | TODO | Bootstrap `StellaOps.Evidence.Locker` service with Postgres schema for `evidence_bundles`, `evidence_artifacts`, `evidence_holds`, tenant RLS, and object-store abstraction (WORM optional). | Evidence Locker Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) -EVID-OBS-53-002 | TODO | Implement bundle builders for evaluation/job/export snapshots collecting inputs, outputs, env digests, run metadata. Generate Merkle tree + manifest skeletons and persist root hash. | Evidence Locker Guild, Orchestrator Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) -EVID-OBS-53-003 | TODO | Expose REST APIs (`POST /evidence/snapshot`, `GET /evidence/:id`, `POST /evidence/verify`, `POST /evidence/hold/:case_id`) with audit logging, tenant enforcement, and size quotas. | Evidence Locker Guild, Security Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) -EVID-OBS-54-001 | TODO | Attach DSSE signing and RFC3161 timestamping to bundle manifests; validate against Provenance verification library. Wire legal hold retention extension and chain-of-custody events for Timeline Indexer. | Evidence Locker Guild, Provenance Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) -EVID-OBS-54-002 | TODO | Provide bundle download/export packaging (tgz) with checksum manifest, offline verification instructions, and sample fixture for CLI tests. | Evidence Locker Guild, DevEx/CLI Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) -EVID-OBS-55-001 | TODO | Implement incident mode hooks increasing retention window, capturing additional debug artefacts, and emitting activation/deactivation events to Timeline Indexer + Notifier. | Evidence Locker Guild, DevOps Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) -EVID-OBS-60-001 | TODO | Deliver portable evidence export flow for sealed environments: generate sealed bundles with checksum manifest, redacted metadata, and offline verification script. Document air-gapped import/verify procedures. | Evidence Locker Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) +EVID-OBS-53-002 | TODO | Implement bundle builders for evaluation/job/export snapshots collecting inputs, outputs, env digests, run metadata. Generate Merkle tree + manifest skeletons and persist root hash. Dependencies: EVID-OBS-53-001. | Evidence Locker Guild, Orchestrator Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) +EVID-OBS-53-003 | TODO | Expose REST APIs (`POST /evidence/snapshot`, `GET /evidence/:id`, `POST /evidence/verify`, `POST /evidence/hold/:case_id`) with audit logging, tenant enforcement, and size quotas. Dependencies: EVID-OBS-53-002. | Evidence Locker Guild, Security Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) +EVID-OBS-54-001 | TODO | Attach DSSE signing and RFC3161 timestamping to bundle manifests; validate against Provenance verification library. Wire legal hold retention extension and chain-of-custody events for Timeline Indexer. Dependencies: EVID-OBS-53-003. | Evidence Locker Guild, Provenance Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) +EVID-OBS-54-002 | TODO | Provide bundle download/export packaging (tgz) with checksum manifest, offline verification instructions, and sample fixture for CLI tests. Dependencies: EVID-OBS-54-001. | Evidence Locker Guild, DevEx/CLI Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) +EVID-OBS-55-001 | TODO | Implement incident mode hooks increasing retention window, capturing additional debug artefacts, and emitting activation/deactivation events to Timeline Indexer + Notifier. Dependencies: EVID-OBS-54-002. | Evidence Locker Guild, DevOps Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) +EVID-OBS-60-001 | TODO | Deliver portable evidence export flow for sealed environments: generate sealed bundles with checksum manifest, redacted metadata, and offline verification script. Document air-gapped import/verify procedures. Dependencies: EVID-OBS-55-001. | Evidence Locker Guild (src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md) [Export & Evidence] 160.B) ExportCenter.I @@ -20,20 +20,20 @@ Summary: Export & Evidence focus on ExportCenter (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- DVOFF-64-001 | TODO | Implement Export Center job `devportal --offline` bundling portal HTML, specs, SDK artifacts, changelogs, and verification manifest. | DevPortal Offline Guild, Exporter Guild (src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline/TASKS.md) -DVOFF-64-002 | TODO | Provide verification CLI (`stella devportal verify bundle.tgz`) ensuring integrity before import. | DevPortal Offline Guild, AirGap Controller Guild (src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline/TASKS.md) +DVOFF-64-002 | TODO | Provide verification CLI (`stella devportal verify bundle.tgz`) ensuring integrity before import. Dependencies: DVOFF-64-001. | DevPortal Offline Guild, AirGap Controller Guild (src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline/TASKS.md) EXPORT-AIRGAP-56-001 | TODO | Extend Export Center to build Mirror Bundles as export profiles, including advisories/VEX/policy packs manifesting DSSE/TUF metadata. | Exporter Service Guild, Mirror Creator Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-AIRGAP-56-002 | TODO | Package Bootstrap Pack (images + charts) into OCI archives with signed manifests for air-gapped deployment. | Exporter Service Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-AIRGAP-57-001 | TODO | Integrate portable evidence export mode producing sealed evidence bundles with DSSE signatures and chain-of-custody metadata. | Exporter Service Guild, Evidence Locker Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-AIRGAP-58-001 | TODO | Emit notifications and timeline events when Mirror Bundles or Bootstrap packs are ready for transfer. | Exporter Service Guild, Notifications Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-AIRGAP-56-002 | TODO | Package Bootstrap Pack (images + charts) into OCI archives with signed manifests for air-gapped deployment. Dependencies: EXPORT-AIRGAP-56-001. | Exporter Service Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-AIRGAP-57-001 | TODO | Integrate portable evidence export mode producing sealed evidence bundles with DSSE signatures and chain-of-custody metadata. Dependencies: EXPORT-AIRGAP-56-002. | Exporter Service Guild, Evidence Locker Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-AIRGAP-58-001 | TODO | Emit notifications and timeline events when Mirror Bundles or Bootstrap packs are ready for transfer. Dependencies: EXPORT-AIRGAP-57-001. | Exporter Service Guild, Notifications Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) EXPORT-ATTEST-74-001 | TODO | Implement export job producing attestation bundles with manifest, checksums, DSSE signature, and optional transparency log segments. | Attestation Bundle Guild, Attestor Service Guild (src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md) -EXPORT-ATTEST-74-001 | TODO | Implement attestation bundle export job via Export Center. | Exporter Service Guild, Attestation Bundle Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-ATTEST-74-002 | TODO | Integrate bundle job into CI/offline kit packaging with checksum publication. | Attestation Bundle Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md) -EXPORT-ATTEST-75-001 | TODO | Provide CLI command `stella attest bundle verify/import` for air-gap usage. | Attestation Bundle Guild, CLI Attestor Guild (src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md) -EXPORT-ATTEST-75-001 | TODO | Integrate attestation bundles into offline kit flows and CLI commands. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-ATTEST-75-002 | TODO | Document `/docs/modules/attestor/airgap.md` with bundle workflows and verification steps. | Attestation Bundle Guild, Docs Guild (src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md) +EXPORT-ATTEST-74-001 | TODO | Implement attestation bundle export job via Export Center. Dependencies: EXPORT-ATTEST-74-001. | Exporter Service Guild, Attestation Bundle Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-ATTEST-74-002 | TODO | Integrate bundle job into CI/offline kit packaging with checksum publication. Dependencies: EXPORT-ATTEST-74-001. | Attestation Bundle Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md) +EXPORT-ATTEST-75-001 | TODO | Provide CLI command `stella attest bundle verify/import` for air-gap usage. Dependencies: EXPORT-ATTEST-74-002. | Attestation Bundle Guild, CLI Attestor Guild (src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md) +EXPORT-ATTEST-75-001 | TODO | Integrate attestation bundles into offline kit flows and CLI commands. Dependencies: EXPORT-ATTEST-75-001. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-ATTEST-75-002 | TODO | Document `/docs/modules/attestor/airgap.md` with bundle workflows and verification steps. Dependencies: EXPORT-ATTEST-75-001. | Attestation Bundle Guild, Docs Guild (src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md) EXPORT-OAS-61-001 | TODO | Update Exporter OAS covering profiles, runs, downloads, devportal exports with standard error envelope and examples. | Exporter Service Guild, API Contracts Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-OAS-61-002 | TODO | Provide `/.well-known/openapi` discovery endpoint with version metadata and ETag. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-OAS-62-001 | TODO | Ensure SDKs include export profile/run clients with streaming download helpers; add smoke tests. | Exporter Service Guild, SDK Generator Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-OAS-61-002 | TODO | Provide `/.well-known/openapi` discovery endpoint with version metadata and ETag. Dependencies: EXPORT-OAS-61-001. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-OAS-62-001 | TODO | Ensure SDKs include export profile/run clients with streaming download helpers; add smoke tests. Dependencies: EXPORT-OAS-61-002. | Exporter Service Guild, SDK Generator Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) [Export & Evidence] 160.B) ExportCenter.II @@ -41,21 +41,21 @@ Depends on: Sprint 160.B - ExportCenter.I Summary: Export & Evidence focus on ExportCenter (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -EXPORT-OAS-63-001 | TODO | Implement deprecation headers and notifications for legacy export endpoints. | Exporter Service Guild, API Governance Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-OAS-63-001 | TODO | Implement deprecation headers and notifications for legacy export endpoints. Dependencies: EXPORT-OAS-62-001. | Exporter Service Guild, API Governance Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) EXPORT-OBS-50-001 | TODO | Adopt telemetry core in exporter service + workers, ensuring spans/logs capture profile id, tenant, artifact counts, distribution type, and trace IDs. | Exporter Service Guild, Observability Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-OBS-51-001 | TODO | Emit metrics for export planner latency, bundle build time, distribution success rate, bundle size, and define SLOs (bundle availability P95 <90s). Add Grafana dashboards + burn-rate alerts. | Exporter Service Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-OBS-52-001 | TODO | Publish timeline events for export lifecycle (`export.requested`, `export.built`, `export.distributed`, `export.failed`) embedding manifest hashes and evidence refs. Provide dedupe + retry logic. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-OBS-53-001 | TODO | Push export manifests + distribution transcripts to evidence locker bundles, ensuring Merkle root alignment and DSSE pre-sign data available. | Exporter Service Guild, Evidence Locker Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-OBS-54-001 | TODO | Produce DSSE attestations for each export artifact and distribution target, expose verification API `/exports/{id}/attestation`, and integrate with CLI verify path. | Exporter Service Guild, Provenance Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-OBS-55-001 | TODO | Add incident mode enhancements (extra tracing for slow exports, additional debug logs, retention bump). Emit incident activation events to timeline + notifier. | Exporter Service Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-OBS-51-001 | TODO | Emit metrics for export planner latency, bundle build time, distribution success rate, bundle size, and define SLOs (bundle availability P95 <90s). Add Grafana dashboards + burn-rate alerts. Dependencies: EXPORT-OBS-50-001. | Exporter Service Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-OBS-52-001 | TODO | Publish timeline events for export lifecycle (`export.requested`, `export.built`, `export.distributed`, `export.failed`) embedding manifest hashes and evidence refs. Provide dedupe + retry logic. Dependencies: EXPORT-OBS-51-001. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-OBS-53-001 | TODO | Push export manifests + distribution transcripts to evidence locker bundles, ensuring Merkle root alignment and DSSE pre-sign data available. Dependencies: EXPORT-OBS-52-001. | Exporter Service Guild, Evidence Locker Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-OBS-54-001 | TODO | Produce DSSE attestations for each export artifact and distribution target, expose verification API `/exports/{id}/attestation`, and integrate with CLI verify path. Dependencies: EXPORT-OBS-53-001. | Exporter Service Guild, Provenance Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-OBS-55-001 | TODO | Add incident mode enhancements (extra tracing for slow exports, additional debug logs, retention bump). Emit incident activation events to timeline + notifier. Dependencies: EXPORT-OBS-54-001. | Exporter Service Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) EXPORT-RISK-69-001 | TODO | Add Export Center job handler `risk-bundle` with provider selection, manifest signing, and audit logging. | Exporter Service Guild, Risk Bundle Export Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-RISK-69-002 | TODO | Enable simulation report exports pulling scored data + explainability snapshots. | Exporter Service Guild, Risk Engine Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-RISK-70-001 | TODO | Integrate risk bundle builds into offline kit packaging with checksum verification. | Exporter Service Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-RISK-69-002 | TODO | Enable simulation report exports pulling scored data + explainability snapshots. Dependencies: EXPORT-RISK-69-001. | Exporter Service Guild, Risk Engine Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-RISK-70-001 | TODO | Integrate risk bundle builds into offline kit packaging with checksum verification. Dependencies: EXPORT-RISK-69-002. | Exporter Service Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) EXPORT-SVC-35-001 | BLOCKED (2025-10-29) | Bootstrap exporter service project, configuration, and Postgres migrations for `export_profiles`, `export_runs`, `export_inputs`, `export_distributions` with tenant scoping + tests. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-35-002 | TODO | Implement planner + scope resolver translating filters into ledger iterators and orchestrator job payloads; include deterministic sampling and validation. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-35-003 | TODO | Deliver JSON adapters (`json:raw`, `json:policy`) with canonical normalization, redaction allowlists, compression, and manifest counts. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-35-004 | TODO | Build mirror (full) adapter producing filesystem layout, indexes, manifests, and README with download-only distribution. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-35-005 | TODO | Implement manifest/provenance writer and KMS signing/attestation (detached + embedded) for bundle outputs. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-35-002 | TODO | Implement planner + scope resolver translating filters into ledger iterators and orchestrator job payloads; include deterministic sampling and validation. Dependencies: EXPORT-SVC-35-001. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-35-003 | TODO | Deliver JSON adapters (`json:raw`, `json:policy`) with canonical normalization, redaction allowlists, compression, and manifest counts. Dependencies: EXPORT-SVC-35-002. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-35-004 | TODO | Build mirror (full) adapter producing filesystem layout, indexes, manifests, and README with download-only distribution. Dependencies: EXPORT-SVC-35-003. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-35-005 | TODO | Implement manifest/provenance writer and KMS signing/attestation (detached + embedded) for bundle outputs. Dependencies: EXPORT-SVC-35-004. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) [Export & Evidence] 160.B) ExportCenter.III @@ -63,21 +63,21 @@ Depends on: Sprint 160.B - ExportCenter.II Summary: Export & Evidence focus on ExportCenter (phase III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -EXPORT-SVC-35-006 | TODO | Expose Export API (profiles, runs, download, SSE updates) with audit logging, concurrency controls, and viewer/operator RBAC integration. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-36-001 | TODO | Implement Trivy DB adapter (core) with schema mappings, version flag gating, and validation harness. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-36-002 | TODO | Add Trivy Java DB variant with shared manifest entries and adapter regression tests. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-36-003 | TODO | Build OCI distribution engine (manifests, descriptors, annotations) with registry auth support and retries. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-36-004 | TODO | Extend planner/run lifecycle for distribution targets (OCI/object storage) with idempotent metadata updates and retention timestamps. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-37-001 | TODO | Implement mirror delta adapter with base manifest comparison, change set generation, and content-addressed reuse. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-37-002 | TODO | Add bundle encryption (age/AES-GCM), key wrapping via KMS, and verification tooling for encrypted outputs. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-37-003 | TODO | Implement export scheduling (cron/event), retention pruning, retry idempotency, and failure classification. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-37-004 | TODO | Provide verification API to stream manifests/hashes, compute hash+signature checks, and return attest status for CLI/UI. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) -EXPORT-SVC-43-001 | TODO | Integrate pack run manifests/artifacts into export bundles and CLI verification flows; expose provenance links. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-35-006 | TODO | Expose Export API (profiles, runs, download, SSE updates) with audit logging, concurrency controls, and viewer/operator RBAC integration. Dependencies: EXPORT-SVC-35-005. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-36-001 | TODO | Implement Trivy DB adapter (core) with schema mappings, version flag gating, and validation harness. Dependencies: EXPORT-SVC-35-006. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-36-002 | TODO | Add Trivy Java DB variant with shared manifest entries and adapter regression tests. Dependencies: EXPORT-SVC-36-001. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-36-003 | TODO | Build OCI distribution engine (manifests, descriptors, annotations) with registry auth support and retries. Dependencies: EXPORT-SVC-36-002. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-36-004 | TODO | Extend planner/run lifecycle for distribution targets (OCI/object storage) with idempotent metadata updates and retention timestamps. Dependencies: EXPORT-SVC-36-003. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-37-001 | TODO | Implement mirror delta adapter with base manifest comparison, change set generation, and content-addressed reuse. Dependencies: EXPORT-SVC-36-004. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-37-002 | TODO | Add bundle encryption (age/AES-GCM), key wrapping via KMS, and verification tooling for encrypted outputs. Dependencies: EXPORT-SVC-37-001. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-37-003 | TODO | Implement export scheduling (cron/event), retention pruning, retry idempotency, and failure classification. Dependencies: EXPORT-SVC-37-002. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-37-004 | TODO | Provide verification API to stream manifests/hashes, compute hash+signature checks, and return attest status for CLI/UI. Dependencies: EXPORT-SVC-37-003. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) +EXPORT-SVC-43-001 | TODO | Integrate pack run manifests/artifacts into export bundles and CLI verification flows; expose provenance links. Dependencies: EXPORT-SVC-37-004. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) EXPORT-TEN-48-001 | TODO | Prefix artifacts/manifests with tenant/project, enforce scope checks, and prevent cross-tenant exports unless explicitly whitelisted; update provenance. | Exporter Service Guild (src/ExportCenter/StellaOps.ExportCenter/TASKS.md) RISK-BUNDLE-69-001 | TODO | Implement `stella export risk-bundle` job producing tarball with provider datasets, manifests, and DSSE signatures. | Risk Bundle Export Guild, Risk Engine Guild (src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md) -RISK-BUNDLE-69-002 | TODO | Integrate bundle job into CI/offline kit pipelines with checksum publication. | Risk Bundle Export Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md) -RISK-BUNDLE-70-001 | TODO | Provide CLI `stella risk bundle verify` command to validate bundles before import. | Risk Bundle Export Guild, CLI Guild (src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md) -RISK-BUNDLE-70-002 | TODO | Publish `/docs/airgap/risk-bundles.md` detailing build/import/verification workflows. | Risk Bundle Export Guild, Docs Guild (src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md) +RISK-BUNDLE-69-002 | TODO | Integrate bundle job into CI/offline kit pipelines with checksum publication. Dependencies: RISK-BUNDLE-69-001. | Risk Bundle Export Guild, DevOps Guild (src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md) +RISK-BUNDLE-70-001 | TODO | Provide CLI `stella risk bundle verify` command to validate bundles before import. Dependencies: RISK-BUNDLE-69-002. | Risk Bundle Export Guild, CLI Guild (src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md) +RISK-BUNDLE-70-002 | TODO | Publish `/docs/airgap/risk-bundles.md` detailing build/import/verification workflows. Dependencies: RISK-BUNDLE-70-001. | Risk Bundle Export Guild, Docs Guild (src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md) [Export & Evidence] 160.C) TimelineIndexer @@ -86,10 +86,10 @@ Summary: Export & Evidence focus on TimelineIndexer). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- TIMELINE-OBS-52-001 | TODO | Bootstrap `StellaOps.Timeline.Indexer` service with Postgres migrations for `timeline_events`, `timeline_event_details`, `timeline_event_digests`; enable RLS scaffolding and deterministic migration scripts. | Timeline Indexer Guild (src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md) -TIMELINE-OBS-52-002 | TODO | Implement event ingestion pipeline (NATS/Redis consumers) with ordering guarantees, dedupe on `(event_id, tenant_id)`, correlation to trace IDs, and backpressure metrics. | Timeline Indexer Guild (src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md) -TIMELINE-OBS-52-003 | TODO | Expose REST/gRPC APIs for timeline queries (`GET /timeline`, `/timeline/{id}`) with filters, pagination, and tenant enforcement. Provide OpenAPI + contract tests. | Timeline Indexer Guild (src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md) -TIMELINE-OBS-52-004 | TODO | Finalize RLS policies, scope checks (`timeline:read`), and audit logging for query access. Include integration tests for cross-tenant isolation and legal hold markers. | Timeline Indexer Guild, Security Guild (src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md) -TIMELINE-OBS-53-001 | TODO | Link timeline events to evidence bundle digests + attestation subjects; expose `/timeline/{id}/evidence` endpoint returning signed manifest references. | Timeline Indexer Guild, Evidence Locker Guild (src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md) +TIMELINE-OBS-52-002 | TODO | Implement event ingestion pipeline (NATS/Redis consumers) with ordering guarantees, dedupe on `(event_id, tenant_id)`, correlation to trace IDs, and backpressure metrics. Dependencies: TIMELINE-OBS-52-001. | Timeline Indexer Guild (src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md) +TIMELINE-OBS-52-003 | TODO | Expose REST/gRPC APIs for timeline queries (`GET /timeline`, `/timeline/{id}`) with filters, pagination, and tenant enforcement. Provide OpenAPI + contract tests. Dependencies: TIMELINE-OBS-52-002. | Timeline Indexer Guild (src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md) +TIMELINE-OBS-52-004 | TODO | Finalize RLS policies, scope checks (`timeline:read`), and audit logging for query access. Include integration tests for cross-tenant isolation and legal hold markers. Dependencies: TIMELINE-OBS-52-003. | Timeline Indexer Guild, Security Guild (src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md) +TIMELINE-OBS-53-001 | TODO | Link timeline events to evidence bundle digests + attestation subjects; expose `/timeline/{id}/evidence` endpoint returning signed manifest references. Dependencies: TIMELINE-OBS-52-004. | Timeline Indexer Guild, Evidence Locker Guild (src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md) If all tasks are done - read next sprint section - SPRINT_170_notifications_telemetry.md diff --git a/docs/implplan/SPRINT_170_notifications_telemetry.md b/docs/implplan/SPRINT_170_notifications_telemetry.md index d2bb17be..3074ca6a 100644 --- a/docs/implplan/SPRINT_170_notifications_telemetry.md +++ b/docs/implplan/SPRINT_170_notifications_telemetry.md @@ -6,20 +6,20 @@ Summary: Notifications & Telemetry focus on Notifier (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- NOTIFY-AIRGAP-56-001 | TODO | Disable external webhook targets in sealed mode, default to enclave-safe channels (SMTP relay, syslog, file sink), and surface remediation guidance. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-AIRGAP-56-002 | TODO | Provide local notifier configurations bundled within Bootstrap Pack with deterministic secrets handling. | Notifications Service Guild, DevOps Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-AIRGAP-57-001 | TODO | Send staleness drift and bundle import notifications with remediation steps. | Notifications Service Guild, AirGap Time Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-AIRGAP-58-001 | TODO | Add portable evidence export completion notifications including checksum + location metadata. | Notifications Service Guild, Evidence Locker Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-AIRGAP-56-002 | TODO | Provide local notifier configurations bundled within Bootstrap Pack with deterministic secrets handling. Dependencies: NOTIFY-AIRGAP-56-001. | Notifications Service Guild, DevOps Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-AIRGAP-57-001 | TODO | Send staleness drift and bundle import notifications with remediation steps. Dependencies: NOTIFY-AIRGAP-56-002. | Notifications Service Guild, AirGap Time Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-AIRGAP-58-001 | TODO | Add portable evidence export completion notifications including checksum + location metadata. Dependencies: NOTIFY-AIRGAP-57-001. | Notifications Service Guild, Evidence Locker Guild (src/Notifier/StellaOps.Notifier/TASKS.md) NOTIFY-ATTEST-74-001 | TODO | Create notification templates for verification failures, expiring attestations, key revocations, and transparency anomalies. | Notifications Service Guild, Attestor Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-ATTEST-74-002 | TODO | Wire notifications to key rotation/revocation events and transparency witness failures. | Notifications Service Guild, KMS Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-ATTEST-74-002 | TODO | Wire notifications to key rotation/revocation events and transparency witness failures. Dependencies: NOTIFY-ATTEST-74-001. | Notifications Service Guild, KMS Guild (src/Notifier/StellaOps.Notifier/TASKS.md) NOTIFY-OAS-61-001 | TODO | Update notifier OAS with rules, templates, incidents, quiet hours endpoints using standard error envelope and examples. | Notifications Service Guild, API Contracts Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-OAS-61-002 | TODO | Implement `/.well-known/openapi` discovery endpoint with scope metadata. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-OAS-62-001 | TODO | Provide SDK usage examples for rule CRUD, incident ack, and quiet hours; ensure SDK smoke tests. | Notifications Service Guild, SDK Generator Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-OAS-63-001 | TODO | Emit deprecation headers and Notifications templates for retiring notifier APIs. | Notifications Service Guild, API Governance Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-OAS-61-002 | TODO | Implement `/.well-known/openapi` discovery endpoint with scope metadata. Dependencies: NOTIFY-OAS-61-001. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-OAS-62-001 | TODO | Provide SDK usage examples for rule CRUD, incident ack, and quiet hours; ensure SDK smoke tests. Dependencies: NOTIFY-OAS-61-002. | Notifications Service Guild, SDK Generator Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-OAS-63-001 | TODO | Emit deprecation headers and Notifications templates for retiring notifier APIs. Dependencies: NOTIFY-OAS-62-001. | Notifications Service Guild, API Governance Guild (src/Notifier/StellaOps.Notifier/TASKS.md) NOTIFY-OBS-51-001 | TODO | Integrate SLO evaluator webhooks into Notifier rules (burn-rate breaches, health degradations) with templates, routing, and suppression logic. Provide sample policies and ensure imposed rule propagation. | Notifications Service Guild, Observability Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-OBS-55-001 | TODO | Publish incident mode start/stop notifications with trace/evidence quick links, retention notes, and automatic escalation paths. Include quiet-hour overrides + legal compliance logging. | Notifications Service Guild, Ops Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-OBS-55-001 | TODO | Publish incident mode start/stop notifications with trace/evidence quick links, retention notes, and automatic escalation paths. Include quiet-hour overrides + legal compliance logging. Dependencies: NOTIFY-OBS-51-001. | Notifications Service Guild, Ops Guild (src/Notifier/StellaOps.Notifier/TASKS.md) NOTIFY-RISK-66-001 | TODO | Add notification triggers for risk severity escalation/downgrade events with profile metadata in payload. | Notifications Service Guild, Risk Engine Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-RISK-67-001 | TODO | Notify stakeholders when risk profiles are published, deprecated, or thresholds change. | Notifications Service Guild, Policy Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-RISK-68-001 | TODO | Support per-profile routing rules, quiet hours, and dedupe for risk alerts; integrate with CLI/Console preferences. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-RISK-67-001 | TODO | Notify stakeholders when risk profiles are published, deprecated, or thresholds change. Dependencies: NOTIFY-RISK-66-001. | Notifications Service Guild, Policy Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-RISK-68-001 | TODO | Support per-profile routing rules, quiet hours, and dedupe for risk alerts; integrate with CLI/Console preferences. Dependencies: NOTIFY-RISK-67-001. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) [Notifications & Telemetry] 170.A) Notifier.II @@ -28,20 +28,20 @@ Summary: Notifications & Telemetry focus on Notifier (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- NOTIFY-SVC-37-001 | TODO | Define pack approval & policy notification contract, including OpenAPI schema, event payloads, resume token mechanics, and security guidance. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-37-002 | TODO | Implement secure ingestion endpoint, Mongo persistence (`pack_approvals`), idempotent writes, and audit trail for approval events. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-37-003 | TODO | Deliver approval/policy templates, routing predicates, and channel dispatch (email + webhook) with localization + redaction. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-37-004 | TODO | Provide acknowledgement API, Task Runner callback client, metrics for outstanding approvals, and runbook updates. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-38-002 | TODO | Implement channel adapters (email, chat webhook, generic webhook) with retry policies, health checks, and audit logging. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-38-003 | TODO | Deliver template service (versioned templates, localization scaffolding) and renderer with redaction allowlists, Markdown/HTML/JSON outputs, and provenance links. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-38-004 | TODO | Expose REST + WS APIs (rules CRUD, templates preview, incidents list, ack) with audit logging, RBAC checks, and live feed stream. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-39-001 | TODO | Implement correlation engine with pluggable key expressions/windows, throttler (token buckets), quiet hours/maintenance evaluator, and incident lifecycle. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-39-002 | TODO | Build digest generator (queries, formatting) with schedule runner and distribution via existing channels. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-39-003 | TODO | Provide simulation engine/API to dry-run rules against historical events, returning matched actions with explanations. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-39-004 | TODO | Integrate quiet hour calendars and default throttles with audit logging and operator overrides. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-40-001 | TODO | Implement escalations + on-call schedules, ack bridge, PagerDuty/OpsGenie adapters, and CLI/in-app inbox channels. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-40-002 | TODO | Add summary storm breaker notifications, localization bundles, and localization fallback handling. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-40-003 | TODO | Harden security: signed ack links (KMS), webhook HMAC/IP allowlists, tenant isolation fuzz tests, HTML sanitization. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) -NOTIFY-SVC-40-004 | TODO | Finalize observability (metrics/traces for escalations, latency), dead-letter handling, chaos tests for channel outages, and retention policies. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-37-002 | TODO | Implement secure ingestion endpoint, Mongo persistence (`pack_approvals`), idempotent writes, and audit trail for approval events. Dependencies: NOTIFY-SVC-37-001. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-37-003 | TODO | Deliver approval/policy templates, routing predicates, and channel dispatch (email + webhook) with localization + redaction. Dependencies: NOTIFY-SVC-37-002. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-37-004 | TODO | Provide acknowledgement API, Task Runner callback client, metrics for outstanding approvals, and runbook updates. Dependencies: NOTIFY-SVC-37-003. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-38-002 | TODO | Implement channel adapters (email, chat webhook, generic webhook) with retry policies, health checks, and audit logging. Dependencies: NOTIFY-SVC-37-004. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-38-003 | TODO | Deliver template service (versioned templates, localization scaffolding) and renderer with redaction allowlists, Markdown/HTML/JSON outputs, and provenance links. Dependencies: NOTIFY-SVC-38-002. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-38-004 | TODO | Expose REST + WS APIs (rules CRUD, templates preview, incidents list, ack) with audit logging, RBAC checks, and live feed stream. Dependencies: NOTIFY-SVC-38-003. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-39-001 | TODO | Implement correlation engine with pluggable key expressions/windows, throttler (token buckets), quiet hours/maintenance evaluator, and incident lifecycle. Dependencies: NOTIFY-SVC-38-004. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-39-002 | TODO | Build digest generator (queries, formatting) with schedule runner and distribution via existing channels. Dependencies: NOTIFY-SVC-39-001. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-39-003 | TODO | Provide simulation engine/API to dry-run rules against historical events, returning matched actions with explanations. Dependencies: NOTIFY-SVC-39-002. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-39-004 | TODO | Integrate quiet hour calendars and default throttles with audit logging and operator overrides. Dependencies: NOTIFY-SVC-39-003. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-40-001 | TODO | Implement escalations + on-call schedules, ack bridge, PagerDuty/OpsGenie adapters, and CLI/in-app inbox channels. Dependencies: NOTIFY-SVC-39-004. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-40-002 | TODO | Add summary storm breaker notifications, localization bundles, and localization fallback handling. Dependencies: NOTIFY-SVC-40-001. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-40-003 | TODO | Harden security: signed ack links (KMS), webhook HMAC/IP allowlists, tenant isolation fuzz tests, HTML sanitization. Dependencies: NOTIFY-SVC-40-002. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) +NOTIFY-SVC-40-004 | TODO | Finalize observability (metrics/traces for escalations, latency), dead-letter handling, chaos tests for channel outages, and retention policies. Dependencies: NOTIFY-SVC-40-003. | Notifications Service Guild (src/Notifier/StellaOps.Notifier/TASKS.md) [Notifications & Telemetry] 170.A) Notifier.III @@ -58,11 +58,11 @@ Summary: Notifications & Telemetry focus on Telemetry). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- TELEMETRY-OBS-50-001 | TODO | Create `StellaOps.Telemetry.Core` library with structured logging facade, OpenTelemetry configuration helpers, and deterministic bootstrap (service name/version detection, resource attributes). Publish sample usage for web/worker hosts. | Telemetry Core Guild (src/Telemetry/StellaOps.Telemetry.Core/TASKS.md) -TELEMETRY-OBS-50-002 | TODO | Implement context propagation middleware/adapters for HTTP, gRPC, background jobs, and CLI invocations, carrying `trace_id`, `tenant_id`, `actor`, and imposed-rule metadata. Provide test harness covering async resume scenarios. | Telemetry Core Guild (src/Telemetry/StellaOps.Telemetry.Core/TASKS.md) -TELEMETRY-OBS-51-001 | TODO | Ship metrics helpers for golden signals (histograms, counters, gauges) with exemplar support and cardinality guards. Provide Roslyn analyzer preventing unsanitised labels. | Telemetry Core Guild, Observability Guild (src/Telemetry/StellaOps.Telemetry.Core/TASKS.md) -TELEMETRY-OBS-51-002 | TODO | Implement redaction/scrubbing filters for secrets/PII enforced at logger sink, configurable per-tenant with TTL, including audit of overrides. Add determinism tests verifying stable field order and timestamp normalization. | Telemetry Core Guild, Security Guild (src/Telemetry/StellaOps.Telemetry.Core/TASKS.md) -TELEMETRY-OBS-55-001 | TODO | Provide incident mode toggle API that adjusts sampling, enables extended retention tags, and records activation trail for services. Ensure toggle honored by all hosting templates and integrates with Config/FeatureFlag providers. | Telemetry Core Guild (src/Telemetry/StellaOps.Telemetry.Core/TASKS.md) -TELEMETRY-OBS-56-001 | TODO | Add sealed-mode telemetry helpers (drift metrics, seal/unseal spans, offline exporters) and ensure hosts can disable external exporters when sealed. | Telemetry Core Guild (src/Telemetry/StellaOps.Telemetry.Core/TASKS.md) +TELEMETRY-OBS-50-002 | TODO | Implement context propagation middleware/adapters for HTTP, gRPC, background jobs, and CLI invocations, carrying `trace_id`, `tenant_id`, `actor`, and imposed-rule metadata. Provide test harness covering async resume scenarios. Dependencies: TELEMETRY-OBS-50-001. | Telemetry Core Guild (src/Telemetry/StellaOps.Telemetry.Core/TASKS.md) +TELEMETRY-OBS-51-001 | TODO | Ship metrics helpers for golden signals (histograms, counters, gauges) with exemplar support and cardinality guards. Provide Roslyn analyzer preventing unsanitised labels. Dependencies: TELEMETRY-OBS-50-002. | Telemetry Core Guild, Observability Guild (src/Telemetry/StellaOps.Telemetry.Core/TASKS.md) +TELEMETRY-OBS-51-002 | TODO | Implement redaction/scrubbing filters for secrets/PII enforced at logger sink, configurable per-tenant with TTL, including audit of overrides. Add determinism tests verifying stable field order and timestamp normalization. Dependencies: TELEMETRY-OBS-51-001. | Telemetry Core Guild, Security Guild (src/Telemetry/StellaOps.Telemetry.Core/TASKS.md) +TELEMETRY-OBS-55-001 | TODO | Provide incident mode toggle API that adjusts sampling, enables extended retention tags, and records activation trail for services. Ensure toggle honored by all hosting templates and integrates with Config/FeatureFlag providers. Dependencies: TELEMETRY-OBS-51-002. | Telemetry Core Guild (src/Telemetry/StellaOps.Telemetry.Core/TASKS.md) +TELEMETRY-OBS-56-001 | TODO | Add sealed-mode telemetry helpers (drift metrics, seal/unseal spans, offline exporters) and ensure hosts can disable external exporters when sealed. Dependencies: TELEMETRY-OBS-55-001. | Telemetry Core Guild (src/Telemetry/StellaOps.Telemetry.Core/TASKS.md) If all tasks are done - read next sprint section - SPRINT_180_experience_sdks.md diff --git a/docs/implplan/SPRINT_180_experience_sdks.md b/docs/implplan/SPRINT_180_experience_sdks.md index 09c94492..00ca68b1 100644 --- a/docs/implplan/SPRINT_180_experience_sdks.md +++ b/docs/implplan/SPRINT_180_experience_sdks.md @@ -6,20 +6,20 @@ Summary: Experience & SDKs focus on Cli (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- CLI-AIAI-31-001 | TODO | Implement `stella advise summarize` command with JSON/Markdown outputs and citation display. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-AIAI-31-002 | TODO | Implement `stella advise explain` showing conflict narrative and structured rationale. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-AIAI-31-003 | TODO | Implement `stella advise remediate` generating remediation plans with `--strategy` filters and file output. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-AIAI-31-004 | TODO | Implement `stella advise batch` for summaries/conflicts/remediation with progress + multi-status responses. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-AIAI-31-002 | TODO | Implement `stella advise explain` showing conflict narrative and structured rationale. Dependencies: CLI-AIAI-31-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-AIAI-31-003 | TODO | Implement `stella advise remediate` generating remediation plans with `--strategy` filters and file output. Dependencies: CLI-AIAI-31-002. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-AIAI-31-004 | TODO | Implement `stella advise batch` for summaries/conflicts/remediation with progress + multi-status responses. Dependencies: CLI-AIAI-31-003. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-AIRGAP-56-001 | TODO | Implement `stella mirror create | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-AIRGAP-56-002 | TODO | Ensure telemetry propagation under sealed mode (no remote exporters) while preserving correlation IDs; add label `AirGapped-Phase-1`. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-AIRGAP-57-001 | TODO | Add `stella airgap import` with diff preview, bundle scope selection (`--tenant`, `--global`), audit logging, and progress reporting. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-AIRGAP-57-002 | TODO | Provide `stella airgap seal | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-AIRGAP-58-001 | TODO | Implement `stella airgap export evidence` helper for portable evidence packages, including checksum manifest and verification. | DevEx/CLI Guild, Evidence Locker Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-AIRGAP-56-002 | TODO | Ensure telemetry propagation under sealed mode (no remote exporters) while preserving correlation IDs; add label `AirGapped-Phase-1`. Dependencies: CLI-AIRGAP-56-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-AIRGAP-57-001 | TODO | Add `stella airgap import` with diff preview, bundle scope selection (`--tenant`, `--global`), audit logging, and progress reporting. Dependencies: CLI-AIRGAP-56-002. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-AIRGAP-57-002 | TODO | Provide `stella airgap seal. Dependencies: CLI-AIRGAP-57-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-AIRGAP-58-001 | TODO | Implement `stella airgap export evidence` helper for portable evidence packages, including checksum manifest and verification. Dependencies: CLI-AIRGAP-57-002. | DevEx/CLI Guild, Evidence Locker Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-ATTEST-73-001 | TODO | Implement `stella attest sign` (payload selection, subject digest, key reference, output format) using official SDK transport. | CLI Attestor Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-ATTEST-73-002 | TODO | Implement `stella attest verify` with policy selection, explainability output, and JSON/table formatting. | CLI Attestor Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-ATTEST-74-001 | TODO | Implement `stella attest list` with filters (subject, type, issuer, scope) and pagination. | CLI Attestor Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-ATTEST-74-002 | TODO | Implement `stella attest fetch` to download envelopes and payloads to disk. | CLI Attestor Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-ATTEST-75-001 | TODO | Implement `stella attest key create | CLI Attestor Guild, KMS Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-ATTEST-75-002 | TODO | Add support for building/verifying attestation bundles in CLI. | CLI Attestor Guild, Export Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-ATTEST-73-002 | TODO | Implement `stella attest verify` with policy selection, explainability output, and JSON/table formatting. Dependencies: CLI-ATTEST-73-001. | CLI Attestor Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-ATTEST-74-001 | TODO | Implement `stella attest list` with filters (subject, type, issuer, scope) and pagination. Dependencies: CLI-ATTEST-73-002. | CLI Attestor Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-ATTEST-74-002 | TODO | Implement `stella attest fetch` to download envelopes and payloads to disk. Dependencies: CLI-ATTEST-74-001. | CLI Attestor Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-ATTEST-75-001 | TODO | Implement `stella attest key create. Dependencies: CLI-ATTEST-74-002. | CLI Attestor Guild, KMS Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-ATTEST-75-002 | TODO | Add support for building/verifying attestation bundles in CLI. Dependencies: CLI-ATTEST-75-001. | CLI Attestor Guild, Export Guild (src/Cli/StellaOps.Cli/TASKS.md) [Experience & SDKs] 180.A) Cli.II @@ -29,18 +29,18 @@ Task ID | State | Task description | Owners (Source) --- | --- | --- | --- CLI-CORE-41-001 | TODO | Implement CLI core features: config precedence, profiles/contexts, auth flows, output renderer (json/yaml/table), error mapping, global flags, telemetry opt-in. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-EXC-25-001 | TODO | Implement `stella exceptions list | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-EXC-25-002 | TODO | Extend `stella policy simulate` with `--with-exception`/`--without-exception` flags to preview exception impact. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-EXC-25-002 | TODO | Extend `stella policy simulate` with `--with-exception`/`--without-exception` flags to preview exception impact. Dependencies: CLI-EXC-25-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-EXPORT-35-001 | BLOCKED (2025-10-29) | Implement `stella export profiles | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-EXPORT-36-001 | TODO | Add distribution commands (`stella export distribute`, `run download --resume` enhancements) and improved status polling with progress bars. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-EXPORT-37-001 | TODO | Provide scheduling (`stella export schedule`), retention, and `export verify` commands performing signature/hash validation. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-EXPORT-36-001 | TODO | Add distribution commands (`stella export distribute`, `run download --resume` enhancements) and improved status polling with progress bars. Dependencies: CLI-EXPORT-35-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-EXPORT-37-001 | TODO | Provide scheduling (`stella export schedule`), retention, and `export verify` commands performing signature/hash validation. Dependencies: CLI-EXPORT-36-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-FORENSICS-53-001 | TODO | Implement `stella forensic snapshot create --case` and `snapshot list/show` commands invoking evidence locker APIs, surfacing manifest digests, and storing local cache metadata. | DevEx/CLI Guild, Evidence Locker Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-FORENSICS-54-001 | TODO | Provide `stella forensic verify ` command validating checksums, DSSE signatures, and timeline chain-of-custody. Support JSON/pretty output and exit codes for CI. | DevEx/CLI Guild, Provenance Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-FORENSICS-54-002 | TODO | Implement `stella forensic attest show ` listing attestation details (signer, timestamp, subjects) and verifying signatures. | DevEx/CLI Guild, Provenance Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-FORENSICS-54-001 | TODO | Provide `stella forensic verify ` command validating checksums, DSSE signatures, and timeline chain-of-custody. Support JSON/pretty output and exit codes for CI. Dependencies: CLI-FORENSICS-53-001. | DevEx/CLI Guild, Provenance Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-FORENSICS-54-002 | TODO | Implement `stella forensic attest show ` listing attestation details (signer, timestamp, subjects) and verifying signatures. Dependencies: CLI-FORENSICS-54-001. | DevEx/CLI Guild, Provenance Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-LNM-22-001 | TODO | Implement `stella advisory obs get/linkset show/export` commands with JSON/OSV output, pagination, and conflict display; ensure `ERR_AGG_*` mapping. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-LNM-22-002 | TODO | Implement `stella vex obs get/linkset show` commands with product filters, status filters, and JSON output for CI usage. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-LNM-22-002 | TODO | Implement `stella vex obs get/linkset show` commands with product filters, status filters, and JSON output for CI usage. Dependencies: CLI-LNM-22-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-NOTIFY-38-001 | BLOCKED (2025-10-29) | Implement `stella notify rules | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-NOTIFY-39-001 | BLOCKED (2025-10-29) | Add simulation (`stella notify simulate`) and digest commands with diff output and schedule triggering, including dry-run mode. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-NOTIFY-40-001 | TODO | Provide ack token redemption workflow, escalation management, localization previews, and channel health checks. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-NOTIFY-39-001 | BLOCKED (2025-10-29) | Add simulation (`stella notify simulate`) and digest commands with diff output and schedule triggering, including dry-run mode. Dependencies: CLI-NOTIFY-38-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-NOTIFY-40-001 | TODO | Provide ack token redemption workflow, escalation management, localization previews, and channel health checks. Dependencies: CLI-NOTIFY-39-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-OBS-50-001 | TODO | Ensure CLI HTTP client propagates `traceparent` headers for all commands, prints correlation IDs on failure, and records trace IDs in verbose logs (scrubbed). | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) @@ -49,21 +49,21 @@ Depends on: Sprint 180.A - Cli.II Summary: Experience & SDKs focus on Cli (phase III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -CLI-OBS-51-001 | TODO | Implement `stella obs top` command streaming service health metrics, SLO status, and burn-rate alerts with TUI view and JSON output. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-OBS-52-001 | TODO | Add `stella obs trace ` and `stella obs logs --from/--to` commands that correlate timeline events, logs, and evidence links with pagination + guardrails. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-OBS-55-001 | TODO | Add `stella obs incident-mode enable | DevEx/CLI Guild, DevOps Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-OBS-51-001 | TODO | Implement `stella obs top` command streaming service health metrics, SLO status, and burn-rate alerts with TUI view and JSON output. Dependencies: CLI-OBS-50-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-OBS-52-001 | TODO | Add `stella obs trace ` and `stella obs logs --from/--to` commands that correlate timeline events, logs, and evidence links with pagination + guardrails. Dependencies: CLI-OBS-51-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-OBS-55-001 | TODO | Add `stella obs incident-mode enable. Dependencies: CLI-OBS-52-001. | DevEx/CLI Guild, DevOps Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-ORCH-32-001 | TODO | Implement `stella orch sources | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-ORCH-33-001 | TODO | Add action verbs (`sources test | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-ORCH-34-001 | TODO | Provide backfill wizard (`--from/--to --dry-run`), quota management (`quotas get | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-ORCH-33-001 | TODO | Add action verbs (`sources test. Dependencies: CLI-ORCH-32-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-ORCH-34-001 | TODO | Provide backfill wizard (`--from/--to --dry-run`), quota management (`quotas get. Dependencies: CLI-ORCH-33-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-PACKS-42-001 | TODO | Implement Task Pack commands (`pack plan/run/push/pull/verify`) with schema validation, expression sandbox, plan/simulate engine, remote execution. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-PACKS-43-001 | TODO | Deliver advanced pack features (approvals pause/resume, secret injection, localization, man pages, offline cache). | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-PACKS-43-001 | TODO | Deliver advanced pack features (approvals pause/resume, secret injection, localization, man pages, offline cache). Dependencies: CLI-PACKS-42-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-PARITY-41-001 | TODO | Deliver parity command groups (`policy`, `sbom`, `vuln`, `vex`, `advisory`, `export`, `orchestrator`) with `--explain`, deterministic outputs, and parity matrix entries. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-PARITY-41-002 | TODO | Implement `notify`, `aoc`, `auth` command groups, idempotency keys, shell completions, config docs, and parity matrix export tooling. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-PARITY-41-002 | TODO | Implement `notify`, `aoc`, `auth` command groups, idempotency keys, shell completions, config docs, and parity matrix export tooling. Dependencies: CLI-PARITY-41-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-POLICY-20-001 | TODO | Add `stella policy new | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-POLICY-23-004 | TODO | Add `stella policy lint` command validating SPL files with compiler diagnostics; support JSON output. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-POLICY-23-005 | DOING (2025-10-28) | Implement `stella policy activate` with scheduling window, approval enforcement, and summary output. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-POLICY-23-006 | TODO | Provide `stella policy history` and `stella policy explain` commands to pull run history and explanation trees. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-POLICY-27-001 | TODO | Implement policy workspace commands (`stella policy init`, `edit`, `lint`, `compile`, `test`) with template selection, local cache, JSON output, and deterministic temp directories. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-POLICY-23-004 | TODO | Add `stella policy lint` command validating SPL files with compiler diagnostics; support JSON output. Dependencies: CLI-POLICY-20-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-POLICY-23-005 | DOING (2025-10-28) | Implement `stella policy activate` with scheduling window, approval enforcement, and summary output. Dependencies: CLI-POLICY-23-004. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-POLICY-23-006 | TODO | Provide `stella policy history` and `stella policy explain` commands to pull run history and explanation trees. Dependencies: CLI-POLICY-23-005. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-POLICY-27-001 | TODO | Implement policy workspace commands (`stella policy init`, `edit`, `lint`, `compile`, `test`) with template selection, local cache, JSON output, and deterministic temp directories. Dependencies: CLI-POLICY-23-006. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) [Experience & SDKs] 180.A) Cli.IV @@ -71,21 +71,21 @@ Depends on: Sprint 180.A - Cli.III Summary: Experience & SDKs focus on Cli (phase IV). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -CLI-POLICY-27-002 | TODO | Add submission/review workflow commands (`stella policy version bump`, `submit`, `review comment`, `approve`, `reject`) supporting reviewer assignment, changelog capture, and exit codes. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-POLICY-27-003 | TODO | Implement `stella policy simulate` enhancements (quick vs batch, SBOM selectors, heatmap summary, manifest download) with `--json` and Markdown report output for CI. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-POLICY-27-004 | TODO | Add lifecycle commands for publish/promote/rollback/sign (`stella policy publish --sign`, `promote --env`, `rollback`) with attestation verification and canary arguments. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-POLICY-27-005 | TODO | Update CLI reference and samples for Policy Studio including JSON schemas, exit codes, and CI snippets. | DevEx/CLI Guild, Docs Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-POLICY-27-006 | TODO | Update CLI policy profiles/help text to request the new Policy Studio scope family, surface ProblemDetails guidance for `invalid_scope`, and adjust regression tests for scope failures. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-POLICY-27-002 | TODO | Add submission/review workflow commands (`stella policy version bump`, `submit`, `review comment`, `approve`, `reject`) supporting reviewer assignment, changelog capture, and exit codes. Dependencies: CLI-POLICY-27-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-POLICY-27-003 | TODO | Implement `stella policy simulate` enhancements (quick vs batch, SBOM selectors, heatmap summary, manifest download) with `--json` and Markdown report output for CI. Dependencies: CLI-POLICY-27-002. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-POLICY-27-004 | TODO | Add lifecycle commands for publish/promote/rollback/sign (`stella policy publish --sign`, `promote --env`, `rollback`) with attestation verification and canary arguments. Dependencies: CLI-POLICY-27-003. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-POLICY-27-005 | TODO | Update CLI reference and samples for Policy Studio including JSON schemas, exit codes, and CI snippets. Dependencies: CLI-POLICY-27-004. | DevEx/CLI Guild, Docs Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-POLICY-27-006 | TODO | Update CLI policy profiles/help text to request the new Policy Studio scope family, surface ProblemDetails guidance for `invalid_scope`, and adjust regression tests for scope failures. Dependencies: CLI-POLICY-27-005. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-RISK-66-001 | TODO | Implement `stella risk profile list | DevEx/CLI Guild, Policy Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-RISK-66-002 | TODO | Ship `stella risk simulate` supporting SBOM/asset inputs, diff mode, and export to JSON/CSV. | DevEx/CLI Guild, Risk Engine Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-RISK-67-001 | TODO | Provide `stella risk results` with filtering, severity thresholds, explainability fetch. | DevEx/CLI Guild, Findings Ledger Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-RISK-68-001 | TODO | Add `stella risk bundle verify` and integrate with offline risk bundles. | DevEx/CLI Guild, Export Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-RISK-66-002 | TODO | Ship `stella risk simulate` supporting SBOM/asset inputs, diff mode, and export to JSON/CSV. Dependencies: CLI-RISK-66-001. | DevEx/CLI Guild, Risk Engine Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-RISK-67-001 | TODO | Provide `stella risk results` with filtering, severity thresholds, explainability fetch. Dependencies: CLI-RISK-66-002. | DevEx/CLI Guild, Findings Ledger Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-RISK-68-001 | TODO | Add `stella risk bundle verify` and integrate with offline risk bundles. Dependencies: CLI-RISK-67-001. | DevEx/CLI Guild, Export Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-SDK-62-001 | TODO | Replace bespoke HTTP clients with official SDK (TS/Go) for all CLI commands; ensure modular transport for air-gapped mode. | DevEx/CLI Guild, SDK Generator Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-SDK-62-002 | TODO | Update CLI error handling to surface standardized API error envelope with `error.code` and `trace_id`. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-SDK-63-001 | TODO | Expose `stella api spec download` command retrieving aggregate OAS and verifying checksum/ETag. | DevEx/CLI Guild, API Governance Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-SDK-64-001 | TODO | Add CLI subcommand `stella sdk update` to fetch latest SDK manifests/changelogs; integrate with Notifications for deprecations. | DevEx/CLI Guild, SDK Release Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-SDK-62-002 | TODO | Update CLI error handling to surface standardized API error envelope with `error.code` and `trace_id`. Dependencies: CLI-SDK-62-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-SDK-63-001 | TODO | Expose `stella api spec download` command retrieving aggregate OAS and verifying checksum/ETag. Dependencies: CLI-SDK-62-002. | DevEx/CLI Guild, API Governance Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-SDK-64-001 | TODO | Add CLI subcommand `stella sdk update` to fetch latest SDK manifests/changelogs; integrate with Notifications for deprecations. Dependencies: CLI-SDK-63-001. | DevEx/CLI Guild, SDK Release Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-SIG-26-001 | TODO | Implement `stella reachability upload-callgraph` and `stella reachability list/explain` commands with streaming upload, pagination, and exit codes. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-SIG-26-002 | TODO | Extend `stella policy simulate` with reachability override flags (`--reachability-state`, `--reachability-score`). | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-SIG-26-002 | TODO | Extend `stella policy simulate` with reachability override flags (`--reachability-state`, `--reachability-score`). Dependencies: CLI-SIG-26-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) [Experience & SDKs] 180.A) Cli.V @@ -94,17 +94,17 @@ Summary: Experience & SDKs focus on Cli (phase V). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- CLI-TEN-47-001 | TODO | Implement `stella login`, `whoami`, `tenants list`, persistent profiles, secure token storage, and `--tenant` override with validation. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-TEN-49-001 | TODO | Add service account token minting, delegation (`stella token delegate`), impersonation banner, and audit-friendly logging. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-TEN-49-001 | TODO | Add service account token minting, delegation (`stella token delegate`), impersonation banner, and audit-friendly logging. Dependencies: CLI-TEN-47-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-VEX-30-001 | TODO | Implement `stella vex consensus list` with filters, paging, policy selection, `--json/--csv`. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-VEX-30-002 | TODO | Implement `stella vex consensus show` displaying quorum, evidence, rationale, signature status. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-VEX-30-003 | TODO | Implement `stella vex simulate` for trust/threshold overrides with JSON diff output. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-VEX-30-004 | TODO | Implement `stella vex export` for consensus NDJSON bundles with signature verification helper. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-VEX-30-002 | TODO | Implement `stella vex consensus show` displaying quorum, evidence, rationale, signature status. Dependencies: CLI-VEX-30-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-VEX-30-003 | TODO | Implement `stella vex simulate` for trust/threshold overrides with JSON diff output. Dependencies: CLI-VEX-30-002. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-VEX-30-004 | TODO | Implement `stella vex export` for consensus NDJSON bundles with signature verification helper. Dependencies: CLI-VEX-30-003. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) CLI-VULN-29-001 | TODO | Implement `stella vuln list` with grouping, paging, filters, `--json/--csv`, and policy selection. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-VULN-29-002 | TODO | Implement `stella vuln show` displaying evidence, policy rationale, paths, ledger summary; support `--json` for automation. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-VULN-29-003 | TODO | Add workflow commands (`assign`, `comment`, `accept-risk`, `verify-fix`, `target-fix`, `reopen`) with filter selection (`--filter`) and idempotent retries. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-VULN-29-004 | TODO | Implement `stella vuln simulate` producing delta summaries and optional Markdown report for CI. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-VULN-29-005 | TODO | Add `stella vuln export` and `stella vuln bundle verify` commands to trigger/download evidence bundles and verify signatures. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) -CLI-VULN-29-006 | TODO | Update CLI docs/examples for Vulnerability Explorer with compliance checklist and CI snippets. | DevEx/CLI Guild, Docs Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-VULN-29-002 | TODO | Implement `stella vuln show` displaying evidence, policy rationale, paths, ledger summary; support `--json` for automation. Dependencies: CLI-VULN-29-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-VULN-29-003 | TODO | Add workflow commands (`assign`, `comment`, `accept-risk`, `verify-fix`, `target-fix`, `reopen`) with filter selection (`--filter`) and idempotent retries. Dependencies: CLI-VULN-29-002. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-VULN-29-004 | TODO | Implement `stella vuln simulate` producing delta summaries and optional Markdown report for CI. Dependencies: CLI-VULN-29-003. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-VULN-29-005 | TODO | Add `stella vuln export` and `stella vuln bundle verify` commands to trigger/download evidence bundles and verify signatures. Dependencies: CLI-VULN-29-004. | DevEx/CLI Guild (src/Cli/StellaOps.Cli/TASKS.md) +CLI-VULN-29-006 | TODO | Update CLI docs/examples for Vulnerability Explorer with compliance checklist and CI snippets. Dependencies: CLI-VULN-29-005. | DevEx/CLI Guild, Docs Guild (src/Cli/StellaOps.Cli/TASKS.md) [Experience & SDKs] 180.B) DevPortal @@ -113,11 +113,11 @@ Summary: Experience & SDKs focus on DevPortal). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- DEVPORT-62-001 | TODO | Select static site generator, integrate aggregate spec, build navigation + search scaffolding. | Developer Portal Guild (src/DevPortal/StellaOps.DevPortal.Site/TASKS.md) -DEVPORT-62-002 | TODO | Implement schema viewer, example rendering, copy-curl snippets, and version selector UI. | Developer Portal Guild (src/DevPortal/StellaOps.DevPortal.Site/TASKS.md) -DEVPORT-63-001 | TODO | Add Try-It console pointing at sandbox environment with token onboarding and scope info. | Developer Portal Guild, Platform Guild (src/DevPortal/StellaOps.DevPortal.Site/TASKS.md) -DEVPORT-63-002 | TODO | Embed language-specific SDK snippets and quick starts generated from tested examples. | Developer Portal Guild, SDK Generator Guild (src/DevPortal/StellaOps.DevPortal.Site/TASKS.md) -DEVPORT-64-001 | TODO | Provide offline build target bundling HTML, specs, SDK archives; ensure no external assets. | Developer Portal Guild, Export Center Guild (src/DevPortal/StellaOps.DevPortal.Site/TASKS.md) -DEVPORT-64-002 | TODO | Add automated accessibility tests, link checker, and performance budgets. | Developer Portal Guild (src/DevPortal/StellaOps.DevPortal.Site/TASKS.md) +DEVPORT-62-002 | TODO | Implement schema viewer, example rendering, copy-curl snippets, and version selector UI. Dependencies: DEVPORT-62-001. | Developer Portal Guild (src/DevPortal/StellaOps.DevPortal.Site/TASKS.md) +DEVPORT-63-001 | TODO | Add Try-It console pointing at sandbox environment with token onboarding and scope info. Dependencies: DEVPORT-62-002. | Developer Portal Guild, Platform Guild (src/DevPortal/StellaOps.DevPortal.Site/TASKS.md) +DEVPORT-63-002 | TODO | Embed language-specific SDK snippets and quick starts generated from tested examples. Dependencies: DEVPORT-63-001. | Developer Portal Guild, SDK Generator Guild (src/DevPortal/StellaOps.DevPortal.Site/TASKS.md) +DEVPORT-64-001 | TODO | Provide offline build target bundling HTML, specs, SDK archives; ensure no external assets. Dependencies: DEVPORT-63-002. | Developer Portal Guild, Export Center Guild (src/DevPortal/StellaOps.DevPortal.Site/TASKS.md) +DEVPORT-64-002 | TODO | Add automated accessibility tests, link checker, and performance budgets. Dependencies: DEVPORT-64-001. | Developer Portal Guild (src/DevPortal/StellaOps.DevPortal.Site/TASKS.md) [Experience & SDKs] 180.C) Graph @@ -126,16 +126,16 @@ Summary: Experience & SDKs focus on Graph). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- GRAPH-API-28-001 | TODO | Define OpenAPI + JSON schema for graph search/query/paths/diff/export endpoints, including cost metadata and streaming tile schema. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) -GRAPH-API-28-002 | TODO | Implement `/graph/search` with multi-type index lookup, prefix/exact match, RBAC enforcement, and result ranking + caching. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) -GRAPH-API-28-003 | TODO | Build query planner + cost estimator for `/graph/query`, stream tiles (nodes/edges/stats) progressively, enforce budgets, provide cursor tokens. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) -GRAPH-API-28-004 | TODO | Implement `/graph/paths` with depth ≤6, constraint filters, heuristic shortest path search, and optional policy overlay rendering. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) -GRAPH-API-28-005 | TODO | Implement `/graph/diff` streaming added/removed/changed nodes/edges between SBOM snapshots; include overlay deltas and policy/VEX/advisory metadata. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) -GRAPH-API-28-006 | TODO | Consume Policy Engine overlay contract (`POLICY-ENGINE-30-001..003`) and surface advisory/VEX/policy overlays with caching, partial materialization, and explain trace sampling for focused nodes. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) -GRAPH-API-28-007 | TODO | Implement exports (`graphml`, `csv`, `ndjson`, `png`, `svg`) with async job management, checksum manifests, and streaming downloads. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) -GRAPH-API-28-008 | TODO | Integrate RBAC scopes (`graph:read`, `graph:query`, `graph:export`), tenant headers, audit logging, and rate limiting. | Graph API Guild, Authority Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) -GRAPH-API-28-009 | TODO | Instrument metrics (`graph_tile_latency_seconds`, `graph_query_budget_denied_total`, `graph_overlay_cache_hit_ratio`), structured logs, and traces per query stage; publish dashboards. | Graph API Guild, Observability Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) -GRAPH-API-28-010 | TODO | Build unit/integration/load tests with synthetic datasets (500k nodes/2M edges), fuzz query validation, verify determinism across runs. | Graph API Guild, QA Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) -GRAPH-API-28-011 | TODO | Provide deployment manifests, offline kit support, API gateway integration docs, and smoke tests. | Graph API Guild, DevOps Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) +GRAPH-API-28-002 | TODO | Implement `/graph/search` with multi-type index lookup, prefix/exact match, RBAC enforcement, and result ranking + caching. Dependencies: GRAPH-API-28-001. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) +GRAPH-API-28-003 | TODO | Build query planner + cost estimator for `/graph/query`, stream tiles (nodes/edges/stats) progressively, enforce budgets, provide cursor tokens. Dependencies: GRAPH-API-28-002. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) +GRAPH-API-28-004 | TODO | Implement `/graph/paths` with depth ≤6, constraint filters, heuristic shortest path search, and optional policy overlay rendering. Dependencies: GRAPH-API-28-003. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) +GRAPH-API-28-005 | TODO | Implement `/graph/diff` streaming added/removed/changed nodes/edges between SBOM snapshots; include overlay deltas and policy/VEX/advisory metadata. Dependencies: GRAPH-API-28-004. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) +GRAPH-API-28-006 | TODO | Consume Policy Engine overlay contract (`POLICY-ENGINE-30-001..003`) and surface advisory/VEX/policy overlays with caching, partial materialization, and explain trace sampling for focused nodes. Dependencies: GRAPH-API-28-005. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) +GRAPH-API-28-007 | TODO | Implement exports (`graphml`, `csv`, `ndjson`, `png`, `svg`) with async job management, checksum manifests, and streaming downloads. Dependencies: GRAPH-API-28-006. | Graph API Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) +GRAPH-API-28-008 | TODO | Integrate RBAC scopes (`graph:read`, `graph:query`, `graph:export`), tenant headers, audit logging, and rate limiting. Dependencies: GRAPH-API-28-007. | Graph API Guild, Authority Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) +GRAPH-API-28-009 | TODO | Instrument metrics (`graph_tile_latency_seconds`, `graph_query_budget_denied_total`, `graph_overlay_cache_hit_ratio`), structured logs, and traces per query stage; publish dashboards. Dependencies: GRAPH-API-28-008. | Graph API Guild, Observability Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) +GRAPH-API-28-010 | TODO | Build unit/integration/load tests with synthetic datasets (500k nodes/2M edges), fuzz query validation, verify determinism across runs. Dependencies: GRAPH-API-28-009. | Graph API Guild, QA Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) +GRAPH-API-28-011 | TODO | Provide deployment manifests, offline kit support, API gateway integration docs, and smoke tests. Dependencies: GRAPH-API-28-010. | Graph API Guild, DevOps Guild (src/Graph/StellaOps.Graph.Api/TASKS.md) [Experience & SDKs] 180.D) Sdk @@ -144,17 +144,17 @@ Summary: Experience & SDKs focus on Sdk). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- SDKGEN-62-001 | TODO | Choose/pin generator toolchain, set up language template pipeline, and enforce reproducible builds. | SDK Generator Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) -SDKGEN-62-002 | TODO | Implement shared post-processing (auth helpers, retries, pagination utilities, telemetry hooks) applied to all languages. | SDK Generator Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) -SDKGEN-63-001 | TODO | Ship TypeScript SDK alpha with ESM/CJS builds, typed errors, paginator, streaming helpers. | SDK Generator Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) -SDKGEN-63-002 | TODO | Ship Python SDK alpha (sync/async clients, type hints, upload/download helpers). | SDK Generator Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) -SDKGEN-63-003 | TODO | Ship Go SDK alpha with context-first API and streaming helpers. | SDK Generator Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) -SDKGEN-63-004 | TODO | Ship Java SDK alpha (builder pattern, HTTP client abstraction). | SDK Generator Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) -SDKGEN-64-001 | TODO | Switch CLI to consume TS or Go SDK; ensure parity. | SDK Generator Guild, CLI Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) -SDKGEN-64-002 | TODO | Integrate SDKs into Console data providers where feasible. | SDK Generator Guild, Console Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) +SDKGEN-62-002 | TODO | Implement shared post-processing (auth helpers, retries, pagination utilities, telemetry hooks) applied to all languages. Dependencies: SDKGEN-62-001. | SDK Generator Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) +SDKGEN-63-001 | TODO | Ship TypeScript SDK alpha with ESM/CJS builds, typed errors, paginator, streaming helpers. Dependencies: SDKGEN-62-002. | SDK Generator Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) +SDKGEN-63-002 | TODO | Ship Python SDK alpha (sync/async clients, type hints, upload/download helpers). Dependencies: SDKGEN-63-001. | SDK Generator Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) +SDKGEN-63-003 | TODO | Ship Go SDK alpha with context-first API and streaming helpers. Dependencies: SDKGEN-63-002. | SDK Generator Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) +SDKGEN-63-004 | TODO | Ship Java SDK alpha (builder pattern, HTTP client abstraction). Dependencies: SDKGEN-63-003. | SDK Generator Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) +SDKGEN-64-001 | TODO | Switch CLI to consume TS or Go SDK; ensure parity. Dependencies: SDKGEN-63-004. | SDK Generator Guild, CLI Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) +SDKGEN-64-002 | TODO | Integrate SDKs into Console data providers where feasible. Dependencies: SDKGEN-64-001. | SDK Generator Guild, Console Guild (src/Sdk/StellaOps.Sdk.Generator/TASKS.md) SDKREL-63-001 | TODO | Configure CI pipelines for npm, PyPI, Maven Central staging, and Go proxies with signing and provenance attestations. | SDK Release Guild (src/Sdk/StellaOps.Sdk.Release/TASKS.md) -SDKREL-63-002 | TODO | Integrate changelog automation pulling from OAS diffs and generator metadata. | SDK Release Guild, API Governance Guild (src/Sdk/StellaOps.Sdk.Release/TASKS.md) -SDKREL-64-001 | TODO | Hook SDK releases into Notifications Studio with scoped announcements and RSS/Atom feeds. | SDK Release Guild, Notifications Guild (src/Sdk/StellaOps.Sdk.Release/TASKS.md) -SDKREL-64-002 | TODO | Add `devportal --offline` bundle job packaging docs, specs, SDK artifacts for air-gapped users. | SDK Release Guild, Export Center Guild (src/Sdk/StellaOps.Sdk.Release/TASKS.md) +SDKREL-63-002 | TODO | Integrate changelog automation pulling from OAS diffs and generator metadata. Dependencies: SDKREL-63-001. | SDK Release Guild, API Governance Guild (src/Sdk/StellaOps.Sdk.Release/TASKS.md) +SDKREL-64-001 | TODO | Hook SDK releases into Notifications Studio with scoped announcements and RSS/Atom feeds. Dependencies: SDKREL-63-002. | SDK Release Guild, Notifications Guild (src/Sdk/StellaOps.Sdk.Release/TASKS.md) +SDKREL-64-002 | TODO | Add `devportal --offline` bundle job packaging docs, specs, SDK artifacts for air-gapped users. Dependencies: SDKREL-64-001. | SDK Release Guild, Export Center Guild (src/Sdk/StellaOps.Sdk.Release/TASKS.md) [Experience & SDKs] 180.E) UI.I @@ -163,19 +163,19 @@ Summary: Experience & SDKs focus on UI (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- UI-AOC-19-001 | TODO | Add Sources dashboard tiles showing AOC pass/fail, recent violation codes, and ingest throughput per tenant. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-AOC-19-002 | TODO | Implement violation drill-down view highlighting offending document fields and provenance metadata. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-AOC-19-003 | TODO | Add "Verify last 24h" action triggering AOC verifier endpoint and surfacing CLI parity guidance. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-AOC-19-002 | TODO | Implement violation drill-down view highlighting offending document fields and provenance metadata. Dependencies: UI-AOC-19-001. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-AOC-19-003 | TODO | Add "Verify last 24h" action triggering AOC verifier endpoint and surfacing CLI parity guidance. Dependencies: UI-AOC-19-002. | UI Guild (src/UI/StellaOps.UI/TASKS.md) UI-EXC-25-001 | TODO | Build Exception Center (list + kanban) with filters, sorting, workflow transitions, and audit views. | UI Guild, Governance Guild (src/UI/StellaOps.UI/TASKS.md) -UI-EXC-25-002 | TODO | Implement exception creation wizard with scope preview, justification templates, timebox guardrails. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-EXC-25-003 | TODO | Add inline exception drafting/proposing from Vulnerability Explorer and Graph detail panels with live simulation. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-EXC-25-004 | TODO | Surface exception badges, countdown timers, and explain integration across Graph/Vuln Explorer and policy views. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-EXC-25-005 | TODO | Add keyboard shortcuts (`x`,`a`,`r`) and ensure screen-reader messaging for approvals/revocations. | UI Guild, Accessibility Guild (src/UI/StellaOps.UI/TASKS.md) +UI-EXC-25-002 | TODO | Implement exception creation wizard with scope preview, justification templates, timebox guardrails. Dependencies: UI-EXC-25-001. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-EXC-25-003 | TODO | Add inline exception drafting/proposing from Vulnerability Explorer and Graph detail panels with live simulation. Dependencies: UI-EXC-25-002. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-EXC-25-004 | TODO | Surface exception badges, countdown timers, and explain integration across Graph/Vuln Explorer and policy views. Dependencies: UI-EXC-25-003. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-EXC-25-005 | TODO | Add keyboard shortcuts (`x`,`a`,`r`) and ensure screen-reader messaging for approvals/revocations. Dependencies: UI-EXC-25-004. | UI Guild, Accessibility Guild (src/UI/StellaOps.UI/TASKS.md) UI-GRAPH-21-001 | TODO | Align Graph Explorer auth configuration with new `graph:*` scopes; consume scope identifiers from shared `StellaOpsScopes` exports (via generated SDK/config) instead of hard-coded strings. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-GRAPH-24-001 | TODO | Build Graph Explorer canvas with layered/radial layouts, virtualization, zoom/pan, and scope toggles; initial render <1.5s for sample asset. | UI Guild, SBOM Service Guild (src/UI/StellaOps.UI/TASKS.md) -UI-GRAPH-24-002 | TODO | Implement overlays (Policy, Evidence, License, Exposure), simulation toggle, path view, and SBOM diff/time-travel with accessible tooltips/AOC indicators. | UI Guild, Policy Guild (src/UI/StellaOps.UI/TASKS.md) -UI-GRAPH-24-003 | TODO | Deliver filters/search panel with facets, saved views, permalinks, and share modal. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-GRAPH-24-004 | TODO | Add side panels (Details, What-if, History) with upgrade simulation integration and SBOM diff viewer. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-GRAPH-24-006 | TODO | Ensure accessibility (keyboard nav, screen reader labels, contrast), add hotkeys (`f`,`e`,`.`), and analytics instrumentation. | UI Guild, Accessibility Guild (src/UI/StellaOps.UI/TASKS.md) +UI-GRAPH-24-001 | TODO | Build Graph Explorer canvas with layered/radial layouts, virtualization, zoom/pan, and scope toggles; initial render <1.5s for sample asset. Dependencies: UI-GRAPH-21-001. | UI Guild, SBOM Service Guild (src/UI/StellaOps.UI/TASKS.md) +UI-GRAPH-24-002 | TODO | Implement overlays (Policy, Evidence, License, Exposure), simulation toggle, path view, and SBOM diff/time-travel with accessible tooltips/AOC indicators. Dependencies: UI-GRAPH-24-001. | UI Guild, Policy Guild (src/UI/StellaOps.UI/TASKS.md) +UI-GRAPH-24-003 | TODO | Deliver filters/search panel with facets, saved views, permalinks, and share modal. Dependencies: UI-GRAPH-24-002. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-GRAPH-24-004 | TODO | Add side panels (Details, What-if, History) with upgrade simulation integration and SBOM diff viewer. Dependencies: UI-GRAPH-24-003. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-GRAPH-24-006 | TODO | Ensure accessibility (keyboard nav, screen reader labels, contrast), add hotkeys (`f`,`e`,`.`), and analytics instrumentation. Dependencies: UI-GRAPH-24-004. | UI Guild, Accessibility Guild (src/UI/StellaOps.UI/TASKS.md) UI-LNM-22-001 | TODO | Build Evidence panel showing policy decision with advisory observations/linksets side-by-side, conflict badges, AOC chain, and raw doc download links. Docs `DOCS-LNM-22-005` waiting on delivered UI for screenshots + flows. | UI Guild, Policy Guild (src/UI/StellaOps.UI/TASKS.md) @@ -184,21 +184,21 @@ Depends on: Sprint 180.E - UI.I Summary: Experience & SDKs focus on UI (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -UI-LNM-22-002 | TODO | Implement filters (source, severity bucket, conflict-only, CVSS vector presence) and pagination/lazy loading for large linksets. Docs depend on finalized filtering UX. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-LNM-22-003 | TODO | Add VEX tab with status/justification summaries, conflict indicators, and export actions. Required for `DOCS-LNM-22-005` coverage of VEX evidence tab. | UI Guild, Excititor Guild (src/UI/StellaOps.UI/TASKS.md) -UI-LNM-22-004 | TODO | Provide permalink + copy-to-clipboard for selected component/linkset/policy combination; ensure high-contrast theme support. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-LNM-22-002 | TODO | Implement filters (source, severity bucket, conflict-only, CVSS vector presence) and pagination/lazy loading for large linksets. Docs depend on finalized filtering UX. Dependencies: UI-LNM-22-001. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-LNM-22-003 | TODO | Add VEX tab with status/justification summaries, conflict indicators, and export actions. Required for `DOCS-LNM-22-005` coverage of VEX evidence tab. Dependencies: UI-LNM-22-002. | UI Guild, Excititor Guild (src/UI/StellaOps.UI/TASKS.md) +UI-LNM-22-004 | TODO | Provide permalink + copy-to-clipboard for selected component/linkset/policy combination; ensure high-contrast theme support. Dependencies: UI-LNM-22-003. | UI Guild (src/UI/StellaOps.UI/TASKS.md) UI-ORCH-32-001 | TODO | Update Console RBAC mappings to surface `Orch.Viewer`, request `orch:read` scope in token flows, and gate dashboard access/messaging accordingly. | UI Guild, Console Guild (src/UI/StellaOps.UI/TASKS.md) UI-POLICY-13-007 | TODO | Surface policy confidence metadata (band, age, quiet provenance) on preview and report views. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-POLICY-20-001 | TODO | Ship Monaco-based policy editor with DSL syntax highlighting, inline diagnostics, and compliance checklist sidebar. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-POLICY-20-002 | TODO | Build simulation panel showing before/after counts, severity deltas, and rule hit summaries with deterministic diff rendering. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-POLICY-20-003 | TODO | Implement submit/review/approve workflow with comments, approvals log, and RBAC checks aligned to new Policy Studio roles (`policy:author`/`policy:review`/`policy:approve`/`policy:operate`). | UI Guild, Product Ops (src/UI/StellaOps.UI/TASKS.md) -UI-POLICY-20-004 | TODO | Add run viewer dashboards (rule heatmap, VEX wins, suppressions) with filter/search and export. | UI Guild, Observability Guild (src/UI/StellaOps.UI/TASKS.md) -UI-POLICY-23-001 | TODO | Deliver Policy Editor workspace with pack list, revision history, and scoped metadata cards. | UI Guild, Policy Guild (src/UI/StellaOps.UI/TASKS.md) -UI-POLICY-23-002 | TODO | Implement YAML editor with schema validation, lint diagnostics, and live canonicalization preview. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-POLICY-23-003 | TODO | Build guided rule builder (source preferences, severity mapping, VEX precedence, exceptions) with preview JSON output. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-POLICY-23-004 | TODO | Add review/approval workflow UI: checklists, comments, two-person approval indicator, scope scheduling. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-POLICY-23-005 | TODO | Integrate simulator panel (SBOM/component/advisory selection), run diff vs active policy, show explain tree and overlays. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-POLICY-23-006 | TODO | Implement explain view linking to evidence overlays and exceptions; provide export to JSON/PDF. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-POLICY-20-001 | TODO | Ship Monaco-based policy editor with DSL syntax highlighting, inline diagnostics, and compliance checklist sidebar. Dependencies: UI-POLICY-13-007. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-POLICY-20-002 | TODO | Build simulation panel showing before/after counts, severity deltas, and rule hit summaries with deterministic diff rendering. Dependencies: UI-POLICY-20-001. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-POLICY-20-003 | TODO | Implement submit/review/approve workflow with comments, approvals log, and RBAC checks aligned to new Policy Studio roles (`policy:author`/`policy:review`/`policy:approve`/`policy:operate`). Dependencies: UI-POLICY-20-002. | UI Guild, Product Ops (src/UI/StellaOps.UI/TASKS.md) +UI-POLICY-20-004 | TODO | Add run viewer dashboards (rule heatmap, VEX wins, suppressions) with filter/search and export. Dependencies: UI-POLICY-20-003. | UI Guild, Observability Guild (src/UI/StellaOps.UI/TASKS.md) +UI-POLICY-23-001 | TODO | Deliver Policy Editor workspace with pack list, revision history, and scoped metadata cards. Dependencies: UI-POLICY-20-004. | UI Guild, Policy Guild (src/UI/StellaOps.UI/TASKS.md) +UI-POLICY-23-002 | TODO | Implement YAML editor with schema validation, lint diagnostics, and live canonicalization preview. Dependencies: UI-POLICY-23-001. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-POLICY-23-003 | TODO | Build guided rule builder (source preferences, severity mapping, VEX precedence, exceptions) with preview JSON output. Dependencies: UI-POLICY-23-002. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-POLICY-23-004 | TODO | Add review/approval workflow UI: checklists, comments, two-person approval indicator, scope scheduling. Dependencies: UI-POLICY-23-003. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-POLICY-23-005 | TODO | Integrate simulator panel (SBOM/component/advisory selection), run diff vs active policy, show explain tree and overlays. Dependencies: UI-POLICY-23-004. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-POLICY-23-006 | TODO | Implement explain view linking to evidence overlays and exceptions; provide export to JSON/PDF. Dependencies: UI-POLICY-23-005. | UI Guild (src/UI/StellaOps.UI/TASKS.md) [Experience & SDKs] 180.E) UI.III @@ -206,11 +206,11 @@ Depends on: Sprint 180.E - UI.II Summary: Experience & SDKs focus on UI (phase III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -UI-POLICY-27-001 | TODO | Update Console policy workspace RBAC guards, scope requests, and user messaging to reflect the new Policy Studio roles/scopes (`policy:author/review/approve/operate/audit/simulate`), including Cypress auth stubs and help text. | UI Guild, Product Ops (src/UI/StellaOps.UI/TASKS.md) +UI-POLICY-27-001 | TODO | Update Console policy workspace RBAC guards, scope requests, and user messaging to reflect the new Policy Studio roles/scopes (`policy:author/review/approve/operate/audit/simulate`), including Cypress auth stubs and help text. Dependencies: UI-POLICY-23-006. | UI Guild, Product Ops (src/UI/StellaOps.UI/TASKS.md) UI-SIG-26-001 | TODO | Add reachability columns/badges to Vulnerability Explorer with filters and tooltips. | UI Guild, Signals Guild (src/UI/StellaOps.UI/TASKS.md) -UI-SIG-26-002 | TODO | Enhance “Why” drawer with call path visualization, reachability timeline, and evidence list. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-SIG-26-003 | TODO | Add reachability overlay halos/time slider to SBOM Graph along with state legend. | UI Guild (src/UI/StellaOps.UI/TASKS.md) -UI-SIG-26-004 | TODO | Build Reachability Center view showing asset coverage, missing sensors, and stale facts. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-SIG-26-002 | TODO | Enhance “Why” drawer with call path visualization, reachability timeline, and evidence list. Dependencies: UI-SIG-26-001. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-SIG-26-003 | TODO | Add reachability overlay halos/time slider to SBOM Graph along with state legend. Dependencies: UI-SIG-26-002. | UI Guild (src/UI/StellaOps.UI/TASKS.md) +UI-SIG-26-004 | TODO | Build Reachability Center view showing asset coverage, missing sensors, and stale facts. Dependencies: UI-SIG-26-003. | UI Guild (src/UI/StellaOps.UI/TASKS.md) [Experience & SDKs] 180.F) Web.I @@ -219,19 +219,19 @@ Summary: Experience & SDKs focus on Web (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- WEB-AIAI-31-001 `API routing` | TODO | Route `/advisory/ai/*` endpoints through gateway with RBAC/ABAC, rate limits, and telemetry headers. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-AIAI-31-002 `Batch orchestration` | TODO | Provide batching job handlers and streaming responses for CLI automation with retry/backoff. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-AIAI-31-003 `Telemetry & audit` | TODO | Emit metrics/logs (latency, guardrail blocks, validation failures) and forward anonymized prompt hashes to analytics. | BE-Base Platform Guild, Observability Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-AIAI-31-002 `Batch orchestration` | TODO | Provide batching job handlers and streaming responses for CLI automation with retry/backoff. Dependencies: WEB-AIAI-31-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-AIAI-31-003 `Telemetry & audit` | TODO | Emit metrics/logs (latency, guardrail blocks, validation failures) and forward anonymized prompt hashes to analytics. Dependencies: WEB-AIAI-31-002. | BE-Base Platform Guild, Observability Guild (src/Web/StellaOps.Web/TASKS.md) WEB-AOC-19-001 `Shared AOC guard primitives` | DOING (2025-10-26) | Provide `AOCForbiddenKeys`, guard middleware/interceptor hooks, and error types (`AOCError`, `AOCViolationCode`) for ingestion services. Publish sample usage + analyzer to ensure guard registered. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-AOC-19-002 `Provenance & signature helpers` | TODO | Ship `ProvenanceBuilder`, checksum utilities, and signature verification helper integrated with guard logging. Cover DSSE/CMS formats with unit tests. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-AOC-19-003 `Analyzer + test fixtures` | TODO | Author Roslyn analyzer preventing ingestion modules from writing forbidden keys without guard, and provide shared test fixtures for guard validation used by Concelier/Excititor service tests. | QA Guild, BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-AOC-19-002 `Provenance & signature helpers` | TODO | Ship `ProvenanceBuilder`, checksum utilities, and signature verification helper integrated with guard logging. Cover DSSE/CMS formats with unit tests. Dependencies: WEB-AOC-19-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-AOC-19-003 `Analyzer + test fixtures` | TODO | Author Roslyn analyzer preventing ingestion modules from writing forbidden keys without guard, and provide shared test fixtures for guard validation used by Concelier/Excititor service tests. Dependencies: WEB-AOC-19-002. | QA Guild, BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) WEB-CONSOLE-23-001 `Global posture endpoints` | TODO | Provide consolidated `/console/dashboard` and `/console/filters` APIs returning tenant-scoped aggregates (findings by severity, VEX override counts, advisory deltas, run health, policy change log). Enforce AOC labelling, deterministic ordering, and cursor-based pagination for drill-down hints. | BE-Base Platform Guild, Product Analytics Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-CONSOLE-23-002 `Live status & SSE proxy` | TODO | Expose `/console/status` polling endpoint and `/console/runs/{id}/stream` SSE/WebSocket proxy with heartbeat/backoff, queue lag metrics, and auth scope enforcement. Surface request IDs + retry headers. | BE-Base Platform Guild, Scheduler Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-CONSOLE-23-003 `Evidence export orchestrator` | TODO | Add `/console/exports` POST/GET routes coordinating evidence bundle creation, streaming CSV/JSON exports, checksum manifest retrieval, and signed attestation references. Ensure requests honor tenant + policy scopes and expose job tracking metadata. | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-CONSOLE-23-004 `Global search router` | TODO | Implement `/console/search` endpoint accepting CVE/GHSA/PURL/SBOM identifiers, performing fan-out queries with caching, ranking, and deterministic tie-breaking. Return typed results for Console navigation; respect result caps and latency SLOs. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-CONSOLE-23-005 `Downloads manifest API` | TODO | Serve `/console/downloads` JSON manifest (images, charts, offline bundles) sourced from signed registry metadata; include integrity hashes, release notes links, and offline instructions. Provide caching headers and documentation. | BE-Base Platform Guild, DevOps Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-CONSOLE-23-002 `Live status & SSE proxy` | TODO | Expose `/console/status` polling endpoint and `/console/runs/{id}/stream` SSE/WebSocket proxy with heartbeat/backoff, queue lag metrics, and auth scope enforcement. Surface request IDs + retry headers. Dependencies: WEB-CONSOLE-23-001. | BE-Base Platform Guild, Scheduler Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-CONSOLE-23-003 `Evidence export orchestrator` | TODO | Add `/console/exports` POST/GET routes coordinating evidence bundle creation, streaming CSV/JSON exports, checksum manifest retrieval, and signed attestation references. Ensure requests honor tenant + policy scopes and expose job tracking metadata. Dependencies: WEB-CONSOLE-23-002. | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-CONSOLE-23-004 `Global search router` | TODO | Implement `/console/search` endpoint accepting CVE/GHSA/PURL/SBOM identifiers, performing fan-out queries with caching, ranking, and deterministic tie-breaking. Return typed results for Console navigation; respect result caps and latency SLOs. Dependencies: WEB-CONSOLE-23-003. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-CONSOLE-23-005 `Downloads manifest API` | TODO | Serve `/console/downloads` JSON manifest (images, charts, offline bundles) sourced from signed registry metadata; include integrity hashes, release notes links, and offline instructions. Provide caching headers and documentation. Dependencies: WEB-CONSOLE-23-004. | BE-Base Platform Guild, DevOps Guild (src/Web/StellaOps.Web/TASKS.md) WEB-CONTAINERS-44-001 `Config discovery & quickstart flag` | TODO | Expose `/welcome` state, config discovery endpoint (safe values), and `QUICKSTART_MODE` handling for Console banner; add `/health/liveness`, `/health/readiness`, `/version` if missing. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-CONTAINERS-45-001 `Helm readiness support` | TODO | Ensure readiness endpoints reflect DB/queue readiness, add feature flag toggles via config map, and document NetworkPolicy ports. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-CONTAINERS-46-001 `Air-gap hardening` | TODO | Provide offline-friendly asset serving (no CDN), allow overriding object store endpoints via env, and document fallback behavior. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-CONTAINERS-45-001 `Helm readiness support` | TODO | Ensure readiness endpoints reflect DB/queue readiness, add feature flag toggles via config map, and document NetworkPolicy ports. Dependencies: WEB-CONTAINERS-44-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-CONTAINERS-46-001 `Air-gap hardening` | TODO | Provide offline-friendly asset serving (no CDN), allow overriding object store endpoints via env, and document fallback behavior. Dependencies: WEB-CONTAINERS-45-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) WEB-EXC-25-001 `Exceptions CRUD & workflow` | TODO | Implement `/exceptions` API (create, propose, approve, revoke, list, history) with validation, pagination, and audit logging. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) @@ -240,21 +240,21 @@ Depends on: Sprint 180.F - Web.I Summary: Experience & SDKs focus on Web (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -WEB-EXC-25-002 `Policy integration surfaces` | TODO | Extend `/policy/effective` and `/policy/simulate` responses to include exception metadata and accept overrides for simulations. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-EXC-25-003 `Notifications & events` | TODO | Publish `exception.*` events, integrate with notification hooks, enforce rate limits. | BE-Base Platform Guild, Platform Events Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-EXC-25-002 `Policy integration surfaces` | TODO | Extend `/policy/effective` and `/policy/simulate` responses to include exception metadata and accept overrides for simulations. Dependencies: WEB-EXC-25-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-EXC-25-003 `Notifications & events` | TODO | Publish `exception.*` events, integrate with notification hooks, enforce rate limits. Dependencies: WEB-EXC-25-002. | BE-Base Platform Guild, Platform Events Guild (src/Web/StellaOps.Web/TASKS.md) WEB-EXPORT-35-001 `Export routing` | TODO | Surface Export Center APIs (profiles/runs/download) through gateway with tenant scoping, streaming support, and viewer/operator scope checks. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-EXPORT-36-001 `Distribution endpoints` | TODO | Add distribution routes (OCI/object storage), manifest/provenance proxies, and signed URL generation. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-EXPORT-37-001 `Scheduling & verification` | TODO | Expose scheduling, retention, encryption parameters, and verification endpoints with admin scope enforcement and audit logs. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-EXPORT-36-001 `Distribution endpoints` | TODO | Add distribution routes (OCI/object storage), manifest/provenance proxies, and signed URL generation. Dependencies: WEB-EXPORT-35-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-EXPORT-37-001 `Scheduling & verification` | TODO | Expose scheduling, retention, encryption parameters, and verification endpoints with admin scope enforcement and audit logs. Dependencies: WEB-EXPORT-36-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) WEB-GRAPH-21-001 `Graph endpoints` | BLOCKED (2025-10-27) | Add gateway routes for graph versions/viewport/node/path/diff/export endpoints with tenant enforcement, scope checks, and streaming responses; proxy Policy Engine diff toggles without inline logic. Adopt `StellaOpsScopes` constants for RBAC enforcement. | BE-Base Platform Guild, Graph Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-GRAPH-21-002 `Request validation` | BLOCKED (2025-10-27) | Implement bbox/zoom/path parameter validation, pagination tokens, and deterministic ordering; add contract tests for boundary conditions. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-GRAPH-21-003 `Error mapping & exports` | BLOCKED (2025-10-27) | Map graph service errors to `ERR_Graph_*`, support GraphML/JSONL export streaming, and document rate limits. | BE-Base Platform Guild, QA Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-GRAPH-21-004 `Overlay pass-through` | BLOCKED (2025-10-27) | Proxy Policy Engine overlay responses for graph endpoints while keeping gateway stateless; maintain streaming budgets and latency SLOs. | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-GRAPH-24-001 `Gateway proxy refresh` | TODO | Gateway proxy for Graph API and Policy overlays with RBAC, caching, pagination, ETags, and streaming; zero business logic. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-GRAPH-24-001 `Graph endpoints` | TODO | Implement `/graph/assets/*` endpoints (snapshots, adjacency, search) with pagination, ETags, and tenant scoping while acting as a pure proxy. | BE-Base Platform Guild, SBOM Service Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-GRAPH-24-004 `AOC enrichers` | TODO | Embed AOC summaries sourced from overlay services; ensure gateway does not compute derived severity or hints. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-GRAPH-24-004 `Telemetry aggregation` | TODO | Collect gateway metrics/logs (tile latency, proxy errors, overlay cache stats) and forward to dashboards; document sampling strategy. | BE-Base Platform Guild, Observability Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-GRAPH-21-002 `Request validation` | BLOCKED (2025-10-27) | Implement bbox/zoom/path parameter validation, pagination tokens, and deterministic ordering; add contract tests for boundary conditions. Dependencies: WEB-GRAPH-21-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-GRAPH-21-003 `Error mapping & exports` | BLOCKED (2025-10-27) | Map graph service errors to `ERR_Graph_*`, support GraphML/JSONL export streaming, and document rate limits. Dependencies: WEB-GRAPH-21-002. | BE-Base Platform Guild, QA Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-GRAPH-21-004 `Overlay pass-through` | BLOCKED (2025-10-27) | Proxy Policy Engine overlay responses for graph endpoints while keeping gateway stateless; maintain streaming budgets and latency SLOs. Dependencies: WEB-GRAPH-21-003. | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-GRAPH-24-001 `Gateway proxy refresh` | TODO | Gateway proxy for Graph API and Policy overlays with RBAC, caching, pagination, ETags, and streaming; zero business logic. Dependencies: WEB-GRAPH-21-004. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-GRAPH-24-001 `Graph endpoints` | TODO | Implement `/graph/assets/*` endpoints (snapshots, adjacency, search) with pagination, ETags, and tenant scoping while acting as a pure proxy. Dependencies: WEB-GRAPH-24-001. | BE-Base Platform Guild, SBOM Service Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-GRAPH-24-004 `AOC enrichers` | TODO | Embed AOC summaries sourced from overlay services; ensure gateway does not compute derived severity or hints. Dependencies: WEB-GRAPH-24-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-GRAPH-24-004 `Telemetry aggregation` | TODO | Collect gateway metrics/logs (tile latency, proxy errors, overlay cache stats) and forward to dashboards; document sampling strategy. Dependencies: WEB-GRAPH-24-004. | BE-Base Platform Guild, Observability Guild (src/Web/StellaOps.Web/TASKS.md) WEB-LNM-21-001 `Advisory observation endpoints` | TODO | Surface new `/advisories/*` APIs through gateway with caching, pagination, and RBAC enforcement (`advisory:read`). | BE-Base Platform Guild, Concelier WebService Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-LNM-21-002 `VEX observation endpoints` | TODO | Expose `/vex/*` read APIs with evidence routes and export handlers; map `ERR_AGG_*` codes. | BE-Base Platform Guild, Excititor WebService Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-LNM-21-002 `VEX observation endpoints` | TODO | Expose `/vex/*` read APIs with evidence routes and export handlers; map `ERR_AGG_*` codes. Dependencies: WEB-LNM-21-001. | BE-Base Platform Guild, Excititor WebService Guild (src/Web/StellaOps.Web/TASKS.md) [Experience & SDKs] 180.F) Web.III @@ -262,20 +262,20 @@ Depends on: Sprint 180.F - Web.II Summary: Experience & SDKs focus on Web (phase III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -WEB-LNM-21-003 `Policy evidence aggregation` | TODO | Provide combined endpoint for Console to fetch policy result + source evidence (advisory + VEX linksets) for a component. | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-LNM-21-003 `Policy evidence aggregation` | TODO | Provide combined endpoint for Console to fetch policy result + source evidence (advisory + VEX linksets) for a component. Dependencies: WEB-LNM-21-002. | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) WEB-NOTIFY-38-001 `Gateway routing` | TODO | Route notifier APIs (`/notifications/*`) and WS feed through gateway with tenant scoping, viewer/operator scope enforcement, and SSE/WebSocket bridging. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-NOTIFY-39-001 `Digest & simulation endpoints` | TODO | Surface digest scheduling, quiet-hour/throttle management, and simulation APIs; ensure rate limits and audit logging. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-NOTIFY-40-001 `Escalations & localization` | TODO | Expose escalation, localization, channel health, and ack verification endpoints with admin scope enforcement and signed token validation. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-NOTIFY-39-001 `Digest & simulation endpoints` | TODO | Surface digest scheduling, quiet-hour/throttle management, and simulation APIs; ensure rate limits and audit logging. Dependencies: WEB-NOTIFY-38-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-NOTIFY-40-001 `Escalations & localization` | TODO | Expose escalation, localization, channel health, and ack verification endpoints with admin scope enforcement and signed token validation. Dependencies: WEB-NOTIFY-39-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) WEB-OAS-61-001 `Discovery endpoint` | TODO | Implement `GET /.well-known/openapi` returning gateway spec with version metadata, cache headers, and signed ETag. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-OAS-61-002 `Standard error envelope` | TODO | Migrate gateway errors to standard envelope and update examples; ensure telemetry logs include `error.code`. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-OAS-62-001 `Pagination & idempotency alignment` | TODO | Normalize all endpoints to cursor pagination, expose `Idempotency-Key` support, and document rate-limit headers. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-OAS-63-001 `Deprecation support` | TODO | Add deprecation header middleware, Sunset link emission, and observability metrics for deprecated routes. | BE-Base Platform Guild, API Governance Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-OAS-61-002 `Standard error envelope` | TODO | Migrate gateway errors to standard envelope and update examples; ensure telemetry logs include `error.code`. Dependencies: WEB-OAS-61-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-OAS-62-001 `Pagination & idempotency alignment` | TODO | Normalize all endpoints to cursor pagination, expose `Idempotency-Key` support, and document rate-limit headers. Dependencies: WEB-OAS-61-002. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-OAS-63-001 `Deprecation support` | TODO | Add deprecation header middleware, Sunset link emission, and observability metrics for deprecated routes. Dependencies: WEB-OAS-62-001. | BE-Base Platform Guild, API Governance Guild (src/Web/StellaOps.Web/TASKS.md) WEB-OBS-50-001 `Telemetry core adoption` | TODO | Integrate `StellaOps.Telemetry.Core` into gateway host, replace ad-hoc logging, ensure all routes emit trace/span IDs, tenant context, and scrubbed payload previews. | BE-Base Platform Guild, Observability Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-OBS-51-001 `Observability health endpoints` | TODO | Implement `/obs/health` and `/obs/slo` aggregations, pulling metrics from Prometheus/collector APIs, including burn-rate signals and exemplar links for Console widgets. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-OBS-52-001 `Trace & log proxies` | TODO | Deliver `/obs/trace/:id` and `/obs/logs` proxy endpoints with guardrails (time window limits, tenant scoping) forwarding to timeline indexer + log store with signed URLs. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-OBS-54-001 `Evidence & attestation bridges` | TODO | Provide `/evidence/*` and `/attestations/*` pass-through endpoints, enforce `timeline:read`, `evidence:read`, `attest:read` scopes, append provenance headers, and surface verification summaries. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-OBS-55-001 `Incident mode controls` | TODO | Add `/obs/incident-mode` API (enable/disable/status) with audit trail, sampling override, retention bump preview, and CLI/Console hooks. | BE-Base Platform Guild, Ops Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-OBS-56-001 `Sealed status surfaces` | TODO | Extend telemetry core integration to expose sealed/unsealed status APIs, drift metrics, and Console widgets without leaking sealed-mode secrets. | BE-Base Platform Guild, AirGap Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-OBS-51-001 `Observability health endpoints` | TODO | Implement `/obs/health` and `/obs/slo` aggregations, pulling metrics from Prometheus/collector APIs, including burn-rate signals and exemplar links for Console widgets. Dependencies: WEB-OBS-50-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-OBS-52-001 `Trace & log proxies` | TODO | Deliver `/obs/trace/:id` and `/obs/logs` proxy endpoints with guardrails (time window limits, tenant scoping) forwarding to timeline indexer + log store with signed URLs. Dependencies: WEB-OBS-51-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-OBS-54-001 `Evidence & attestation bridges` | TODO | Provide `/evidence/*` and `/attestations/*` pass-through endpoints, enforce `timeline:read`, `evidence:read`, `attest:read` scopes, append provenance headers, and surface verification summaries. Dependencies: WEB-OBS-52-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-OBS-55-001 `Incident mode controls` | TODO | Add `/obs/incident-mode` API (enable/disable/status) with audit trail, sampling override, retention bump preview, and CLI/Console hooks. Dependencies: WEB-OBS-54-001. | BE-Base Platform Guild, Ops Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-OBS-56-001 `Sealed status surfaces` | TODO | Extend telemetry core integration to expose sealed/unsealed status APIs, drift metrics, and Console widgets without leaking sealed-mode secrets. Dependencies: WEB-OBS-55-001. | BE-Base Platform Guild, AirGap Guild (src/Web/StellaOps.Web/TASKS.md) WEB-ORCH-32-001 `Read-only routing` | TODO | Expose `/orchestrator/sources | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) @@ -284,21 +284,21 @@ Depends on: Sprint 180.F - Web.III Summary: Experience & SDKs focus on Web (phase IV). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -WEB-ORCH-33-001 `Control + backfill actions` | TODO | Add POST action routes (`pause | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-ORCH-34-001 `Quotas & telemetry` | TODO | Surface quotas/backfill APIs, queue/backpressure metrics, and error clustering routes with admin scope enforcement and audit logging. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-ORCH-33-001 `Control + backfill actions` | TODO | Add POST action routes (`pause. Dependencies: WEB-ORCH-32-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-ORCH-34-001 `Quotas & telemetry` | TODO | Surface quotas/backfill APIs, queue/backpressure metrics, and error clustering routes with admin scope enforcement and audit logging. Dependencies: WEB-ORCH-33-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) WEB-POLICY-20-001 `Policy endpoints` | TODO | Implement Policy CRUD/compile/run/simulate/findings/explain endpoints with OpenAPI, tenant scoping, and service identity enforcement. | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-20-002 `Pagination & filters` | TODO | Add pagination, filtering, sorting, and tenant guards to listings for policies, runs, and findings; include deterministic ordering and query diagnostics. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-20-003 `Error mapping` | TODO | Map engine errors to `ERR_POL_*` responses with consistent payloads and contract tests; expose correlation IDs in headers. | BE-Base Platform Guild, QA Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-20-004 `Simulate rate limits` | TODO | Introduce adaptive rate limiting + quotas for simulation endpoints, expose metrics, and document retry headers. | Platform Reliability Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-23-001 `Policy pack CRUD` | BLOCKED (2025-10-29) | Implement API endpoints for creating/listing/fetching policy packs and revisions (`/policy/packs`, `/policy/packs/{id}/revisions`) with pagination, RBAC, and AOC metadata exposure. (Tracked via Sprint 18.5 gateway tasks.) | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-23-002 `Activation & scope` | BLOCKED (2025-10-29) | Add activation endpoint with scope windows, conflict checks, and optional 2-person approval integration; emit events on success. (Tracked via Sprint 18.5 gateway tasks.) | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-23-003 `Simulation & evaluation` | TODO | Provide `/policy/simulate` and `/policy/evaluate` endpoints with streaming responses, rate limiting, and error mapping. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-23-004 `Explain retrieval` | TODO | Expose explain history endpoints (`/policy/runs`, `/policy/runs/{id}`) including decision tree, sources consulted, and AOC chain. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-27-001 `Policy registry proxy` | TODO | Surface Policy Registry APIs (`/policy/workspaces`, `/policy/versions`, `/policy/reviews`, `/policy/registry`) through gateway with tenant scoping, RBAC, and request validation; ensure streaming downloads for evidence bundles. | BE-Base Platform Guild, Policy Registry Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-27-002 `Review & approval routes` | TODO | Implement review lifecycle endpoints (open, comment, approve/reject) with audit headers, comment pagination, and webhook fan-out. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-27-003 `Simulation orchestration endpoints` | TODO | Expose quick/batch simulation endpoints with SSE progress (`/policy/simulations/{runId}/stream`), cursor-based result pagination, and manifest download routes. | BE-Base Platform Guild, Scheduler Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-27-004 `Publish & promote controls` | TODO | Add publish/sign/promote/rollback endpoints with idempotent request IDs, canary parameters, and environment bindings; enforce scope checks and emit structured events. | BE-Base Platform Guild, Security Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-POLICY-27-005 `Policy Studio telemetry` | TODO | Instrument metrics/logs for compile latency, simulation queue depth, approval latency, promotion actions; expose aggregated dashboards and correlation IDs for Console. | BE-Base Platform Guild, Observability Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-20-002 `Pagination & filters` | TODO | Add pagination, filtering, sorting, and tenant guards to listings for policies, runs, and findings; include deterministic ordering and query diagnostics. Dependencies: WEB-POLICY-20-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-20-003 `Error mapping` | TODO | Map engine errors to `ERR_POL_*` responses with consistent payloads and contract tests; expose correlation IDs in headers. Dependencies: WEB-POLICY-20-002. | BE-Base Platform Guild, QA Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-20-004 `Simulate rate limits` | TODO | Introduce adaptive rate limiting + quotas for simulation endpoints, expose metrics, and document retry headers. Dependencies: WEB-POLICY-20-003. | Platform Reliability Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-23-001 `Policy pack CRUD` | BLOCKED (2025-10-29) | Implement API endpoints for creating/listing/fetching policy packs and revisions (`/policy/packs`, `/policy/packs/{id}/revisions`) with pagination, RBAC, and AOC metadata exposure. (Tracked via Sprint 18.5 gateway tasks.). Dependencies: WEB-POLICY-20-004. | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-23-002 `Activation & scope` | BLOCKED (2025-10-29) | Add activation endpoint with scope windows, conflict checks, and optional 2-person approval integration; emit events on success. (Tracked via Sprint 18.5 gateway tasks.). Dependencies: WEB-POLICY-23-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-23-003 `Simulation & evaluation` | TODO | Provide `/policy/simulate` and `/policy/evaluate` endpoints with streaming responses, rate limiting, and error mapping. Dependencies: WEB-POLICY-23-002. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-23-004 `Explain retrieval` | TODO | Expose explain history endpoints (`/policy/runs`, `/policy/runs/{id}`) including decision tree, sources consulted, and AOC chain. Dependencies: WEB-POLICY-23-003. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-27-001 `Policy registry proxy` | TODO | Surface Policy Registry APIs (`/policy/workspaces`, `/policy/versions`, `/policy/reviews`, `/policy/registry`) through gateway with tenant scoping, RBAC, and request validation; ensure streaming downloads for evidence bundles. Dependencies: WEB-POLICY-23-004. | BE-Base Platform Guild, Policy Registry Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-27-002 `Review & approval routes` | TODO | Implement review lifecycle endpoints (open, comment, approve/reject) with audit headers, comment pagination, and webhook fan-out. Dependencies: WEB-POLICY-27-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-27-003 `Simulation orchestration endpoints` | TODO | Expose quick/batch simulation endpoints with SSE progress (`/policy/simulations/{runId}/stream`), cursor-based result pagination, and manifest download routes. Dependencies: WEB-POLICY-27-002. | BE-Base Platform Guild, Scheduler Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-27-004 `Publish & promote controls` | TODO | Add publish/sign/promote/rollback endpoints with idempotent request IDs, canary parameters, and environment bindings; enforce scope checks and emit structured events. Dependencies: WEB-POLICY-27-003. | BE-Base Platform Guild, Security Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-POLICY-27-005 `Policy Studio telemetry` | TODO | Instrument metrics/logs for compile latency, simulation queue depth, approval latency, promotion actions; expose aggregated dashboards and correlation IDs for Console. Dependencies: WEB-POLICY-27-004. | BE-Base Platform Guild, Observability Guild (src/Web/StellaOps.Web/TASKS.md) [Experience & SDKs] 180.F) Web.V @@ -307,20 +307,20 @@ Summary: Experience & SDKs focus on Web (phase V). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- WEB-RISK-66-001 `Risk API routing` | TODO | Expose risk profile/results endpoints through gateway with tenant scoping, pagination, and rate limiting. | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-RISK-66-002 `Explainability downloads` | TODO | Add signed URL handling for explanation blobs and enforce scope checks. | BE-Base Platform Guild, Risk Engine Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-RISK-67-001 `Risk status endpoint` | TODO | Provide aggregated risk stats (`/risk/status`) for Console dashboards (counts per severity, last computation). | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-RISK-68-001 `Notification hooks` | TODO | Emit events on severity transitions via gateway to notifier bus with trace metadata. | BE-Base Platform Guild, Notifications Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-RISK-66-002 `Explainability downloads` | TODO | Add signed URL handling for explanation blobs and enforce scope checks. Dependencies: WEB-RISK-66-001. | BE-Base Platform Guild, Risk Engine Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-RISK-67-001 `Risk status endpoint` | TODO | Provide aggregated risk stats (`/risk/status`) for Console dashboards (counts per severity, last computation). Dependencies: WEB-RISK-66-002. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-RISK-68-001 `Notification hooks` | TODO | Emit events on severity transitions via gateway to notifier bus with trace metadata. Dependencies: WEB-RISK-67-001. | BE-Base Platform Guild, Notifications Guild (src/Web/StellaOps.Web/TASKS.md) WEB-SIG-26-001 `Signals proxy endpoints` | TODO | Surface `/signals/callgraphs`, `/signals/facts` read/write endpoints with pagination, ETags, and RBAC. | BE-Base Platform Guild, Signals Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-SIG-26-002 `Reachability joins` | TODO | Extend `/policy/effective` and `/vuln/explorer` responses to include reachability scores/states and allow filtering. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-SIG-26-003 `Simulation hooks` | TODO | Add reachability override parameters to `/policy/simulate` and related APIs for what-if analysis. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-SIG-26-002 `Reachability joins` | TODO | Extend `/policy/effective` and `/vuln/explorer` responses to include reachability scores/states and allow filtering. Dependencies: WEB-SIG-26-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-SIG-26-003 `Simulation hooks` | TODO | Add reachability override parameters to `/policy/simulate` and related APIs for what-if analysis. Dependencies: WEB-SIG-26-002. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) WEB-TEN-47-001 `Auth middleware` | TODO | Implement JWT verification, tenant activation from headers, scope matching, and decision audit emission for all API endpoints. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-TEN-48-001 `Tenant context propagation` | TODO | Set DB session `stella.tenant_id`, enforce tenant/project checks on persistence, prefix object storage paths, and stamp audit metadata. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-TEN-49-001 `ABAC & audit API` | TODO | Integrate optional ABAC overlay with Policy Engine, expose `/audit/decisions` API, and support service token minting endpoints. | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-TEN-48-001 `Tenant context propagation` | TODO | Set DB session `stella.tenant_id`, enforce tenant/project checks on persistence, prefix object storage paths, and stamp audit metadata. Dependencies: WEB-TEN-47-001. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-TEN-49-001 `ABAC & audit API` | TODO | Integrate optional ABAC overlay with Policy Engine, expose `/audit/decisions` API, and support service token minting endpoints. Dependencies: WEB-TEN-48-001. | BE-Base Platform Guild, Policy Guild (src/Web/StellaOps.Web/TASKS.md) WEB-VEX-30-007 `VEX consensus routing` | TODO | Route `/vex/consensus` APIs with tenant RBAC/ABAC, caching, and streaming; surface telemetry and trace IDs without gateway-side overlay logic. | BE-Base Platform Guild, VEX Lens Guild (src/Web/StellaOps.Web/TASKS.md) WEB-VULN-29-001 `Vuln API routing` | TODO | Expose `/vuln/*` endpoints via gateway with tenant scoping, RBAC/ABAC enforcement, anti-forgery headers, and request logging. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-VULN-29-002 `Ledger proxy headers` | TODO | Forward workflow actions to Findings Ledger with idempotency headers and correlation IDs; handle retries/backoff. | BE-Base Platform Guild, Findings Ledger Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-VULN-29-003 `Simulation + export routing` | TODO | Provide simulation and export orchestration routes with SSE/progress headers, signed download links, and request budgeting. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) -WEB-VULN-29-004 `Telemetry aggregation` | TODO | Emit gateway metrics/logs (latency, error rates, export duration), propagate query hashes for analytics dashboards. | BE-Base Platform Guild, Observability Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-VULN-29-002 `Ledger proxy headers` | TODO | Forward workflow actions to Findings Ledger with idempotency headers and correlation IDs; handle retries/backoff. Dependencies: WEB-VULN-29-001. | BE-Base Platform Guild, Findings Ledger Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-VULN-29-003 `Simulation + export routing` | TODO | Provide simulation and export orchestration routes with SSE/progress headers, signed download links, and request budgeting. Dependencies: WEB-VULN-29-002. | BE-Base Platform Guild (src/Web/StellaOps.Web/TASKS.md) +WEB-VULN-29-004 `Telemetry aggregation` | TODO | Emit gateway metrics/logs (latency, error rates, export duration), propagate query hashes for analytics dashboards. Dependencies: WEB-VULN-29-003. | BE-Base Platform Guild, Observability Guild (src/Web/StellaOps.Web/TASKS.md) If all tasks are done - read next sprint section - SPRINT_190_ops_offline.md diff --git a/docs/implplan/SPRINT_190_ops_offline.md b/docs/implplan/SPRINT_190_ops_offline.md index 9ed82aba..9d29ecc5 100644 --- a/docs/implplan/SPRINT_190_ops_offline.md +++ b/docs/implplan/SPRINT_190_ops_offline.md @@ -6,19 +6,19 @@ Summary: Ops & Offline focus on Ops Deployment (phase I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- COMPOSE-44-001 | TODO | Author `docker-compose.yml`, `.env.example`, and `quickstart.sh` with all core services + dependencies (postgres, redis, object-store, queue, otel). | Deployment Guild, DevEx Guild (ops/deployment/TASKS.md) -COMPOSE-44-002 | TODO | Implement `backup.sh` and `reset.sh` scripts with safety prompts and documentation. | Deployment Guild (ops/deployment/TASKS.md) -COMPOSE-44-003 | TODO | Package seed data container and onboarding wizard toggle (`QUICKSTART_MODE`), ensuring default creds randomized on first run. | Deployment Guild, Docs Guild (ops/deployment/TASKS.md) +COMPOSE-44-002 | TODO | Implement `backup.sh` and `reset.sh` scripts with safety prompts and documentation. Dependencies: COMPOSE-44-001. | Deployment Guild (ops/deployment/TASKS.md) +COMPOSE-44-003 | TODO | Package seed data container and onboarding wizard toggle (`QUICKSTART_MODE`), ensuring default creds randomized on first run. Dependencies: COMPOSE-44-002. | Deployment Guild, Docs Guild (ops/deployment/TASKS.md) DEPLOY-AIAI-31-001 | TODO | Provide Helm/Compose manifests, GPU toggle, scaling/runbook, and offline kit instructions for Advisory AI service + inference container. | Deployment Guild, Advisory AI Guild (ops/deployment/TASKS.md) DEPLOY-AIRGAP-46-001 | TODO | Provide instructions and scripts (`load.sh`) for importing air-gap bundle into private registry; update Offline Kit guide. | Deployment Guild, Offline Kit Guild (ops/deployment/TASKS.md) DEPLOY-CLI-41-001 | TODO | Package CLI release artifacts (tarballs per OS/arch, checksums, signatures, completions, container image) and publish distribution docs. | Deployment Guild, DevEx/CLI Guild (ops/deployment/TASKS.md) DEPLOY-COMPOSE-44-001 | TODO | Finalize Quickstart scripts (`quickstart.sh`, `backup.sh`, `reset.sh`), seed data container, and publish README with imposed rule reminder. | Deployment Guild (ops/deployment/TASKS.md) DEPLOY-EXPORT-35-001 | BLOCKED (2025-10-29) | Package exporter service/worker Helm overlays (download-only), document rollout/rollback, and integrate signing KMS secrets. | Deployment Guild, Exporter Service Guild (ops/deployment/TASKS.md) -DEPLOY-EXPORT-36-001 | TODO | Document OCI/object storage distribution workflows, registry credential automation, and monitoring hooks for exports. | Deployment Guild, Exporter Service Guild (ops/deployment/TASKS.md) +DEPLOY-EXPORT-36-001 | TODO | Document OCI/object storage distribution workflows, registry credential automation, and monitoring hooks for exports. Dependencies: DEPLOY-EXPORT-35-001. | Deployment Guild, Exporter Service Guild (ops/deployment/TASKS.md) DEPLOY-HELM-45-001 | TODO | Publish Helm install guide and sample values for prod/airgap; integrate with docs site build. | Deployment Guild (ops/deployment/TASKS.md) DEPLOY-NOTIFY-38-001 | BLOCKED (2025-10-29) | Package notifier API/worker Helm overlays (email/chat/webhook), secrets templates, rollout guide. | Deployment Guild, DevOps Guild (ops/deployment/TASKS.md) DEPLOY-ORCH-34-001 | TODO | Provide orchestrator Helm/Compose manifests, scaling defaults, secret templates, offline kit instructions, and GA rollout/rollback playbook. | Deployment Guild, Orchestrator Service Guild (ops/deployment/TASKS.md) DEPLOY-PACKS-42-001 | TODO | Provide deployment manifests for packs-registry and task-runner services, including Helm/Compose overlays, scaling defaults, and secret templates. | Deployment Guild, Packs Registry Guild (ops/deployment/TASKS.md) -DEPLOY-PACKS-43-001 | TODO | Ship remote Task Runner worker profiles, object storage bootstrap, approval workflow integration, and Offline Kit packaging instructions. | Deployment Guild, Task Runner Guild (ops/deployment/TASKS.md) +DEPLOY-PACKS-43-001 | TODO | Ship remote Task Runner worker profiles, object storage bootstrap, approval workflow integration, and Offline Kit packaging instructions. Dependencies: DEPLOY-PACKS-42-001. | Deployment Guild, Task Runner Guild (ops/deployment/TASKS.md) DEPLOY-POLICY-27-001 | TODO | Produce Helm/Compose overlays for Policy Registry + simulation workers, including Mongo migrations, object storage buckets, signing key secrets, and tenancy defaults. | Deployment Guild, Policy Registry Guild (ops/deployment/TASKS.md) @@ -27,15 +27,15 @@ Depends on: Sprint 190.A - Ops Deployment.I Summary: Ops & Offline focus on Ops Deployment (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DEPLOY-POLICY-27-002 | TODO | Document rollout/rollback playbooks for policy publish/promote (canary strategy, emergency freeze toggle, evidence retrieval) under `/docs/runbooks/policy-incident.md`. | Deployment Guild, Policy Guild (ops/deployment/TASKS.md) +DEPLOY-POLICY-27-002 | TODO | Document rollout/rollback playbooks for policy publish/promote (canary strategy, emergency freeze toggle, evidence retrieval) under `/docs/runbooks/policy-incident.md`. Dependencies: DEPLOY-POLICY-27-001. | Deployment Guild, Policy Guild (ops/deployment/TASKS.md) DEPLOY-VEX-30-001 | TODO | Provide Helm/Compose overlays, scaling defaults, and offline kit instructions for VEX Lens service. | Deployment Guild, VEX Lens Guild (ops/deployment/TASKS.md) -DEPLOY-VEX-30-002 | TODO | Package Issuer Directory deployment manifests, backups, and security hardening guidance. | Deployment Guild, Issuer Directory Guild (ops/deployment/TASKS.md) +DEPLOY-VEX-30-002 | TODO | Package Issuer Directory deployment manifests, backups, and security hardening guidance. Dependencies: DEPLOY-VEX-30-001. | Deployment Guild, Issuer Directory Guild (ops/deployment/TASKS.md) DEPLOY-VULN-29-001 | TODO | Produce Helm/Compose overlays for Findings Ledger + projector, including DB migrations, Merkle anchor jobs, and scaling guidance. | Deployment Guild, Findings Ledger Guild (ops/deployment/TASKS.md) -DEPLOY-VULN-29-002 | TODO | Package `stella-vuln-explorer-api` deployment manifests, health checks, autoscaling policies, and offline kit instructions with signed images. | Deployment Guild, Vuln Explorer API Guild (ops/deployment/TASKS.md) +DEPLOY-VULN-29-002 | TODO | Package `stella-vuln-explorer-api` deployment manifests, health checks, autoscaling policies, and offline kit instructions with signed images. Dependencies: DEPLOY-VULN-29-001. | Deployment Guild, Vuln Explorer API Guild (ops/deployment/TASKS.md) DOWNLOADS-CONSOLE-23-001 | TODO | Maintain signed downloads manifest pipeline (images, Helm, offline bundles), publish JSON under `deploy/downloads/manifest.json`, and document sync cadence for Console + docs parity. | Deployment Guild, DevOps Guild (ops/deployment/TASKS.md) HELM-45-001 | TODO | Scaffold `deploy/helm/stella` chart with values, component toggles, and pinned image digests for all services; include migration Job templates. | Deployment Guild (ops/deployment/TASKS.md) -HELM-45-002 | TODO | Add TLS/Ingress, NetworkPolicy, PodSecurityContexts, Secrets integration (external secrets), and document security posture. | Deployment Guild, Security Guild (ops/deployment/TASKS.md) -HELM-45-003 | TODO | Implement HPA, PDB, readiness gates, Prometheus scraping annotations, OTel configuration hooks, and upgrade hooks. | Deployment Guild, Observability Guild (ops/deployment/TASKS.md) +HELM-45-002 | TODO | Add TLS/Ingress, NetworkPolicy, PodSecurityContexts, Secrets integration (external secrets), and document security posture. Dependencies: HELM-45-001. | Deployment Guild, Security Guild (ops/deployment/TASKS.md) +HELM-45-003 | TODO | Implement HPA, PDB, readiness gates, Prometheus scraping annotations, OTel configuration hooks, and upgrade hooks. Dependencies: HELM-45-002. | Deployment Guild, Observability Guild (ops/deployment/TASKS.md) [Ops & Offline] 190.B) Ops Devops.I @@ -45,19 +45,19 @@ Task ID | State | Task description | Owners (Source) --- | --- | --- | --- DEVOPS-AIAI-31-001 | TODO | Stand up CI pipelines, inference monitoring, privacy logging review, and perf dashboards for Advisory AI (summaries/conflicts/remediation). | DevOps Guild, Advisory AI Guild (ops/devops/TASKS.md) DEVOPS-AIRGAP-56-001 | TODO | Ship deny-all egress policies for Kubernetes (NetworkPolicy/eBPF) and docker-compose firewall rules; provide verification script for sealed mode. | DevOps Guild (ops/devops/TASKS.md) -DEVOPS-AIRGAP-56-002 | TODO | Provide import tooling for bundle staging: checksum validation, offline object-store loader scripts, removable media guidance. | DevOps Guild, AirGap Importer Guild (ops/devops/TASKS.md) -DEVOPS-AIRGAP-56-003 | TODO | Build Bootstrap Pack pipeline bundling images/charts, generating checksums, and publishing manifest for offline transfer. | DevOps Guild, Container Distribution Guild (ops/devops/TASKS.md) -DEVOPS-AIRGAP-57-001 | TODO | Automate Mirror Bundle creation jobs with dual-control approvals, artifact signing, and checksum publication. | DevOps Guild, Mirror Creator Guild (ops/devops/TASKS.md) -DEVOPS-AIRGAP-57-002 | TODO | Configure sealed-mode CI tests that run services with sealed flag and ensure no egress occurs (iptables + mock DNS). | DevOps Guild, Authority Guild (ops/devops/TASKS.md) -DEVOPS-AIRGAP-58-001 | TODO | Provide local SMTP/syslog container templates and health checks for sealed environments; integrate into Bootstrap Pack. | DevOps Guild, Notifications Guild (ops/devops/TASKS.md) -DEVOPS-AIRGAP-58-002 | TODO | Ship sealed-mode observability stack (Prometheus/Grafana/Tempo/Loki) pre-configured with offline dashboards and no remote exporters. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) +DEVOPS-AIRGAP-56-002 | TODO | Provide import tooling for bundle staging: checksum validation, offline object-store loader scripts, removable media guidance. Dependencies: DEVOPS-AIRGAP-56-001. | DevOps Guild, AirGap Importer Guild (ops/devops/TASKS.md) +DEVOPS-AIRGAP-56-003 | TODO | Build Bootstrap Pack pipeline bundling images/charts, generating checksums, and publishing manifest for offline transfer. Dependencies: DEVOPS-AIRGAP-56-002. | DevOps Guild, Container Distribution Guild (ops/devops/TASKS.md) +DEVOPS-AIRGAP-57-001 | TODO | Automate Mirror Bundle creation jobs with dual-control approvals, artifact signing, and checksum publication. Dependencies: DEVOPS-AIRGAP-56-003. | DevOps Guild, Mirror Creator Guild (ops/devops/TASKS.md) +DEVOPS-AIRGAP-57-002 | TODO | Configure sealed-mode CI tests that run services with sealed flag and ensure no egress occurs (iptables + mock DNS). Dependencies: DEVOPS-AIRGAP-57-001. | DevOps Guild, Authority Guild (ops/devops/TASKS.md) +DEVOPS-AIRGAP-58-001 | TODO | Provide local SMTP/syslog container templates and health checks for sealed environments; integrate into Bootstrap Pack. Dependencies: DEVOPS-AIRGAP-57-002. | DevOps Guild, Notifications Guild (ops/devops/TASKS.md) +DEVOPS-AIRGAP-58-002 | TODO | Ship sealed-mode observability stack (Prometheus/Grafana/Tempo/Loki) pre-configured with offline dashboards and no remote exporters. Dependencies: DEVOPS-AIRGAP-58-001. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) DEVOPS-AOC-19-001 | BLOCKED (2025-10-26) | Integrate the AOC Roslyn analyzer and guard tests into CI, failing builds when ingestion projects attempt banned writes. | DevOps Guild, Platform Guild (ops/devops/TASKS.md) -DEVOPS-AOC-19-002 | BLOCKED (2025-10-26) | Add pipeline stage executing `stella aoc verify --since` against seeded Mongo snapshots for Concelier + Excititor, publishing violation report artefacts. | DevOps Guild (ops/devops/TASKS.md) -DEVOPS-AOC-19-003 | BLOCKED (2025-10-26) | Enforce unit test coverage thresholds for AOC guard suites and ensure coverage exported to dashboards. | DevOps Guild, QA Guild (ops/devops/TASKS.md) -DEVOPS-AOC-19-101 | TODO (2025-10-28) | Draft supersedes backfill rollout (freeze window, dry-run steps, rollback) once advisory_raw idempotency index passes staging verification. | DevOps Guild, Concelier Storage Guild (ops/devops/TASKS.md) +DEVOPS-AOC-19-002 | BLOCKED (2025-10-26) | Add pipeline stage executing `stella aoc verify --since` against seeded Mongo snapshots for Concelier + Excititor, publishing violation report artefacts. Dependencies: DEVOPS-AOC-19-001. | DevOps Guild (ops/devops/TASKS.md) +DEVOPS-AOC-19-003 | BLOCKED (2025-10-26) | Enforce unit test coverage thresholds for AOC guard suites and ensure coverage exported to dashboards. Dependencies: DEVOPS-AOC-19-002. | DevOps Guild, QA Guild (ops/devops/TASKS.md) +DEVOPS-AOC-19-101 | TODO (2025-10-28) | Draft supersedes backfill rollout (freeze window, dry-run steps, rollback) once advisory_raw idempotency index passes staging verification. Dependencies: DEVOPS-AOC-19-003. | DevOps Guild, Concelier Storage Guild (ops/devops/TASKS.md) DEVOPS-ATTEST-73-001 | TODO | Provision CI pipelines for attestor service (lint/test/security scan, seed data) and manage secrets for KMS drivers. | DevOps Guild, Attestor Service Guild (ops/devops/TASKS.md) -DEVOPS-ATTEST-73-002 | TODO | Establish secure storage for signing keys (vault integration, rotation schedule) and audit logging. | DevOps Guild, KMS Guild (ops/devops/TASKS.md) -DEVOPS-ATTEST-74-001 | TODO | Deploy transparency log witness infrastructure and monitoring. | DevOps Guild, Transparency Guild (ops/devops/TASKS.md) +DEVOPS-ATTEST-73-002 | TODO | Establish secure storage for signing keys (vault integration, rotation schedule) and audit logging. Dependencies: DEVOPS-ATTEST-73-001. | DevOps Guild, KMS Guild (ops/devops/TASKS.md) +DEVOPS-ATTEST-74-001 | TODO | Deploy transparency log witness infrastructure and monitoring. Dependencies: DEVOPS-ATTEST-73-002. | DevOps Guild, Transparency Guild (ops/devops/TASKS.md) [Ops & Offline] 190.B) Ops Devops.II @@ -65,20 +65,20 @@ Depends on: Sprint 190.B - Ops Devops.I Summary: Ops & Offline focus on Ops Devops (phase II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DEVOPS-ATTEST-74-002 | TODO | Integrate attestation bundle builds into release/offline pipelines with checksum verification. | DevOps Guild, Export Attestation Guild (ops/devops/TASKS.md) -DEVOPS-ATTEST-75-001 | TODO | Add dashboards/alerts for signing latency, verification failures, key rotation events. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) +DEVOPS-ATTEST-74-002 | TODO | Integrate attestation bundle builds into release/offline pipelines with checksum verification. Dependencies: DEVOPS-ATTEST-74-001. | DevOps Guild, Export Attestation Guild (ops/devops/TASKS.md) +DEVOPS-ATTEST-75-001 | TODO | Add dashboards/alerts for signing latency, verification failures, key rotation events. Dependencies: DEVOPS-ATTEST-74-002. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) DEVOPS-CLI-41-001 | TODO | Establish CLI build pipeline (multi-platform binaries, SBOM, checksums), parity matrix CI enforcement, and release artifact signing. | DevOps Guild, DevEx/CLI Guild (ops/devops/TASKS.md) -DEVOPS-CLI-42-001 | TODO | Add CLI golden output tests, parity diff automation, pack run CI harness, and artifact cache for remote mode. | DevOps Guild (ops/devops/TASKS.md) -DEVOPS-CLI-43-001 | DOING (2025-10-27) | Finalize multi-platform release automation, SBOM signing, parity gate enforcement, and Task Pack chaos tests. | DevOps Guild (ops/devops/TASKS.md) -DEVOPS-CLI-43-002 | TODO | Implement Task Pack chaos smoke in CI (random failure injection, resume, sealed-mode toggle) and publish evidence bundles for review. | DevOps Guild, Task Runner Guild (ops/devops/TASKS.md) -DEVOPS-CLI-43-003 | TODO | Integrate CLI golden output/parity diff automation into release gating; export parity report artifact consumed by Console Downloads workspace. | DevOps Guild, DevEx/CLI Guild (ops/devops/TASKS.md) +DEVOPS-CLI-42-001 | TODO | Add CLI golden output tests, parity diff automation, pack run CI harness, and artifact cache for remote mode. Dependencies: DEVOPS-CLI-41-001. | DevOps Guild (ops/devops/TASKS.md) +DEVOPS-CLI-43-001 | DOING (2025-10-27) | Finalize multi-platform release automation, SBOM signing, parity gate enforcement, and Task Pack chaos tests. Dependencies: DEVOPS-CLI-42-001. | DevOps Guild (ops/devops/TASKS.md) +DEVOPS-CLI-43-002 | TODO | Implement Task Pack chaos smoke in CI (random failure injection, resume, sealed-mode toggle) and publish evidence bundles for review. Dependencies: DEVOPS-CLI-43-001. | DevOps Guild, Task Runner Guild (ops/devops/TASKS.md) +DEVOPS-CLI-43-003 | TODO | Integrate CLI golden output/parity diff automation into release gating; export parity report artifact consumed by Console Downloads workspace. Dependencies: DEVOPS-CLI-43-002. | DevOps Guild, DevEx/CLI Guild (ops/devops/TASKS.md) DEVOPS-CONSOLE-23-001 | BLOCKED (2025-10-26) | Add console CI workflow (pnpm cache, lint, type-check, unit, Storybook a11y, Playwright, Lighthouse) with offline runners and artifact retention for screenshots/reports. | DevOps Guild, Console Guild (ops/devops/TASKS.md) -DEVOPS-CONSOLE-23-002 | TODO | Produce `stella-console` container build + Helm chart overlays with deterministic digests, SBOM/provenance artefacts, and offline bundle packaging scripts. | DevOps Guild, Console Guild (ops/devops/TASKS.md) +DEVOPS-CONSOLE-23-002 | TODO | Produce `stella-console` container build + Helm chart overlays with deterministic digests, SBOM/provenance artefacts, and offline bundle packaging scripts. Dependencies: DEVOPS-CONSOLE-23-001. | DevOps Guild, Console Guild (ops/devops/TASKS.md) DEVOPS-CONTAINERS-44-001 | TODO | Automate multi-arch image builds with buildx, SBOM generation, cosign signing, and signature verification in CI. | DevOps Guild (ops/devops/TASKS.md) -DEVOPS-CONTAINERS-45-001 | TODO | Add Compose and Helm smoke tests (fresh VM + kind cluster) to CI; publish test artifacts and logs. | DevOps Guild (ops/devops/TASKS.md) -DEVOPS-CONTAINERS-46-001 | TODO | Build air-gap bundle generator (`src/Tools/make-airgap-bundle.sh`), produce signed bundle, and verify in CI using private registry. | DevOps Guild (ops/devops/TASKS.md) +DEVOPS-CONTAINERS-45-001 | TODO | Add Compose and Helm smoke tests (fresh VM + kind cluster) to CI; publish test artifacts and logs. Dependencies: DEVOPS-CONTAINERS-44-001. | DevOps Guild (ops/devops/TASKS.md) +DEVOPS-CONTAINERS-46-001 | TODO | Build air-gap bundle generator (`src/Tools/make-airgap-bundle.sh`), produce signed bundle, and verify in CI using private registry. Dependencies: DEVOPS-CONTAINERS-45-001. | DevOps Guild (ops/devops/TASKS.md) DEVOPS-DEVPORT-63-001 | TODO | Automate developer portal build pipeline with caching, link & accessibility checks, performance budgets. | DevOps Guild, Developer Portal Guild (ops/devops/TASKS.md) -DEVOPS-DEVPORT-64-001 | TODO | Schedule `devportal --offline` nightly builds with checksum validation and artifact retention policies. | DevOps Guild, DevPortal Offline Guild (ops/devops/TASKS.md) +DEVOPS-DEVPORT-64-001 | TODO | Schedule `devportal --offline` nightly builds with checksum validation and artifact retention policies. Dependencies: DEVOPS-DEVPORT-63-001. | DevOps Guild, DevPortal Offline Guild (ops/devops/TASKS.md) DEVOPS-EXPORT-35-001 | BLOCKED (2025-10-29) | Establish exporter CI pipeline (lint/test/perf smoke), configure object storage fixtures, seed Grafana dashboards, and document bootstrap steps. | DevOps Guild, Exporter Service Guild (ops/devops/TASKS.md) @@ -87,21 +87,21 @@ Depends on: Sprint 190.B - Ops Devops.II Summary: Ops & Offline focus on Ops Devops (phase III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DEVOPS-EXPORT-36-001 | TODO | Integrate Trivy compatibility validation, cosign signature checks, `trivy module db import` smoke tests, OCI distribution verification, and throughput/error dashboards. | DevOps Guild, Exporter Service Guild (ops/devops/TASKS.md) -DEVOPS-EXPORT-37-001 | TODO | Finalize exporter monitoring (failure alerts, verify metrics, retention jobs) and chaos/latency tests ahead of GA. | DevOps Guild, Exporter Service Guild (ops/devops/TASKS.md) +DEVOPS-EXPORT-36-001 | TODO | Integrate Trivy compatibility validation, cosign signature checks, `trivy module db import` smoke tests, OCI distribution verification, and throughput/error dashboards. Dependencies: DEVOPS-EXPORT-35-001. | DevOps Guild, Exporter Service Guild (ops/devops/TASKS.md) +DEVOPS-EXPORT-37-001 | TODO | Finalize exporter monitoring (failure alerts, verify metrics, retention jobs) and chaos/latency tests ahead of GA. Dependencies: DEVOPS-EXPORT-36-001. | DevOps Guild, Exporter Service Guild (ops/devops/TASKS.md) DEVOPS-GRAPH-24-001 | TODO | Load test graph index/adjacency APIs with 40k-node assets; capture perf dashboards and alert thresholds. | DevOps Guild, SBOM Service Guild (ops/devops/TASKS.md) -DEVOPS-GRAPH-24-002 | TODO | Integrate synthetic UI perf runs (Playwright/WebGL metrics) for Graph/Vuln explorers; fail builds on regression. | DevOps Guild, UI Guild (ops/devops/TASKS.md) -DEVOPS-GRAPH-24-003 | TODO | Implement smoke job for simulation endpoints ensuring we stay within SLA (<3s upgrade) and log results. | DevOps Guild (ops/devops/TASKS.md) +DEVOPS-GRAPH-24-002 | TODO | Integrate synthetic UI perf runs (Playwright/WebGL metrics) for Graph/Vuln explorers; fail builds on regression. Dependencies: DEVOPS-GRAPH-24-001. | DevOps Guild, UI Guild (ops/devops/TASKS.md) +DEVOPS-GRAPH-24-003 | TODO | Implement smoke job for simulation endpoints ensuring we stay within SLA (<3s upgrade) and log results. Dependencies: DEVOPS-GRAPH-24-002. | DevOps Guild (ops/devops/TASKS.md) DEVOPS-LNM-22-001 | BLOCKED (2025-10-27) | Run migration/backfill pipelines for advisory observations/linksets in staging, validate counts/conflicts, and automate deployment steps. Awaiting storage backfill tooling. | DevOps Guild, Concelier Guild (ops/devops/TASKS.md) -DEVOPS-LNM-22-002 | BLOCKED (2025-10-27) | Execute VEX observation/linkset backfill with monitoring; ensure NATS/Redis events integrated; document ops runbook. Blocked until Excititor storage migration lands. | DevOps Guild, Excititor Guild (ops/devops/TASKS.md) -DEVOPS-LNM-22-003 | TODO | Add CI/monitoring coverage for new metrics (`advisory_observations_total`, `linksets_total`, etc.) and alerts on ingest-to-API SLA breaches. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) +DEVOPS-LNM-22-002 | BLOCKED (2025-10-27) | Execute VEX observation/linkset backfill with monitoring; ensure NATS/Redis events integrated; document ops runbook. Blocked until Excititor storage migration lands. Dependencies: DEVOPS-LNM-22-001. | DevOps Guild, Excititor Guild (ops/devops/TASKS.md) +DEVOPS-LNM-22-003 | TODO | Add CI/monitoring coverage for new metrics (`advisory_observations_total`, `linksets_total`, etc.) and alerts on ingest-to-API SLA breaches. Dependencies: DEVOPS-LNM-22-002. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) DEVOPS-OAS-61-001 | TODO | Add CI stages for OpenAPI linting, validation, and compatibility diff; enforce gating on PRs. | DevOps Guild, API Contracts Guild (ops/devops/TASKS.md) -DEVOPS-OAS-61-002 | TODO | Integrate mock server + contract test suite into PR and nightly workflows; publish artifacts. | DevOps Guild, Contract Testing Guild (ops/devops/TASKS.md) +DEVOPS-OAS-61-002 | TODO | Integrate mock server + contract test suite into PR and nightly workflows; publish artifacts. Dependencies: DEVOPS-OAS-61-001. | DevOps Guild, Contract Testing Guild (ops/devops/TASKS.md) DEVOPS-OBS-50-002 | DOING (2025-10-26) | Stand up multi-tenant storage backends (Prometheus, Tempo/Jaeger, Loki) with retention policies, tenant isolation, and redaction guard rails. Integrate with Authority scopes for read paths. | DevOps Guild, Security Guild (ops/devops/TASKS.md) -DEVOPS-OBS-51-001 | TODO | Implement SLO evaluator service (burn rate calculators, webhook emitters), Grafana dashboards, and alert routing to Notifier. Provide Terraform/Helm automation. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) -DEVOPS-OBS-52-001 | TODO | Configure streaming pipeline (NATS/Redis/Kafka) with retention, partitioning, and backpressure tuning for timeline events; add CI validation of schema + rate caps. | DevOps Guild, Timeline Indexer Guild (ops/devops/TASKS.md) -DEVOPS-OBS-53-001 | TODO | Provision object storage with WORM/retention options (S3 Object Lock / MinIO immutability), legal hold automation, and backup/restore scripts for evidence locker. | DevOps Guild, Evidence Locker Guild (ops/devops/TASKS.md) -DEVOPS-OBS-54-001 | TODO | Manage provenance signing infrastructure (KMS keys, rotation schedule, timestamp authority integration) and integrate verification jobs into CI. | DevOps Guild, Security Guild (ops/devops/TASKS.md) +DEVOPS-OBS-51-001 | TODO | Implement SLO evaluator service (burn rate calculators, webhook emitters), Grafana dashboards, and alert routing to Notifier. Provide Terraform/Helm automation. Dependencies: DEVOPS-OBS-50-002. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) +DEVOPS-OBS-52-001 | TODO | Configure streaming pipeline (NATS/Redis/Kafka) with retention, partitioning, and backpressure tuning for timeline events; add CI validation of schema + rate caps. Dependencies: DEVOPS-OBS-51-001. | DevOps Guild, Timeline Indexer Guild (ops/devops/TASKS.md) +DEVOPS-OBS-53-001 | TODO | Provision object storage with WORM/retention options (S3 Object Lock / MinIO immutability), legal hold automation, and backup/restore scripts for evidence locker. Dependencies: DEVOPS-OBS-52-001. | DevOps Guild, Evidence Locker Guild (ops/devops/TASKS.md) +DEVOPS-OBS-54-001 | TODO | Manage provenance signing infrastructure (KMS keys, rotation schedule, timestamp authority integration) and integrate verification jobs into CI. Dependencies: DEVOPS-OBS-53-001. | DevOps Guild, Security Guild (ops/devops/TASKS.md) [Ops & Offline] 190.B) Ops Devops.IV @@ -109,21 +109,21 @@ Depends on: Sprint 190.B - Ops Devops.III Summary: Ops & Offline focus on Ops Devops (phase IV). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DEVOPS-OBS-55-001 | TODO | Implement incident mode automation: feature flag service, auto-activation via SLO burn-rate, retention override management, and post-incident reset job. | DevOps Guild, Ops Guild (ops/devops/TASKS.md) +DEVOPS-OBS-55-001 | TODO | Implement incident mode automation: feature flag service, auto-activation via SLO burn-rate, retention override management, and post-incident reset job. Dependencies: DEVOPS-OBS-54-001. | DevOps Guild, Ops Guild (ops/devops/TASKS.md) DEVOPS-ORCH-32-001 | TODO | Provision orchestrator Postgres/message-bus infrastructure, add CI smoke deploy, seed Grafana dashboards (queue depth, inflight jobs), and document bootstrap. | DevOps Guild, Orchestrator Service Guild (ops/devops/TASKS.md) -DEVOPS-ORCH-33-001 | TODO | Publish Grafana dashboards/alerts for rate limiter, backpressure, error clustering, and DLQ depth; integrate with on-call rotations. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) -DEVOPS-ORCH-34-001 | TODO | Harden production monitoring (synthetic probes, burn-rate alerts, replay smoke), document incident response, and prep GA readiness checklist. | DevOps Guild, Orchestrator Service Guild (ops/devops/TASKS.md) +DEVOPS-ORCH-33-001 | TODO | Publish Grafana dashboards/alerts for rate limiter, backpressure, error clustering, and DLQ depth; integrate with on-call rotations. Dependencies: DEVOPS-ORCH-32-001. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) +DEVOPS-ORCH-34-001 | TODO | Harden production monitoring (synthetic probes, burn-rate alerts, replay smoke), document incident response, and prep GA readiness checklist. Dependencies: DEVOPS-ORCH-33-001. | DevOps Guild, Orchestrator Service Guild (ops/devops/TASKS.md) DEVOPS-POLICY-27-001 | TODO | Add CI pipeline stages to run `stella policy lint | DevOps Guild, DevEx/CLI Guild (ops/devops/TASKS.md) -DEVOPS-POLICY-27-002 | TODO | Provide optional batch simulation CI job (staging inventory) that triggers Registry run, polls results, and posts markdown summary to PR; enforce drift thresholds. | DevOps Guild, Policy Registry Guild (ops/devops/TASKS.md) -DEVOPS-POLICY-27-003 | TODO | Manage signing key material for policy publish pipeline (OIDC workload identity + cosign), rotate keys, and document verification steps; integrate attestation verification stage. | DevOps Guild, Security Guild (ops/devops/TASKS.md) -DEVOPS-POLICY-27-004 | TODO | Create dashboards/alerts for policy compile latency, simulation queue depth, approval latency, and promotion outcomes; integrate with on-call playbooks. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) +DEVOPS-POLICY-27-002 | TODO | Provide optional batch simulation CI job (staging inventory) that triggers Registry run, polls results, and posts markdown summary to PR; enforce drift thresholds. Dependencies: DEVOPS-POLICY-27-001. | DevOps Guild, Policy Registry Guild (ops/devops/TASKS.md) +DEVOPS-POLICY-27-003 | TODO | Manage signing key material for policy publish pipeline (OIDC workload identity + cosign), rotate keys, and document verification steps; integrate attestation verification stage. Dependencies: DEVOPS-POLICY-27-002. | DevOps Guild, Security Guild (ops/devops/TASKS.md) +DEVOPS-POLICY-27-004 | TODO | Create dashboards/alerts for policy compile latency, simulation queue depth, approval latency, and promotion outcomes; integrate with on-call playbooks. Dependencies: DEVOPS-POLICY-27-003. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) DEVOPS-REL-17-004 | BLOCKED (2025-10-26) | Ensure release workflow publishes `out/release/debug` (build-id tree + manifest) and fails when symbols are missing. | DevOps Guild (ops/devops/TASKS.md) DEVOPS-RULES-33-001 | REVIEW (2025-10-30) | Contracts & Rules anchor:
• Gateway proxies only; Policy Engine composes overlays/simulations.
• AOC ingestion cannot merge; only lossless canonicalization.
• One graph platform: Graph Indexer + Graph API. Cartographer retired. | DevOps Guild, Platform Leads (ops/devops/TASKS.md) DEVOPS-SDK-63-001 | TODO | Provision registry credentials, signing keys, and secure storage for SDK publishing pipelines. | DevOps Guild, SDK Release Guild (ops/devops/TASKS.md) DEVOPS-SIG-26-001 | TODO | Provision CI/CD pipelines, Helm/Compose manifests for Signals service, including artifact storage and Redis dependencies. | DevOps Guild, Signals Guild (ops/devops/TASKS.md) -DEVOPS-SIG-26-002 | TODO | Create dashboards/alerts for reachability scoring latency, cache hit rates, sensor staleness. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) +DEVOPS-SIG-26-002 | TODO | Create dashboards/alerts for reachability scoring latency, cache hit rates, sensor staleness. Dependencies: DEVOPS-SIG-26-001. | DevOps Guild, Observability Guild (ops/devops/TASKS.md) DEVOPS-TEN-47-001 | TODO | Add JWKS cache monitoring, signature verification regression tests, and token expiration chaos tests to CI. | DevOps Guild (ops/devops/TASKS.md) -DEVOPS-TEN-48-001 | TODO | Build integration tests to assert RLS enforcement, tenant-prefixed object storage, and audit event emission; set up lint to prevent raw SQL bypass. | DevOps Guild (ops/devops/TASKS.md) +DEVOPS-TEN-48-001 | TODO | Build integration tests to assert RLS enforcement, tenant-prefixed object storage, and audit event emission; set up lint to prevent raw SQL bypass. Dependencies: DEVOPS-TEN-47-001. | DevOps Guild (ops/devops/TASKS.md) [Ops & Offline] 190.B) Ops Devops.V @@ -131,17 +131,17 @@ Depends on: Sprint 190.B - Ops Devops.IV Summary: Ops & Offline focus on Ops Devops (phase V). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DEVOPS-TEN-49-001 | TODO | Deploy audit pipeline, scope usage metrics, JWKS outage chaos tests, and tenant load/perf benchmarks. | DevOps Guild (ops/devops/TASKS.md) +DEVOPS-TEN-49-001 | TODO | Deploy audit pipeline, scope usage metrics, JWKS outage chaos tests, and tenant load/perf benchmarks. Dependencies: DEVOPS-TEN-48-001. | DevOps Guild (ops/devops/TASKS.md) DEVOPS-VEX-30-001 | TODO | Provision CI, load tests, dashboards, alerts for VEX Lens and Issuer Directory (compute latency, disputed totals, signature verification rates). | DevOps Guild, VEX Lens Guild (ops/devops/TASKS.md) DEVOPS-VULN-29-001 | TODO | Provision CI jobs for ledger projector (replay, determinism), set up backups, monitor Merkle anchoring, and automate verification. | DevOps Guild, Findings Ledger Guild (ops/devops/TASKS.md) -DEVOPS-VULN-29-002 | TODO | Configure load/perf tests (5M findings/tenant), query budget enforcement, API SLO dashboards, and alerts for `vuln_list_latency` and `projection_lag`. | DevOps Guild, Vuln Explorer API Guild (ops/devops/TASKS.md) -DEVOPS-VULN-29-003 | TODO | Instrument analytics pipeline for Vuln Explorer (telemetry ingestion, query hashes), ensure compliance with privacy/PII guardrails, and update observability docs. | DevOps Guild, Console Guild (ops/devops/TASKS.md) +DEVOPS-VULN-29-002 | TODO | Configure load/perf tests (5M findings/tenant), query budget enforcement, API SLO dashboards, and alerts for `vuln_list_latency` and `projection_lag`. Dependencies: DEVOPS-VULN-29-001. | DevOps Guild, Vuln Explorer API Guild (ops/devops/TASKS.md) +DEVOPS-VULN-29-003 | TODO | Instrument analytics pipeline for Vuln Explorer (telemetry ingestion, query hashes), ensure compliance with privacy/PII guardrails, and update observability docs. Dependencies: DEVOPS-VULN-29-002. | DevOps Guild, Console Guild (ops/devops/TASKS.md) DOCKER-44-001 | TODO | Author multi-stage Dockerfiles for all core services (API, Console, Orchestrator, Task Runner, Conseiller, Excitator, Policy, Notify, Export, AI) with non-root users, read-only file systems, and health scripts. | DevOps Guild, Service Owners (ops/devops/TASKS.md) -DOCKER-44-002 | TODO | Generate SBOMs and cosign attestations for each image and integrate verification into CI. | DevOps Guild (ops/devops/TASKS.md) -DOCKER-44-003 | TODO | Implement `/health/liveness`, `/health/readiness`, `/version`, `/metrics`, and ensure capability endpoint returns `merge=false` for Conseiller/Excitator. | DevOps Guild (ops/devops/TASKS.md) +DOCKER-44-002 | TODO | Generate SBOMs and cosign attestations for each image and integrate verification into CI. Dependencies: DOCKER-44-001. | DevOps Guild (ops/devops/TASKS.md) +DOCKER-44-003 | TODO | Implement `/health/liveness`, `/health/readiness`, `/version`, `/metrics`, and ensure capability endpoint returns `merge=false` for Conseiller/Excitator. Dependencies: DOCKER-44-002. | DevOps Guild (ops/devops/TASKS.md) OPS-ENV-01 | TODO | Update deployment manifests (Helm/Compose) and configuration docs to include Surface.Env variables for Scanner and Zastava services. | DevOps Guild, Scanner Guild (ops/devops/TASKS.md) OPS-SECRETS-01 | TODO | Define secret provisioning workflow (Kubernetes, Compose, Offline Kit) for Surface.Secrets references and update runbooks. | DevOps Guild, Security Guild (ops/devops/TASKS.md) -OPS-SECRETS-02 | TODO | Embed Surface.Secrets material (encrypted bundles, manifests) into offline kit packaging scripts. | DevOps Guild, Offline Kit Guild (ops/devops/TASKS.md) +OPS-SECRETS-02 | TODO | Embed Surface.Secrets material (encrypted bundles, manifests) into offline kit packaging scripts. Dependencies: OPS-SECRETS-01. | DevOps Guild, Offline Kit Guild (ops/devops/TASKS.md) [Ops & Offline] 190.C) Ops Offline Kit @@ -151,11 +151,11 @@ Task ID | State | Task description | Owners (Source) --- | --- | --- | --- CLI-PACKS-43-002 | TODO | Bundle Task Pack samples, registry mirror seeds, Task Runner configs, and CLI binaries with checksums into Offline Kit. | Offline Kit Guild, Packs Registry Guild (ops/offline-kit/TASKS.md) DEVOPS-OFFLINE-17-004 | BLOCKED (2025-10-26) | Execute `mirror_debug_store.py` after the next release pipeline emits `out/release/debug`, verify manifest hashes, and archive `metadata/debug-store.json` with the kit. | Offline Kit Guild, DevOps Guild (ops/offline-kit/TASKS.md) -DEVOPS-OFFLINE-34-006 | TODO | Bundle orchestrator service container, worker SDK samples, Postgres snapshot, and dashboards into Offline Kit with manifest/signature updates. | Offline Kit Guild, Orchestrator Service Guild (ops/offline-kit/TASKS.md) -DEVOPS-OFFLINE-37-001 | TODO | Export Center offline bundles + verification tooling (mirror artefacts, verification CLI, manifest/signature refresh, air-gap import script). | Offline Kit Guild, Exporter Service Guild (ops/offline-kit/TASKS.md) -DEVOPS-OFFLINE-37-002 | TODO | Notifier offline packs (sample configs, template/digest packs, dry-run harness) with integrity checks and operator docs. | Offline Kit Guild, Notifications Service Guild (ops/offline-kit/TASKS.md) +DEVOPS-OFFLINE-34-006 | TODO | Bundle orchestrator service container, worker SDK samples, Postgres snapshot, and dashboards into Offline Kit with manifest/signature updates. Dependencies: DEVOPS-OFFLINE-17-004. | Offline Kit Guild, Orchestrator Service Guild (ops/offline-kit/TASKS.md) +DEVOPS-OFFLINE-37-001 | TODO | Export Center offline bundles + verification tooling (mirror artefacts, verification CLI, manifest/signature refresh, air-gap import script). Dependencies: DEVOPS-OFFLINE-34-006. | Offline Kit Guild, Exporter Service Guild (ops/offline-kit/TASKS.md) +DEVOPS-OFFLINE-37-002 | TODO | Notifier offline packs (sample configs, template/digest packs, dry-run harness) with integrity checks and operator docs. Dependencies: DEVOPS-OFFLINE-37-001. | Offline Kit Guild, Notifications Service Guild (ops/offline-kit/TASKS.md) OFFLINE-CONTAINERS-46-001 | TODO | Include container air-gap bundle, verification docs, and mirrored registry instructions inside Offline Kit. | Offline Kit Guild, Deployment Guild (ops/offline-kit/TASKS.md) -OPS-SECRETS-02 | TODO | Add Surface.Secrets bundles (encrypted creds, manifests) to Offline Kit packaging plus verification script. | Offline Kit Guild, DevOps Guild (ops/offline-kit/TASKS.md) +OPS-SECRETS-02 | TODO | Add Surface.Secrets bundles (encrypted creds, manifests) to Offline Kit packaging plus verification script. Dependencies: OPS-SECRETS-02. | Offline Kit Guild, DevOps Guild (ops/offline-kit/TASKS.md) [Ops & Offline] 190.D) Samples @@ -164,9 +164,9 @@ Summary: Ops & Offline focus on Samples). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- SAMPLES-GRAPH-24-003 | TODO | Generate large-scale SBOM graph fixture (≈40k nodes) with policy overlay snapshot for performance/perf regression suites. | Samples Guild, SBOM Service Guild (samples/TASKS.md) -SAMPLES-GRAPH-24-004 | TODO | Create vulnerability explorer JSON/CSV fixtures capturing conflicting evidence and policy outputs for UI/CLI automated tests. | Samples Guild, UI Guild (samples/TASKS.md) +SAMPLES-GRAPH-24-004 | TODO | Create vulnerability explorer JSON/CSV fixtures capturing conflicting evidence and policy outputs for UI/CLI automated tests. Dependencies: SAMPLES-GRAPH-24-003. | Samples Guild, UI Guild (samples/TASKS.md) SAMPLES-LNM-22-001 | BLOCKED (2025-10-27) | Create advisory observation/linkset fixtures (NVD, GHSA, OSV disagreements) for API/CLI/UI tests with documented conflicts. Waiting on finalized schema/linkset outputs. | Samples Guild, Concelier Guild (samples/TASKS.md) -SAMPLES-LNM-22-002 | BLOCKED (2025-10-27) | Produce VEX observation/linkset fixtures demonstrating status conflicts and path relevance; include raw blobs. Pending Excititor observation/linkset implementation. | Samples Guild, Excititor Guild (samples/TASKS.md) +SAMPLES-LNM-22-002 | BLOCKED (2025-10-27) | Produce VEX observation/linkset fixtures demonstrating status conflicts and path relevance; include raw blobs. Pending Excititor observation/linkset implementation. Dependencies: SAMPLES-LNM-22-001. | Samples Guild, Excititor Guild (samples/TASKS.md) [Ops & Offline] 190.E) AirGap @@ -175,20 +175,20 @@ Summary: Ops & Offline focus on AirGap). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- AIRGAP-CTL-56-001 | TODO | Implement `airgap_state` persistence, seal/unseal state machine, and Authority scope checks (`airgap:seal`, `airgap:status:read`). | AirGap Controller Guild (src/AirGap/StellaOps.AirGap.Controller/TASKS.md) -AIRGAP-CTL-56-002 | TODO | Expose `GET /system/airgap/status`, `POST /system/airgap/seal`, integrate policy hash validation, and return staleness/time anchor placeholders. | AirGap Controller Guild, DevOps Guild (src/AirGap/StellaOps.AirGap.Controller/TASKS.md) -AIRGAP-CTL-57-001 | TODO | Add startup diagnostics that block application run when sealed flag set but egress policies missing; emit audit + telemetry. | AirGap Controller Guild (src/AirGap/StellaOps.AirGap.Controller/TASKS.md) -AIRGAP-CTL-57-002 | TODO | Instrument seal/unseal events with trace/log fields and timeline emission (`airgap.sealed`, `airgap.unsealed`). | AirGap Controller Guild, Observability Guild (src/AirGap/StellaOps.AirGap.Controller/TASKS.md) -AIRGAP-CTL-58-001 | TODO | Persist time anchor metadata, compute drift seconds, and surface staleness budgets in status API. | AirGap Controller Guild, AirGap Time Guild (src/AirGap/StellaOps.AirGap.Controller/TASKS.md) +AIRGAP-CTL-56-002 | TODO | Expose `GET /system/airgap/status`, `POST /system/airgap/seal`, integrate policy hash validation, and return staleness/time anchor placeholders. Dependencies: AIRGAP-CTL-56-001. | AirGap Controller Guild, DevOps Guild (src/AirGap/StellaOps.AirGap.Controller/TASKS.md) +AIRGAP-CTL-57-001 | TODO | Add startup diagnostics that block application run when sealed flag set but egress policies missing; emit audit + telemetry. Dependencies: AIRGAP-CTL-56-002. | AirGap Controller Guild (src/AirGap/StellaOps.AirGap.Controller/TASKS.md) +AIRGAP-CTL-57-002 | TODO | Instrument seal/unseal events with trace/log fields and timeline emission (`airgap.sealed`, `airgap.unsealed`). Dependencies: AIRGAP-CTL-57-001. | AirGap Controller Guild, Observability Guild (src/AirGap/StellaOps.AirGap.Controller/TASKS.md) +AIRGAP-CTL-58-001 | TODO | Persist time anchor metadata, compute drift seconds, and surface staleness budgets in status API. Dependencies: AIRGAP-CTL-57-002. | AirGap Controller Guild, AirGap Time Guild (src/AirGap/StellaOps.AirGap.Controller/TASKS.md) AIRGAP-IMP-56-001 | TODO | Implement DSSE verification helpers, TUF metadata parser (`root.json`, `snapshot.json`, `timestamp.json`), and Merkle root calculator. | AirGap Importer Guild (src/AirGap/StellaOps.AirGap.Importer/TASKS.md) -AIRGAP-IMP-56-002 | TODO | Introduce root rotation policy validation (dual approval) and signer trust store management. | AirGap Importer Guild, Security Guild (src/AirGap/StellaOps.AirGap.Importer/TASKS.md) -AIRGAP-IMP-57-001 | TODO | Write `bundle_catalog` and `bundle_items` repositories with RLS + deterministic migrations. | AirGap Importer Guild (src/AirGap/StellaOps.AirGap.Importer/TASKS.md) -AIRGAP-IMP-57-002 | TODO | Implement object-store loader storing artifacts under tenant/global mirror paths with Zstandard decompression and checksum validation. | AirGap Importer Guild, DevOps Guild (src/AirGap/StellaOps.AirGap.Importer/TASKS.md) -AIRGAP-IMP-58-001 | TODO | Implement API (`POST /airgap/import`, `/airgap/verify`) and CLI commands wiring verification + catalog updates, including diff preview. | AirGap Importer Guild, CLI Guild (src/AirGap/StellaOps.AirGap.Importer/TASKS.md) -AIRGAP-IMP-58-002 | TODO | Emit timeline events (`airgap.import.started | AirGap Importer Guild, Observability Guild (src/AirGap/StellaOps.AirGap.Importer/TASKS.md) +AIRGAP-IMP-56-002 | TODO | Introduce root rotation policy validation (dual approval) and signer trust store management. Dependencies: AIRGAP-IMP-56-001. | AirGap Importer Guild, Security Guild (src/AirGap/StellaOps.AirGap.Importer/TASKS.md) +AIRGAP-IMP-57-001 | TODO | Write `bundle_catalog` and `bundle_items` repositories with RLS + deterministic migrations. Dependencies: AIRGAP-IMP-56-002. | AirGap Importer Guild (src/AirGap/StellaOps.AirGap.Importer/TASKS.md) +AIRGAP-IMP-57-002 | TODO | Implement object-store loader storing artifacts under tenant/global mirror paths with Zstandard decompression and checksum validation. Dependencies: AIRGAP-IMP-57-001. | AirGap Importer Guild, DevOps Guild (src/AirGap/StellaOps.AirGap.Importer/TASKS.md) +AIRGAP-IMP-58-001 | TODO | Implement API (`POST /airgap/import`, `/airgap/verify`) and CLI commands wiring verification + catalog updates, including diff preview. Dependencies: AIRGAP-IMP-57-002. | AirGap Importer Guild, CLI Guild (src/AirGap/StellaOps.AirGap.Importer/TASKS.md) +AIRGAP-IMP-58-002 | TODO | Emit timeline events (`airgap.import.started. Dependencies: AIRGAP-IMP-58-001. | AirGap Importer Guild, Observability Guild (src/AirGap/StellaOps.AirGap.Importer/TASKS.md) AIRGAP-TIME-57-001 | TODO | Implement signed time token parser (Roughtime/RFC3161), verify signatures against bundle trust roots, and expose normalized anchor representation. | AirGap Time Guild (src/AirGap/StellaOps.AirGap.Time/TASKS.md) -AIRGAP-TIME-57-002 | TODO | Add telemetry counters for time anchors (`airgap_time_anchor_age_seconds`) and alerts for approaching thresholds. | AirGap Time Guild, Observability Guild (src/AirGap/StellaOps.AirGap.Time/TASKS.md) -AIRGAP-TIME-58-001 | TODO | Persist drift baseline, compute per-content staleness (advisories, VEX, policy) based on bundle metadata, and surface through controller status API. | AirGap Time Guild (src/AirGap/StellaOps.AirGap.Time/TASKS.md) -AIRGAP-TIME-58-002 | TODO | Emit notifications and timeline events when staleness budgets breached or approaching. | AirGap Time Guild, Notifications Guild (src/AirGap/StellaOps.AirGap.Time/TASKS.md) +AIRGAP-TIME-57-002 | TODO | Add telemetry counters for time anchors (`airgap_time_anchor_age_seconds`) and alerts for approaching thresholds. Dependencies: AIRGAP-TIME-57-001. | AirGap Time Guild, Observability Guild (src/AirGap/StellaOps.AirGap.Time/TASKS.md) +AIRGAP-TIME-58-001 | TODO | Persist drift baseline, compute per-content staleness (advisories, VEX, policy) based on bundle metadata, and surface through controller status API. Dependencies: AIRGAP-TIME-57-002. | AirGap Time Guild (src/AirGap/StellaOps.AirGap.Time/TASKS.md) +AIRGAP-TIME-58-002 | TODO | Emit notifications and timeline events when staleness budgets breached or approaching. Dependencies: AIRGAP-TIME-58-001. | AirGap Time Guild, Notifications Guild (src/AirGap/StellaOps.AirGap.Time/TASKS.md) [Ops & Offline] 190.F) Api @@ -197,16 +197,16 @@ Summary: Ops & Offline focus on Api). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- APIGOV-61-001 | TODO | Configure spectral/linters with Stella rules; add CI job failing on violations. | API Governance Guild (src/Api/StellaOps.Api.Governance/TASKS.md) -APIGOV-61-002 | TODO | Implement example coverage checker ensuring every operation has at least one request/response example. | API Governance Guild (src/Api/StellaOps.Api.Governance/TASKS.md) -APIGOV-62-001 | TODO | Build compatibility diff tool producing additive/breaking reports comparing prior release. | API Governance Guild (src/Api/StellaOps.Api.Governance/TASKS.md) -APIGOV-62-002 | TODO | Automate changelog generation and publish signed artifacts to `src/Sdk/StellaOps.Sdk.Release` pipeline. | API Governance Guild, DevOps Guild (src/Api/StellaOps.Api.Governance/TASKS.md) -APIGOV-63-001 | TODO | Integrate deprecation metadata into Notification Studio templates for API sunset events. | API Governance Guild, Notifications Guild (src/Api/StellaOps.Api.Governance/TASKS.md) +APIGOV-61-002 | TODO | Implement example coverage checker ensuring every operation has at least one request/response example. Dependencies: APIGOV-61-001. | API Governance Guild (src/Api/StellaOps.Api.Governance/TASKS.md) +APIGOV-62-001 | TODO | Build compatibility diff tool producing additive/breaking reports comparing prior release. Dependencies: APIGOV-61-002. | API Governance Guild (src/Api/StellaOps.Api.Governance/TASKS.md) +APIGOV-62-002 | TODO | Automate changelog generation and publish signed artifacts to `src/Sdk/StellaOps.Sdk.Release` pipeline. Dependencies: APIGOV-62-001. | API Governance Guild, DevOps Guild (src/Api/StellaOps.Api.Governance/TASKS.md) +APIGOV-63-001 | TODO | Integrate deprecation metadata into Notification Studio templates for API sunset events. Dependencies: APIGOV-62-002. | API Governance Guild, Notifications Guild (src/Api/StellaOps.Api.Governance/TASKS.md) OAS-61-001 | TODO | Scaffold per-service OpenAPI 3.1 files with shared components, info blocks, and initial path stubs. | API Contracts Guild (src/Api/StellaOps.Api.OpenApi/TASKS.md) -OAS-61-002 | TODO | Implement aggregate composer (`stella.yaml`) resolving `$ref`s and merging shared components; wire into CI. | API Contracts Guild, DevOps Guild (src/Api/StellaOps.Api.OpenApi/TASKS.md) -OAS-62-001 | TODO | Populate request/response examples for top 50 endpoints, including standard error envelope. | API Contracts Guild, Service Guilds (src/Api/StellaOps.Api.OpenApi/TASKS.md) -OAS-62-002 | TODO | Add custom lint rules enforcing pagination, idempotency headers, naming conventions, and example coverage. | API Contracts Guild (src/Api/StellaOps.Api.OpenApi/TASKS.md) -OAS-63-001 | TODO | Implement compatibility diff tooling comparing previous release specs; classify breaking vs additive changes. | API Contracts Guild (src/Api/StellaOps.Api.OpenApi/TASKS.md) -OAS-63-002 | TODO | Add `/.well-known/openapi` discovery endpoint schema metadata (extensions, version info). | API Contracts Guild, Gateway Guild (src/Api/StellaOps.Api.OpenApi/TASKS.md) +OAS-61-002 | TODO | Implement aggregate composer (`stella.yaml`) resolving `$ref`s and merging shared components; wire into CI. Dependencies: OAS-61-001. | API Contracts Guild, DevOps Guild (src/Api/StellaOps.Api.OpenApi/TASKS.md) +OAS-62-001 | TODO | Populate request/response examples for top 50 endpoints, including standard error envelope. Dependencies: OAS-61-002. | API Contracts Guild, Service Guilds (src/Api/StellaOps.Api.OpenApi/TASKS.md) +OAS-62-002 | TODO | Add custom lint rules enforcing pagination, idempotency headers, naming conventions, and example coverage. Dependencies: OAS-62-001. | API Contracts Guild (src/Api/StellaOps.Api.OpenApi/TASKS.md) +OAS-63-001 | TODO | Implement compatibility diff tooling comparing previous release specs; classify breaking vs additive changes. Dependencies: OAS-62-002. | API Contracts Guild (src/Api/StellaOps.Api.OpenApi/TASKS.md) +OAS-63-002 | TODO | Add `/.well-known/openapi` discovery endpoint schema metadata (extensions, version info). Dependencies: OAS-63-001. | API Contracts Guild, Gateway Guild (src/Api/StellaOps.Api.OpenApi/TASKS.md) [Ops & Offline] 190.G) Bench @@ -215,12 +215,12 @@ Summary: Ops & Offline focus on Bench). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- BENCH-GRAPH-21-001 | BLOCKED (2025-10-27) | Build graph viewport/path benchmark harness (50k/100k nodes) measuring Graph API/Indexer latency, memory, and tile cache hit rates. *(Executed within Sprint 28 Graph program).* | Bench Guild, Graph Platform Guild (src/Bench/StellaOps.Bench/TASKS.md) -BENCH-GRAPH-21-002 | BLOCKED (2025-10-27) | Add headless UI load benchmark (Playwright) for graph canvas interactions to track render times and FPS budgets. *(Executed within Sprint 28 Graph program).* | Bench Guild, UI Guild (src/Bench/StellaOps.Bench/TASKS.md) -BENCH-GRAPH-24-002 | TODO | Implement UI interaction benchmarks (filter/zoom/table operations) citing p95 latency; integrate with perf dashboards. | Bench Guild, UI Guild (src/Bench/StellaOps.Bench/TASKS.md) +BENCH-GRAPH-21-002 | BLOCKED (2025-10-27) | Add headless UI load benchmark (Playwright) for graph canvas interactions to track render times and FPS budgets. *(Executed within Sprint 28 Graph program).*. Dependencies: BENCH-GRAPH-21-001. | Bench Guild, UI Guild (src/Bench/StellaOps.Bench/TASKS.md) +BENCH-GRAPH-24-002 | TODO | Implement UI interaction benchmarks (filter/zoom/table operations) citing p95 latency; integrate with perf dashboards. Dependencies: BENCH-GRAPH-21-002. | Bench Guild, UI Guild (src/Bench/StellaOps.Bench/TASKS.md) BENCH-IMPACT-16-001 | TODO | ImpactIndex throughput bench (resolve 10k productKeys) + RAM profile. | Bench Guild, Scheduler Team (src/Bench/StellaOps.Bench/TASKS.md) BENCH-POLICY-20-002 | TODO | Add incremental run benchmark measuring delta evaluation vs full; capture SLA compliance. | Bench Guild, Policy Guild, Scheduler Guild (src/Bench/StellaOps.Bench/TASKS.md) BENCH-SIG-26-001 | TODO | Develop benchmark for reachability scoring pipeline (facts/sec, latency, memory) using synthetic callgraphs/runtime batches. | Bench Guild, Signals Guild (src/Bench/StellaOps.Bench/TASKS.md) -BENCH-SIG-26-002 | TODO | Measure policy evaluation overhead with reachability cache hot/cold; ensure ≤8 ms p95 added latency. | Bench Guild, Policy Guild (src/Bench/StellaOps.Bench/TASKS.md) +BENCH-SIG-26-002 | TODO | Measure policy evaluation overhead with reachability cache hot/cold; ensure ≤8 ms p95 added latency. Dependencies: BENCH-SIG-26-001. | Bench Guild, Policy Guild (src/Bench/StellaOps.Bench/TASKS.md) [Ops & Offline] 190.H) Provenance @@ -229,9 +229,9 @@ Summary: Ops & Offline focus on Provenance). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- PROV-OBS-53-001 | TODO | Implement DSSE/SLSA `BuildDefinition` + `BuildMetadata` models with canonical JSON serializer, Merkle digest helpers, and deterministic hashing tests. Publish sample statements for orchestrator/job/export subjects. | Provenance Guild (src/Provenance/StellaOps.Provenance.Attestation/TASKS.md) -PROV-OBS-53-002 | TODO | Build signer abstraction (cosign/KMS/offline) with key rotation hooks, audit logging, and policy enforcement (required claims). Provide unit tests using fake signer + real cosign fixture. | Provenance Guild, Security Guild (src/Provenance/StellaOps.Provenance.Attestation/TASKS.md) -PROV-OBS-54-001 | TODO | Deliver verification library that validates DSSE signatures, Merkle roots, and timeline chain-of-custody, exposing reusable CLI/service APIs. Include negative-case fixtures and offline timestamp verification. | Provenance Guild, Evidence Locker Guild (src/Provenance/StellaOps.Provenance.Attestation/TASKS.md) -PROV-OBS-54-002 | TODO | Generate .NET global tool for local verification + embed command helpers for CLI `stella forensic verify`. Provide deterministic packaging and offline kit instructions. | Provenance Guild, DevEx/CLI Guild (src/Provenance/StellaOps.Provenance.Attestation/TASKS.md) +PROV-OBS-53-002 | TODO | Build signer abstraction (cosign/KMS/offline) with key rotation hooks, audit logging, and policy enforcement (required claims). Provide unit tests using fake signer + real cosign fixture. Dependencies: PROV-OBS-53-001. | Provenance Guild, Security Guild (src/Provenance/StellaOps.Provenance.Attestation/TASKS.md) +PROV-OBS-54-001 | TODO | Deliver verification library that validates DSSE signatures, Merkle roots, and timeline chain-of-custody, exposing reusable CLI/service APIs. Include negative-case fixtures and offline timestamp verification. Dependencies: PROV-OBS-53-002. | Provenance Guild, Evidence Locker Guild (src/Provenance/StellaOps.Provenance.Attestation/TASKS.md) +PROV-OBS-54-002 | TODO | Generate .NET global tool for local verification + embed command helpers for CLI `stella forensic verify`. Provide deterministic packaging and offline kit instructions. Dependencies: PROV-OBS-54-001. | Provenance Guild, DevEx/CLI Guild (src/Provenance/StellaOps.Provenance.Attestation/TASKS.md) If all tasks are done - read next sprint section - SPRINT_200_documentation_process.md diff --git a/docs/implplan/SPRINT_200_documentation_process.md b/docs/implplan/SPRINT_200_documentation_process.md index 62dd8a58..0d914ba2 100644 --- a/docs/implplan/SPRINT_200_documentation_process.md +++ b/docs/implplan/SPRINT_200_documentation_process.md @@ -6,20 +6,20 @@ Summary: Documentation & Process focus on Docs Tasks (phase Md.I). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- DOCS-AIAI-31-001 | TODO | Publish `/docs/advisory-ai/overview.md` covering capabilities, guardrails, RBAC. | Docs Guild, Advisory AI Guild (docs/TASKS.md) -DOCS-AIAI-31-002 | TODO | Author `/docs/advisory-ai/architecture.md` detailing RAG pipeline, deterministics, caching, model options. | Docs Guild, Advisory AI Guild (docs/TASKS.md) -DOCS-AIAI-31-003 | TODO | Write `/docs/advisory-ai/api.md` describing endpoints, schemas, errors, rate limits. | Docs Guild, Advisory AI Guild (docs/TASKS.md) -DOCS-AIAI-31-004 | TODO | Create `/docs/advisory-ai/console.md` with screenshots, a11y notes, copy-as-ticket instructions. | Docs Guild, Console Guild (docs/TASKS.md) -DOCS-AIAI-31-005 | TODO | Publish `/docs/advisory-ai/cli.md` covering commands, exit codes, scripting patterns. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) -DOCS-AIAI-31-006 | TODO | Update `/docs/policy/assistant-parameters.md` covering temperature, token limits, ranking weights, TTLs. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-AIAI-31-007 | TODO | Write `/docs/security/assistant-guardrails.md` detailing redaction, injection defense, logging. | Docs Guild, Security Guild (docs/TASKS.md) -DOCS-AIAI-31-008 | TODO | Publish `/docs/sbom/remediation-heuristics.md` (feasibility scoring, blast radius). | Docs Guild, SBOM Service Guild (docs/TASKS.md) -DOCS-AIAI-31-009 | TODO | Create `/docs/runbooks/assistant-ops.md` for warmup, cache priming, model outages, scaling. | Docs Guild, DevOps Guild (docs/TASKS.md) +DOCS-AIAI-31-002 | TODO | Author `/docs/advisory-ai/architecture.md` detailing RAG pipeline, deterministics, caching, model options. Dependencies: DOCS-AIAI-31-001. | Docs Guild, Advisory AI Guild (docs/TASKS.md) +DOCS-AIAI-31-003 | TODO | Write `/docs/advisory-ai/api.md` describing endpoints, schemas, errors, rate limits. Dependencies: DOCS-AIAI-31-002. | Docs Guild, Advisory AI Guild (docs/TASKS.md) +DOCS-AIAI-31-004 | TODO | Create `/docs/advisory-ai/console.md` with screenshots, a11y notes, copy-as-ticket instructions. Dependencies: DOCS-AIAI-31-003. | Docs Guild, Console Guild (docs/TASKS.md) +DOCS-AIAI-31-005 | TODO | Publish `/docs/advisory-ai/cli.md` covering commands, exit codes, scripting patterns. Dependencies: DOCS-AIAI-31-004. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) +DOCS-AIAI-31-006 | TODO | Update `/docs/policy/assistant-parameters.md` covering temperature, token limits, ranking weights, TTLs. Dependencies: DOCS-AIAI-31-005. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-AIAI-31-007 | TODO | Write `/docs/security/assistant-guardrails.md` detailing redaction, injection defense, logging. Dependencies: DOCS-AIAI-31-006. | Docs Guild, Security Guild (docs/TASKS.md) +DOCS-AIAI-31-008 | TODO | Publish `/docs/sbom/remediation-heuristics.md` (feasibility scoring, blast radius). Dependencies: DOCS-AIAI-31-007. | Docs Guild, SBOM Service Guild (docs/TASKS.md) +DOCS-AIAI-31-009 | TODO | Create `/docs/runbooks/assistant-ops.md` for warmup, cache priming, model outages, scaling. Dependencies: DOCS-AIAI-31-008. | Docs Guild, DevOps Guild (docs/TASKS.md) DOCS-AIRGAP-56-001 | TODO | Publish `/docs/airgap/overview.md` outlining modes, lifecycle, responsibilities, and imposed rule banner. | Docs Guild, AirGap Controller Guild (docs/TASKS.md) -DOCS-AIRGAP-56-002 | TODO | Author `/docs/airgap/sealing-and-egress.md` covering network policies, EgressPolicy facade usage, and verification steps. | Docs Guild, DevOps Guild (docs/TASKS.md) -DOCS-AIRGAP-56-003 | TODO | Create `/docs/airgap/mirror-bundles.md` describing bundle format, DSSE/TUF/Merkle validation, creation/import workflows. | Docs Guild, Exporter Guild (docs/TASKS.md) -DOCS-AIRGAP-56-004 | TODO | Publish `/docs/airgap/bootstrap.md` detailing Bootstrap Pack creation, validation, and install procedures. | Docs Guild, Deployment Guild (docs/TASKS.md) -DOCS-AIRGAP-57-001 | TODO | Write `/docs/airgap/staleness-and-time.md` explaining time anchors, drift policies, staleness budgets, and UI indicators. | Docs Guild, AirGap Time Guild (docs/TASKS.md) -DOCS-AIRGAP-57-002 | TODO | Publish `/docs/console/airgap.md` covering sealed badge, import wizard, staleness dashboards. | Docs Guild, Console Guild (docs/TASKS.md) +DOCS-AIRGAP-56-002 | TODO | Author `/docs/airgap/sealing-and-egress.md` covering network policies, EgressPolicy facade usage, and verification steps. Dependencies: DOCS-AIRGAP-56-001. | Docs Guild, DevOps Guild (docs/TASKS.md) +DOCS-AIRGAP-56-003 | TODO | Create `/docs/airgap/mirror-bundles.md` describing bundle format, DSSE/TUF/Merkle validation, creation/import workflows. Dependencies: DOCS-AIRGAP-56-002. | Docs Guild, Exporter Guild (docs/TASKS.md) +DOCS-AIRGAP-56-004 | TODO | Publish `/docs/airgap/bootstrap.md` detailing Bootstrap Pack creation, validation, and install procedures. Dependencies: DOCS-AIRGAP-56-003. | Docs Guild, Deployment Guild (docs/TASKS.md) +DOCS-AIRGAP-57-001 | TODO | Write `/docs/airgap/staleness-and-time.md` explaining time anchors, drift policies, staleness budgets, and UI indicators. Dependencies: DOCS-AIRGAP-56-004. | Docs Guild, AirGap Time Guild (docs/TASKS.md) +DOCS-AIRGAP-57-002 | TODO | Publish `/docs/console/airgap.md` covering sealed badge, import wizard, staleness dashboards. Dependencies: DOCS-AIRGAP-57-001. | Docs Guild, Console Guild (docs/TASKS.md) [Documentation & Process] 200.A) Docs Tasks.Md.II @@ -27,21 +27,21 @@ Depends on: Sprint 200.A - Docs Tasks.Md.I Summary: Documentation & Process focus on Docs Tasks (phase Md.II). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DOCS-AIRGAP-57-003 | TODO | Publish `/docs/modules/cli/guides/airgap.md` documenting commands, examples, exit codes. | Docs Guild, CLI Guild (docs/TASKS.md) -DOCS-AIRGAP-57-004 | TODO | Create `/docs/airgap/operations.md` with runbooks for imports, failure recovery, and auditing. | Docs Guild, Ops Guild (docs/TASKS.md) -DOCS-AIRGAP-58-001 | TODO | Provide `/docs/airgap/degradation-matrix.md` enumerating feature availability, fallbacks, remediation. | Docs Guild, Product Guild (docs/TASKS.md) -DOCS-AIRGAP-58-002 | TODO | Update `/docs/security/trust-and-signing.md` with DSSE/TUF roots, rotation, and signed time tokens. | Docs Guild, Security Guild (docs/TASKS.md) -DOCS-AIRGAP-58-003 | TODO | Publish `/docs/dev/airgap-contracts.md` describing EgressPolicy usage, sealed-mode tests, linting. | Docs Guild, DevEx Guild (docs/TASKS.md) -DOCS-AIRGAP-58-004 | TODO | Document `/docs/airgap/portable-evidence.md` for exporting/importing portable evidence bundles across enclaves. | Docs Guild, Evidence Locker Guild (docs/TASKS.md) +DOCS-AIRGAP-57-003 | TODO | Publish `/docs/modules/cli/guides/airgap.md` documenting commands, examples, exit codes. Dependencies: DOCS-AIRGAP-57-002. | Docs Guild, CLI Guild (docs/TASKS.md) +DOCS-AIRGAP-57-004 | TODO | Create `/docs/airgap/operations.md` with runbooks for imports, failure recovery, and auditing. Dependencies: DOCS-AIRGAP-57-003. | Docs Guild, Ops Guild (docs/TASKS.md) +DOCS-AIRGAP-58-001 | TODO | Provide `/docs/airgap/degradation-matrix.md` enumerating feature availability, fallbacks, remediation. Dependencies: DOCS-AIRGAP-57-004. | Docs Guild, Product Guild (docs/TASKS.md) +DOCS-AIRGAP-58-002 | TODO | Update `/docs/security/trust-and-signing.md` with DSSE/TUF roots, rotation, and signed time tokens. Dependencies: DOCS-AIRGAP-58-001. | Docs Guild, Security Guild (docs/TASKS.md) +DOCS-AIRGAP-58-003 | TODO | Publish `/docs/dev/airgap-contracts.md` describing EgressPolicy usage, sealed-mode tests, linting. Dependencies: DOCS-AIRGAP-58-002. | Docs Guild, DevEx Guild (docs/TASKS.md) +DOCS-AIRGAP-58-004 | TODO | Document `/docs/airgap/portable-evidence.md` for exporting/importing portable evidence bundles across enclaves. Dependencies: DOCS-AIRGAP-58-003. | Docs Guild, Evidence Locker Guild (docs/TASKS.md) DOCS-AIRGAP-DEVPORT-64-001 | TODO | Create `/docs/airgap/devportal-offline.md` describing offline bundle usage and verification. | Docs Guild, DevPortal Offline Guild (docs/TASKS.md) DOCS-ATTEST-73-001 | TODO | Publish `/docs/modules/attestor/overview.md` with imposed rule banner. | Docs Guild, Attestor Service Guild (docs/TASKS.md) -DOCS-ATTEST-73-002 | TODO | Write `/docs/modules/attestor/payloads.md` with schemas/examples. | Docs Guild, Attestation Payloads Guild (docs/TASKS.md) -DOCS-ATTEST-73-003 | TODO | Publish `/docs/modules/attestor/policies.md` covering verification policies. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-ATTEST-73-004 | TODO | Add `/docs/modules/attestor/workflows.md` detailing ingest, verify, bulk operations. | Docs Guild, Attestor Service Guild (docs/TASKS.md) -DOCS-ATTEST-74-001 | TODO | Publish `/docs/modules/attestor/keys-and-issuers.md`. | Docs Guild, KMS Guild (docs/TASKS.md) -DOCS-ATTEST-74-002 | TODO | Document `/docs/modules/attestor/transparency.md` with witness usage/offline validation. | Docs Guild, Transparency Guild (docs/TASKS.md) -DOCS-ATTEST-74-003 | TODO | Write `/docs/console/attestor-ui.md` with screenshots/workflows. | Docs Guild, Attestor Console Guild (docs/TASKS.md) -DOCS-ATTEST-74-004 | TODO | Publish `/docs/modules/cli/guides/attest.md` covering CLI usage. | Docs Guild, CLI Attestor Guild (docs/TASKS.md) +DOCS-ATTEST-73-002 | TODO | Write `/docs/modules/attestor/payloads.md` with schemas/examples. Dependencies: DOCS-ATTEST-73-001. | Docs Guild, Attestation Payloads Guild (docs/TASKS.md) +DOCS-ATTEST-73-003 | TODO | Publish `/docs/modules/attestor/policies.md` covering verification policies. Dependencies: DOCS-ATTEST-73-002. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-ATTEST-73-004 | TODO | Add `/docs/modules/attestor/workflows.md` detailing ingest, verify, bulk operations. Dependencies: DOCS-ATTEST-73-003. | Docs Guild, Attestor Service Guild (docs/TASKS.md) +DOCS-ATTEST-74-001 | TODO | Publish `/docs/modules/attestor/keys-and-issuers.md`. Dependencies: DOCS-ATTEST-73-004. | Docs Guild, KMS Guild (docs/TASKS.md) +DOCS-ATTEST-74-002 | TODO | Document `/docs/modules/attestor/transparency.md` with witness usage/offline validation. Dependencies: DOCS-ATTEST-74-001. | Docs Guild, Transparency Guild (docs/TASKS.md) +DOCS-ATTEST-74-003 | TODO | Write `/docs/console/attestor-ui.md` with screenshots/workflows. Dependencies: DOCS-ATTEST-74-002. | Docs Guild, Attestor Console Guild (docs/TASKS.md) +DOCS-ATTEST-74-004 | TODO | Publish `/docs/modules/cli/guides/attest.md` covering CLI usage. Dependencies: DOCS-ATTEST-74-003. | Docs Guild, CLI Attestor Guild (docs/TASKS.md) [Documentation & Process] 200.A) Docs Tasks.Md.III @@ -49,21 +49,21 @@ Depends on: Sprint 200.A - Docs Tasks.Md.II Summary: Documentation & Process focus on Docs Tasks (phase Md.III). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DOCS-ATTEST-75-001 | TODO | Add `/docs/modules/attestor/airgap.md` for attestation bundles. | Docs Guild, Export Attestation Guild (docs/TASKS.md) -DOCS-ATTEST-75-002 | TODO | Update `/docs/security/aoc-invariants.md` with attestation invariants. | Docs Guild, Security Guild (docs/TASKS.md) +DOCS-ATTEST-75-001 | TODO | Add `/docs/modules/attestor/airgap.md` for attestation bundles. Dependencies: DOCS-ATTEST-74-004. | Docs Guild, Export Attestation Guild (docs/TASKS.md) +DOCS-ATTEST-75-002 | TODO | Update `/docs/security/aoc-invariants.md` with attestation invariants. Dependencies: DOCS-ATTEST-75-001. | Docs Guild, Security Guild (docs/TASKS.md) DOCS-CLI-41-001 | TODO | Publish `/docs/modules/cli/guides/overview.md`, `/docs/modules/cli/guides/configuration.md`, `/docs/modules/cli/guides/output-and-exit-codes.md` with imposed rule statements. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) -DOCS-CLI-42-001 | TODO | Publish `/docs/modules/cli/guides/parity-matrix.md` and command guides under `/docs/modules/cli/guides/commands/*.md` (policy, sbom, vuln, vex, advisory, export, orchestrator, notify, aoc, auth). | Docs Guild (docs/TASKS.md) +DOCS-CLI-42-001 | TODO | Publish `/docs/modules/cli/guides/parity-matrix.md` and command guides under `/docs/modules/cli/guides/commands/*.md` (policy, sbom, vuln, vex, advisory, export, orchestrator, notify, aoc, auth). Dependencies: DOCS-CLI-41-001. | Docs Guild (docs/TASKS.md) DOCS-CLI-FORENSICS-53-001 | TODO | Publish `/docs/modules/cli/guides/forensics.md` for snapshot/verify/attest commands with sample outputs, imposed rule banner, and offline workflows. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) DOCS-CLI-OBS-52-001 | TODO | Create `/docs/modules/cli/guides/observability.md` detailing `stella obs` commands, examples, exit codes, imposed rule banner, and scripting tips. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) DOCS-CONSOLE-OBS-52-001 | TODO | Document `/docs/console/observability.md` showcasing Observability Hub widgets, trace/log search, imposed rule banner, and accessibility tips. | Docs Guild, Console Guild (docs/TASKS.md) -DOCS-CONSOLE-OBS-52-002 | TODO | Publish `/docs/console/forensics.md` covering timeline explorer, evidence viewer, attestation verifier, imposed rule banner, and troubleshooting. | Docs Guild, Console Guild (docs/TASKS.md) +DOCS-CONSOLE-OBS-52-002 | TODO | Publish `/docs/console/forensics.md` covering timeline explorer, evidence viewer, attestation verifier, imposed rule banner, and troubleshooting. Dependencies: DOCS-CONSOLE-OBS-52-001. | Docs Guild, Console Guild (docs/TASKS.md) DOCS-CONTRIB-62-001 | TODO | Publish `/docs/contributing/api-contracts.md` detailing how to edit OAS, lint rules, compatibility checks. | Docs Guild, API Governance Guild (docs/TASKS.md) DOCS-DEVPORT-62-001 | TODO | Document `/docs/devportal/publishing.md` for build pipeline, offline bundle steps. | Docs Guild, Developer Portal Guild (docs/TASKS.md) DOCS-EXC-25-001 | TODO | Author `/docs/governance/exceptions.md` covering lifecycle, scope patterns, examples, compliance checklist. | Docs Guild, Governance Guild (docs/TASKS.md) -DOCS-EXC-25-002 | TODO | Publish `/docs/governance/approvals-and-routing.md` detailing roles, routing matrix, MFA rules, audit trails. | Docs Guild, Authority Core (docs/TASKS.md) -DOCS-EXC-25-003 | TODO | Create `/docs/api/exceptions.md` with endpoints, payloads, errors, idempotency notes. | Docs Guild, BE-Base Platform Guild (docs/TASKS.md) -DOCS-EXC-25-005 | TODO | Write `/docs/ui/exception-center.md` with UI walkthrough, badges, accessibility, shortcuts. | Docs Guild, UI Guild (docs/TASKS.md) -DOCS-EXC-25-006 | TODO | Update `/docs/modules/cli/guides/exceptions.md` covering command usage and exit codes. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) +DOCS-EXC-25-002 | TODO | Publish `/docs/governance/approvals-and-routing.md` detailing roles, routing matrix, MFA rules, audit trails. Dependencies: DOCS-EXC-25-001. | Docs Guild, Authority Core (docs/TASKS.md) +DOCS-EXC-25-003 | TODO | Create `/docs/api/exceptions.md` with endpoints, payloads, errors, idempotency notes. Dependencies: DOCS-EXC-25-002. | Docs Guild, BE-Base Platform Guild (docs/TASKS.md) +DOCS-EXC-25-005 | TODO | Write `/docs/ui/exception-center.md` with UI walkthrough, badges, accessibility, shortcuts. Dependencies: DOCS-EXC-25-003. | Docs Guild, UI Guild (docs/TASKS.md) +DOCS-EXC-25-006 | TODO | Update `/docs/modules/cli/guides/exceptions.md` covering command usage and exit codes. Dependencies: DOCS-EXC-25-005. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) [Documentation & Process] 200.A) Docs Tasks.Md.IV @@ -71,21 +71,21 @@ Depends on: Sprint 200.A - Docs Tasks.Md.III Summary: Documentation & Process focus on Docs Tasks (phase Md.IV). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DOCS-EXC-25-007 | TODO | Publish `/docs/migration/exception-governance.md` describing cutover from legacy suppressions, notifications, rollback. | Docs Guild, DevOps Guild (docs/TASKS.md) +DOCS-EXC-25-007 | TODO | Publish `/docs/migration/exception-governance.md` describing cutover from legacy suppressions, notifications, rollback. Dependencies: DOCS-EXC-25-006. | Docs Guild, DevOps Guild (docs/TASKS.md) DOCS-EXPORT-37-004 | TODO | Publish `/docs/security/export-hardening.md` outlining RBAC, tenancy, encryption, redaction, restating imposed rule. | Docs Guild (docs/TASKS.md) -DOCS-EXPORT-37-005 | TODO | Validate Export Center docs against live Trivy/mirror bundles once implementation lands; refresh examples and CLI snippets accordingly. | Docs Guild, Exporter Service Guild (docs/TASKS.md) -DOCS-EXPORT-37-101 | TODO | Refresh CLI verification sections once `stella export verify` lands (flags, exit codes, samples). | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) -DOCS-EXPORT-37-102 | TODO | Embed export dashboards/alerts references into provenance/runbook docs after Grafana work ships. | Docs Guild, DevOps Guild (docs/TASKS.md) +DOCS-EXPORT-37-005 | TODO | Validate Export Center docs against live Trivy/mirror bundles once implementation lands; refresh examples and CLI snippets accordingly. Dependencies: DOCS-EXPORT-37-004. | Docs Guild, Exporter Service Guild (docs/TASKS.md) +DOCS-EXPORT-37-101 | TODO | Refresh CLI verification sections once `stella export verify` lands (flags, exit codes, samples). Dependencies: DOCS-EXPORT-37-005. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) +DOCS-EXPORT-37-102 | TODO | Embed export dashboards/alerts references into provenance/runbook docs after Grafana work ships. Dependencies: DOCS-EXPORT-37-101. | Docs Guild, DevOps Guild (docs/TASKS.md) DOCS-FORENSICS-53-001 | TODO | Publish `/docs/forensics/evidence-locker.md` describing bundle formats, WORM options, retention, legal hold, and imposed rule banner. | Docs Guild, Evidence Locker Guild (docs/TASKS.md) -DOCS-FORENSICS-53-002 | TODO | Release `/docs/forensics/provenance-attestation.md` covering DSSE schema, signing process, verification workflow, and imposed rule banner. | Docs Guild, Provenance Guild (docs/TASKS.md) -DOCS-FORENSICS-53-003 | TODO | Publish `/docs/forensics/timeline.md` with schema, event kinds, filters, query examples, and imposed rule banner. | Docs Guild, Timeline Indexer Guild (docs/TASKS.md) +DOCS-FORENSICS-53-002 | TODO | Release `/docs/forensics/provenance-attestation.md` covering DSSE schema, signing process, verification workflow, and imposed rule banner. Dependencies: DOCS-FORENSICS-53-001. | Docs Guild, Provenance Guild (docs/TASKS.md) +DOCS-FORENSICS-53-003 | TODO | Publish `/docs/forensics/timeline.md` with schema, event kinds, filters, query examples, and imposed rule banner. Dependencies: DOCS-FORENSICS-53-002. | Docs Guild, Timeline Indexer Guild (docs/TASKS.md) DOCS-GRAPH-24-001 | TODO | Author `/docs/ui/sbom-graph-explorer.md` detailing overlays, filters, saved views, accessibility, and AOC visibility. | Docs Guild, UI Guild (docs/TASKS.md) -DOCS-GRAPH-24-002 | TODO | Publish `/docs/ui/vulnerability-explorer.md` covering table usage, grouping, fix suggestions, Why drawer. | Docs Guild, UI Guild (docs/TASKS.md) -DOCS-GRAPH-24-003 | TODO | Create `/docs/modules/graph/architecture-index.md` describing data model, ingestion pipeline, caches, events. | Docs Guild, SBOM Service Guild (docs/TASKS.md) -DOCS-GRAPH-24-004 | TODO | Document `/docs/api/graph.md` and `/docs/api/vuln.md` avec endpoints, parameters, errors, RBAC. | Docs Guild, BE-Base Platform Guild (docs/TASKS.md) -DOCS-GRAPH-24-005 | TODO | Update `/docs/modules/cli/guides/graph-and-vuln.md` covering new CLI commands, exit codes, scripting. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) -DOCS-GRAPH-24-006 | TODO | Write `/docs/policy/ui-integration.md` explaining overlays, cache usage, simulator contracts. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-GRAPH-24-007 | TODO | Produce `/docs/migration/graph-parity.md` with rollout plan, parity checks, fallback guidance. | Docs Guild, DevOps Guild (docs/TASKS.md) +DOCS-GRAPH-24-002 | TODO | Publish `/docs/ui/vulnerability-explorer.md` covering table usage, grouping, fix suggestions, Why drawer. Dependencies: DOCS-GRAPH-24-001. | Docs Guild, UI Guild (docs/TASKS.md) +DOCS-GRAPH-24-003 | TODO | Create `/docs/modules/graph/architecture-index.md` describing data model, ingestion pipeline, caches, events. Dependencies: DOCS-GRAPH-24-002. | Docs Guild, SBOM Service Guild (docs/TASKS.md) +DOCS-GRAPH-24-004 | TODO | Document `/docs/api/graph.md` and `/docs/api/vuln.md` avec endpoints, parameters, errors, RBAC. Dependencies: DOCS-GRAPH-24-003. | Docs Guild, BE-Base Platform Guild (docs/TASKS.md) +DOCS-GRAPH-24-005 | TODO | Update `/docs/modules/cli/guides/graph-and-vuln.md` covering new CLI commands, exit codes, scripting. Dependencies: DOCS-GRAPH-24-004. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) +DOCS-GRAPH-24-006 | TODO | Write `/docs/policy/ui-integration.md` explaining overlays, cache usage, simulator contracts. Dependencies: DOCS-GRAPH-24-005. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-GRAPH-24-007 | TODO | Produce `/docs/migration/graph-parity.md` with rollout plan, parity checks, fallback guidance. Dependencies: DOCS-GRAPH-24-006. | Docs Guild, DevOps Guild (docs/TASKS.md) [Documentation & Process] 200.A) Docs Tasks.Md.V @@ -94,20 +94,20 @@ Summary: Documentation & Process focus on Docs Tasks (phase Md.V). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- DOCS-INSTALL-44-001 | TODO | Publish `/docs/install/overview.md` and `/docs/install/compose-quickstart.md` with imposed rule line and copy-ready commands. | Docs Guild, Deployment Guild (docs/TASKS.md) -DOCS-INSTALL-45-001 | TODO | Publish `/docs/install/helm-prod.md` and `/docs/install/configuration-reference.md` with values tables and imposed rule reminder. | Docs Guild, Deployment Guild (docs/TASKS.md) -DOCS-INSTALL-46-001 | TODO | Publish `/docs/install/airgap.md`, `/docs/security/supply-chain.md`, `/docs/operations/health-and-readiness.md`, `/docs/release/image-catalog.md`, `/docs/console/onboarding.md` (each with imposed rule). | Docs Guild, Security Guild (docs/TASKS.md) -DOCS-INSTALL-50-001 | TODO | Add `/docs/install/telemetry-stack.md` with collector deployment, exporter options, offline kit notes, and imposed rule banner. | Docs Guild, DevOps Guild (docs/TASKS.md) +DOCS-INSTALL-45-001 | TODO | Publish `/docs/install/helm-prod.md` and `/docs/install/configuration-reference.md` with values tables and imposed rule reminder. Dependencies: DOCS-INSTALL-44-001. | Docs Guild, Deployment Guild (docs/TASKS.md) +DOCS-INSTALL-46-001 | TODO | Publish `/docs/install/airgap.md`, `/docs/security/supply-chain.md`, `/docs/operations/health-and-readiness.md`, `/docs/release/image-catalog.md`, `/docs/console/onboarding.md` (each with imposed rule). Dependencies: DOCS-INSTALL-45-001. | Docs Guild, Security Guild (docs/TASKS.md) +DOCS-INSTALL-50-001 | TODO | Add `/docs/install/telemetry-stack.md` with collector deployment, exporter options, offline kit notes, and imposed rule banner. Dependencies: DOCS-INSTALL-46-001. | Docs Guild, DevOps Guild (docs/TASKS.md) DOCS-LNM-22-001 | BLOCKED (2025-10-27) | Author `/docs/advisories/aggregation.md` covering observation vs linkset, conflict handling, AOC requirements, and reviewer checklist. | Docs Guild, Concelier Guild (docs/TASKS.md) -DOCS-LNM-22-002 | BLOCKED (2025-10-27) | Publish `/docs/vex/aggregation.md` describing VEX observation/linkset model, product matching, conflicts. | Docs Guild, Excititor Guild (docs/TASKS.md) -DOCS-LNM-22-003 | BLOCKED (2025-10-27) | Update `/docs/api/advisories.md` and `/docs/api/vex.md` for new endpoints, parameters, errors, exports. | Docs Guild, BE-Base Platform Guild (docs/TASKS.md) -DOCS-LNM-22-004 | TODO | Create `/docs/policy/effective-severity.md` detailing severity selection strategies from multiple sources. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-LNM-22-005 | BLOCKED (2025-10-27) | Document `/docs/ui/evidence-panel.md` with screenshots, conflict badges, accessibility guidance. | Docs Guild, UI Guild (docs/TASKS.md) -DOCS-LNM-22-007 | TODO | Publish `/docs/observability/aggregation.md` with metrics/traces/logs/SLOs. | Docs Guild, Observability Guild (docs/TASKS.md) -DOCS-LNM-22-008 | TODO | Write `/docs/migration/no-merge.md` describing migration plan, backfill steps, rollback, feature flags. | Docs Guild, DevOps Guild (docs/TASKS.md) +DOCS-LNM-22-002 | BLOCKED (2025-10-27) | Publish `/docs/vex/aggregation.md` describing VEX observation/linkset model, product matching, conflicts. Dependencies: DOCS-LNM-22-001. | Docs Guild, Excititor Guild (docs/TASKS.md) +DOCS-LNM-22-003 | BLOCKED (2025-10-27) | Update `/docs/api/advisories.md` and `/docs/api/vex.md` for new endpoints, parameters, errors, exports. Dependencies: DOCS-LNM-22-002. | Docs Guild, BE-Base Platform Guild (docs/TASKS.md) +DOCS-LNM-22-004 | TODO | Create `/docs/policy/effective-severity.md` detailing severity selection strategies from multiple sources. Dependencies: DOCS-LNM-22-003. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-LNM-22-005 | BLOCKED (2025-10-27) | Document `/docs/ui/evidence-panel.md` with screenshots, conflict badges, accessibility guidance. Dependencies: DOCS-LNM-22-004. | Docs Guild, UI Guild (docs/TASKS.md) +DOCS-LNM-22-007 | TODO | Publish `/docs/observability/aggregation.md` with metrics/traces/logs/SLOs. Dependencies: DOCS-LNM-22-005. | Docs Guild, Observability Guild (docs/TASKS.md) +DOCS-LNM-22-008 | TODO | Write `/docs/migration/no-merge.md` describing migration plan, backfill steps, rollback, feature flags. Dependencies: DOCS-LNM-22-007. | Docs Guild, DevOps Guild (docs/TASKS.md) DOCS-NOTIFY-40-001 | TODO | Publish `/docs/notifications/channels.md`, `/docs/notifications/escalations.md`, `/docs/notifications/api.md`, `/docs/operations/notifier-runbook.md`, `/docs/security/notifications-hardening.md`; each ends with imposed rule line. | Docs Guild, Security Guild (docs/TASKS.md) DOCS-OAS-61-001 | TODO | Publish `/docs/api/overview.md` covering auth, tenancy, pagination, idempotency, rate limits with banner. | Docs Guild, API Contracts Guild (docs/TASKS.md) -DOCS-OAS-61-002 | TODO | Author `/docs/api/conventions.md` capturing naming, errors, filters, sorting, examples. | Docs Guild, API Governance Guild (docs/TASKS.md) -DOCS-OAS-61-003 | TODO | Publish `/docs/api/versioning.md` describing SemVer, deprecation headers, migration playbooks. | Docs Guild, API Governance Guild (docs/TASKS.md) +DOCS-OAS-61-002 | TODO | Author `/docs/api/conventions.md` capturing naming, errors, filters, sorting, examples. Dependencies: DOCS-OAS-61-001. | Docs Guild, API Governance Guild (docs/TASKS.md) +DOCS-OAS-61-003 | TODO | Publish `/docs/api/versioning.md` describing SemVer, deprecation headers, migration playbooks. Dependencies: DOCS-OAS-61-002. | Docs Guild, API Governance Guild (docs/TASKS.md) [Documentation & Process] 200.A) Docs Tasks.Md.VI @@ -115,21 +115,21 @@ Depends on: Sprint 200.A - Docs Tasks.Md.V Summary: Documentation & Process focus on Docs Tasks (phase Md.VI). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DOCS-OAS-62-001 | TODO | Stand up `/docs/api/reference/` auto-generated site; integrate with portal nav. | Docs Guild, Developer Portal Guild (docs/TASKS.md) +DOCS-OAS-62-001 | TODO | Stand up `/docs/api/reference/` auto-generated site; integrate with portal nav. Dependencies: DOCS-OAS-61-003. | Docs Guild, Developer Portal Guild (docs/TASKS.md) DOCS-OBS-50-002 | TODO | Author `/docs/observability/telemetry-standards.md` detailing common fields, scrubbing policy, sampling defaults, and redaction override procedure. | Docs Guild, Security Guild (docs/TASKS.md) -DOCS-OBS-50-003 | TODO | Create `/docs/observability/logging.md` covering structured log schema, dos/don'ts, tenant isolation, and copyable examples. | Docs Guild, Observability Guild (docs/TASKS.md) -DOCS-OBS-50-004 | TODO | Draft `/docs/observability/tracing.md` explaining context propagation, async linking, CLI header usage, and sampling strategies. | Docs Guild, Observability Guild (docs/TASKS.md) -DOCS-OBS-51-001 | TODO | Publish `/docs/observability/metrics-and-slos.md` cataloging metrics, SLO targets, burn rate policies, and alert runbooks. | Docs Guild, DevOps Guild (docs/TASKS.md) +DOCS-OBS-50-003 | TODO | Create `/docs/observability/logging.md` covering structured log schema, dos/don'ts, tenant isolation, and copyable examples. Dependencies: DOCS-OBS-50-002. | Docs Guild, Observability Guild (docs/TASKS.md) +DOCS-OBS-50-004 | TODO | Draft `/docs/observability/tracing.md` explaining context propagation, async linking, CLI header usage, and sampling strategies. Dependencies: DOCS-OBS-50-003. | Docs Guild, Observability Guild (docs/TASKS.md) +DOCS-OBS-51-001 | TODO | Publish `/docs/observability/metrics-and-slos.md` cataloging metrics, SLO targets, burn rate policies, and alert runbooks. Dependencies: DOCS-OBS-50-004. | Docs Guild, DevOps Guild (docs/TASKS.md) DOCS-ORCH-32-001 | TODO | Author `/docs/orchestrator/overview.md` covering mission, roles, AOC alignment, governance, with imposed rule reminder. | Docs Guild (docs/TASKS.md) -DOCS-ORCH-32-002 | TODO | Author `/docs/orchestrator/architecture.md` detailing scheduler, DAGs, rate limits, data model, message bus, storage layout, restating imposed rule. | Docs Guild (docs/TASKS.md) -DOCS-ORCH-33-001 | TODO | Publish `/docs/orchestrator/api.md` (REST/WebSocket endpoints, payloads, error codes) with imposed rule note. | Docs Guild (docs/TASKS.md) -DOCS-ORCH-33-002 | TODO | Publish `/docs/orchestrator/console.md` covering screens, a11y, live updates, control actions, reiterating imposed rule. | Docs Guild (docs/TASKS.md) -DOCS-ORCH-33-003 | TODO | Publish `/docs/orchestrator/cli.md` documenting commands, options, exit codes, streaming output, offline usage, and imposed rule. | Docs Guild (docs/TASKS.md) -DOCS-ORCH-34-001 | TODO | Author `/docs/orchestrator/run-ledger.md` covering ledger schema, provenance chain, audit workflows, with imposed rule reminder. | Docs Guild (docs/TASKS.md) -DOCS-ORCH-34-002 | TODO | Update `/docs/security/secrets-handling.md` for orchestrator KMS refs, redaction badges, operator hygiene, reiterating imposed rule. | Docs Guild (docs/TASKS.md) -DOCS-ORCH-34-003 | TODO | Publish `/docs/operations/orchestrator-runbook.md` (incident playbook, backfill guide, circuit breakers, throttling) with imposed rule statement. | Docs Guild (docs/TASKS.md) -DOCS-ORCH-34-004 | TODO | Document `/docs/schemas/artifacts.md` describing artifact kinds, schema versions, hashing, storage layout, restating imposed rule. | Docs Guild (docs/TASKS.md) -DOCS-ORCH-34-005 | TODO | Author `/docs/slo/orchestrator-slo.md` defining SLOs, burn alerts, measurement, and reiterating imposed rule. | Docs Guild (docs/TASKS.md) +DOCS-ORCH-32-002 | TODO | Author `/docs/orchestrator/architecture.md` detailing scheduler, DAGs, rate limits, data model, message bus, storage layout, restating imposed rule. Dependencies: DOCS-ORCH-32-001. | Docs Guild (docs/TASKS.md) +DOCS-ORCH-33-001 | TODO | Publish `/docs/orchestrator/api.md` (REST/WebSocket endpoints, payloads, error codes) with imposed rule note. Dependencies: DOCS-ORCH-32-002. | Docs Guild (docs/TASKS.md) +DOCS-ORCH-33-002 | TODO | Publish `/docs/orchestrator/console.md` covering screens, a11y, live updates, control actions, reiterating imposed rule. Dependencies: DOCS-ORCH-33-001. | Docs Guild (docs/TASKS.md) +DOCS-ORCH-33-003 | TODO | Publish `/docs/orchestrator/cli.md` documenting commands, options, exit codes, streaming output, offline usage, and imposed rule. Dependencies: DOCS-ORCH-33-002. | Docs Guild (docs/TASKS.md) +DOCS-ORCH-34-001 | TODO | Author `/docs/orchestrator/run-ledger.md` covering ledger schema, provenance chain, audit workflows, with imposed rule reminder. Dependencies: DOCS-ORCH-33-003. | Docs Guild (docs/TASKS.md) +DOCS-ORCH-34-002 | TODO | Update `/docs/security/secrets-handling.md` for orchestrator KMS refs, redaction badges, operator hygiene, reiterating imposed rule. Dependencies: DOCS-ORCH-34-001. | Docs Guild (docs/TASKS.md) +DOCS-ORCH-34-003 | TODO | Publish `/docs/operations/orchestrator-runbook.md` (incident playbook, backfill guide, circuit breakers, throttling) with imposed rule statement. Dependencies: DOCS-ORCH-34-002. | Docs Guild (docs/TASKS.md) +DOCS-ORCH-34-004 | TODO | Document `/docs/schemas/artifacts.md` describing artifact kinds, schema versions, hashing, storage layout, restating imposed rule. Dependencies: DOCS-ORCH-34-003. | Docs Guild (docs/TASKS.md) +DOCS-ORCH-34-005 | TODO | Author `/docs/slo/orchestrator-slo.md` defining SLOs, burn alerts, measurement, and reiterating imposed rule. Dependencies: DOCS-ORCH-34-004. | Docs Guild (docs/TASKS.md) [Documentation & Process] 200.A) Docs Tasks.Md.VII @@ -138,20 +138,20 @@ Summary: Documentation & Process focus on Docs Tasks (phase Md.VII). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- DOCS-POLICY-23-001 | TODO | Author `/docs/policy/overview.md` describing SPL philosophy, layering, and glossary with reviewer checklist. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-POLICY-23-002 | TODO | Write `/docs/policy/spl-v1.md` (language reference, JSON Schema, examples). | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-POLICY-23-003 | TODO | Produce `/docs/policy/runtime.md` covering compiler, evaluator, caching, events, SLOs. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-POLICY-23-004 | TODO | Document `/docs/policy/editor.md` (UI walkthrough, validation, simulation, approvals). | Docs Guild, UI Guild (docs/TASKS.md) -DOCS-POLICY-23-005 | TODO | Publish `/docs/policy/governance.md` (roles, scopes, approvals, signing, exceptions). | Docs Guild, Security Guild (docs/TASKS.md) -DOCS-POLICY-23-006 | TODO | Update `/docs/api/policy.md` with new endpoints, schemas, errors, pagination. | Docs Guild, BE-Base Platform Guild (docs/TASKS.md) -DOCS-POLICY-23-007 | TODO | Update `/docs/modules/cli/guides/policy.md` for lint/simulate/activate/history commands, exit codes. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) -DOCS-POLICY-23-008 | TODO | Refresh `/docs/modules/policy/architecture.md` with data model, sequence diagrams, event flows. | Docs Guild, Architecture Guild (docs/TASKS.md) -DOCS-POLICY-23-009 | TODO | Create `/docs/migration/policy-parity.md` covering dual-run parity plan and rollback. | Docs Guild, DevOps Guild (docs/TASKS.md) -DOCS-POLICY-23-010 | TODO | Write `/docs/ui/explainers.md` showing explain trees, evidence overlays, interpretation guidance. | Docs Guild, UI Guild (docs/TASKS.md) -DOCS-POLICY-27-001 | BLOCKED (2025-10-27) | Publish `/docs/policy/studio-overview.md` covering lifecycle, roles, glossary, and compliance checklist. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-POLICY-27-002 | BLOCKED (2025-10-27) | Write `/docs/policy/authoring.md` detailing workspace templates, snippets, lint rules, IDE shortcuts, and best practices. | Docs Guild, Console Guild (docs/TASKS.md) -DOCS-POLICY-27-003 | BLOCKED (2025-10-27) | Document `/docs/policy/versioning-and-publishing.md` (semver rules, attestations, rollback) with compliance checklist. | Docs Guild, Policy Registry Guild (docs/TASKS.md) -DOCS-POLICY-27-004 | BLOCKED (2025-10-27) | Write `/docs/policy/simulation.md` covering quick vs batch sim, thresholds, evidence bundles, CLI examples. | Docs Guild, Scheduler Guild (docs/TASKS.md) -DOCS-POLICY-27-005 | BLOCKED (2025-10-27) | Publish `/docs/policy/review-and-approval.md` with approver requirements, comments, webhooks, audit trail guidance. | Docs Guild, Product Ops (docs/TASKS.md) +DOCS-POLICY-23-002 | TODO | Write `/docs/policy/spl-v1.md` (language reference, JSON Schema, examples). Dependencies: DOCS-POLICY-23-001. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-POLICY-23-003 | TODO | Produce `/docs/policy/runtime.md` covering compiler, evaluator, caching, events, SLOs. Dependencies: DOCS-POLICY-23-002. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-POLICY-23-004 | TODO | Document `/docs/policy/editor.md` (UI walkthrough, validation, simulation, approvals). Dependencies: DOCS-POLICY-23-003. | Docs Guild, UI Guild (docs/TASKS.md) +DOCS-POLICY-23-005 | TODO | Publish `/docs/policy/governance.md` (roles, scopes, approvals, signing, exceptions). Dependencies: DOCS-POLICY-23-004. | Docs Guild, Security Guild (docs/TASKS.md) +DOCS-POLICY-23-006 | TODO | Update `/docs/api/policy.md` with new endpoints, schemas, errors, pagination. Dependencies: DOCS-POLICY-23-005. | Docs Guild, BE-Base Platform Guild (docs/TASKS.md) +DOCS-POLICY-23-007 | TODO | Update `/docs/modules/cli/guides/policy.md` for lint/simulate/activate/history commands, exit codes. Dependencies: DOCS-POLICY-23-006. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) +DOCS-POLICY-23-008 | TODO | Refresh `/docs/modules/policy/architecture.md` with data model, sequence diagrams, event flows. Dependencies: DOCS-POLICY-23-007. | Docs Guild, Architecture Guild (docs/TASKS.md) +DOCS-POLICY-23-009 | TODO | Create `/docs/migration/policy-parity.md` covering dual-run parity plan and rollback. Dependencies: DOCS-POLICY-23-008. | Docs Guild, DevOps Guild (docs/TASKS.md) +DOCS-POLICY-23-010 | TODO | Write `/docs/ui/explainers.md` showing explain trees, evidence overlays, interpretation guidance. Dependencies: DOCS-POLICY-23-009. | Docs Guild, UI Guild (docs/TASKS.md) +DOCS-POLICY-27-001 | BLOCKED (2025-10-27) | Publish `/docs/policy/studio-overview.md` covering lifecycle, roles, glossary, and compliance checklist. Dependencies: DOCS-POLICY-23-010. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-POLICY-27-002 | BLOCKED (2025-10-27) | Write `/docs/policy/authoring.md` detailing workspace templates, snippets, lint rules, IDE shortcuts, and best practices. Dependencies: DOCS-POLICY-27-001. | Docs Guild, Console Guild (docs/TASKS.md) +DOCS-POLICY-27-003 | BLOCKED (2025-10-27) | Document `/docs/policy/versioning-and-publishing.md` (semver rules, attestations, rollback) with compliance checklist. Dependencies: DOCS-POLICY-27-002. | Docs Guild, Policy Registry Guild (docs/TASKS.md) +DOCS-POLICY-27-004 | BLOCKED (2025-10-27) | Write `/docs/policy/simulation.md` covering quick vs batch sim, thresholds, evidence bundles, CLI examples. Dependencies: DOCS-POLICY-27-003. | Docs Guild, Scheduler Guild (docs/TASKS.md) +DOCS-POLICY-27-005 | BLOCKED (2025-10-27) | Publish `/docs/policy/review-and-approval.md` with approver requirements, comments, webhooks, audit trail guidance. Dependencies: DOCS-POLICY-27-004. | Docs Guild, Product Ops (docs/TASKS.md) [Documentation & Process] 200.A) Docs Tasks.Md.VIII @@ -159,21 +159,21 @@ Depends on: Sprint 200.A - Docs Tasks.Md.VII Summary: Documentation & Process focus on Docs Tasks (phase Md.VIII). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DOCS-POLICY-27-006 | BLOCKED (2025-10-27) | Author `/docs/policy/promotion.md` covering environments, canary, rollback, and monitoring steps. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-POLICY-27-007 | BLOCKED (2025-10-27) | Update `/docs/policy/cli.md` with new commands, JSON schemas, CI usage, and compliance checklist. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) -DOCS-POLICY-27-008 | BLOCKED (2025-10-27) | Publish `/docs/policy/api.md` describing Registry endpoints, request/response schemas, errors, and feature flags. | Docs Guild, Policy Registry Guild (docs/TASKS.md) -DOCS-POLICY-27-009 | BLOCKED (2025-10-27) | Create `/docs/security/policy-attestations.md` covering signing, verification, key rotation, and compliance checklist. | Docs Guild, Security Guild (docs/TASKS.md) -DOCS-POLICY-27-010 | BLOCKED (2025-10-27) | Author `/docs/modules/policy/registry-architecture.md` (service design, schemas, queues, failure modes) with diagrams and checklist. | Docs Guild, Architecture Guild (docs/TASKS.md) -DOCS-POLICY-27-011 | BLOCKED (2025-10-27) | Publish `/docs/observability/policy-telemetry.md` with metrics/log tables, dashboards, alerts, and compliance checklist. | Docs Guild, Observability Guild (docs/TASKS.md) -DOCS-POLICY-27-012 | BLOCKED (2025-10-27) | Write `/docs/runbooks/policy-incident.md` detailing rollback, freeze, forensic steps, notifications. | Docs Guild, Ops Guild (docs/TASKS.md) -DOCS-POLICY-27-013 | BLOCKED (2025-10-27) | Update `/docs/examples/policy-templates.md` with new templates, snippets, and sample policies. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-POLICY-27-014 | BLOCKED (2025-10-27) | Refresh `/docs/aoc/aoc-guardrails.md` to include Studio-specific guardrails and validation scenarios. | Docs Guild, Policy Registry Guild (docs/TASKS.md) +DOCS-POLICY-27-006 | BLOCKED (2025-10-27) | Author `/docs/policy/promotion.md` covering environments, canary, rollback, and monitoring steps. Dependencies: DOCS-POLICY-27-005. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-POLICY-27-007 | BLOCKED (2025-10-27) | Update `/docs/policy/cli.md` with new commands, JSON schemas, CI usage, and compliance checklist. Dependencies: DOCS-POLICY-27-006. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) +DOCS-POLICY-27-008 | BLOCKED (2025-10-27) | Publish `/docs/policy/api.md` describing Registry endpoints, request/response schemas, errors, and feature flags. Dependencies: DOCS-POLICY-27-007. | Docs Guild, Policy Registry Guild (docs/TASKS.md) +DOCS-POLICY-27-009 | BLOCKED (2025-10-27) | Create `/docs/security/policy-attestations.md` covering signing, verification, key rotation, and compliance checklist. Dependencies: DOCS-POLICY-27-008. | Docs Guild, Security Guild (docs/TASKS.md) +DOCS-POLICY-27-010 | BLOCKED (2025-10-27) | Author `/docs/modules/policy/registry-architecture.md` (service design, schemas, queues, failure modes) with diagrams and checklist. Dependencies: DOCS-POLICY-27-009. | Docs Guild, Architecture Guild (docs/TASKS.md) +DOCS-POLICY-27-011 | BLOCKED (2025-10-27) | Publish `/docs/observability/policy-telemetry.md` with metrics/log tables, dashboards, alerts, and compliance checklist. Dependencies: DOCS-POLICY-27-010. | Docs Guild, Observability Guild (docs/TASKS.md) +DOCS-POLICY-27-012 | BLOCKED (2025-10-27) | Write `/docs/runbooks/policy-incident.md` detailing rollback, freeze, forensic steps, notifications. Dependencies: DOCS-POLICY-27-011. | Docs Guild, Ops Guild (docs/TASKS.md) +DOCS-POLICY-27-013 | BLOCKED (2025-10-27) | Update `/docs/examples/policy-templates.md` with new templates, snippets, and sample policies. Dependencies: DOCS-POLICY-27-012. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-POLICY-27-014 | BLOCKED (2025-10-27) | Refresh `/docs/aoc/aoc-guardrails.md` to include Studio-specific guardrails and validation scenarios. Dependencies: DOCS-POLICY-27-013. | Docs Guild, Policy Registry Guild (docs/TASKS.md) DOCS-RISK-66-001 | TODO | Publish `/docs/risk/overview.md` covering concepts and glossary. | Docs Guild, Risk Profile Schema Guild (docs/TASKS.md) -DOCS-RISK-66-002 | TODO | Author `/docs/risk/profiles.md` (authoring, versioning, scope). | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-RISK-66-003 | TODO | Publish `/docs/risk/factors.md` cataloging signals, transforms, reducers, TTLs. | Docs Guild, Risk Engine Guild (docs/TASKS.md) -DOCS-RISK-66-004 | TODO | Create `/docs/risk/formulas.md` detailing math, normalization, gating, severity. | Docs Guild, Risk Engine Guild (docs/TASKS.md) -DOCS-RISK-67-001 | TODO | Publish `/docs/risk/explainability.md` showing artifact schema and UI screenshots. | Docs Guild, Risk Engine Guild (docs/TASKS.md) -DOCS-RISK-67-002 | TODO | Produce `/docs/risk/api.md` with endpoint reference/examples. | Docs Guild, API Guild (docs/TASKS.md) +DOCS-RISK-66-002 | TODO | Author `/docs/risk/profiles.md` (authoring, versioning, scope). Dependencies: DOCS-RISK-66-001. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-RISK-66-003 | TODO | Publish `/docs/risk/factors.md` cataloging signals, transforms, reducers, TTLs. Dependencies: DOCS-RISK-66-002. | Docs Guild, Risk Engine Guild (docs/TASKS.md) +DOCS-RISK-66-004 | TODO | Create `/docs/risk/formulas.md` detailing math, normalization, gating, severity. Dependencies: DOCS-RISK-66-003. | Docs Guild, Risk Engine Guild (docs/TASKS.md) +DOCS-RISK-67-001 | TODO | Publish `/docs/risk/explainability.md` showing artifact schema and UI screenshots. Dependencies: DOCS-RISK-66-004. | Docs Guild, Risk Engine Guild (docs/TASKS.md) +DOCS-RISK-67-002 | TODO | Produce `/docs/risk/api.md` with endpoint reference/examples. Dependencies: DOCS-RISK-67-001. | Docs Guild, API Guild (docs/TASKS.md) [Documentation & Process] 200.A) Docs Tasks.Md.IX @@ -181,21 +181,21 @@ Depends on: Sprint 200.A - Docs Tasks.Md.VIII Summary: Documentation & Process focus on Docs Tasks (phase Md.IX). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DOCS-RISK-67-003 | TODO | Document `/docs/console/risk-ui.md` for authoring, simulation, dashboards. | Docs Guild, Console Guild (docs/TASKS.md) -DOCS-RISK-67-004 | TODO | Publish `/docs/modules/cli/guides/risk.md` covering CLI workflows. | Docs Guild, CLI Guild (docs/TASKS.md) -DOCS-RISK-68-001 | TODO | Add `/docs/airgap/risk-bundles.md` for offline factor bundles. | Docs Guild, Export Guild (docs/TASKS.md) -DOCS-RISK-68-002 | TODO | Update `/docs/security/aoc-invariants.md` with risk scoring provenance guarantees. | Docs Guild, Security Guild (docs/TASKS.md) +DOCS-RISK-67-003 | TODO | Document `/docs/console/risk-ui.md` for authoring, simulation, dashboards. Dependencies: DOCS-RISK-67-002. | Docs Guild, Console Guild (docs/TASKS.md) +DOCS-RISK-67-004 | TODO | Publish `/docs/modules/cli/guides/risk.md` covering CLI workflows. Dependencies: DOCS-RISK-67-003. | Docs Guild, CLI Guild (docs/TASKS.md) +DOCS-RISK-68-001 | TODO | Add `/docs/airgap/risk-bundles.md` for offline factor bundles. Dependencies: DOCS-RISK-67-004. | Docs Guild, Export Guild (docs/TASKS.md) +DOCS-RISK-68-002 | TODO | Update `/docs/security/aoc-invariants.md` with risk scoring provenance guarantees. Dependencies: DOCS-RISK-68-001. | Docs Guild, Security Guild (docs/TASKS.md) DOCS-RUNBOOK-55-001 | TODO | Author `/docs/runbooks/incidents.md` describing incident mode activation, escalation steps, retention impact, verification checklist, and imposed rule banner. | Docs Guild, Ops Guild (docs/TASKS.md) DOCS-SDK-62-001 | TODO | Publish `/docs/sdks/overview.md` plus language guides (`typescript.md`, `python.md`, `go.md`, `java.md`). | Docs Guild, SDK Generator Guild (docs/TASKS.md) DOCS-SEC-62-001 | TODO | Update `/docs/security/auth-scopes.md` with OAuth2/PAT scopes, tenancy header usage. | Docs Guild, Authority Core (docs/TASKS.md) DOCS-SEC-OBS-50-001 | TODO | Update `/docs/security/redaction-and-privacy.md` to cover telemetry privacy controls, tenant opt-in debug, and imposed rule reminder. | Docs Guild, Security Guild (docs/TASKS.md) DOCS-SIG-26-001 | TODO | Write `/docs/signals/reachability.md` covering states, scores, provenance, retention. | Docs Guild, Signals Guild (docs/TASKS.md) -DOCS-SIG-26-002 | TODO | Publish `/docs/signals/callgraph-formats.md` with schemas and validation errors. | Docs Guild, Signals Guild (docs/TASKS.md) -DOCS-SIG-26-003 | TODO | Create `/docs/signals/runtime-facts.md` detailing agent capabilities, privacy safeguards, opt-in flags. | Docs Guild, Runtime Guild (docs/TASKS.md) -DOCS-SIG-26-004 | TODO | Document `/docs/policy/signals-weighting.md` for SPL predicates and weighting strategies. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-SIG-26-005 | TODO | Draft `/docs/ui/reachability-overlays.md` with badges, timelines, shortcuts. | Docs Guild, UI Guild (docs/TASKS.md) -DOCS-SIG-26-006 | TODO | Update `/docs/modules/cli/guides/reachability.md` for new commands and automation recipes. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) -DOCS-SIG-26-007 | TODO | Publish `/docs/api/signals.md` covering endpoints, payloads, ETags, errors. | Docs Guild, BE-Base Platform Guild (docs/TASKS.md) +DOCS-SIG-26-002 | TODO | Publish `/docs/signals/callgraph-formats.md` with schemas and validation errors. Dependencies: DOCS-SIG-26-001. | Docs Guild, Signals Guild (docs/TASKS.md) +DOCS-SIG-26-003 | TODO | Create `/docs/signals/runtime-facts.md` detailing agent capabilities, privacy safeguards, opt-in flags. Dependencies: DOCS-SIG-26-002. | Docs Guild, Runtime Guild (docs/TASKS.md) +DOCS-SIG-26-004 | TODO | Document `/docs/policy/signals-weighting.md` for SPL predicates and weighting strategies. Dependencies: DOCS-SIG-26-003. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-SIG-26-005 | TODO | Draft `/docs/ui/reachability-overlays.md` with badges, timelines, shortcuts. Dependencies: DOCS-SIG-26-004. | Docs Guild, UI Guild (docs/TASKS.md) +DOCS-SIG-26-006 | TODO | Update `/docs/modules/cli/guides/reachability.md` for new commands and automation recipes. Dependencies: DOCS-SIG-26-005. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) +DOCS-SIG-26-007 | TODO | Publish `/docs/api/signals.md` covering endpoints, payloads, ETags, errors. Dependencies: DOCS-SIG-26-006. | Docs Guild, BE-Base Platform Guild (docs/TASKS.md) [Documentation & Process] 200.A) Docs Tasks.Md.X @@ -203,21 +203,21 @@ Depends on: Sprint 200.A - Docs Tasks.Md.IX Summary: Documentation & Process focus on Docs Tasks (phase Md.X). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -DOCS-SIG-26-008 | TODO | Write `/docs/migration/enable-reachability.md` guiding rollout, fallbacks, monitoring. | Docs Guild, DevOps Guild (docs/TASKS.md) +DOCS-SIG-26-008 | TODO | Write `/docs/migration/enable-reachability.md` guiding rollout, fallbacks, monitoring. Dependencies: DOCS-SIG-26-007. | Docs Guild, DevOps Guild (docs/TASKS.md) DOCS-SURFACE-01 | TODO | Create `/docs/modules/scanner/scanner-engine.md` covering Surface.FS/Env/Secrets workflow between Scanner, Zastava, Scheduler, and Ops. | Docs Guild, Scanner Guild, Zastava Guild (docs/TASKS.md) DOCS-TEN-47-001 | TODO | Publish `/docs/security/tenancy-overview.md` and `/docs/security/scopes-and-roles.md` outlining scope grammar, tenant model, imposed rule reminder. | Docs Guild, Authority Core (docs/TASKS.md) -DOCS-TEN-48-001 | TODO | Publish `/docs/operations/multi-tenancy.md`, `/docs/operations/rls-and-data-isolation.md`, `/docs/console/admin-tenants.md`. | Docs Guild, Platform Ops (docs/TASKS.md) -DOCS-TEN-49-001 | TODO | Publish `/docs/modules/cli/guides/authentication.md`, `/docs/api/authentication.md`, `/docs/policy/examples/abac-overlays.md`, update `/docs/install/configuration-reference.md` with new env vars, all ending with imposed rule line. | Docs & DevEx Guilds (docs/TASKS.md) +DOCS-TEN-48-001 | TODO | Publish `/docs/operations/multi-tenancy.md`, `/docs/operations/rls-and-data-isolation.md`, `/docs/console/admin-tenants.md`. Dependencies: DOCS-TEN-47-001. | Docs Guild, Platform Ops (docs/TASKS.md) +DOCS-TEN-49-001 | TODO | Publish `/docs/modules/cli/guides/authentication.md`, `/docs/api/authentication.md`, `/docs/policy/examples/abac-overlays.md`, update `/docs/install/configuration-reference.md` with new env vars, all ending with imposed rule line. Dependencies: DOCS-TEN-48-001. | Docs & DevEx Guilds (docs/TASKS.md) DOCS-TEST-62-001 | TODO | Author `/docs/testing/contract-testing.md` covering mock server, replay tests, golden fixtures. | Docs Guild, Contract Testing Guild (docs/TASKS.md) DOCS-VEX-30-001 | TODO | Publish `/docs/vex/consensus-overview.md` describing purpose, scope, AOC guarantees. | Docs Guild, VEX Lens Guild (docs/TASKS.md) -DOCS-VEX-30-002 | TODO | Author `/docs/vex/consensus-algorithm.md` covering normalization, weighting, thresholds, examples. | Docs Guild, VEX Lens Guild (docs/TASKS.md) -DOCS-VEX-30-003 | TODO | Document `/docs/vex/issuer-directory.md` (issuer management, keys, trust overrides, audit). | Docs Guild, Issuer Directory Guild (docs/TASKS.md) -DOCS-VEX-30-004 | TODO | Publish `/docs/vex/consensus-api.md` with endpoint specs, query params, rate limits. | Docs Guild, VEX Lens Guild (docs/TASKS.md) -DOCS-VEX-30-005 | TODO | Write `/docs/vex/consensus-console.md` covering UI workflows, filters, conflicts, accessibility. | Docs Guild, Console Guild (docs/TASKS.md) -DOCS-VEX-30-006 | TODO | Add `/docs/policy/vex-trust-model.md` detailing policy knobs, thresholds, simulation. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-VEX-30-007 | TODO | Publish `/docs/sbom/vex-mapping.md` (CPE→purl strategy, edge cases, overrides). | Docs Guild, SBOM Service Guild (docs/TASKS.md) -DOCS-VEX-30-008 | TODO | Deliver `/docs/security/vex-signatures.md` (verification flow, key rotation, audit). | Docs Guild, Security Guild (docs/TASKS.md) -DOCS-VEX-30-009 | TODO | Create `/docs/runbooks/vex-ops.md` for recompute storms, mapping failures, signature errors. | Docs Guild, DevOps Guild (docs/TASKS.md) +DOCS-VEX-30-002 | TODO | Author `/docs/vex/consensus-algorithm.md` covering normalization, weighting, thresholds, examples. Dependencies: DOCS-VEX-30-001. | Docs Guild, VEX Lens Guild (docs/TASKS.md) +DOCS-VEX-30-003 | TODO | Document `/docs/vex/issuer-directory.md` (issuer management, keys, trust overrides, audit). Dependencies: DOCS-VEX-30-002. | Docs Guild, Issuer Directory Guild (docs/TASKS.md) +DOCS-VEX-30-004 | TODO | Publish `/docs/vex/consensus-api.md` with endpoint specs, query params, rate limits. Dependencies: DOCS-VEX-30-003. | Docs Guild, VEX Lens Guild (docs/TASKS.md) +DOCS-VEX-30-005 | TODO | Write `/docs/vex/consensus-console.md` covering UI workflows, filters, conflicts, accessibility. Dependencies: DOCS-VEX-30-004. | Docs Guild, Console Guild (docs/TASKS.md) +DOCS-VEX-30-006 | TODO | Add `/docs/policy/vex-trust-model.md` detailing policy knobs, thresholds, simulation. Dependencies: DOCS-VEX-30-005. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-VEX-30-007 | TODO | Publish `/docs/sbom/vex-mapping.md` (CPE→purl strategy, edge cases, overrides). Dependencies: DOCS-VEX-30-006. | Docs Guild, SBOM Service Guild (docs/TASKS.md) +DOCS-VEX-30-008 | TODO | Deliver `/docs/security/vex-signatures.md` (verification flow, key rotation, audit). Dependencies: DOCS-VEX-30-007. | Docs Guild, Security Guild (docs/TASKS.md) +DOCS-VEX-30-009 | TODO | Create `/docs/runbooks/vex-ops.md` for recompute storms, mapping failures, signature errors. Dependencies: DOCS-VEX-30-008. | Docs Guild, DevOps Guild (docs/TASKS.md) [Documentation & Process] 200.A) Docs Tasks.Md.XI @@ -226,18 +226,18 @@ Summary: Documentation & Process focus on Docs Tasks (phase Md.XI). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- DOCS-VULN-29-001 | TODO | Publish `/docs/vuln/explorer-overview.md` covering domain model, identities, AOC guarantees, workflow summary. | Docs Guild, Vuln Explorer Guild (docs/TASKS.md) -DOCS-VULN-29-002 | TODO | Write `/docs/vuln/explorer-using-console.md` with workflows, screenshots, keyboard shortcuts, saved views, deep links. | Docs Guild, Console Guild (docs/TASKS.md) -DOCS-VULN-29-003 | TODO | Author `/docs/vuln/explorer-api.md` (endpoints, query schema, grouping, errors, rate limits). | Docs Guild, Vuln Explorer API Guild (docs/TASKS.md) -DOCS-VULN-29-004 | TODO | Publish `/docs/vuln/explorer-cli.md` with command reference, samples, exit codes, CI snippets. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) -DOCS-VULN-29-005 | TODO | Write `/docs/vuln/findings-ledger.md` detailing event schema, hashing, Merkle roots, replay tooling. | Docs Guild, Findings Ledger Guild (docs/TASKS.md) -DOCS-VULN-29-006 | TODO | Update `/docs/policy/vuln-determinations.md` for new rationale, signals, simulation semantics. | Docs Guild, Policy Guild (docs/TASKS.md) -DOCS-VULN-29-007 | TODO | Publish `/docs/vex/explorer-integration.md` covering CSAF mapping, suppression precedence, status semantics. | Docs Guild, Excititor Guild (docs/TASKS.md) -DOCS-VULN-29-008 | TODO | Publish `/docs/advisories/explorer-integration.md` covering key normalization, withdrawn handling, provenance. | Docs Guild, Concelier Guild (docs/TASKS.md) -DOCS-VULN-29-009 | TODO | Author `/docs/sbom/vuln-resolution.md` detailing version semantics, scope, paths, safe version hints. | Docs Guild, SBOM Service Guild (docs/TASKS.md) -DOCS-VULN-29-010 | TODO | Publish `/docs/observability/vuln-telemetry.md` (metrics, logs, tracing, dashboards, SLOs). | Docs Guild, Observability Guild (docs/TASKS.md) -DOCS-VULN-29-011 | TODO | Create `/docs/security/vuln-rbac.md` for roles, ABAC policies, attachment encryption, CSRF. | Docs Guild, Security Guild (docs/TASKS.md) -DOCS-VULN-29-012 | TODO | Write `/docs/runbooks/vuln-ops.md` (projector lag, resolver storms, export failures, policy activation). | Docs Guild, Ops Guild (docs/TASKS.md) -DOCS-VULN-29-013 | TODO | Update `/docs/install/containers.md` with Findings Ledger & Vuln Explorer API images, manifests, resource sizing, health checks. | Docs Guild, Deployment Guild (docs/TASKS.md) +DOCS-VULN-29-002 | TODO | Write `/docs/vuln/explorer-using-console.md` with workflows, screenshots, keyboard shortcuts, saved views, deep links. Dependencies: DOCS-VULN-29-001. | Docs Guild, Console Guild (docs/TASKS.md) +DOCS-VULN-29-003 | TODO | Author `/docs/vuln/explorer-api.md` (endpoints, query schema, grouping, errors, rate limits). Dependencies: DOCS-VULN-29-002. | Docs Guild, Vuln Explorer API Guild (docs/TASKS.md) +DOCS-VULN-29-004 | TODO | Publish `/docs/vuln/explorer-cli.md` with command reference, samples, exit codes, CI snippets. Dependencies: DOCS-VULN-29-003. | Docs Guild, DevEx/CLI Guild (docs/TASKS.md) +DOCS-VULN-29-005 | TODO | Write `/docs/vuln/findings-ledger.md` detailing event schema, hashing, Merkle roots, replay tooling. Dependencies: DOCS-VULN-29-004. | Docs Guild, Findings Ledger Guild (docs/TASKS.md) +DOCS-VULN-29-006 | TODO | Update `/docs/policy/vuln-determinations.md` for new rationale, signals, simulation semantics. Dependencies: DOCS-VULN-29-005. | Docs Guild, Policy Guild (docs/TASKS.md) +DOCS-VULN-29-007 | TODO | Publish `/docs/vex/explorer-integration.md` covering CSAF mapping, suppression precedence, status semantics. Dependencies: DOCS-VULN-29-006. | Docs Guild, Excititor Guild (docs/TASKS.md) +DOCS-VULN-29-008 | TODO | Publish `/docs/advisories/explorer-integration.md` covering key normalization, withdrawn handling, provenance. Dependencies: DOCS-VULN-29-007. | Docs Guild, Concelier Guild (docs/TASKS.md) +DOCS-VULN-29-009 | TODO | Author `/docs/sbom/vuln-resolution.md` detailing version semantics, scope, paths, safe version hints. Dependencies: DOCS-VULN-29-008. | Docs Guild, SBOM Service Guild (docs/TASKS.md) +DOCS-VULN-29-010 | TODO | Publish `/docs/observability/vuln-telemetry.md` (metrics, logs, tracing, dashboards, SLOs). Dependencies: DOCS-VULN-29-009. | Docs Guild, Observability Guild (docs/TASKS.md) +DOCS-VULN-29-011 | TODO | Create `/docs/security/vuln-rbac.md` for roles, ABAC policies, attachment encryption, CSRF. Dependencies: DOCS-VULN-29-010. | Docs Guild, Security Guild (docs/TASKS.md) +DOCS-VULN-29-012 | TODO | Write `/docs/runbooks/vuln-ops.md` (projector lag, resolver storms, export failures, policy activation). Dependencies: DOCS-VULN-29-011. | Docs Guild, Ops Guild (docs/TASKS.md) +DOCS-VULN-29-013 | TODO | Update `/docs/install/containers.md` with Findings Ledger & Vuln Explorer API images, manifests, resource sizing, health checks. Dependencies: DOCS-VULN-29-012. | Docs Guild, Deployment Guild (docs/TASKS.md) [Documentation & Process] 200.B) Docs Modules Advisory Ai diff --git a/docs/modules/attestor/README.md b/docs/modules/attestor/README.md index 89a1e2de..ceebaa03 100644 --- a/docs/modules/attestor/README.md +++ b/docs/modules/attestor/README.md @@ -19,16 +19,22 @@ Attestor converts signed DSSE evidence from the Signer into transparency-log pro - `StellaOps.PolicyEvaluation@1`, `StellaOps.RiskProfileEvidence@1` All predicates capture subjects, issuer metadata, policy context, materials, optional witnesses, and versioned schemas. Unsupported predicates return `422 predicate_unsupported`. -## Trust & envelope model -- DSSE envelopes are canonicalised, hashed, and stored alongside the Rekor UUID, index, and proof. -- Signature modes span keyless (Fulcio), keyful (KMS/HSM), and hardware-backed (FIDO2). Multiple signatures are supported per envelope. -- Proofs include Merkle inclusion path, checkpoint metadata, optional witness endorsements, and cached verification verdicts. -- CAS/object storage retains envelopes + provenance for later replay; Rekor backends may be primary plus mirrors. - -## UI, CLI, and SDK workflows -- **Console:** Evidence browser, verification reports, chain-of-custody graph, issuer/key management, attestation workbench, and bulk verification flows. -- **CLI / SDK:** `stella attest sign|verify|list|fetch|key` commands plus language SDKs to integrate build pipelines and offline verification scripts. -- **Policy Studio:** Verification policies author required predicate types, issuers, witness requirements, and freshness windows; simulations show enforcement impact. +## Trust & envelope model +- DSSE envelopes are canonicalised, hashed, and stored alongside the Rekor UUID, index, and proof. +- Signature modes span keyless (Fulcio), keyful (KMS/HSM), and hardware-backed (FIDO2). Multiple signatures are supported per envelope. +- Proofs include Merkle inclusion path, checkpoint metadata, optional witness endorsements, and cached verification verdicts. +- CAS/object storage retains envelopes + provenance for later replay; Rekor backends may be primary plus mirrors. + +## Security hardening +- `attestor.write`, `attestor.verify`, and `attestor.read` scopes are enforced per endpoint; verify/list flows accept read/verify scopes while submissions remain write-only. +- JSON content-type is mandatory; malformed content returns `415 unsupported_media_type`. +- DSSE payloads are capped at 2 MiB (configurable), certificate chains at six entries, and each envelope may carry up to six signatures to contain parsing abuse. +- All verification/list APIs share the token-bucket rate limiter (`quotas.perCaller`) in addition to the existing submission limiter. + +## UI, CLI, and SDK workflows +- **Console:** Evidence browser, verification reports, chain-of-custody graph, issuer/key management, attestation workbench, and bulk verification flows. +- **CLI / SDK:** `stella attest sign|verify|list|fetch|key` commands plus language SDKs to integrate build pipelines and offline verification scripts. +- **Policy Studio:** Verification policies author required predicate types, issuers, witness requirements, and freshness windows; simulations show enforcement impact. ## Storage, offline & air-gap posture - MongoDB stores entry metadata, dedupe keys, and audit events; object storage optionally archives DSSE bundles. diff --git a/docs/modules/attestor/architecture.md b/docs/modules/attestor/architecture.md index 7fed77e7..ac57f3fd 100644 --- a/docs/modules/attestor/architecture.md +++ b/docs/modules/attestor/architecture.md @@ -45,17 +45,23 @@ Trust boundary: **Only the Signer** is allowed to call submission endpoints; enf - `StellaOps.BuildProvenance@1` - `StellaOps.SBOMAttestation@1` - `StellaOps.ScanResults@1` -- `StellaOps.PolicyEvaluation@1` -- `StellaOps.VEXAttestation@1` -- `StellaOps.RiskProfileEvidence@1` +- `StellaOps.PolicyEvaluation@1` +- `StellaOps.VEXAttestation@1` +- `StellaOps.RiskProfileEvidence@1` + +Each predicate embeds subject digests, issuer metadata, policy context, materials, and optional transparency hints. Unsupported predicates return `422 predicate_unsupported`. + +> **Golden fixtures:** Deterministic JSON statements for each predicate live in `src/Attestor/StellaOps.Attestor.Types/samples`. They are kept stable by the `StellaOps.Attestor.Types.Tests` project so downstream docs and contracts can rely on them without drifting. -Each predicate embeds subject digests, issuer metadata, policy context, materials, and optional transparency hints. Unsupported predicates return `422 predicate_unsupported`. - -### Envelope & signature model -- DSSE envelopes canonicalised (stable JSON ordering) prior to hashing. -- Signature modes: keyless (Fulcio cert chain), keyful (KMS/HSM), hardware (FIDO2/WebAuthn). Multiple signatures allowed. -- Rekor entry stores bundle hash, certificate chain, and optional witness endorsements. -- Archive CAS retains original envelope plus metadata for offline verification. +### Envelope & signature model +- DSSE envelopes canonicalised (stable JSON ordering) prior to hashing. +- Signature modes: keyless (Fulcio cert chain), keyful (KMS/HSM), hardware (FIDO2/WebAuthn). Multiple signatures allowed. +- Rekor entry stores bundle hash, certificate chain, and optional witness endorsements. +- Archive CAS retains original envelope plus metadata for offline verification. +- Envelope serializer emits **compact** (canonical, minified) and **expanded** (annotated, indented) JSON variants off the same canonical byte stream so hashing stays deterministic while humans get context. +- Payload handling supports **optional compression** (`gzip`, `brotli`) with compression metadata recorded in the expanded view and digesting always performed over the uncompressed bytes. +- Expanded envelopes surface **detached payload references** (URI, digest, media type, size) so large artifacts can live in CAS/object storage while the canonical payload remains embedded for verification. +- Payload previews auto-render JSON or UTF-8 text in the expanded output to simplify triage in air-gapped and offline review flows. ### Verification pipeline overview 1. Fetch envelope (from request, cache, or storage) and validate DSSE structure. @@ -151,11 +157,53 @@ Indexes: ## 4) APIs -### 4.1 Submission - -`POST /api/v1/rekor/entries` *(mTLS + OpTok required)* - -* **Body**: as above. +### 4.1 Signing + +`POST /api/v1/attestations:sign` *(mTLS + OpTok required)* + +* **Purpose**: Deterministically wrap Stella Ops payloads in DSSE envelopes before Rekor submission. Reuses the submission rate limiter and honours caller tenancy/audience scopes. +* **Body**: + + ```json + { + "keyId": "signing-key-id", + "payloadType": "application/vnd.in-toto+json", + "payload": "", + "mode": "keyless|keyful|kms", + "certificateChain": ["-----BEGIN CERTIFICATE-----..."], + "artifact": { + "sha256": "", + "kind": "sbom|report|vex-export", + "imageDigest": "sha256:...", + "subjectUri": "oci://..." + }, + "logPreference": "primary|mirror|both", + "archive": true + } + ``` + +* **Behaviour**: + * Resolve the signing key from `attestor.signing.keys[]` (includes algorithm, provider, and optional KMS version). + * Compute DSSE pre‑authentication encoding, sign with the resolved provider (default EC, BouncyCastle Ed25519, or File‑KMS ES256), and add static + request certificate chains. + * Canonicalise the resulting bundle, derive `bundleSha256`, and mirror the request meta shape used by `/api/v1/rekor/entries`. + * Emit `attestor.sign_total{result,algorithm,provider}` and `attestor.sign_latency_seconds{algorithm,provider}` metrics and append an audit row (`action=sign`). +* **Response 200**: + + ```json + { + "bundle": { "dsse": { "payloadType": "...", "payload": "...", "signatures": [{ "keyid": "signing-key-id", "sig": "..." }] }, "certificateChain": ["..."], "mode": "kms" }, + "meta": { "artifact": { "sha256": "...", "kind": "sbom" }, "bundleSha256": "...", "logPreference": "primary", "archive": true }, + "key": { "keyId": "signing-key-id", "algorithm": "ES256", "mode": "kms", "provider": "kms", "signedAt": "2025-11-01T12:34:56Z" } + } + ``` + +* **Errors**: `400 key_not_found`, `400 payload_missing|payload_invalid_base64|artifact_sha_missing`, `400 mode_not_allowed`, `403 client_certificate_required`, `401 invalid_token`, `500 signing_failed`. + +### 4.2 Submission + +`POST /api/v1/rekor/entries` *(mTLS + OpTok required)* + +* **Body**: as above. * **Behavior**: * Verify caller (mTLS + OpTok). @@ -178,16 +226,16 @@ Indexes: "status": "included" } ``` -* **Errors**: `401 invalid_token`, `403 not_signer|chain_untrusted`, `409 duplicate_bundle` (with existing `uuid`), `502 rekor_unavailable`, `504 proof_timeout`. - -### 4.2 Proof retrieval - -`GET /api/v1/rekor/entries/{uuid}` +* **Errors**: `401 invalid_token`, `403 not_signer|chain_untrusted`, `409 duplicate_bundle` (with existing `uuid`), `502 rekor_unavailable`, `504 proof_timeout`. + +### 4.3 Proof retrieval + +`GET /api/v1/rekor/entries/{uuid}` * Returns `entries` row (refreshes proof from Rekor if stale/missing). * Accepts `?refresh=true` to force backend query. -### 4.3 Verification (third‑party or internal) +### 4.4 Verification (third‑party or internal) `POST /api/v1/rekor/verify` @@ -202,17 +250,28 @@ Indexes: 1. **Bundle signature** → cert chain to Fulcio/KMS roots configured. 2. **Inclusion proof** → recompute leaf hash; verify Merkle path against checkpoint root. 3. Optionally verify **checkpoint** against local trust anchors (if Rekor signs checkpoints). - 4. Confirm **subject.digest** matches caller‑provided hash (when given). + 4. Confirm **subject.digest** matches caller‑provided hash (when given). + 5. Fetch **transparency witness** statement when enabled; cache results and downgrade status to WARN when endorsements are missing or mismatched. -* **Response**: - - ```json - { "ok": true, "uuid": "…", "index": 123, "logURL": "…", "checkedAt": "…" } - ``` - -### 4.4 Batch submission (optional) - -`POST /api/v1/rekor/batch` accepts an array of submission objects; processes with per‑item results. +* **Response**: + + ```json + { "ok": true, "uuid": "…", "index": 123, "logURL": "…", "checkedAt": "…" } + ``` + +### 4.5 Bulk verification + +`POST /api/v1/rekor/verify:bulk` enqueues a verification job containing up to `quotas.bulk.maxItemsPerJob` items. Each item mirrors the single verification payload (uuid | artifactSha256 | subject+envelopeId, optional policyVersion/refreshProof). The handler persists a MongoDB job document (`bulk_jobs` collection) and returns `202 Accepted` with a job descriptor and polling URL. + +`GET /api/v1/rekor/verify:bulk/{jobId}` returns progress and per-item results (subject/uuid, status, issues, cached verification report if available). Jobs are tenant- and subject-scoped; only the initiating principal can read their progress. + +**Worker path:** `BulkVerificationWorker` claims queued jobs (`status=queued → running`), executes items sequentially through the cached verification service, updates progress counters, and records metrics: + +- `attestor.bulk_jobs_total{status}` – completed/failed jobs +- `attestor.bulk_job_duration_seconds{status}` – job runtime +- `attestor.bulk_items_total{status}` – per-item outcomes (`succeeded`, `verification_failed`, `exception`) + +The worker honours `bulkVerification.itemDelayMilliseconds` for throttling and reschedules persistence conflicts with optimistic version checks. Results hydrate the verification cache; failed items record the error reason without aborting the overall job. --- @@ -244,8 +303,10 @@ Indexes: * `subject.digest.sha256` values must be present and well‑formed (hex). * **No public submission** path. **Never** accept bundles from untrusted clients. * **Client certificate allowlists**: optional `security.mtls.allowedSubjects` / `allowedThumbprints` tighten peer identity checks beyond CA pinning. -* **Rate limits**: token-bucket per caller derived from `quotas.perCaller` (QPS/burst) returns `429` + `Retry-After` when exceeded. -* **Redaction**: Attestor never logs secret material; DSSE payloads **should** be public by design (SBOMs/reports). If customers require redaction, enforce policy at Signer (predicate minimization) **before** Attestor. +* **Rate limits**: token-bucket per caller derived from `quotas.perCaller` (QPS/burst) returns `429` + `Retry-After` when exceeded. +* **Scope enforcement**: API separates `attestor.write`, `attestor.verify`, and `attestor.read` policies; verification/list endpoints accept read or verify scopes while submission endpoints remain write-only. +* **Request hygiene**: JSON content-type is mandatory (415 returned otherwise); DSSE payloads are capped (default 2 MiB), certificate chains limited to six entries, and signatures to six per envelope to mitigate parsing abuse. +* **Redaction**: Attestor never logs secret material; DSSE payloads **should** be public by design (SBOMs/reports). If customers require redaction, enforce policy at Signer (predicate minimization) **before** Attestor. --- @@ -268,24 +329,32 @@ Indexes: ## 8) Observability & audit -**Metrics** (Prometheus): - -* `attestor.submit_total{result,backend}` -* `attestor.submit_latency_seconds{backend}` -* `attestor.proof_fetch_total{result}` -* `attestor.verify_total{result}` -* `attestor.dedupe_hits_total` -* `attestor.errors_total{type}` - -**Correlation**: - -* HTTP callers may supply `X-Correlation-Id`; Attestor will echo the header and push `CorrelationId` into the log scope for cross-service tracing. - -**Tracing**: - -* Spans: `validate`, `rekor.submit`, `rekor.poll`, `persist`, `archive`, `verify`. - -**Audit**: +**Metrics** (Prometheus): + +* `attestor.sign_total{result,algorithm,provider}` +* `attestor.sign_latency_seconds{algorithm,provider}` +* `attestor.submit_total{result,backend}` +* `attestor.submit_latency_seconds{backend}` +* `attestor.proof_fetch_total{subject,issuer,policy,result,attestor.log.backend}` +* `attestor.verify_total{subject,issuer,policy,result}` +* `attestor.verify_latency_seconds{subject,issuer,policy,result}` +* `attestor.dedupe_hits_total` +* `attestor.errors_total{type}` + +SLO guardrails: + +* `attestor.verify_latency_seconds` P95 ≤ 2 s per policy. +* `attestor.verify_total{result="failed"}` ≤ 1 % of `attestor.verify_total` over 30 min rolling windows. + +**Correlation**: + +* HTTP callers may supply `X-Correlation-Id`; Attestor will echo the header and push `CorrelationId` into the log scope for cross-service tracing. + +**Tracing**: + +* Spans: `attestor.sign`, `validate`, `rekor.submit`, `rekor.poll`, `persist`, `archive`, `attestor.verify`, `attestor.verify.refresh_proof`. + +**Audit**: * Immutable `audit` rows (ts, caller, action, hashes, uuid, index, backend, result, latency). @@ -296,20 +365,45 @@ Indexes: ```yaml attestor: listen: "https://0.0.0.0:8444" - security: - mtls: - caBundle: /etc/ssl/signer-ca.pem - requireClientCert: true - authority: - issuer: "https://authority.internal" - jwksUrl: "https://authority.internal/jwks" - requireSenderConstraint: "dpop" # or "mtls" - signerIdentity: - mode: ["keyless","kms"] - fulcioRoots: ["/etc/fulcio/root.pem"] - allowedSANs: ["urn:stellaops:signer"] - kmsKeys: ["kms://cluster-kms/stellaops-signer"] - rekor: + security: + mtls: + caBundle: /etc/ssl/signer-ca.pem + requireClientCert: true + authority: + issuer: "https://authority.internal" + jwksUrl: "https://authority.internal/jwks" + requireSenderConstraint: "dpop" # or "mtls" + signerIdentity: + mode: ["keyless","kms"] + fulcioRoots: ["/etc/fulcio/root.pem"] + allowedSANs: ["urn:stellaops:signer"] + kmsKeys: ["kms://cluster-kms/stellaops-signer"] + submissionLimits: + maxPayloadBytes: 2097152 + maxCertificateChainEntries: 6 + maxSignatures: 6 + signing: + preferredProviders: ["kms","bouncycastle.ed25519","default"] + kms: + enabled: true + rootPath: "/var/lib/stellaops/kms" + password: "${ATTESTOR_KMS_PASSWORD}" + keys: + - keyId: "kms-primary" + algorithm: ES256 + mode: kms + provider: "kms" + providerKeyId: "kms-primary" + kmsVersionId: "v1" + - keyId: "ed25519-offline" + algorithm: Ed25519 + mode: keyful + provider: "bouncycastle.ed25519" + materialFormat: base64 + materialPath: "/etc/stellaops/keys/ed25519.key" + certificateChain: + - "-----BEGIN CERTIFICATE-----...-----END CERTIFICATE-----" + rekor: primary: url: "https://rekor-v2.internal" proofTimeoutMs: 15000 @@ -328,13 +422,20 @@ attestor: objectLock: "governance" redis: url: "redis://redis:6379/2" - quotas: - perCaller: - qps: 50 - burst: 100 -``` - ---- + quotas: + perCaller: + qps: 50 + burst: 100 +``` + +**Notes:** + +* `signing.preferredProviders` defines the resolution order when multiple providers support the requested algorithm. Omit to fall back to registration order. +* File-backed KMS (`signing.kms`) is required when at least one key uses `mode: kms`; the password should be injected via secret store or environment. +* For keyful providers, supply inline `material` or `materialPath` plus `materialFormat` (`pem` (default), `base64`, or `hex`). KMS keys ignore these fields and require `kmsVersionId`. +* `certificateChain` entries are appended to returned bundles so offline verifiers do not need to dereference external stores. + +--- ## 10) End‑to‑end sequences diff --git a/docs/modules/attestor/payloads.md b/docs/modules/attestor/payloads.md new file mode 100644 index 00000000..2d0517b5 --- /dev/null +++ b/docs/modules/attestor/payloads.md @@ -0,0 +1,48 @@ +# Attestor Payload Reference + +StellaOps evidence predicates must remain reproducible, explainable, and portable across online and fully air-gapped deployments. This guide lists each predicate type, indicates where the canonical JSON Schema lives, highlights the producing service, and links to the matching golden samples. + +## Quick Reference + +| Type ID | Predicate URI | Schema file | Produced by | Primary consumers | +| --- | --- | --- | --- | --- | +| StellaOps.BuildProvenance@1 | https://schemas.stella-ops.org/attestations/build-provenance@1 | src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-build-provenance.v1.schema.json | Build pipelines, Scanner SBOM bake stage | Attestor, Export Center, Policy Engine | +| StellaOps.SBOMAttestation@1 | https://schemas.stella-ops.org/attestations/sbom-attestation@1 | src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-sbom-attestation.v1.schema.json | Scanner.Worker SBOM composer | Policy Engine, CLI, Export Center | +| StellaOps.ScanResults@1 | https://schemas.stella-ops.org/attestations/scan-results@1 | src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-scan-results.v1.schema.json | Scanner.Worker analyzers | Policy Engine, CLI, Orchestrator | +| StellaOps.PolicyEvaluation@1 | https://schemas.stella-ops.org/attestations/policy-evaluation@1 | src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-policy-evaluation.v1.schema.json | Policy Engine explain pipeline | CLI, Notify, Export Center | +| StellaOps.VEXAttestation@1 | https://schemas.stella-ops.org/attestations/vex-attestation@1 | src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-vex-attestation.v1.schema.json | Excititor consensus service | Policy Engine, CLI, Console | +| StellaOps.RiskProfileEvidence@1 | https://schemas.stella-ops.org/attestations/risk-profile@1 | src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-risk-profile.v1.schema.json | Policy Engine risk pipeline | Console, Notify, Export Center | +| StellaOps.CustomEvidence@1 | https://schemas.stella-ops.org/attestations/custom-evidence@1 | src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-custom-evidence.v1.schema.json | CLI custom evidence workflows and partner integrations | Policy Engine (policy hooks), Export Center | + +Golden JSON fixtures that double as contract tests live under `src/Attestor/StellaOps.Attestor.Types/fixtures/v1/.sample.json`. TypeScript and Go clients consume the generated sources in `src/Attestor/StellaOps.Attestor.Types/generated/ts` and `src/Attestor/StellaOps.Attestor.Types/generated/go`. + +## Envelope Conventions + +- DSSE envelopes are signed over canonical JSON (sorted keys, UTF-8, no insignificant whitespace). +- The `subject` array must include at least one SHA-256 digest and may attach annotations such as `oci.reference` or `stellaops.asset`. +- `predicateType` uses the URI shown in the table; `predicate.typeId` mirrors the short identifier. +- `predicate.schemaVersion` follows semantic versioning. Consumers reject mismatched major versions. +- Optional `metadata` and `materials` sections follow the in-toto Statement format to maximise provenance portability. + +## Predicate Highlights + +- **StellaOps.BuildProvenance@1** records builder identity, config source, materials, reproducibility flags, and the resulting artifact digests. Outputs must match the DSSE subject. +- **StellaOps.SBOMAttestation@1** links an artifact digest to a CycloneDX 1.6 or SBOM 3.0.0 document, tracking inventory counts and the generator metadata. Component graph hashes reference CAS entries emitted by Scanner.Worker. +- **StellaOps.ScanResults@1** captures deterministic findings from OS, language, and native analyzers. It reports summary counts, per-finding metadata (PURL, severity, exploitability), and the layer digests inspected. +- **StellaOps.PolicyEvaluation@1** documents lattice-based policy outcomes, including decision traces and evidence digests consumed during evaluation. +- **StellaOps.VEXAttestation@1** mirrors OpenVEX-aligned statements with justification, scope narrowing (package coordinates or component IDs), and issue timestamps. +- **StellaOps.RiskProfileEvidence@1** summarises exploitability, ticketing load, runtime coverage, and maturity for downstream dashboards. +- **StellaOps.CustomEvidence@1** allows regulated tenants to attach organisation-specific payloads referenced by a CAS-hosted schema while preserving provenance and retention controls. + +## Validation and Tooling + +- Run `npm install` once, then `npm run docs:attestor:validate` to validate JSON fixtures against their schemas, execute the generated TypeScript tests (`npm test`), and run `go test ./...` for the Go SDK. The command fails fast when any schema, fixture, or generated SDK drifts. +- Regenerate schemas and SDKs after edits with `dotnet run --project src/Attestor/StellaOps.Attestor.Types/Tools/StellaOps.Attestor.Types.Generator`. +- Offline Kit builds (`ops/devops/offline-kit/`) mirror schemas, fixtures, and SDK bundles so air-gapped operators can run the same validation stack. + +## Related Material + +- `docs/modules/attestor/architecture.md` — service topology, Rekor integration, caching model. +- `docs/modules/platform/architecture-overview.md` — cross-module data flows and tenant boundaries. +- `docs/ingestion/aggregation-only-contract.md` — guardrails for advisory feeds consumed by policy evaluation. +- `src/Attestor/StellaOps.Attestor.Types/samples/README.md` — directory map for the golden evidence set referenced here. diff --git a/docs/modules/attestor/ttl-validation.md b/docs/modules/attestor/ttl-validation.md new file mode 100644 index 00000000..77a6fac1 --- /dev/null +++ b/docs/modules/attestor/ttl-validation.md @@ -0,0 +1,41 @@ +# Attestor TTL Validation Runbook + +> **Purpose:** confirm MongoDB TTL indexes and Redis expirations for the attestation dedupe store behave as expected on a production-like stack. + +## Prerequisites +- Docker Desktop or compatible daemon with the Compose plugin enabled. +- Local ports `27017` and `6379` free. +- `dotnet` SDK 10.0 preview (same as repo toolchain). +- Network access to pull `mongo:7` and `redis:7` images. + +## Quickstart +1. From the repo root export any required proxy settings, then run + ```bash + scripts/run-attestor-ttl-validation.sh + ``` + The helper script: + - Spins up `mongo:7` and `redis:7` containers. + - Sets `ATTESTOR_LIVE_MONGO_URI` / `ATTESTOR_LIVE_REDIS_URI`. + - Executes the live TTL test suite (`Category=LiveTTL`) in `StellaOps.Attestor.Tests`. + - Tears the stack down automatically. + +2. Capture the test output (`ttl-validation-.log`) and attach it to the sprint evidence folder (`docs/modules/attestor/evidence/`). + +## Result handling +- **Success:** Tests complete in ~3–4 minutes with `Total tests: 2, Passed: 2`. Store the log and note the run in `SPRINT_100_identity_signing.md` under ATTESTOR-72-003. +- **Failure:** Preserve: + - `docker compose logs` for both services. + - `mongosh` output of `db.dedupe.getIndexes()` and sample documents. + - `redis-cli --raw ttl attestor:ttl:live:bundle:`. + File an incident in the Attestor Guild channel and link the captured artifacts. + +## Manual verification (optional) +If the helper script cannot be used: +1. Start MongoDB and Redis manually with equivalent configuration. +2. Set `ATTESTOR_LIVE_MONGO_URI` and `ATTESTOR_LIVE_REDIS_URI`. +3. Run `dotnet test src/Attestor/StellaOps.Attestor.sln --no-build --filter "Category=LiveTTL"`. +4. Follow the evidence handling steps above. + +## Ownership +- Primary: Attestor Service Guild. +- Partner: QA Guild (observes TTL metrics, confirms evidence archiving). diff --git a/docs/modules/attestor/workflows.md b/docs/modules/attestor/workflows.md new file mode 100644 index 00000000..9ba48c9d --- /dev/null +++ b/docs/modules/attestor/workflows.md @@ -0,0 +1,247 @@ +# Attestor Verification Workflows + +> How StellaOps turns DSSE bundles into verifiable evidence, how the verification API reports outcomes, and how explainability signals surface in UI/CLI flows. + +> ⚠️ **2025-11-01 coordination note:** `StellaOps.Attestor.WebService` is failing to compile until downstream fixes land (`Contracts/AttestationBundleContracts.cs` null-coalescing update and scope/token variables restored in `Program.cs`). Verification flows ship in infrastructure/tests, but the WebService hand-off stays blocked — track via `ATTESTOR-73-002` (see Attestor task board). + +## 1. Verification flow (API and service contract) + +- **Entry point.** `POST /api/v1/rekor/verify` deserialises to `AttestorVerificationRequest`. +- **Resolution order.** The service tries `uuid`, then canonicalised `bundle`, then `artifactSha256`. At least one selector must be present (`invalid_query` otherwise). +- **Optional proof refresh.** `refreshProof=true` forces a Rekor lookup before returning. Proofs are cached in Mongo. +- **Signature replay.** Supplying `bundle` lets the service recompute the canonical hash and re-run signature checks; omitting the bundle skips those steps but still validates Merkle proofs and cached policy decisions. +- **Auth scopes.** Endpoints demand `attestor.verify` (write scope is also accepted); read-only detail/list APIs require `attestor.read` at minimum. + +### 1.1 Request properties + +| Field | Type | Required | Purpose | +|-------|------|----------|---------| +| `uuid` | string | optional | Rekor V2 UUID to verify and (optionally) refresh. | +| `bundle` | object | optional | DSSE envelope (same shape as submission) for signature re-verification. | +| `artifactSha256` | string | optional | Resolve the most recent entry for an attestable artefact digest. | +| `subject` | string | optional | Logical subject identifier used for cache/telemetry tagging; defaults to the stored artifact digest. | +| `envelopeId` | string | optional | Stable identifier for the DSSE bundle (typically the canonical hash); enables cache lookups. | +| `policyVersion` | string | optional | Policy digest/version driving verification; feeds cache keys and observability dimensions. | +| `refreshProof` | bool | optional (default `false`) | Pull the current inclusion proof and checkpoint from Rekor before evaluating. | + +All selectors are mutually compatible; if more than one is set the service uses the first match (`uuid` → `bundle` → `artifactSha256`). + +### 1.2 Response schema (`AttestorVerificationResult`) + +| Field | Type | Description | +|-------|------|-------------| +| `ok` | bool | `true` when the entry status is `included` **and** no issues were recorded. | +| `uuid` | string | Rekor UUID that satisfied the query. Useful for follow-up fetches. | +| `index` | number (int64) | Rekor log index, when supplied by the backend. | +| `logUrl` | string | Fully-qualified Rekor entry URL for operators and auditors. | +| `status` | string | Transparency-log status seen in Mongo (`included`, `pending`, `failed`, …). | +| `checkedAt` | string (ISO-8601 UTC) | Timestamp emitted when the response is created. | +| `issues` | array[string] | Machine-readable explainability codes. Empty when `ok=true`. | + +> **Note:** `checkedAt` is recomputed each call; cache hits do not recycle previous timestamps. + +### 1.3 Success criteria + +`ok=true` requires: + +1. Entry exists and status equals `included`. +2. Canonical DSSE hash matches the stored bundle hash. +3. Signature re-verification (when a bundle is supplied) succeeds. +4. Inclusion proof validates against the cached or refreshed checkpoint. + +Any deviation records at least one issue and flips `ok` to `false`. Consumers **must** inspect `issues` rather than inferring from `status` alone. + +## 2. Verification report schema + +`AttestorVerificationResult` carries the flattened summary shown above. When callers request the detailed report (`GET /api/v1/rekor/entries/{uuid}?refresh=true` or via SDK) they receive a `VerificationReport` shaped as follows: + +```json +{ + "overallStatus": "pass", + "succeeded": true, + "policy": { ... }, + "issuer": { ... }, + "freshness": { ... }, + "signatures": { ... }, + "transparency": { ... }, + "issues": [ "bundle_hash_mismatch" ] +} +``` + +| Field | Type | Description | +|-------|------|-------------| +| `overallStatus` | string (`pass`, `warn`, `fail`, `skipped`) | Aggregated verdict derived from the individual section statuses. | +| `succeeded` | bool | Convenience flag; `true` when `overallStatus ∈ {pass, warn}`. | +| `policy` | object | Results from policy evaluation (see below). | +| `issuer` | object | Identity/result of the signing entity. | +| `freshness` | object | Age analysis relative to policy settings. | +| `signatures` | object | Signature validation summary. | +| `transparency` | object | Inclusion proof / checkpoint evaluation summary. | +| `issues` | array[string] | De-duplicated set drawn from the sections; order is deterministic and stable. | + +### 2.1 `policy` + +| Field | Description | +|-------|-------------| +| `status` | Section verdict (`pass`, `warn`, `fail`, `skipped`). | +| `policyId` / `policyVersion` | DSL identifier and revision used for evaluation. | +| `verdict` | Policy outcome (`allow`, `challenge`, `deny`, etc.). | +| `issues` | Policy-specific explainability codes (e.g., `policy_rule_blocked`). | +| `attributes` | Key/value map emitted by the policy for downstream observability (e.g., applicable rules, matched waivers). | + +### 2.2 `issuer` + +| Field | Description | +|-------|-------------| +| `status` | Result of issuer validation. | +| `mode` | Signing mode detected (`keyless`, `kms`, `unknown`). | +| `issuer` | Distinguished name / issuer URI recorded during signing. | +| `subjectAlternativeName` | SAN pulled from the Fulcio certificate (keyless) or recorded KMS identity. | +| `keyId` | Logical key identifier associated with the signature. | +| `issues` | Issuer-specific issues (e.g., `issuer_trust_root_mismatch`, `signer_mode_unsupported:kid`). | + +### 2.3 `freshness` + +| Field | Description | +|-------|-------------| +| `status` | `fail` when the attestation exceeds `verification.freshnessMaxAgeMinutes`; `warn` when only the warning threshold is hit. | +| `createdAt` | Timestamp embedded in the attestation metadata. | +| `evaluatedAt` | Server-side timestamp used for age calculations. | +| `age` | ISO8601 duration of `evaluatedAt - createdAt`. | +| `maxAge` | Policy-driven ceiling (null when unchecked). | +| `issues` | `freshness_max_age_exceeded`, `freshness_warning`, etc. | + +### 2.4 `signatures` + +| Field | Description | +|-------|-------------| +| `status` | Signature validation verdict. | +| `bundleProvided` | `true` when canonical DSSE bytes were supplied. | +| `totalSignatures` | Count observed in the DSSE envelope. | +| `verifiedSignatures` | Number of signatures that validated against trusted keys. | +| `requiredSignatures` | Policy / configuration minimum enforced. | +| `issues` | Signature codes such as `bundle_payload_invalid_base64`, `signature_invalid`, `signer_mode_unknown`. | + +### 2.5 `transparency` + +| Field | Description | +|-------|-------------| +| `status` | Inclusion proof / checkpoint verdict. | +| `proofPresent` | Whether a proof document was available. | +| `checkpointPresent` | Indicates the Rekor checkpoint existed and parsed. | +| `inclusionPathPresent` | `true` when the Merkle path array contained nodes. | +| `issues` | Merkle/rekor codes (`proof_missing`, `proof_leafhash_mismatch`, `checkpoint_missing`, `proof_root_mismatch`). | + +### 2.6 Issue catalogue (non-exhaustive) + +| Code | Trigger | Notes | +|------|---------|-------| +| `bundle_hash_mismatch` | Canonical DSSE hash differs from stored value. | Often indicates tampering or inconsistent canonicalisation. | +| `bundle_payload_invalid_base64` | DSSE payload cannot be base64-decoded. | Validate producer pipeline; the attestation is unusable. | +| `signature_invalid` | At least one signature failed cryptographic verification. | Consider checking key rotation / revocation status. | +| `signer_mode_unknown` / `signer_mode_unsupported:` | Signing mode not configured for this installation. | Update `attestorOptions.security.signerIdentity.mode`. | +| `issuer_trust_root_mismatch` | Certificate chain does not terminate in configured Fulcio/KMS roots. | Check Fulcio bundle / KMS configuration. | +| `freshness_max_age_exceeded` | Attestation older than permitted maximum. | Regenerate attestation or extend policy window. | +| `proof_missing` | No inclusion proof stored or supplied. | When running offline, import bundles with proofs or allow warn-level policies. | +| `proof_root_mismatch` | Rebuilt Merkle root differs from checkpoint. | Proof may be stale or log compromised; escalate. | +| `checkpoint_missing` | No Rekor checkpoint available. | Configure `RequireCheckpoint=false` to downgrade severity. | + +Downstream consumers (UI, CLI, policy studio) should render human-readable messages but must retain the exact issue codes for automation and audit replay. + +## 3. Explainability signals + +1. **Canonicalisation.** The service replays DSSE canonicalisation to derive `bundleSha256`. Failures surface as `bundle_hash_mismatch` or decoding errors. +2. **Signature checks.** Mode-aware handling: + - `kms` (HMAC) compares against configured shared secrets. + - `keyless` rebuilds the certificate chain, enforces Fulcio roots, SAN allow-lists, and verifies with the leaf certificate. + - Unknown modes emit `signer_mode_unknown` / `signer_mode_unsupported:`. +3. **Proof acquisition.** When `refreshProof` is requested the Rekor backend may contribute a textual issue (`Proof refresh failed: …`) without stopping evaluation. +4. **Merkle validation.** Structured helper ensures leaf hash, path orientation, and checkpoint root are consistent; each validation failure has a discrete issue code. +5. **Observability.** The meter `attestor.verify_total` increments with `result=ok|failed`; structured logs and traces carry the same `issues` vector for UI/CLI drill-down. + +All issues are appended in detection order to simplify chronological replay in the Console’s chain-of-custody view. + +## 3. Issue catalogue + +| Code | Trigger | Operator guidance | +|------|---------|-------------------| +| `bundle_hash_mismatch` | Canonicalised DSSE hash differs from stored bundle hash. | Re-download artefact; investigate tampering or submission races. | +| `bundle_payload_invalid_base64` | Payload could not be base64-decoded. | Ensure bundle transport preserved payload; capture original DSSE for forensics. | +| `signature_invalid_kms` | HMAC verification failed for `mode=kms`. | Confirm shared secret alignment with Signer; rotate keys if drift detected. | +| `signer_mode_unknown` | Entry lacks signer mode metadata and bundle omitted it. | Re-ingest bundle or inspect submission pipeline metadata. | +| `signer_mode_unsupported:` | Signer mode is unsupported by the verifier. | Add support or block unsupported issuers in policy. | +| `kms_key_missing` | No configured KMS secrets to verify `mode=kms`. | Populate `security:signerIdentity:kmsKeys` in Attestor config before retry. | +| `signature_invalid_base64` | One or more signatures were not valid base64. | Bundle corruption; capture raw payload and re-submit. | +| `certificate_chain_missing` | `mode=keyless` bundle lacked any certificates. | Ensure Signer attaches Fulcio chain; review submission pipeline. | +| `certificate_chain_invalid` | Certificates could not be parsed. | Fetch original DSSE bundle for repair; confirm certificate encoding. | +| `certificate_chain_untrusted[:detail]` | Chain failed custom-root validation. | Import correct Fulcio roots or investigate potential impersonation. | +| `certificate_san_untrusted` | Leaf SAN not in configured allow-list. | Update allow-list or revoke offending issuer. | +| `signature_invalid` | No signature validated with supplied public keys. | Treat as tampering; trigger incident response. | +| `proof_missing` | No Merkle proof stored for the entry. | Re-run with `refreshProof=true`; check Rekor availability. | +| `bundle_hash_decode_failed` | Stored bundle hash could not be decoded. | Verify Mongo record integrity; re-enqueue submission if necessary. | +| `proof_inclusion_missing` | Inclusion section absent from proof. | Retry proof refresh; inspect Rekor health. | +| `proof_leafhash_decode_failed` | Leaf hash malformed. | Replay submission; inspect Rekor data corruption. | +| `proof_leafhash_mismatch` | Leaf hash differs from canonical bundle hash. | Raises tamper alert; reconcile Rekor entry vs stored bundle. | +| `proof_path_decode_failed` | Inclusion path entry malformed. | Same action as above; likely Rekor data corruption. | +| `proof_path_orientation_missing` | Inclusion path lacks left/right marker. | File Rekor bug; fallback to mirror log if configured. | +| `checkpoint_missing` | Proof lacks checkpoint metadata. | Retry refresh; ensure Rekor is configured to return checkpoints. | +| `checkpoint_root_decode_failed` | Checkpoint root hash malformed. | Investigate Rekor/mirror integrity before trusting log. | +| `proof_root_mismatch` | Computed root hash != checkpoint root. | Critical alert; assume inclusion proof compromised. | +| `Proof refresh failed: …` | Rekor fetch threw an exception. | Message includes upstream error; surface alongside telemetry for debugging. | + +Future explainability flags must follow the same pattern: short, lowercase codes with optional suffix payload (`code:detail`). + +## 4. Worked examples + +### 4.1 Successful verification + +```json +{ + "ok": true, + "uuid": "0192fdb4-a82b-7f90-b894-6fd1dd918b85", + "index": 73421, + "logUrl": "https://rekor.stellaops.test/api/v2/log/entries/0192fdb4a82b7f90b8946fd1dd918b85", + "status": "included", + "checkedAt": "2025-11-01T17:06:52.182394Z", + "issues": [] +} +``` + +This mirrors the happy-path asserted in `AttestorVerificationServiceTests.VerifyAsync_ReturnsOk_ForExistingUuid`, which replays the entire submission→verification loop. + +### 4.2 Tampered bundle + +```json +{ + "ok": false, + "uuid": "0192fdb4-a82b-7f90-b894-6fd1dd918b85", + "index": 73421, + "logUrl": "https://rekor.stellaops.test/api/v2/log/entries/0192fdb4a82b7f90b8946fd1dd918b85", + "status": "included", + "checkedAt": "2025-11-01T17:09:05.443218Z", + "issues": [ + "bundle_hash_mismatch", + "signature_invalid" + ] +} +``` + +Derived from `AttestorVerificationServiceTests.VerifyAsync_FlagsTamperedBundle`, which flips the DSSE payload and expects both issues to surface. CLI and Console consumers should display these codes verbatim and provide remediation tips from the table above. + +## 5. Validating the documentation + +- Run `dotnet test src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests` to exercise the scenarios behind the examples. +- API integrators can `curl` the verify endpoint and compare responses with the JSON above. +- UI/CLI teams should ensure explainability tooltips and runbooks reference the same issue catalogue. + +Keeping the documentation aligned with the test suite guarantees explainability remains deterministic and audit-friendly. + +## 6. Offline bundles & air-gapped verification + +Stella Ops Attestor now supports packaging attestations for sealed environments and rehydrating them without calling Rekor: + +- **Export bundles.** `POST /api/v1/attestations:export` accepts either a list of Rekor UUIDs or filter criteria (`subject`, `type`, `issuer`, `scope`, `createdAfter|Before`, `limit`, `continuationToken`) and returns an `attestor.bundle.v1` document. Each item contains the attestation entry, canonical DSSE payload (base64), optional proof payload, and metadata. Responses include a `continuationToken` so callers can page through large result sets (limits default to 100 and are capped at 200). JSON content is required and requests are gated by the `attestor.read` scope. +- **Import bundles.** `POST /api/v1/attestations:import` ingests the bundle document, upserts attestation metadata, and restores the canonical DSSE/proof into the configured archive store. The S3 archive integration must be enabled; the response reports how many entries were imported versus updated, any skipped items, and issue codes (`bundle_payload_invalid_base64`, `bundle_hash_mismatch`, `archive_disabled`, …). +- **Offline verification.** When replaying verification without log connectivity, submit the DSSE bundle and set `offline=true` on `POST /api/v1/rekor/verify`. The service reuses imported proofs when present and surfaces deterministic explainability codes (`proof_missing`, `proof_inclusion_missing`, …) instead of attempting Rekor fetches. + +Tests `AttestorBundleServiceTests.ExportAsync_AppliesFiltersAndContinuation`, `AttestationBundleEndpointsTests`, `AttestorVerificationServiceTests.VerifyAsync_OfflineSkipsProofRefreshWhenMissing`, and `AttestorVerificationServiceTests.VerifyAsync_OfflineUsesImportedProof` exercise the exporter/importer, API contracts, and the offline verification path with and without witness data. diff --git a/docs/modules/authority/architecture.md b/docs/modules/authority/architecture.md index f580d4f4..bb206cf2 100644 --- a/docs/modules/authority/architecture.md +++ b/docs/modules/authority/architecture.md @@ -1,445 +1,452 @@ # component_architecture_authority.md — **Stella Ops Authority** (2025Q4) > Consolidates identity and tenancy requirements documented across the AOC, Policy, and Platform guides, along with the dedicated Authority implementation plan. - -> **Scope.** Implementation‑ready architecture for **Stella Ops Authority**: the on‑prem **OIDC/OAuth2** service that issues **short‑lived, sender‑constrained operational tokens (OpToks)** to first‑party services and tools. Covers protocols (DPoP & mTLS binding), token shapes, endpoints, storage, rotation, HA, RBAC, audit, and testing. This component is the trust anchor for *who* is calling inside a Stella Ops installation. (Entitlement is proven separately by **PoE** from the cloud Licensing Service; Authority does not issue PoE.) - ---- - -## 0) Mission & boundaries - -**Mission.** Provide **fast, local, verifiable** authentication for Stella Ops microservices and tools by minting **very short‑lived** OAuth2/OIDC tokens that are **sender‑constrained** (DPoP or mTLS‑bound). Support RBAC scopes, multi‑tenant claims, and deterministic validation for APIs (Scanner, Signer, Attestor, Excititor, Concelier, UI, CLI, Zastava). - -**Boundaries.** - -* Authority **does not** validate entitlements/licensing. That’s enforced by **Signer** using **PoE** with the cloud Licensing Service. -* Authority tokens are **operational only** (2–5 min TTL) and must not be embedded in long‑lived artifacts or stored in SBOMs. -* Authority is **stateless for validation** (JWT) and **optional introspection** for services that prefer online checks. - ---- - -## 1) Protocols & cryptography - -* **OIDC Discovery**: `/.well-known/openid-configuration` -* **OAuth2** grant types: - - * **Client Credentials** (service↔service, with mTLS or private_key_jwt) - * **Device Code** (CLI login on headless agents; optional) - * **Authorization Code + PKCE** (browser login for UI; optional) -* **Sender constraint options** (choose per caller or per audience): - - * **DPoP** (Demonstration of Proof‑of‑Possession): proof JWT on each HTTP request, bound to the access token via `cnf.jkt`. - * **OAuth 2.0 mTLS** (certificate‑bound tokens): token bound to client certificate thumbprint via `cnf.x5t#S256`. -* **Signing algorithms**: **EdDSA (Ed25519)** preferred; fallback **ES256 (P‑256)**. Rotation is supported via **kid** in JWKS. -* **Token format**: **JWT** access tokens (compact), optionally opaque reference tokens for services that insist on introspection. -* **Clock skew tolerance**: ±60 s; issue `nbf`, `iat`, `exp` accordingly. - ---- - -## 2) Token model - -### 2.1 Access token (OpTok) — short‑lived (120–300 s) - -**Registered claims** - -``` -iss = https://authority. -sub = -aud = -exp = (<= 300 s from iat) -iat = -nbf = iat - 30 -jti = -scope = "scanner.scan scanner.export signer.sign ..." -``` - -**Sender‑constraint (`cnf`)** - -* **DPoP**: - - ```json - "cnf": { "jkt": "" } - ``` -* **mTLS**: - - ```json - "cnf": { "x5t#S256": "" } - ``` - -**Install/tenant context (custom claims)** - -``` -tid = // multi-tenant -inst = // unique installation -roles = [ "svc.scanner", "svc.signer", "ui.admin", ... ] -plan? = // optional hint for UIs; not used for enforcement -``` - -> **Note**: Do **not** copy PoE claims into OpTok; OpTok ≠ entitlement. Only **Signer** checks PoE. - -### 2.2 Refresh tokens (optional) - -* Default **disabled**. If enabled (for UI interactive logins), pair with **DPoP‑bound** refresh tokens or **mTLS** client sessions; short TTL (≤ 8 h), rotating on use (replay‑safe). - -### 2.3 ID tokens (optional) - -* Issued for UI/browser OIDC flows (Authorization Code + PKCE); not used for service auth. - ---- - -## 3) Endpoints & flows - -### 3.1 OIDC discovery & keys - -* `GET /.well-known/openid-configuration` → endpoints, algs, jwks_uri -* `GET /jwks` → JSON Web Key Set (rotating, at least 2 active keys during transition) - -### 3.2 Token issuance - -* `POST /oauth/token` - - * **Client Credentials** (service→service): - + +> **Scope.** Implementation‑ready architecture for **Stella Ops Authority**: the on‑prem **OIDC/OAuth2** service that issues **short‑lived, sender‑constrained operational tokens (OpToks)** to first‑party services and tools. Covers protocols (DPoP & mTLS binding), token shapes, endpoints, storage, rotation, HA, RBAC, audit, and testing. This component is the trust anchor for *who* is calling inside a Stella Ops installation. (Entitlement is proven separately by **PoE** from the cloud Licensing Service; Authority does not issue PoE.) + +--- + +## 0) Mission & boundaries + +**Mission.** Provide **fast, local, verifiable** authentication for Stella Ops microservices and tools by minting **very short‑lived** OAuth2/OIDC tokens that are **sender‑constrained** (DPoP or mTLS‑bound). Support RBAC scopes, multi‑tenant claims, and deterministic validation for APIs (Scanner, Signer, Attestor, Excititor, Concelier, UI, CLI, Zastava). + +**Boundaries.** + +* Authority **does not** validate entitlements/licensing. That’s enforced by **Signer** using **PoE** with the cloud Licensing Service. +* Authority tokens are **operational only** (2–5 min TTL) and must not be embedded in long‑lived artifacts or stored in SBOMs. +* Authority is **stateless for validation** (JWT) and **optional introspection** for services that prefer online checks. + +--- + +## 1) Protocols & cryptography + +* **OIDC Discovery**: `/.well-known/openid-configuration` +* **OAuth2** grant types: + + * **Client Credentials** (service↔service, with mTLS or private_key_jwt) + * **Device Code** (CLI login on headless agents; optional) + * **Authorization Code + PKCE** (browser login for UI; optional) +* **Sender constraint options** (choose per caller or per audience): + + * **DPoP** (Demonstration of Proof‑of‑Possession): proof JWT on each HTTP request, bound to the access token via `cnf.jkt`. + * **OAuth 2.0 mTLS** (certificate‑bound tokens): token bound to client certificate thumbprint via `cnf.x5t#S256`. +* **Signing algorithms**: **EdDSA (Ed25519)** preferred; fallback **ES256 (P‑256)**. Rotation is supported via **kid** in JWKS. +* **Token format**: **JWT** access tokens (compact), optionally opaque reference tokens for services that insist on introspection. +* **Clock skew tolerance**: ±60 s; issue `nbf`, `iat`, `exp` accordingly. + +--- + +## 2) Token model + +* **Incident mode tokens** require the `obs:incident` scope, a human-supplied `incident_reason`, and remain valid only while `auth_time` stays within a five-minute freshness window. Resource servers enforce the same window and persist `incident.reason`, `incident.auth_time`, and the fresh-auth verdict in `authority.resource.authorize` events. Authority exposes `/authority/audit/incident` so auditors can review recent activations. + + +### 2.1 Access token (OpTok) — short‑lived (120–300 s) + +**Registered claims** + +``` +iss = https://authority. +sub = +aud = +exp = (<= 300 s from iat) +iat = +nbf = iat - 30 +jti = +scope = "scanner.scan scanner.export signer.sign ..." +``` + +**Sender‑constraint (`cnf`)** + +* **DPoP**: + + ```json + "cnf": { "jkt": "" } + ``` +* **mTLS**: + + ```json + "cnf": { "x5t#S256": "" } + ``` + +**Install/tenant context (custom claims)** + +``` +tid = // multi-tenant +inst = // unique installation +roles = [ "svc.scanner", "svc.signer", "ui.admin", ... ] +plan? = // optional hint for UIs; not used for enforcement +``` + +> **Note**: Do **not** copy PoE claims into OpTok; OpTok ≠ entitlement. Only **Signer** checks PoE. + +### 2.2 Refresh tokens (optional) + +* Default **disabled**. If enabled (for UI interactive logins), pair with **DPoP‑bound** refresh tokens or **mTLS** client sessions; short TTL (≤ 8 h), rotating on use (replay‑safe). + +### 2.3 ID tokens (optional) + +* Issued for UI/browser OIDC flows (Authorization Code + PKCE); not used for service auth. + +--- + +## 3) Endpoints & flows + +### 3.1 OIDC discovery & keys + +* `GET /.well-known/openid-configuration` → endpoints, algs, jwks_uri +* `GET /jwks` → JSON Web Key Set (rotating, at least 2 active keys during transition) + +### 3.2 Token issuance + +* `POST /token` + +> Legacy aliases under `/oauth/token` are deprecated as of 1 November 2025 and now emit `Deprecation/Sunset/Warning` headers. See [`docs/api/authority-legacy-auth-endpoints.md`](../../api/authority-legacy-auth-endpoints.md) for timelines and migration guidance. + + * **Client Credentials** (service→service): + * **mTLS**: mutual TLS + `client_id` → bound token (`cnf.x5t#S256`) * `security.senderConstraints.mtls.enforceForAudiences` forces the mTLS path when requested `aud`/`resource` values intersect high-value audiences (defaults include `signer`). Authority rejects clients attempting to use DPoP/basic secrets for these audiences. * Stored `certificateBindings` are authoritative: thumbprint, subject, issuer, serial number, and SAN values are matched against the presented certificate, with rotation grace applied to activation windows. Failures surface deterministic error codes (e.g. `certificate_binding_subject_mismatch`). * **private_key_jwt**: JWT‑based client auth + **DPoP** header (preferred for tools and CLI) - * **Device Code** (CLI): `POST /oauth/device/code` + `POST /oauth/token` poll - * **Authorization Code + PKCE** (UI): standard - -**DPoP handshake (example)** - -1. Client prepares **JWK** (ephemeral keypair). -2. Client sends **DPoP proof** header with fields: - - ``` - htm=POST - htu=https://authority.../oauth/token - iat= - jti= - ``` - - signed with the DPoP private key; header carries JWK. -3. Authority validates proof; issues access token with `cnf.jkt=`. -4. Client uses the same DPoP key to sign **every subsequent API request** to services (Signer, Scanner, …). - -**mTLS flow** - -* Mutual TLS at the connection; Authority extracts client cert, validates chain; token carries `cnf.x5t#S256`. - -### 3.3 Introspection & revocation (optional) - -* `POST /oauth/introspect` → `{ active, sub, scope, aud, exp, cnf, ... }` -* `POST /oauth/revoke` → revokes refresh tokens or opaque access tokens. -* **Replay prevention**: maintain **DPoP `jti` cache** (TTL ≤ 10 min) to reject duplicate proofs when services supply DPoP nonces (Signer requires nonce for high‑value operations). - -### 3.4 UserInfo (optional for UI) - -* `GET /userinfo` (ID token context). - ---- - -## 4) Audiences, scopes & RBAC - -### 4.1 Audiences - -* `signer` — only the **Signer** service should accept tokens with `aud=signer`. -* `attestor`, `scanner`, `concelier`, `excititor`, `ui`, `zastava` similarly. - -Services **must** verify `aud` and **sender constraint** (DPoP/mTLS) per their policy. - -### 4.2 Core scopes - -| Scope | Service | Operation | -| ---------------------------------- | ------------------ | -------------------------- | -| `signer.sign` | Signer | Request DSSE signing | -| `attestor.write` | Attestor | Submit Rekor entries | -| `scanner.scan` | Scanner.WebService | Submit scan jobs | -| `scanner.export` | Scanner.WebService | Export SBOMs | -| `scanner.read` | Scanner.WebService | Read catalog/SBOMs | -| `vex.read` / `vex.admin` | Excititor | Query/operate | -| `concelier.read` / `concelier.export` | Concelier | Query/exports | -| `ui.read` / `ui.admin` | UI | View/admin | -| `zastava.emit` / `zastava.enforce` | Scanner/Zastava | Runtime events / admission | - -**Roles → scopes mapping** is configured centrally (Authority policy) and pushed during token issuance. - ---- - -## 5) Storage & state - -* **Configuration DB** (PostgreSQL/MySQL): clients, audiences, role→scope maps, tenant/installation registry, device code grants, persistent consents (if any). -* **Cache** (Redis): - - * DPoP **jti** replay cache (short TTL) - * **Nonce** store (per resource server, if they demand nonce) - * Device code pollers, rate limiting buckets -* **JWKS**: key material in HSM/KMS or encrypted at rest; JWKS served from memory. - ---- - -## 6) Key management & rotation - -* Maintain **at least 2 signing keys** active during rotation; tokens carry `kid`. -* Prefer **Ed25519** for compact tokens; maintain **ES256** fallback for FIPS contexts. -* Rotation cadence: 30–90 days; emergency rotation supported. -* Publish new JWKS **before** issuing tokens with the new `kid` to avoid cold‑start validation misses. -* Keep **old keys** available **at least** for max token TTL + 5 minutes. - ---- - -## 7) HA & performance - -* **Stateless issuance** (except device codes/refresh) → scale horizontally behind a load‑balancer. -* **DB** only for client metadata and optional flows; token checks are JWT‑local; introspection endpoints hit cache/DB minimally. -* **Targets**: - - * Token issuance P95 ≤ **20 ms** under warm cache. - * DPoP proof validation ≤ **1 ms** extra per request at resource servers (Signer/Scanner). - * 99.9% uptime; HPA on CPU/latency. - ---- - -## 8) Security posture - -* **Strict TLS** (1.3 preferred); HSTS; modern cipher suites. -* **mTLS** enabled where required (Signer/Attestor paths). -* **Replay protection**: DPoP `jti` cache, nonce support for **Signer** (add `DPoP-Nonce` header on 401; clients re‑sign). -* **Rate limits** per client & per IP; exponential backoff on failures. -* **Secrets**: clients use **private_key_jwt** or **mTLS**; never basic secrets over the wire. -* **CSP/CSRF** hardening on UI flows; `SameSite=Lax` cookies; PKCE enforced. -* **Logs** redact `Authorization` and DPoP proofs; store `sub`, `aud`, `scopes`, `inst`, `tid`, `cnf` thumbprints, not full keys. - ---- - -## 9) Multi‑tenancy & installations - -* **Tenant (`tid`)** and **Installation (`inst`)** registries define which audiences/scopes a client can request. -* Cross‑tenant isolation enforced at issuance (disallow rogue `aud`), and resource servers **must** check that `tid` matches their configured tenant. - ---- - -## 10) Admin & operations APIs - -All under `/admin` (mTLS + `authority.admin` scope). - -``` -POST /admin/clients # create/update client (confidential/public) -POST /admin/audiences # register audience resource URIs -POST /admin/roles # define role→scope mappings -POST /admin/tenants # create tenant/install entries -POST /admin/keys/rotate # rotate signing key (zero-downtime) -GET /admin/metrics # Prometheus exposition (token issue rates, errors) -GET /admin/healthz|readyz # health/readiness -``` - -Declared client `audiences` flow through to the issued JWT `aud` claim and the token request's `resource` indicators. Authority relies on this metadata to enforce DPoP nonce challenges for `signer`, `attestor`, and other high-value services without requiring clients to repeat the audience parameter on every request. - ---- - -## 11) Integration hard lines (what resource servers must enforce) - -Every Stella Ops service that consumes Authority tokens **must**: - -1. Verify JWT signature (`kid` in JWKS), `iss`, `aud`, `exp`, `nbf`. -2. Enforce **sender‑constraint**: - - * **DPoP**: validate DPoP proof (`htu`, `htm`, `iat`, `jti`) and match `cnf.jkt`; cache `jti` for replay defense; honor nonce challenges. - * **mTLS**: match presented client cert thumbprint to token `cnf.x5t#S256`. -3. Check **scopes**; optionally map to internal roles. -4. Check **tenant** (`tid`) and **installation** (`inst`) as appropriate. -5. For **Signer** only: require **both** OpTok and **PoE** in the request (enforced by Signer, not Authority). - ---- - -## 12) Error surfaces & UX - -* Token endpoint errors follow OAuth2 (`invalid_client`, `invalid_grant`, `invalid_scope`, `unauthorized_client`). -* Resource servers use RFC 6750 style (`WWW-Authenticate: DPoP error="invalid_token", error_description="…", dpop_nonce="…" `). -* For DPoP nonce challenges, clients retry with the server‑supplied nonce once. - ---- - -## 13) Observability & audit - -* **Metrics**: - - * `authority.tokens_issued_total{grant,aud}` - * `authority.dpop_validations_total{result}` - * `authority.mtls_bindings_total{result}` - * `authority.jwks_rotations_total` - * `authority.errors_total{type}` -* **Audit log** (immutable sink): token issuance (`sub`, `aud`, `scopes`, `tid`, `inst`, `cnf thumbprint`, `jti`), revocations, admin changes. -* **Tracing**: token flows, DB reads, JWKS cache. - ---- - -## 14) Configuration (YAML) - -```yaml -authority: - issuer: "https://authority.internal" - signing: - enabled: true - activeKeyId: "authority-signing-2025" - keyPath: "../certificates/authority-signing-2025.pem" - algorithm: "ES256" - keySource: "file" - security: - rateLimiting: - token: - enabled: true - permitLimit: 30 - window: "00:01:00" - queueLimit: 0 - authorize: - enabled: true - permitLimit: 60 - window: "00:01:00" - queueLimit: 10 - internal: - enabled: false - permitLimit: 5 - window: "00:01:00" - queueLimit: 0 - senderConstraints: - dpop: - enabled: true - allowedAlgorithms: [ "ES256", "ES384" ] - proofLifetime: "00:02:00" - allowedClockSkew: "00:00:30" - replayWindow: "00:05:00" - nonce: - enabled: true - ttl: "00:10:00" - maxIssuancePerMinute: 120 - store: "redis" - redisConnectionString: "redis://authority-redis:6379?ssl=false" - requiredAudiences: - - "signer" - - "attestor" - mtls: - enabled: true - requireChainValidation: true - rotationGrace: "00:15:00" - enforceForAudiences: - - "signer" - allowedSanTypes: - - "dns" - - "uri" - allowedCertificateAuthorities: - - "/etc/ssl/mtls/clients-ca.pem" - clients: - - clientId: scanner-web - grantTypes: [ "client_credentials" ] - audiences: [ "scanner" ] - auth: { type: "private_key_jwt", jwkFile: "/secrets/scanner-web.jwk" } - senderConstraint: "dpop" - scopes: [ "scanner.scan", "scanner.export", "scanner.read" ] - - clientId: signer - grantTypes: [ "client_credentials" ] - audiences: [ "signer" ] - auth: { type: "mtls" } - senderConstraint: "mtls" - scopes: [ "signer.sign" ] - - clientId: notify-web-dev - grantTypes: [ "client_credentials" ] - audiences: [ "notify.dev" ] - auth: { type: "client_secret", secretFile: "/secrets/notify-web-dev.secret" } - senderConstraint: "dpop" - scopes: [ "notify.read", "notify.admin" ] - - clientId: notify-web - grantTypes: [ "client_credentials" ] - audiences: [ "notify" ] - auth: { type: "client_secret", secretFile: "/secrets/notify-web.secret" } - senderConstraint: "dpop" - scopes: [ "notify.read", "notify.admin" ] -``` - ---- - -## 15) Testing matrix - -* **JWT validation**: wrong `aud`, expired `exp`, skewed `nbf`, stale `kid`. -* **DPoP**: invalid `htu`/`htm`, replayed `jti`, stale `iat`, wrong `jkt`, nonce dance. -* **mTLS**: wrong client cert, wrong CA, thumbprint mismatch. -* **RBAC**: scope enforcement per audience; over‑privileged client denied. -* **Rotation**: JWKS rotation while load‑testing; zero‑downtime verification. -* **HA**: kill one Authority instance; verify issuance continues; JWKS served by peers. -* **Performance**: 1k token issuance/sec on 2 cores with Redis enabled for jti caching. - ---- - -## 16) Threat model & mitigations (summary) - -| Threat | Vector | Mitigation | -| ------------------- | ---------------- | ------------------------------------------------------------------------------------------ | -| Token theft | Copy of JWT | **Short TTL**, **sender‑constraint** (DPoP/mTLS); replay blocked by `jti` cache and nonces | -| Replay across hosts | Reuse DPoP proof | Enforce `htu`/`htm`, `iat` freshness, `jti` uniqueness; services may require **nonce** | -| Impersonation | Fake client | mTLS or `private_key_jwt` with pinned JWK; client registration & rotation | -| Key compromise | Signing key leak | HSM/KMS storage, key rotation, audit; emergency key revoke path; narrow token TTL | -| Cross‑tenant abuse | Scope elevation | Enforce `aud`, `tid`, `inst` at issuance and resource servers | -| Downgrade to bearer | Strip DPoP | Resource servers require DPoP/mTLS based on `aud`; reject bearer without `cnf` | - ---- - -## 17) Deployment & HA - -* **Stateless** microservice, containerized; run ≥ 2 replicas behind LB. -* **DB**: HA Postgres (or MySQL) for clients/roles; **Redis** for device codes, DPoP nonces/jtis. -* **Secrets**: mount client JWKs via K8s Secrets/HashiCorp Vault; signing keys via KMS. -* **Backups**: DB daily; Redis not critical (ephemeral). -* **Disaster recovery**: export/import of client registry; JWKS rehydrate from KMS. -* **Compliance**: TLS audit; penetration testing for OIDC flows. - ---- - -## 18) Implementation notes - -* Reference stack: **.NET 10** + **OpenIddict 6** (or IdentityServer if licensed) with custom DPoP validator and mTLS binding middleware. -* Keep the DPoP/JTI cache pluggable; allow Redis/Memcached. -* Provide **client SDKs** for C# and Go: DPoP key mgmt, proof generation, nonce handling, token refresh helper. - ---- - -## 19) Quick reference — wire examples - -**Access token (payload excerpt)** - -```json -{ - "iss": "https://authority.internal", - "sub": "scanner-web", - "aud": "signer", - "exp": 1760668800, - "iat": 1760668620, - "nbf": 1760668620, - "jti": "9d9c3f01-6e1a-49f1-8f77-9b7e6f7e3c50", - "scope": "signer.sign", - "tid": "tenant-01", - "inst": "install-7A2B", - "cnf": { "jkt": "KcVb2V...base64url..." } -} -``` - -**DPoP proof header fields (for POST /sign/dsse)** - -```json -{ - "htu": "https://signer.internal/sign/dsse", - "htm": "POST", - "iat": 1760668620, - "jti": "4b1c9b3c-8a95-4c58-8a92-9c6cfb4a6a0b" -} -``` - -Signer validates that `hash(JWK)` in the proof matches `cnf.jkt` in the token. - ---- - -## 20) Rollout plan - -1. **MVP**: Client Credentials (private_key_jwt + DPoP), JWKS, short OpToks, per‑audience scopes. -2. **Add**: mTLS‑bound tokens for Signer/Attestor; device code for CLI; optional introspection. -3. **Hardening**: DPoP nonce support; full audit pipeline; HA tuning. -4. **UX**: Tenant/installation admin UI; role→scope editors; client bootstrap wizards. + * **Device Code** (CLI): `POST /oauth/device/code` + `POST /oauth/token` poll + * **Authorization Code + PKCE** (UI): standard + +**DPoP handshake (example)** + +1. Client prepares **JWK** (ephemeral keypair). +2. Client sends **DPoP proof** header with fields: + + ``` + htm=POST + htu=https://authority.../token + iat= + jti= + ``` + + signed with the DPoP private key; header carries JWK. +3. Authority validates proof; issues access token with `cnf.jkt=`. +4. Client uses the same DPoP key to sign **every subsequent API request** to services (Signer, Scanner, …). + +**mTLS flow** + +* Mutual TLS at the connection; Authority extracts client cert, validates chain; token carries `cnf.x5t#S256`. + +### 3.3 Introspection & revocation (optional) + +* `POST /introspect` → `{ active, sub, scope, aud, exp, cnf, ... }` +* `POST /revoke` → revokes refresh tokens or opaque access tokens. + +> Requests targeting the legacy `/oauth/{introspect|revoke}` paths receive deprecation headers and are scheduled for removal after 1 May 2026. +* **Replay prevention**: maintain **DPoP `jti` cache** (TTL ≤ 10 min) to reject duplicate proofs when services supply DPoP nonces (Signer requires nonce for high‑value operations). + +### 3.4 UserInfo (optional for UI) + +* `GET /userinfo` (ID token context). + +--- + +## 4) Audiences, scopes & RBAC + +### 4.1 Audiences + +* `signer` — only the **Signer** service should accept tokens with `aud=signer`. +* `attestor`, `scanner`, `concelier`, `excititor`, `ui`, `zastava` similarly. + +Services **must** verify `aud` and **sender constraint** (DPoP/mTLS) per their policy. + +### 4.2 Core scopes + +| Scope | Service | Operation | +| ---------------------------------- | ------------------ | -------------------------- | +| `signer.sign` | Signer | Request DSSE signing | +| `attestor.write` | Attestor | Submit Rekor entries | +| `scanner.scan` | Scanner.WebService | Submit scan jobs | +| `scanner.export` | Scanner.WebService | Export SBOMs | +| `scanner.read` | Scanner.WebService | Read catalog/SBOMs | +| `vex.read` / `vex.admin` | Excititor | Query/operate | +| `concelier.read` / `concelier.export` | Concelier | Query/exports | +| `ui.read` / `ui.admin` | UI | View/admin | +| `zastava.emit` / `zastava.enforce` | Scanner/Zastava | Runtime events / admission | + +**Roles → scopes mapping** is configured centrally (Authority policy) and pushed during token issuance. + +--- + +## 5) Storage & state + +* **Configuration DB** (PostgreSQL/MySQL): clients, audiences, role→scope maps, tenant/installation registry, device code grants, persistent consents (if any). +* **Cache** (Redis): + + * DPoP **jti** replay cache (short TTL) + * **Nonce** store (per resource server, if they demand nonce) + * Device code pollers, rate limiting buckets +* **JWKS**: key material in HSM/KMS or encrypted at rest; JWKS served from memory. + +--- + +## 6) Key management & rotation + +* Maintain **at least 2 signing keys** active during rotation; tokens carry `kid`. +* Prefer **Ed25519** for compact tokens; maintain **ES256** fallback for FIPS contexts. +* Rotation cadence: 30–90 days; emergency rotation supported. +* Publish new JWKS **before** issuing tokens with the new `kid` to avoid cold‑start validation misses. +* Keep **old keys** available **at least** for max token TTL + 5 minutes. + +--- + +## 7) HA & performance + +* **Stateless issuance** (except device codes/refresh) → scale horizontally behind a load‑balancer. +* **DB** only for client metadata and optional flows; token checks are JWT‑local; introspection endpoints hit cache/DB minimally. +* **Targets**: + + * Token issuance P95 ≤ **20 ms** under warm cache. + * DPoP proof validation ≤ **1 ms** extra per request at resource servers (Signer/Scanner). + * 99.9% uptime; HPA on CPU/latency. + +--- + +## 8) Security posture + +* **Strict TLS** (1.3 preferred); HSTS; modern cipher suites. +* **mTLS** enabled where required (Signer/Attestor paths). +* **Replay protection**: DPoP `jti` cache, nonce support for **Signer** (add `DPoP-Nonce` header on 401; clients re‑sign). +* **Rate limits** per client & per IP; exponential backoff on failures. +* **Secrets**: clients use **private_key_jwt** or **mTLS**; never basic secrets over the wire. +* **CSP/CSRF** hardening on UI flows; `SameSite=Lax` cookies; PKCE enforced. +* **Logs** redact `Authorization` and DPoP proofs; store `sub`, `aud`, `scopes`, `inst`, `tid`, `cnf` thumbprints, not full keys. + +--- + +## 9) Multi‑tenancy & installations + +* **Tenant (`tid`)** and **Installation (`inst`)** registries define which audiences/scopes a client can request. +* Cross‑tenant isolation enforced at issuance (disallow rogue `aud`), and resource servers **must** check that `tid` matches their configured tenant. + +--- + +## 10) Admin & operations APIs + +All under `/admin` (mTLS + `authority.admin` scope). + +``` +POST /admin/clients # create/update client (confidential/public) +POST /admin/audiences # register audience resource URIs +POST /admin/roles # define role→scope mappings +POST /admin/tenants # create tenant/install entries +POST /admin/keys/rotate # rotate signing key (zero-downtime) +GET /admin/metrics # Prometheus exposition (token issue rates, errors) +GET /admin/healthz|readyz # health/readiness +``` + +Declared client `audiences` flow through to the issued JWT `aud` claim and the token request's `resource` indicators. Authority relies on this metadata to enforce DPoP nonce challenges for `signer`, `attestor`, and other high-value services without requiring clients to repeat the audience parameter on every request. + +--- + +## 11) Integration hard lines (what resource servers must enforce) + +Every Stella Ops service that consumes Authority tokens **must**: + +1. Verify JWT signature (`kid` in JWKS), `iss`, `aud`, `exp`, `nbf`. +2. Enforce **sender‑constraint**: + + * **DPoP**: validate DPoP proof (`htu`, `htm`, `iat`, `jti`) and match `cnf.jkt`; cache `jti` for replay defense; honor nonce challenges. + * **mTLS**: match presented client cert thumbprint to token `cnf.x5t#S256`. +3. Check **scopes**; optionally map to internal roles. +4. Check **tenant** (`tid`) and **installation** (`inst`) as appropriate. +5. For **Signer** only: require **both** OpTok and **PoE** in the request (enforced by Signer, not Authority). + +--- + +## 12) Error surfaces & UX + +* Token endpoint errors follow OAuth2 (`invalid_client`, `invalid_grant`, `invalid_scope`, `unauthorized_client`). +* Resource servers use RFC 6750 style (`WWW-Authenticate: DPoP error="invalid_token", error_description="…", dpop_nonce="…" `). +* For DPoP nonce challenges, clients retry with the server‑supplied nonce once. + +--- + +## 13) Observability & audit + +* **Metrics**: + + * `authority.tokens_issued_total{grant,aud}` + * `authority.dpop_validations_total{result}` + * `authority.mtls_bindings_total{result}` + * `authority.jwks_rotations_total` + * `authority.errors_total{type}` +* **Audit log** (immutable sink): token issuance (`sub`, `aud`, `scopes`, `tid`, `inst`, `cnf thumbprint`, `jti`), revocations, admin changes. +* **Tracing**: token flows, DB reads, JWKS cache. + +--- + +## 14) Configuration (YAML) + +```yaml +authority: + issuer: "https://authority.internal" + signing: + enabled: true + activeKeyId: "authority-signing-2025" + keyPath: "../certificates/authority-signing-2025.pem" + algorithm: "ES256" + keySource: "file" + security: + rateLimiting: + token: + enabled: true + permitLimit: 30 + window: "00:01:00" + queueLimit: 0 + authorize: + enabled: true + permitLimit: 60 + window: "00:01:00" + queueLimit: 10 + internal: + enabled: false + permitLimit: 5 + window: "00:01:00" + queueLimit: 0 + senderConstraints: + dpop: + enabled: true + allowedAlgorithms: [ "ES256", "ES384" ] + proofLifetime: "00:02:00" + allowedClockSkew: "00:00:30" + replayWindow: "00:05:00" + nonce: + enabled: true + ttl: "00:10:00" + maxIssuancePerMinute: 120 + store: "redis" + redisConnectionString: "redis://authority-redis:6379?ssl=false" + requiredAudiences: + - "signer" + - "attestor" + mtls: + enabled: true + requireChainValidation: true + rotationGrace: "00:15:00" + enforceForAudiences: + - "signer" + allowedSanTypes: + - "dns" + - "uri" + allowedCertificateAuthorities: + - "/etc/ssl/mtls/clients-ca.pem" + clients: + - clientId: scanner-web + grantTypes: [ "client_credentials" ] + audiences: [ "scanner" ] + auth: { type: "private_key_jwt", jwkFile: "/secrets/scanner-web.jwk" } + senderConstraint: "dpop" + scopes: [ "scanner.scan", "scanner.export", "scanner.read" ] + - clientId: signer + grantTypes: [ "client_credentials" ] + audiences: [ "signer" ] + auth: { type: "mtls" } + senderConstraint: "mtls" + scopes: [ "signer.sign" ] + - clientId: notify-web-dev + grantTypes: [ "client_credentials" ] + audiences: [ "notify.dev" ] + auth: { type: "client_secret", secretFile: "/secrets/notify-web-dev.secret" } + senderConstraint: "dpop" + scopes: [ "notify.viewer", "notify.operator", "notify.admin" ] + - clientId: notify-web + grantTypes: [ "client_credentials" ] + audiences: [ "notify" ] + auth: { type: "client_secret", secretFile: "/secrets/notify-web.secret" } + senderConstraint: "dpop" + scopes: [ "notify.viewer", "notify.operator" ] +``` + +--- + +## 15) Testing matrix + +* **JWT validation**: wrong `aud`, expired `exp`, skewed `nbf`, stale `kid`. +* **DPoP**: invalid `htu`/`htm`, replayed `jti`, stale `iat`, wrong `jkt`, nonce dance. +* **mTLS**: wrong client cert, wrong CA, thumbprint mismatch. +* **RBAC**: scope enforcement per audience; over‑privileged client denied. +* **Rotation**: JWKS rotation while load‑testing; zero‑downtime verification. +* **HA**: kill one Authority instance; verify issuance continues; JWKS served by peers. +* **Performance**: 1k token issuance/sec on 2 cores with Redis enabled for jti caching. + +--- + +## 16) Threat model & mitigations (summary) + +| Threat | Vector | Mitigation | +| ------------------- | ---------------- | ------------------------------------------------------------------------------------------ | +| Token theft | Copy of JWT | **Short TTL**, **sender‑constraint** (DPoP/mTLS); replay blocked by `jti` cache and nonces | +| Replay across hosts | Reuse DPoP proof | Enforce `htu`/`htm`, `iat` freshness, `jti` uniqueness; services may require **nonce** | +| Impersonation | Fake client | mTLS or `private_key_jwt` with pinned JWK; client registration & rotation | +| Key compromise | Signing key leak | HSM/KMS storage, key rotation, audit; emergency key revoke path; narrow token TTL | +| Cross‑tenant abuse | Scope elevation | Enforce `aud`, `tid`, `inst` at issuance and resource servers | +| Downgrade to bearer | Strip DPoP | Resource servers require DPoP/mTLS based on `aud`; reject bearer without `cnf` | + +--- + +## 17) Deployment & HA + +* **Stateless** microservice, containerized; run ≥ 2 replicas behind LB. +* **DB**: HA Postgres (or MySQL) for clients/roles; **Redis** for device codes, DPoP nonces/jtis. +* **Secrets**: mount client JWKs via K8s Secrets/HashiCorp Vault; signing keys via KMS. +* **Backups**: DB daily; Redis not critical (ephemeral). +* **Disaster recovery**: export/import of client registry; JWKS rehydrate from KMS. +* **Compliance**: TLS audit; penetration testing for OIDC flows. + +--- + +## 18) Implementation notes + +* Reference stack: **.NET 10** + **OpenIddict 6** (or IdentityServer if licensed) with custom DPoP validator and mTLS binding middleware. +* Keep the DPoP/JTI cache pluggable; allow Redis/Memcached. +* Provide **client SDKs** for C# and Go: DPoP key mgmt, proof generation, nonce handling, token refresh helper. + +--- + +## 19) Quick reference — wire examples + +**Access token (payload excerpt)** + +```json +{ + "iss": "https://authority.internal", + "sub": "scanner-web", + "aud": "signer", + "exp": 1760668800, + "iat": 1760668620, + "nbf": 1760668620, + "jti": "9d9c3f01-6e1a-49f1-8f77-9b7e6f7e3c50", + "scope": "signer.sign", + "tid": "tenant-01", + "inst": "install-7A2B", + "cnf": { "jkt": "KcVb2V...base64url..." } +} +``` + +**DPoP proof header fields (for POST /sign/dsse)** + +```json +{ + "htu": "https://signer.internal/sign/dsse", + "htm": "POST", + "iat": 1760668620, + "jti": "4b1c9b3c-8a95-4c58-8a92-9c6cfb4a6a0b" +} +``` + +Signer validates that `hash(JWK)` in the proof matches `cnf.jkt` in the token. + +--- + +## 20) Rollout plan + +1. **MVP**: Client Credentials (private_key_jwt + DPoP), JWKS, short OpToks, per‑audience scopes. +2. **Add**: mTLS‑bound tokens for Signer/Attestor; device code for CLI; optional introspection. +3. **Hardening**: DPoP nonce support; full audit pipeline; HA tuning. +4. **UX**: Tenant/installation admin UI; role→scope editors; client bootstrap wizards. diff --git a/docs/modules/cli/architecture.md b/docs/modules/cli/architecture.md index 0902fc32..f82c3e09 100644 --- a/docs/modules/cli/architecture.md +++ b/docs/modules/cli/architecture.md @@ -145,14 +145,14 @@ Both subcommands honour offline-first expectations (no network access) and norma ### 3.3 Multi‑audience & scopes -* CLI requests **audiences** as needed per verb: - - * `scanner` for scan/export/report/diff - * `signer` (indirect; usually backend calls Signer) - * `attestor` for verify - * `concelier`/`excititor` for admin verbs - -CLI rejects verbs if required scopes are missing. +* CLI requests **audiences** as needed per verb: + + * `scanner` for scan/export/report/diff + * `signer` (indirect; usually backend calls Signer) + * `attestor` for verify (requires `attestor.verify` scope; read-only verbs fall back to `attestor.read`) + * `concelier`/`excititor` for admin verbs + +CLI rejects verbs if required scopes are missing. --- diff --git a/docs/modules/cli/guides/cli-reference.md b/docs/modules/cli/guides/cli-reference.md index 769ca2ca..05a831b2 100644 --- a/docs/modules/cli/guides/cli-reference.md +++ b/docs/modules/cli/guides/cli-reference.md @@ -1,316 +1,318 @@ -# CLI AOC Commands Reference - -> **Audience:** DevEx engineers, operators, and CI authors integrating the `stella` CLI with Aggregation-Only Contract (AOC) workflows. -> **Scope:** Command synopsis, options, exit codes, and offline considerations for `stella sources ingest --dry-run` and `stella aoc verify` as introduced in Sprint 19. - +# CLI AOC Commands Reference + +> **Audience:** DevEx engineers, operators, and CI authors integrating the `stella` CLI with Aggregation-Only Contract (AOC) workflows. +> **Scope:** Command synopsis, options, exit codes, and offline considerations for `stella sources ingest --dry-run` and `stella aoc verify` as introduced in Sprint 19. + Both commands are designed to enforce the AOC guardrails documented in the [aggregation-only reference](../../../ingestion/aggregation-only-contract.md) and the [architecture overview](../architecture.md). They consume Authority-issued tokens with tenant scopes and never mutate ingestion stores. - ---- - -## 1 · Prerequisites - -- CLI version: `stella` ≥ 0.19.0 (AOC feature gate enabled). -- Required scopes (DPoP-bound): - - `advisory:read` for Concelier sources. - - `vex:read` for Excititor sources (optional but required for VEX checks). - - `aoc:verify` to invoke guard verification endpoints. - - `tenant:select` if your deployment uses tenant switching. -- Connectivity: direct access to Concelier/Excititor APIs or Offline Kit snapshot (see § 4). -- Environment: set `STELLA_AUTHORITY_URL`, `STELLA_TENANT`, and export a valid OpTok via `stella auth login` or existing token cache. - ---- - -## 2 · `stella sources ingest --dry-run` - -### 2.1 Synopsis - -```bash -stella sources ingest --dry-run \ - --source \ - --input \ - [--tenant ] \ - [--format json|table] \ - [--no-color] \ - [--output ] -``` - -### 2.2 Description - -Previews an ingestion write without touching MongoDB. The command loads an upstream advisory or VEX document, computes the would-write payload, runs it through the `AOCWriteGuard`, and reports any forbidden fields, provenance gaps, or idempotency issues. Use it during connector development, CI validation, or while triaging incidents. - -### 2.3 Options - -| Option | Description | -|--------|-------------| -| `--source ` | Logical source name (`redhat`, `ubuntu`, `osv`, etc.). Mirrors connector configuration. | -| `--input ` | Path to local CSAF/OSV/VEX file or HTTPS URI. CLI normalises transport (gzip/base64) before guard evaluation. | -| `--tenant ` | Overrides default tenant for multi-tenant deployments. Mandatory when `STELLA_TENANT` is not set. | -| `--format json|table` | Output format. `table` (default) prints summary with highlighted violations; `json` emits machine-readable report (see below). | -| `--no-color` | Disables ANSI colour output for CI logs. | -| `--output ` | Writes the JSON report to file while still printing human-readable summary to stdout. | - -### 2.4 Output schema (JSON) - -```json -{ - "source": "redhat", - "tenant": "default", - "guardVersion": "1.0.0", - "status": "ok", - "document": { - "contentHash": "sha256:…", - "supersedes": null, - "provenance": { - "signature": { "format": "pgp", "present": true } - } - }, - "violations": [] -} -``` - -When violations exist, `status` becomes `error` and `violations` contains entries with `code` (`ERR_AOC_00x`), a short `message`, and JSON Pointer `path` values indicating offending fields. - -### 2.5 Exit codes - -| Exit code | Meaning | -|-----------|---------| -| `0` | Guard passed; would-write payload is AOC compliant. | -| `11` | `ERR_AOC_001` – Forbidden field (`severity`, `cvss`, etc.) detected. | -| `12` | `ERR_AOC_002` – Merge attempt (multiple upstream sources fused). | -| `13` | `ERR_AOC_003` – Idempotency violation (duplicate without supersedes). | -| `14` | `ERR_AOC_004` – Missing provenance fields. | -| `15` | `ERR_AOC_005` – Signature/checksum mismatch. | -| `16` | `ERR_AOC_006` – Effective findings present (Policy-only data). | -| `17` | `ERR_AOC_007` – Unknown top-level fields / schema violation. | -| `70` | Transport error (network, auth, malformed input). | - -> Exit codes map directly to the `ERR_AOC_00x` table for scripting consistency. Multiple violations yield the highest-priority code (e.g., 11 takes precedence over 14). - -### 2.6 Examples - -Dry-run a local CSAF file: - -```bash -stella sources ingest --dry-run \ - --source redhat \ - --input ./fixtures/redhat/RHSA-2025-1234.json -``` - -Stream from HTTPS and emit JSON for CI: - -```bash -stella sources ingest --dry-run \ - --source osv \ - --input https://osv.dev/vulnerability/GHSA-aaaa-bbbb \ - --format json \ - --output artifacts/osv-dry-run.json - -cat artifacts/osv-dry-run.json | jq '.violations' -``` - -### 2.7 Offline notes - -When operating in sealed/offline mode: - -- Use `--input` paths pointing to Offline Kit snapshots (`offline-kit/advisories/*.json`). -- Provide `--tenant` explicitly if the offline bundle contains multiple tenants. -- The command does not attempt network access when given a file path. -- Store reports with `--output` to include in transfer packages for policy review. - ---- - -## 3 · `stella aoc verify` - -### 3.1 Synopsis - -```bash -stella aoc verify \ - [--since ] \ - [--limit ] \ - [--sources ] \ - [--codes ] \ - [--format table|json] \ - [--export ] \ - [--tenant ] \ - [--no-color] -``` - -### 3.2 Description - -Replays the AOC guard against stored raw documents. By default it checks all advisories and VEX statements ingested in the last 24 hours for the active tenant, reporting totals, top violation codes, and sample documents. Use it in CI pipelines, scheduled verifications, or during incident response. - -### 3.3 Options - -| Option | Description | -|--------|-------------| -| `--since ` | Verification window. Accepts ISO 8601 timestamp (`2025-10-25T12:00:00Z`) or duration (`48h`, `7d`). Defaults to `24h`. | -| `--limit ` | Maximum number of violations to display (per code). `0` means show all. Defaults to `20`. | -| `--sources ` | Comma-separated list of sources (`redhat,ubuntu,osv`). Filters both advisories and VEX entries. | -| `--codes ` | Restricts output to specific `ERR_AOC_00x` codes. Useful for regression tracking. | -| `--format table|json` | `table` (default) prints summary plus top violations; `json` outputs machine-readable report identical to the `/aoc/verify` API. | -| `--export ` | Writes the JSON report to disk (useful for audits/offline uploads). | -| `--tenant ` | Overrides tenant context. Required for cross-tenant verifications when run by platform operators. | -| `--no-color` | Disables ANSI colours. | - -`table` mode prints a summary showing the active tenant, evaluated window, counts of checked advisories/VEX statements, the active limit, total writes/violations, and whether the page was truncated. Status is colour-coded as `ok`, `violations`, or `truncated`. When violations exist the detail table lists the code, total occurrences, first sample document (`source` + `documentId` + `contentHash`), and JSON pointer path. - -### 3.4 Report structure (JSON) - -```json -{ - "tenant": "default", - "window": { - "from": "2025-10-25T12:00:00Z", - "to": "2025-10-26T12:00:00Z" - }, - "checked": { - "advisories": 482, - "vex": 75 - }, - "violations": [ - { - "code": "ERR_AOC_001", - "count": 2, - "examples": [ - { - "source": "redhat", - "documentId": "advisory_raw:redhat:RHSA-2025:1", - "contentHash": "sha256:…", - "path": "/content/raw/cvss" - } - ] - } - ], - "metrics": { - "ingestion_write_total": 557, - "aoc_violation_total": 2 - }, - "truncated": false -} -``` - -### 3.5 Exit codes - -| Exit code | Meaning | -|-----------|---------| -| `0` | Verification succeeded with zero violations. | -| `11…17` | Same mapping as § 2.5 when violations are detected. Highest-priority code returned. | -| `18` | Verification ran but results truncated (limit reached) – treat as warning; rerun with higher `--limit`. | -| `70` | Transport/authentication error. | -| `71` | CLI misconfiguration (missing tenant, invalid `--since`, etc.). | - -### 3.6 Examples - -Daily verification across all sources: - -```bash -stella aoc verify --since 24h --format table -``` - -CI pipeline focusing on errant sources and exporting evidence: - -```bash -stella aoc verify \ - --sources redhat,ubuntu \ - --codes ERR_AOC_001,ERR_AOC_004 \ - --format json \ - --limit 100 \ - --export artifacts/aoc-verify.json - -jq '.violations[] | {code, count}' artifacts/aoc-verify.json -``` - -Air-gapped verification using Offline Kit snapshot (example script): - -```bash -stella aoc verify \ - --since 7d \ - --format json \ - --export /mnt/offline/aoc-verify-$(date +%F).json - -sha256sum /mnt/offline/aoc-verify-*.json > /mnt/offline/checksums.txt -``` - -### 3.7 Automation tips - -- Schedule with `cron` or platform scheduler and fail the job when exit code ≥ 11. -- Pair with `stella sources ingest --dry-run` for pre-flight validation before re-enabling a paused source. -- Push JSON exports to observability pipelines for historical tracking of violation counts. - -### 3.8 Offline notes - -- Works against Offline Kit Mongo snapshots when CLI is pointed at the local API gateway included in the bundle. -- When fully disconnected, run against exported `aoc verify` reports generated on production and replay them using `--format json --export` (automation recipe above). -- Include verification output in compliance packages alongside Offline Kit manifests. - ---- - -## 4 · Global exit-code reference - -| Code | Summary | -|------|---------| -| `0` | Success / no violations. | -| `11` | `ERR_AOC_001` – Forbidden field present. | -| `12` | `ERR_AOC_002` – Merge attempt detected. | -| `13` | `ERR_AOC_003` – Idempotency violation. | -| `14` | `ERR_AOC_004` – Missing provenance/signature metadata. | -| `15` | `ERR_AOC_005` – Signature/checksum mismatch. | -| `16` | `ERR_AOC_006` – Effective findings in ingestion payload. | -| `17` | `ERR_AOC_007` – Schema violation / unknown fields. | -| `18` | Partial verification (limit reached). | -| `70` | Transport or HTTP failure. | -| `71` | CLI usage error (invalid arguments, missing tenant). | - -Use these codes in CI to map outcomes to build statuses or alert severities. - ---- - -## 4 · `stella vuln observations` (Overlay paging) - -`stella vuln observations` lists raw advisory observations for downstream overlays (Graph Explorer, Policy simulations, Console). Large tenants can now page through results deterministically. - -| Option | Description | -|--------|-------------| -| `--limit ` | Caps the number of observations returned in a single call. Defaults to `200`; values above `500` are clamped server-side. | -| `--cursor ` | Opaque continuation token produced by the previous page (`nextCursor` in JSON output). Pass it back to resume iteration. | - -Additional notes: - -- Table mode prints a hint when `hasMore` is `true`: - `[yellow]More observations available. Continue with --cursor [/]`. -- JSON mode returns `nextCursor` and `hasMore` alongside the observation list so automation can loop until `hasMore` is `false`. -- Supplying a non-positive limit falls back to the default (`200`). Invalid/expired cursors yield `400 Bad Request`; restart without `--cursor` to begin a fresh iteration. - ---- - -## 5 · Related references - + +--- + +## 1 · Prerequisites + +- CLI version: `stella` ≥ 0.19.0 (AOC feature gate enabled). +- Required scopes (DPoP-bound): + - `advisory:read` for Concelier sources. + - `vex:read` for Excititor sources (optional but required for VEX checks). + - `aoc:verify` to invoke guard verification endpoints. + - `tenant:select` if your deployment uses tenant switching. +- Connectivity: direct access to Concelier/Excititor APIs or Offline Kit snapshot (see § 4). +- Environment: set `STELLA_AUTHORITY_URL`, `STELLA_TENANT`, and export a valid OpTok via `stella auth login` or existing token cache. + +--- + +## 2 · `stella sources ingest --dry-run` + +### 2.1 Synopsis + +```bash +stella sources ingest --dry-run \ + --source \ + --input \ + [--tenant ] \ + [--format json|table] \ + [--no-color] \ + [--output ] +``` + +### 2.2 Description + +Previews an ingestion write without touching MongoDB. The command loads an upstream advisory or VEX document, computes the would-write payload, runs it through the `AOCWriteGuard`, and reports any forbidden fields, provenance gaps, or idempotency issues. Use it during connector development, CI validation, or while triaging incidents. + +### 2.3 Options + +| Option | Description | +|--------|-------------| +| `--source ` | Logical source name (`redhat`, `ubuntu`, `osv`, etc.). Mirrors connector configuration. | +| `--input ` | Path to local CSAF/OSV/VEX file or HTTPS URI. CLI normalises transport (gzip/base64) before guard evaluation. | +| `--tenant ` | Overrides default tenant for multi-tenant deployments. Mandatory when `STELLA_TENANT` is not set. | +| `--format json|table` | Output format. `table` (default) prints summary with highlighted violations; `json` emits machine-readable report (see below). | +| `--no-color` | Disables ANSI colour output for CI logs. | +| `--output ` | Writes the JSON report to file while still printing human-readable summary to stdout. | + +### 2.4 Output schema (JSON) + +```json +{ + "source": "redhat", + "tenant": "default", + "guardVersion": "1.0.0", + "status": "ok", + "document": { + "contentHash": "sha256:…", + "supersedes": null, + "provenance": { + "signature": { "format": "pgp", "present": true } + } + }, + "violations": [] +} +``` + +When violations exist, `status` becomes `error` and `violations` contains entries with `code` (`ERR_AOC_00x`), a short `message`, and JSON Pointer `path` values indicating offending fields. + +### 2.5 Exit codes + +| Exit code | Meaning | +|-----------|---------| +| `0` | Guard passed; would-write payload is AOC compliant. | +| `11` | `ERR_AOC_001` – Forbidden field (`severity`, `cvss`, etc.) detected. | +| `12` | `ERR_AOC_002` – Merge attempt (multiple upstream sources fused). | +| `13` | `ERR_AOC_003` – Idempotency violation (duplicate without supersedes). | +| `14` | `ERR_AOC_004` – Missing provenance fields. | +| `15` | `ERR_AOC_005` – Signature/checksum mismatch. | +| `16` | `ERR_AOC_006` – Effective findings present (Policy-only data). | +| `17` | `ERR_AOC_007` – Unknown top-level fields / schema violation. | +| `70` | Transport error (network, auth, malformed input). | + +> Exit codes map directly to the `ERR_AOC_00x` table for scripting consistency. Multiple violations yield the highest-priority code (e.g., 11 takes precedence over 14). + +### 2.6 Examples + +Dry-run a local CSAF file: + +```bash +stella sources ingest --dry-run \ + --source redhat \ + --input ./fixtures/redhat/RHSA-2025-1234.json +``` + +Stream from HTTPS and emit JSON for CI: + +```bash +stella sources ingest --dry-run \ + --source osv \ + --input https://osv.dev/vulnerability/GHSA-aaaa-bbbb \ + --format json \ + --output artifacts/osv-dry-run.json + +cat artifacts/osv-dry-run.json | jq '.violations' +``` + +### 2.7 Offline notes + +When operating in sealed/offline mode: + +- Use `--input` paths pointing to Offline Kit snapshots (`offline-kit/advisories/*.json`). +- Provide `--tenant` explicitly if the offline bundle contains multiple tenants. +- The command does not attempt network access when given a file path. +- Store reports with `--output` to include in transfer packages for policy review. + +--- + +## 3 · `stella aoc verify` + +### 3.1 Synopsis + +```bash +stella aoc verify \ + [--since ] \ + [--limit ] \ + [--sources ] \ + [--codes ] \ + [--format table|json] \ + [--export ] \ + [--tenant ] \ + [--no-color] +``` + +### 3.2 Description + +Replays the AOC guard against stored raw documents. By default it checks all advisories and VEX statements ingested in the last 24 hours for the active tenant, reporting totals, top violation codes, and sample documents. Use it in CI pipelines, scheduled verifications, or during incident response. + +### 3.3 Options + +| Option | Description | +|--------|-------------| +| `--since ` | Verification window. Accepts ISO 8601 timestamp (`2025-10-25T12:00:00Z`) or duration (`48h`, `7d`). Defaults to `24h`. | +| `--limit ` | Maximum number of violations to display (per code). `0` means show all. Defaults to `20`. | +| `--sources ` | Comma-separated list of sources (`redhat,ubuntu,osv`). Filters both advisories and VEX entries. | +| `--codes ` | Restricts output to specific `ERR_AOC_00x` codes. Useful for regression tracking. | +| `--format table|json` | `table` (default) prints summary plus top violations; `json` outputs machine-readable report identical to the `/aoc/verify` API. | +| `--export ` | Writes the JSON report to disk (useful for audits/offline uploads). | +| `--tenant ` | Overrides tenant context. Required for cross-tenant verifications when run by platform operators. | +| `--no-color` | Disables ANSI colours. | + +`table` mode prints a summary showing the active tenant, evaluated window, counts of checked advisories/VEX statements, the active limit, total writes/violations, and whether the page was truncated. Status is colour-coded as `ok`, `violations`, or `truncated`. When violations exist the detail table lists the code, total occurrences, first sample document (`source` + `documentId` + `contentHash`), and JSON pointer path. + +### 3.4 Report structure (JSON) + +```json +{ + "tenant": "default", + "window": { + "from": "2025-10-25T12:00:00Z", + "to": "2025-10-26T12:00:00Z" + }, + "checked": { + "advisories": 482, + "vex": 75 + }, + "violations": [ + { + "code": "ERR_AOC_001", + "count": 2, + "examples": [ + { + "source": "redhat", + "documentId": "advisory_raw:redhat:RHSA-2025:1", + "contentHash": "sha256:…", + "path": "/content/raw/cvss" + } + ] + } + ], + "metrics": { + "ingestion_write_total": 557, + "aoc_violation_total": 2 + }, + "truncated": false +} +``` + +### 3.5 Exit codes + +| Exit code | Meaning | +|-----------|---------| +| `0` | Verification succeeded with zero violations. | +| `11…17` | Same mapping as § 2.5 when violations are detected. Highest-priority code returned. | +| `18` | Verification ran but results truncated (limit reached) – treat as warning; rerun with higher `--limit`. | +| `70` | Transport/authentication error. | +| `71` | CLI misconfiguration (missing tenant, invalid `--since`, etc.). | + +### 3.6 Examples + +Daily verification across all sources: + +```bash +stella aoc verify --since 24h --format table +``` + +CI pipeline focusing on errant sources and exporting evidence: + +```bash +stella aoc verify \ + --sources redhat,ubuntu \ + --codes ERR_AOC_001,ERR_AOC_004 \ + --format json \ + --limit 100 \ + --export artifacts/aoc-verify.json + +jq '.violations[] | {code, count}' artifacts/aoc-verify.json +``` + +Air-gapped verification using Offline Kit snapshot (example script): + +```bash +stella aoc verify \ + --since 7d \ + --format json \ + --export /mnt/offline/aoc-verify-$(date +%F).json + +sha256sum /mnt/offline/aoc-verify-*.json > /mnt/offline/checksums.txt +``` + +### 3.7 Automation tips + +- Schedule with `cron` or platform scheduler and fail the job when exit code ≥ 11. +- Pair with `stella sources ingest --dry-run` for pre-flight validation before re-enabling a paused source. +- Push JSON exports to observability pipelines for historical tracking of violation counts. + +### 3.8 Offline notes + +- Works against Offline Kit Mongo snapshots when CLI is pointed at the local API gateway included in the bundle. +- When fully disconnected, run against exported `aoc verify` reports generated on production and replay them using `--format json --export` (automation recipe above). +- Include verification output in compliance packages alongside Offline Kit manifests. + +--- + +## 4 · Global exit-code reference + +| Code | Summary | +|------|---------| +| `0` | Success / no violations. | +| `11` | `ERR_AOC_001` – Forbidden field present. | +| `12` | `ERR_AOC_002` – Merge attempt detected. | +| `13` | `ERR_AOC_003` – Idempotency violation. | +| `14` | `ERR_AOC_004` – Missing provenance/signature metadata. | +| `15` | `ERR_AOC_005` – Signature/checksum mismatch. | +| `16` | `ERR_AOC_006` – Effective findings in ingestion payload. | +| `17` | `ERR_AOC_007` – Schema violation / unknown fields. | +| `18` | Partial verification (limit reached). | +| `70` | Transport or HTTP failure. | +| `71` | CLI usage error (invalid arguments, missing tenant). | + +Use these codes in CI to map outcomes to build statuses or alert severities. + +--- + +## 4 · `stella vuln observations` (Overlay paging) + +`stella vuln observations` lists raw advisory observations for downstream overlays (Graph Explorer, Policy simulations, Console). Large tenants can now page through results deterministically. + +| Option | Description | +|--------|-------------| +| `--limit ` | Caps the number of observations returned in a single call. Defaults to `200`; values above `500` are clamped server-side. | +| `--cursor ` | Opaque continuation token produced by the previous page (`nextCursor` in JSON output). Pass it back to resume iteration. | + +Additional notes: + +- Table mode prints a hint when `hasMore` is `true`: + `[yellow]More observations available. Continue with --cursor [/]`. +- JSON mode returns `nextCursor` and `hasMore` alongside the observation list so automation can loop until `hasMore` is `false`. +- Supplying a non-positive limit falls back to the default (`200`). Invalid/expired cursors yield `400 Bad Request`; restart without `--cursor` to begin a fresh iteration. + +--- + +## 5 · Related references + - [Aggregation-Only Contract reference](../../../ingestion/aggregation-only-contract.md) -- [Architecture overview](../../platform/architecture-overview.md) +- [Architecture overview](../../platform/architecture-overview.md) - [Console AOC dashboard](../../../ui/console.md) -- [Authority scopes](../../authority/architecture.md) - ---- - -## 6 · Compliance checklist - -- [ ] Usage documented for both table and JSON formats. -- [ ] Exit-code mapping matches `ERR_AOC_00x` definitions and automation guidance. -- [ ] Offline/air-gap workflow captured for both commands. -- [ ] References to AOC architecture and console docs included. -- [ ] Examples validated against current CLI syntax (update post-implementation). -- [ ] Docs guild screenshot/narrative placeholder logged for release notes (pending CLI team capture). - ---- - -*Last updated: 2025-10-29 (Sprint 24).* - -## 13. Authority configuration quick reference - -| Setting | Purpose | How to set | -|---------|---------|------------| -| `StellaOps:Authority:OperatorReason` | Incident/change description recorded with `orch:operate` tokens. | CLI flag `--Authority:OperatorReason=...` or env `STELLAOPS_ORCH_REASON`. | -| `StellaOps:Authority:OperatorTicket` | Change/incident ticket reference paired with orchestrator control actions. | CLI flag `--Authority:OperatorTicket=...` or env `STELLAOPS_ORCH_TICKET`. | - -> Tokens requesting `orch:operate` will fail with `invalid_request` unless both values are present. Choose concise strings (≤256 chars for reason, ≤128 chars for ticket) and avoid sensitive data. - \ No newline at end of file +- [Authority scopes](../../authority/architecture.md) + +--- + +## 6 · Compliance checklist + +- [ ] Usage documented for both table and JSON formats. +- [ ] Exit-code mapping matches `ERR_AOC_00x` definitions and automation guidance. +- [ ] Offline/air-gap workflow captured for both commands. +- [ ] References to AOC architecture and console docs included. +- [ ] Examples validated against current CLI syntax (update post-implementation). +- [ ] Docs guild screenshot/narrative placeholder logged for release notes (pending CLI team capture). + +--- + +*Last updated: 2025-10-29 (Sprint 24).* + +## 13. Authority configuration quick reference + +| Setting | Purpose | How to set | +|---------|---------|------------| +| `StellaOps:Authority:OperatorReason` | Incident/change description recorded with `orch:operate` tokens. | CLI flag `--Authority:OperatorReason=...` or env `STELLAOPS_ORCH_REASON`. | +| `StellaOps:Authority:OperatorTicket` | Change/incident ticket reference paired with orchestrator control actions. | CLI flag `--Authority:OperatorTicket=...` or env `STELLAOPS_ORCH_TICKET`. | +| `StellaOps:Authority:QuotaReason` | Required justification recorded with `orch:quota` tokens. | CLI flag `--Authority:QuotaReason=...` or env `STELLAOPS_ORCH_QUOTA_REASON`. | +| `StellaOps:Authority:QuotaTicket` | Optional change ticket/reference accompanying quota adjustments. | CLI flag `--Authority:QuotaTicket=...` or env `STELLAOPS_ORCH_QUOTA_TICKET`. | + +> Tokens requesting `orch:operate` fail with `invalid_request` unless both operator values are present. `orch:quota` tokens require `quota_reason` (≤256 chars) and accept an optional `quota_ticket` (≤128 chars). Avoid embedding secrets in either field. + diff --git a/docs/modules/excititor/architecture.md b/docs/modules/excititor/architecture.md index 4ddf6b9b..ab351f74 100644 --- a/docs/modules/excititor/architecture.md +++ b/docs/modules/excititor/architecture.md @@ -107,7 +107,9 @@ Excititor derives `vex_normalized` tuples (without making decisions) for downstr } ``` -These tuples allow VEX Lens to compute deterministic consensus without re-parsing heavy upstream documents. +These tuples allow VEX Lens to compute deterministic consensus without re-parsing heavy upstream documents. + +Excititor workers now hydrate signature metadata with issuer trust data retrieved from the Issuer Directory service. The worker-side IssuerDirectoryClient performs tenant-aware lookups (including global fallbacks) and caches responses offline so attestation verification exposes an effective trust weight alongside the cryptographic details captured on ingest. ### 1.4 AI-ready citations diff --git a/docs/modules/issuer-directory/architecture.md b/docs/modules/issuer-directory/architecture.md new file mode 100644 index 00000000..93d5be13 --- /dev/null +++ b/docs/modules/issuer-directory/architecture.md @@ -0,0 +1,95 @@ +# Issuer Directory Architecture + +> **Status:** Initial service scaffold (Sprint 100 – Identity & Signing) + +## 1. Purpose + +Issuer Directory centralises trusted VEX/CSAF publisher metadata so downstream services (VEX Lens, Excititor, Policy Engine) can resolve issuer identity, active keys, and trust weights. The initial milestone delivers tenant-scoped CRUD APIs with audit logging plus bootstrap import for CSAF publishers. + +## 2. Runtime Topology + +- **Service name:** `stellaops/issuer-directory` +- **Framework:** ASP.NET Core minimal APIs (`net10.0`) +- **Persistence:** MongoDB (`issuer-directory.issuers`, `issuer-directory.issuer_keys`, `issuer-directory.issuer_audit`) +- **AuthZ:** StellaOps resource server scopes (`issuer-directory:read`, `issuer-directory:write`, `issuer-directory:admin`) +- **Audit:** Every create/update/delete emits an audit record with actor, reason, and context. +- **Bootstrap:** On startup, the service imports `data/csaf-publishers.json` into the global tenant (`@global`) and records a `seeded` audit the first time each publisher is added. +- **Key lifecycle:** API validates Ed25519 public keys, X.509 certificates, and DSSE public keys, enforces future expiries, deduplicates fingerprints, and records audit entries for create/rotate/revoke actions. + +``` +Clients ──> Authority (DPoP/JWT) ──> IssuerDirectory WebService ──> MongoDB + │ + └─> Audit sink (Mongo) +``` + +## 3. Configuration + +Configuration is resolved via `IssuerDirectoryWebServiceOptions` (section name `IssuerDirectory`). The default YAML sample lives at `etc/issuer-directory.yaml.sample` and exposes: + +```yaml +IssuerDirectory: + telemetry: + minimumLogLevel: Information + authority: + enabled: true + issuer: https://authority.example.com/realms/stellaops + requireHttpsMetadata: true + audiences: + - stellaops-platform + readScope: issuer-directory:read + writeScope: issuer-directory:write + adminScope: issuer-directory:admin + tenantHeader: X-StellaOps-Tenant + seedCsafPublishers: true + csafSeedPath: data/csaf-publishers.json + Mongo: + connectionString: mongodb://localhost:27017 + database: issuer-directory + issuersCollection: issuers + issuerKeysCollection: issuer_keys + auditCollection: issuer_audit +``` + +## 4. API Surface (v0) + +| Method | Route | Scope | Description | +|--------|-------|-------|-------------| +| `GET` | `/issuer-directory/issuers` | `issuer-directory:read` | List tenant issuers (optionally include global seeds). | +| `GET` | `/issuer-directory/issuers/{id}` | `issuer-directory:read` | Fetch a single issuer by identifier. | +| `POST` | `/issuer-directory/issuers` | `issuer-directory:write` | Create a tenant issuer. Requires `X-StellaOps-Tenant` header and optional `X-StellaOps-Reason`. | +| `PUT` | `/issuer-directory/issuers/{id}` | `issuer-directory:write` | Update issuer metadata/endpoints/tags. | +| `DELETE` | `/issuer-directory/issuers/{id}` | `issuer-directory:admin` | Delete issuer (records audit). | +| `GET` | `/issuer-directory/issuers/{id}/keys` | `issuer-directory:read` | List issuer keys (tenant + optional `@global` seeds). | +| `POST` | `/issuer-directory/issuers/{id}/keys` | `issuer-directory:write` | Add a signing key (validates format, deduplicates fingerprint, audits). | +| `POST` | `/issuer-directory/issuers/{id}/keys/{keyId}/rotate` | `issuer-directory:write` | Retire an active key and create a replacement atomically. | +| `DELETE` | `/issuer-directory/issuers/{id}/keys/{keyId}` | `issuer-directory:admin` | Revoke a key (status → revoked, audit logged). | + +Payloads follow the contract in `Contracts/IssuerDtos.cs` and align with domain types (`IssuerRecord`, `IssuerMetadata`, `IssuerEndpoint`). + +## 5. Dependencies & Reuse + +- `StellaOps.IssuerDirectory.Core` — domain model (`IssuerRecord`, `IssuerKeyRecord`) + application services. +- `StellaOps.IssuerDirectory.Infrastructure` — MongoDB persistence, audit sink, seed loader. +- `StellaOps.IssuerDirectory.WebService` — minimal API host, authentication wiring. +- Shared libraries: `StellaOps.Configuration`, `StellaOps.Auth.ServerIntegration`. + +## 6. Testing + +- Unit coverage for issuer CRUD (`IssuerDirectoryServiceTests`) and key lifecycle (`IssuerKeyServiceTests`) in `StellaOps.IssuerDirectory.Core.Tests`. +- Test infrastructure leverages `FakeTimeProvider` for deterministic timestamps and in-memory fakes for repository + audit sink. + +## 7. Observability + +- **Metrics.** `issuer_directory_changes_total` (labels: `tenant`, `issuer`, `action`) tracks issuer create/update/delete events; `issuer_directory_key_operations_total` (labels: `tenant`, `issuer`, `operation`, `key_type`) covers key create/rotate/revoke flows; `issuer_directory_key_validation_failures_total` (labels: `tenant`, `issuer`, `reason`) captures validation/verification failures. The WebService exports these via OpenTelemetry (`StellaOps.IssuerDirectory` meter). +- **Logs.** Service-level `ILogger` instrumentation records structured entries for issuer CRUD, key lifecycle operations, and validation failures; audit logs remain the authoritative trail. + +## 8. Roadmap (next milestones) + +1. **Key management APIs (ISSUER-30-002)** — manage signing keys, enforce expiry, integrate with KMS. +2. **Trust weight overrides (ISSUER-30-003)** — expose policy-friendly trust weighting with audit trails. +3. **SDK integration (ISSUER-30-004)** — supply cached issuer metadata to VEX Lens and Excititor clients. +4. **Observability & Ops (ISSUER-30-005/006)** — metrics, dashboards, deployment automation, offline kit. + +--- + +*Document owner: Issuer Directory Guild* diff --git a/docs/modules/notify/architecture.md b/docs/modules/notify/architecture.md index 9376f303..40322973 100644 --- a/docs/modules/notify/architecture.md +++ b/docs/modules/notify/architecture.md @@ -313,17 +313,28 @@ Internal tooling can hit `/internal/notify//normalize` to upgrade legacy * `GET /deliveries/{id}` → detail (redacted body + metadata) * `POST /deliveries/{id}/retry` → force retry (admin, future sprint) -* **Admin** - - * `GET /stats` (per tenant counts, last hour/day) - * `GET /healthz|readyz` (liveness) - * `POST /locks/acquire` | `POST /locks/release` – worker coordination primitives (short TTL). - * `POST /digests` | `GET /digests/{actionKey}` | `DELETE /digests/{actionKey}` – manage open digest windows. - * `POST /audit` | `GET /audit?since=&limit=` – append/query structured audit trail entries. - -**Ingestion**: workers do **not** expose public ingestion; they **subscribe** to the internal bus. (Optional `/events/test` for integration testing, admin‑only.) - ---- +* **Admin** + + * `GET /stats` (per tenant counts, last hour/day) + * `GET /healthz|readyz` (liveness) + * `POST /locks/acquire` | `POST /locks/release` – worker coordination primitives (short TTL). + * `POST /digests` | `GET /digests/{actionKey}` | `DELETE /digests/{actionKey}` – manage open digest windows. + * `POST /audit` | `GET /audit?since=&limit=` – append/query structured audit trail entries. + +### 8.1 Ack tokens & escalation workflows + +To support one-click acknowledgements from chat/email, the Notify WebService mints **DSSE ack tokens** via Authority: + +* `POST /notify/ack-tokens/issue` → returns a DSSE envelope (payload type `application/vnd.stellaops.notify-ack-token+json`) describing the tenant, notification/delivery ids, channel, webhook URL, nonce, permitted actions, and TTL. Requires `notify.operator`; requesting escalation requires the caller to hold `notify.escalate` (and `notify.admin` when configured). Issuance enforces the Authority-side webhook allowlist (`notifications.webhooks.allowedHosts`) before minting tokens. +* `POST /notify/ack-tokens/verify` → verifies the DSSE signature, enforces expiry/tenant/action constraints, and emits audit events (`notify.ack.verified`, `notify.ack.escalated`). Scope: `notify.operator` (+`notify.escalate` for escalation). +* `POST /notify/ack-tokens/rotate` → rotates the signing key used for ack tokens, requires `notify.admin`, and emits `notify.ack.key_rotated`/`notify.ack.key_rotation_failed` audit events. Operators must supply the new key material (file/KMS/etc. depending on `notifications.ackTokens.keySource`); Authority updates JWKS entries with `use: "notify-ack"` and retires the previous key. +* `POST /internal/notifications/ack-tokens/rotate` → legacy bootstrap path (API-key protected) retained for air-gapped initial provisioning; it forwards to the same rotation pipeline as the public endpoint. + +Authority signs ack tokens using keys configured under `notifications.ackTokens`. Public JWKS responses expose these keys with `use: "notify-ack"` and `status: active|retired`, enabling offline verification by the worker/UI/CLI. + +**Ingestion**: workers do **not** expose public ingestion; they **subscribe** to the internal bus. (Optional `/events/test` for integration testing, admin-only.) + +--- ## 9) Delivery pipeline (worker) diff --git a/docs/modules/orchestrator/architecture.md b/docs/modules/orchestrator/architecture.md index 23ee2037..1284b13f 100644 --- a/docs/modules/orchestrator/architecture.md +++ b/docs/modules/orchestrator/architecture.md @@ -19,9 +19,10 @@ ## 3) Rate-limit & quota governance -- Quotas defined per tenant/profile (`maxActive`, `maxPerHour`, `burst`). Stored in `quotas` and enforced before leasing. -- Dynamic throttles allow ops to pause specific sources (`pauseSource`, `resumeSource`) or reduce concurrency. -- Circuit breakers automatically pause job types when failure rate > configured threshold; incidents generated via Notify and Observability stack. +- Quotas defined per tenant/profile (`maxActive`, `maxPerHour`, `burst`). Stored in `quotas` and enforced before leasing. +- Dynamic throttles allow ops to pause specific sources (`pauseSource`, `resumeSource`) or reduce concurrency. +- Circuit breakers automatically pause job types when failure rate > configured threshold; incidents generated via Notify and Observability stack. +- Control plane quota updates require Authority scope `orch:quota` (issued via `Orch.Admin` role). Token requests include `quota_reason` (mandatory) and optional `quota_ticket`; Authority persists both values for audit replay. ## 4) APIs diff --git a/docs/modules/scanner/design/surface-fs.md b/docs/modules/scanner/design/surface-fs.md index 86697374..e8df4a91 100644 --- a/docs/modules/scanner/design/surface-fs.md +++ b/docs/modules/scanner/design/surface-fs.md @@ -73,7 +73,8 @@ Surface.FS library for .NET hosts provides: - `ISurfaceManifestWriter` / `ISurfaceManifestReader` interfaces. - Content-addressed path builder (`SurfacePathBuilder`). - Tenant namespace isolation and bucket configuration (via Surface.Env). -- Local cache management (using `SCANNER_SURFACE_CACHE_ROOT` and quota). +- Local cache abstraction `ISurfaceCache` with default `FileSurfaceCache` implementation (uses `Surface:Cache:Root` / `SCANNER_SURFACE_CACHE_ROOT`, enforces quotas, serialises writes with per-key semaphores). +- `SurfaceCacheKey` helper that normalises cache entries as `{namespace}/{tenant}/{sha256}`. EntryTrace graphs use the `entrytrace.graph` namespace so Worker/WebService/CLI can share cached results deterministically. - Metrics: `surface_manifest_put_seconds`, `surface_manifest_cache_hit_total`, etc. ## 5. Retention & Eviction @@ -97,6 +98,10 @@ offline/surface/ Import script calls `PutManifest` for each manifest, verifying digests. This enables Zastava and Scheduler running offline to consume cached data without re-scanning. +### 6.1 EntryTrace Cache Usage + +Scanner.Worker serialises EntryTrace graphs into Surface.FS using `SurfaceCacheKey(namespace: "entrytrace.graph", tenant, sha256(options|env|entrypoint))`. At runtime the worker checks the cache before invoking analyzers; cache hits bypass parsing and feed the result store/attestor pipeline directly. The same namespace is consumed by WebService and CLI to retrieve cached graphs for reporting. + ## 7. Security & Tenancy - Tenant ID is mandatory; Surface.Validation enforces match with Authority token. diff --git a/docs/modules/scanner/design/surface-secrets.md b/docs/modules/scanner/design/surface-secrets.md index 3dbf019e..e727b269 100644 --- a/docs/modules/scanner/design/surface-secrets.md +++ b/docs/modules/scanner/design/surface-secrets.md @@ -52,7 +52,18 @@ public sealed record SurfaceSecretRequest ### 3.2 Secret Handle -`SurfaceSecretHandle` exposes typed accessors (`AsCredentials()`, `AsTlsCertificate()`) and ensures sensitive data is cleared when disposed. +`SurfaceSecretHandle` exposes typed accessors (`AsBytes()`, `AsCredentials()`, `AsTlsCertificate()`) and ensures sensitive data is cleared when disposed. Consumers that expect string material attempt UTF-8 decoding first and, if decoding fails, fall back to returning a base64 representation rather than dropping binary content. + +### 3.3 Environment & Config References + +Runtime configuration can reference secrets using the URI scheme `secret://{secretType}/{name?}`. Example: + +``` +SCANNER_ENTRYTRACE_ENV__0=API_TOKEN=secret://registry/primary +SCANNER_ENTRYTRACE_ENV__1=TLS_CERT=secret://tls/edge-gateway +``` + +During scan execution, Scanner.Worker resolves each placeholder via `ISurfaceSecretProvider` before invoking analyzers, replacing the environment variable with the resolved value (base64 when non-text). Missing secrets raise `SurfaceSecretNotFoundException` and are surfaced as warnings without hard-failing the scan. ## 4. Configuration diff --git a/docs/modules/scanner/design/surface-validation.md b/docs/modules/scanner/design/surface-validation.md index 4eb96a7c..0022c3ba 100644 --- a/docs/modules/scanner/design/surface-validation.md +++ b/docs/modules/scanner/design/surface-validation.md @@ -13,32 +13,36 @@ Surface.Validation provides a shared validator framework to ensure all surface c ```csharp public interface ISurfaceValidator { - ValueTask ValidateAsync(SurfaceValidationContext context, CancellationToken ct = default); + ValueTask ValidateAsync(SurfaceValidationContext context, CancellationToken ct = default); } -public sealed record SurfaceValidationContext -( - SurfaceEnvironmentSettings Environment, +public sealed record SurfaceValidationContext( IServiceProvider Services, - string ComponentName -); + string ComponentName, + SurfaceEnvironmentSettings Environment, + IReadOnlyDictionary Properties) +{ + public static SurfaceValidationContext Create( + IServiceProvider services, + string componentName, + SurfaceEnvironmentSettings environment, + IReadOnlyDictionary? properties = null); +} -public sealed record ValidationResult -( - bool IsSuccess, - IReadOnlyCollection Issues -); +public interface ISurfaceValidatorRunner +{ + ValueTask RunAllAsync(SurfaceValidationContext context, CancellationToken ct = default); + ValueTask EnsureAsync(SurfaceValidationContext context, CancellationToken ct = default); +} -public sealed record SurfaceValidationIssue -( +public sealed record SurfaceValidationIssue( string Code, string Message, SurfaceValidationSeverity Severity, - string? Hint = null -); + string? Hint = null); ``` -Validators register with DI (`services.AddSurfaceValidation()`). Hosts call `ISurfaceValidatorRunner.RunAllAsync()` during startup and periodically (optional) to re-check configuration. +`Properties` carries optional context-specific metadata (e.g., `jobId`, `imageDigest`, cache paths) so validators can tailor diagnostics without pulling additional services. Validators register with DI (`services.AddSurfaceValidation()`). Hosts call `ISurfaceValidatorRunner.RunAllAsync()` during startup and before workload execution to capture misconfiguration early; `EnsureAsync()` rethrows when `Surface:Validation:ThrowOnFailure=true`. ## 3. Built-in Validators @@ -76,6 +80,7 @@ Validators can access DI services (e.g., HttpClient, Authority token provider) t ## 6. Integration Guidelines - **Scanner Worker/WebService**: fail startup if any error-level issue occurs; log warnings but continue running. +- **Scanner EntryTrace**: execute `RunAllAsync` for each scan job with properties `{imageDigest, jobId, configPath, rootPath}`. If the result contains errors, skip analysis and log the issue summary instead of failing the entire scan. - **Zastava Webhook**: treat validation errors as fatal (webhook should not enforce policies when surface preconditions fail). Display validation error summary in `/readyz` response to aid debugging. - **Analysers**: call `SurfaceValidation.Ensure()` before executing heavy work to catch misconfiguration during integration tests. diff --git a/docs/modules/scanner/operations/entrypoint-static-analysis.md b/docs/modules/scanner/operations/entrypoint-static-analysis.md index c5128aae..dc4f62d9 100644 --- a/docs/modules/scanner/operations/entrypoint-static-analysis.md +++ b/docs/modules/scanner/operations/entrypoint-static-analysis.md @@ -1,11 +1,89 @@ -# Entry-Point Static Analysis - -This guide captures the static half of Stella Ops’ entry-point detection pipeline: how we turn image metadata and filesystem contents into a resolved binary, an execution chain, and a confidence score. - -## 1) Loading OCI images - -### 1.1 Supported inputs -- Registry references (`repo:tag@sha256:digest`) using the existing content store. +# Entry-Point Static Analysis + +This guide captures the static half of Stella Ops’ entry-point detection pipeline: how we turn image metadata and filesystem contents into a resolved binary, an execution chain, and a confidence score. + +## 0) Implementation snapshot — Sprint 130.A (2025-11-02) + +The `StellaOps.Scanner.EntryTrace` stack (analyzer + worker + surfaces) currently provides: + +- **OCI config + layered FS context**: `EntryTraceImageContextFactory` normalises environment (`PATH` fallback), user, and working directory while `LayeredRootFileSystem` handles whiteouts, symlinks, and bounded byte reads (`TryReadBytes`) so ELF/PE probing stays offline friendly. +- **Wrapper-aware exec expansion**: the analyzer unwraps init/user-switch/environment/supervisor wrappers (`tini`, `dumb-init`, `gosu`, `su-exec`, `chpst`, `env`, `supervisord`, `s6-supervise`, `runsv*`) and records guard metadata plus environment/user deltas on nodes and edges. +- **Script + interpreter resolution**: POSIX shell parsing (AST-driven) covers `source`, `run-parts`, `exec`, and supervisor service directories, with Windows `cmd /c` support. Python `-m`, Node script, and Java `-jar` lookups add evidence when targets are located. +- **Terminal classification & scoring**: `ClassifyTerminal` fingerprints ELF (`PT_INTERP`, Go build ID, Rust notes), PE/CLR, and JAR manifests, pairs them with shebang/runtime heuristics (`python`, `node`, `java`, `.NET`, `php-fpm`, `nginx`, `ruby`), and emits `EntryTracePlan/EntryTraceTerminal` records capped at 95-point confidence. +- **NDJSON + capability stream**: `EntryTraceNdjsonWriter` produces deterministic `entrytrace.entry/node/edge/target/warning/capability` lines consumed by AOC, CLI, and policy surfaces. +- **Runtime reconciliation**: `ProcFileSystemSnapshot` + `ProcGraphBuilder` replay `/proc`, `EntryTraceRuntimeReconciler` merges runtime terminals with static predictions, and diagnostics note matches/mismatches. +- **Surface integration**: Scanner Worker caches graphs (`SurfaceCache`), persists `EntryTraceResult` via the shared store, exposes NDJSON + graph through `ScanAnalysisKeys`, and the WebService/CLI (`scan entrytrace`) return the stored result. + +Open follow-ups tracked for this wave: + +- **SCANNER-ENTRYTRACE-18-507** – fallback candidate discovery (Docker history, `/etc/services/**`, `/usr/local/bin/*-entrypoint`) when ENTRYPOINT/CMD are empty. +- **SCANNER-ENTRYTRACE-18-508** – broaden wrapper catalogue (package/tool runners such as `bundle exec`, `npm`, `yarn node`, `docker-php-entrypoint`, `pipenv`, `poetry run`). +- **ENTRYTRACE-SURFACE-01** (DOING) / **ENTRYTRACE-SURFACE-02** (TODO) – finish wiring Surface.Validation/FS/Secrets to gate prerequisites and remove direct env/secret reads. + +_Sections §4–§7 below capture the long-term reduction design; features not yet implemented are explicitly noted in the task board._ + +### Probing the analyzer today + +1. **Load the image config** + ```csharp + using var stream = File.OpenRead("config.json"); + var config = OciImageConfigLoader.Load(stream); + ``` +2. **Create a layered filesystem** from extracted layer directories or tar archives: + ```csharp + var fs = LayeredRootFileSystem.FromArchives(layers); + ``` +3. **Build the image context** (normalises env, PATH, user, working dir): + ```csharp + var imageCtx = EntryTraceImageContextFactory.Create( + config, fs, new EntryTraceAnalyzerOptions(), imageDigest, scanId); + ``` +4. **Resolve the entry trace**: + ```csharp + var analyzer = serviceProvider.GetRequiredService(); + var graph = await analyzer.ResolveAsync(imageCtx.Entrypoint, imageCtx.Context, cancellationToken); + ``` +5. **Inspect results** – `graph.Terminals` lists classified candidates (path, runtime, confidence, evidence), `graph.Nodes/Edges` capture the explainable chain, and `graph.Diagnostics` highlight unresolved steps. Emit metrics/telemetry via `EntryTraceMetrics`. +6. **Serialize if needed** – pass the graph through `EntryTraceNdjsonWriter.Serialize` to obtain deterministic NDJSON lines; the helper already computes capability summaries. + +For ad-hoc investigation, snapshotting `EntryTraceResult` keeps graph and NDJSON aligned. Avoid ad-hoc JSON writers to maintain ordering guarantees. + +#### Probing through Scanner.Worker + +EntryTrace runs automatically inside the worker when these metadata keys exist on the lease: + +| Key | Purpose | +| --- | --- | +| `ScanMetadataKeys.ImageConfigPath` (default `scanner.analyzers.entrytrace.configMetadataKey`) | Absolute path to the OCI `config.json`. | +| `ScanMetadataKeys.LayerDirectories` or `ScanMetadataKeys.LayerArchives` | Semicolon-delimited list of extracted layer folders or tar archives. | +| `ScanMetadataKeys.RuntimeProcRoot` *(optional)* | Path to a captured `/proc` tree for runtime reconciliation (air-gapped runs can mount a snapshot). | + +Worker output lands in `context.Analysis` (`EntryTraceGraph`, `EntryTraceNdjson`) and is persisted via `IEntryTraceResultStore`. Ensure Surface Validation prerequisites pass before dispatching the analyzer. + +#### Probing via WebService & CLI + +- **REST**: `GET /api/scans/{scanId}/entrytrace` returns `EntryTraceResponse` (`graph + ndjson + metadata`). Requires scan ownership/authz. +- **CLI**: `stella scan entrytrace [--ndjson] [--verbose]` renders a confidence-sorted terminal table, diagnostics, and optionally the NDJSON payload. + +Both surfaces consume the persisted result; rerunning the worker updates the stored document atomically. + +### NDJSON reference + +`EntryTraceNdjsonWriter.Serialize` emits newline-delimited JSON in the following order so AOC consumers can stream without buffering: + +- `entrytrace.entry` — scan metadata (scan id, image digest, outcome, counts). +- `entrytrace.node` — every node in the graph with arguments, interpreter, evidence, and metadata. +- `entrytrace.edge` — directed relationships between nodes with optional wrapper metadata. +- `entrytrace.target` — resolved terminal programmes (`EntryTracePlan`), including runtime, confidence, arguments, environment, and evidence. +- `entrytrace.warning` — diagnostics (severity, reason, span, related path). +- `entrytrace.capability` — aggregated wrapper capabilities discovered during traversal. + +Every line ends with a newline and is emitted in deterministic order (IDs ascending, keys lexicographically sorted) so downstream tooling can hash or diff outputs reproducibly. + +## 1) Loading OCI images + +### 1.1 Supported inputs +- Registry references (`repo:tag@sha256:digest`) using the existing content store. - Local OCI/Docker v2 archives (`docker save` tarball, OCI layout directory with `index.json` + `blobs/sha256/*`). ### 1.2 Normalised model @@ -53,14 +131,18 @@ Compose the runtime argv as `Entrypoint ++ Cmd`, honouring shell-form vs exec-fo - For non-ELF/PE files: read first line; interpret `#!interpreter args`. - Replace `argv[0]` with the interpreter, prepend shebang args, append script path per kernel semantics. -### 3.3 Binary probes -- Identify ELF via magic `\x7FELF`, parse `.interp`, `.dynamic`, linked libs, `.note.go.buildid`, DWARF producer. -- Identify PE (Windows) and detect .NET single-file bundles via CLI header. -- Record features for runtime scoring (Go vs Rust vs glibc vs musl). +### 3.3 Binary probes +- Identify ELF via magic `\x7FELF`, parse `.interp`, `.dynamic`, linked libs, `.note.go.buildid`, DWARF producer, `.rustc` notes, and musl/glibc fingerprints. +- Identify PE (Windows) and detect .NET single-file bundles via CLI header / metadata tables; capture ready-to-run vs IL-only markers. +- Inspect archives (JAR/WAR/EAR) for `META-INF/MANIFEST.MF` `Main-Class`/`Main-Module` and signed entries. +- Detect PHP-FPM / nginx launchers (`php-fpm`, `apache2-foreground`, `nginx -g 'daemon off;'`) via binary names + nearby config (php.ini, nginx.conf). +- Record evidence tuples for runtime scoring (interpreter, build ID, runtime note) so downstream components can explain the classification. -## 4) Wrapper catalogue - -Collapse known wrappers before analysing the target command: +## 4) Wrapper catalogue + +> _Roadmap note_: extended package/tool runners land with **SCANNER-ENTRYTRACE-18-508**; today the catalogue covers init/user-switch/environment/supervisor wrappers listed above. + +Collapse known wrappers before analysing the target command: - Init shims: `tini`, `dumb-init`, `s6-svscan`, `runit`, `supervisord`. - Privilege droppers: `gosu`, `su-exec`, `chpst`. @@ -111,12 +193,31 @@ Use a simple logistic model with feature contributions captured for the evidence Persist per-feature evidence strings so UI/CLI users can see **why** the scanner picked a given entry point. -## 8) Outputs - -Return a populated `EntryTraceResult`: - -- `Terminals` contains the best candidate(s) and their runtime classification. -- `Evidence` aggregates feature messages, ShellFlow reasoning, wrapper reductions, and runtime detector hints. -- `Chain` shows the explainable path from initial Docker argv to the final binary. - -Static and dynamic reducers share this shape, enabling downstream modules to remain agnostic of the detection mode. +## 8) Outputs + +Return a populated `EntryTraceResult`: + +- `Terminals` contains the best candidate(s) and their runtime classification. +- `Evidence` aggregates feature messages, ShellFlow reasoning, wrapper reductions, and runtime detector hints. +- `Chain` shows the explainable path from initial Docker argv to the final binary. + +Static and dynamic reducers share this shape, enabling downstream modules to remain agnostic of the detection mode. + +## 9) ProcGraph replay (runtime parity) + +Static resolution must be reconciled with live observations when a workload is running under the Stella Ops runtime agent: + +1. Read `/proc/1/{cmdline,exe}` and walk descendants via `/proc/*/stat` to construct the initial exec chain (ascending PID order). +2. Collapse known wrappers (`tini`, `dumb-init`, `gosu`, `su-exec`, `s6-supervise`, `runsv`, `supervisord`) and privilege switches, mirroring the static wrapper catalogue. +3. Materialise a `ProcGraph` object that records each transition and the resolved executable path (via `/proc//exe` symlinks). +4. Compare `ProcGraph.Terminal` with `EntryTraceResult.Terminals[0]`, emitting `confidence=high` when they match and downgrade when divergence occurs. +5. Persist the merged view so the CLI/UI can highlight static vs runtime discrepancies and feed drift detection in Zastava. + +This replay is optional offline, but required when runtime evidence is available so policy decisions can lean on High-confidence matches. + +## 10) Service & CLI surfaces + +- **Scanner.WebService** must expose `/scans/{scanId}/entrytrace` returning chain, terminal classification, evidence, and runtime agreement markers. +- **CLI** gains `stella scan entrypoint ` (and JSON streaming) for air-gapped review. +- **Policy / Export** payloads include `entrytrace_terminal`, `entrytrace_confidence`, and evidence arrays so downstream consumers retain provenance. +- All outputs reuse the same `EntryTraceResult` schema and NDJSON stream defined in §7, keeping the Offline Kit and DSSE attestations deterministic. diff --git a/docs/notifications/architecture.md b/docs/notifications/architecture.md index a91447cd..c17d4934 100644 --- a/docs/notifications/architecture.md +++ b/docs/notifications/architecture.md @@ -92,7 +92,7 @@ Documents are stored using the canonical JSON serializer (`NotifyCanonicalJsonSe ## 5. Deployment & configuration - **Configuration sources.** YAML files feed typed options (`NotifyMongoOptions`, `NotifyWorkerOptions`, etc.). Environment variables can override connection strings and rate limits for production. -- **Authority integration.** Two OAuth clients (`notify-web`, `notify-web-dev`) with scopes `notify.read` and `notify.admin` are required. Authority enforcement can be disabled for air-gapped dev use by providing `developmentSigningKey`. +- **Authority integration.** Two OAuth clients (`notify-web`, `notify-web-dev`) with scopes `notify.viewer`, `notify.operator`, and (for dev/admin flows) `notify.admin` are required. Authority enforcement can be disabled for air-gapped dev use by providing `developmentSigningKey`. - **Plug-in management.** `plugins.baseDirectory` and `orderedPlugins` guarantee deterministic loading. Offline Kits copy the plug-in tree verbatim; operations must keep the order aligned across environments. - **Observability.** Workers expose structured logs (`ruleId`, `actionId`, `eventId`, `throttleKey`). Metrics include: - `notify_rule_matches_total{tenant,eventKind}` diff --git a/docs/notifications/digests.md b/docs/notifications/digests.md index ce7725b4..bd734515 100644 --- a/docs/notifications/digests.md +++ b/docs/notifications/digests.md @@ -63,7 +63,7 @@ Digest state lives in Mongo (`digests` collection) and mirrors the schema descri | Endpoint | Description | Notes | |----------|-------------|-------| | `POST /digests` | Issues administrative commands (e.g., force flush, reopen) for a specific action/window. | Request body specifies the command target; requires `notify.admin`. | -| `GET /digests/{actionKey}` | Returns the currently open window (if any) for the referenced action. | Supports operators/CLI inspecting pending digests; requires `notify.read`. | +| `GET /digests/{actionKey}` | Returns the currently open window (if any) for the referenced action. | Supports operators/CLI inspecting pending digests; requires `notify.viewer`. | | `DELETE /digests/{actionKey}` | Drops the open window without notifying (emergency stop). | Emits an audit record; use sparingly. | All routes honour the tenant header and reuse the standard Notify rate limits. diff --git a/docs/notifications/overview.md b/docs/notifications/overview.md index 5866aefe..f0320871 100644 --- a/docs/notifications/overview.md +++ b/docs/notifications/overview.md @@ -1,76 +1,77 @@ -# Notifications Overview - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -Notifications Studio turns raw platform events into concise, tenant-scoped alerts that reach the right responders without overwhelming them. The service is sovereign/offline-first, follows the Aggregation-Only Contract (AOC), and produces deterministic outputs so the same configuration yields identical deliveries across environments. - ---- - -## 1. Mission & value - -- **Reduce noise.** Only materially new or high-impact changes reach chat, email, or webhooks thanks to rule filters, throttles, and digest windows. -- **Explainable results.** Every delivery is traceable back to a rule, action, and event payload stored in the delivery ledger; operators can audit what fired and why. -- **Safe by default.** Secrets remain in external stores, templates are sandboxed, quiet hours and throttles prevent storms, and idempotency guarantees protect downstream systems. -- **Offline-aligned.** All configuration, templates, and plug-ins ship with Offline Kits; no external SaaS is required to send notifications. - ---- - -## 2. Core capabilities - -| Capability | What it does | Key docs | -|------------|--------------|----------| -| Rules engine | Declarative matchers for event kinds, severities, namespaces, VEX context, KEV flags, and more. | [`notifications/rules.md`](rules.md) | -| Channel catalog | Slack, Teams, Email, Webhook connectors loaded via restart-time plug-ins; metadata stored without secrets. | [`notifications/architecture.md`](architecture.md) | -| Templates | Locale-aware, deterministic rendering via safe helpers; channel defaults plus tenant-specific overrides. | [`notifications/templates.md`](templates.md) | -| Digests | Coalesce bursts into periodic summaries with deterministic IDs and audit trails. | [`notifications/digests.md`](digests.md) | -| Delivery ledger | Tracks rendered payload hashes, attempts, throttles, and outcomes for every action. | [`modules/notify/architecture.md`](../modules/notify/architecture.md#7-data-model-mongo) | - ---- - -## 3. How it fits into Stella Ops - -1. **Producers emit events.** Scanner, Scheduler, VEX Lens, Attestor, and Zastava publish canonical envelopes (`NotifyEvent`) onto the internal bus. -2. **Notify.Worker evaluates rules.** For each tenant, the worker applies match filters, VEX gates, throttles, and digest policies before rendering the action. -3. **Connectors deliver.** Channel plug-ins send the rendered payload to Slack/Teams/Email/Webhook targets and report back attempts and outcomes. -4. **Consumers investigate.** Operators pivot from message links into Console dashboards, SBOM views, or policy overlays with correlation IDs preserved. - -The Notify WebService fronts worker state with REST APIs used by the UI and CLI. Tenants authenticate via StellaOps Authority scopes `notify.read` and `notify.admin`. All operations require the tenant header (`X-StellaOps-Tenant`) to preserve sovereignty boundaries. - ---- - -## 4. Operating model - -| Area | Guidance | -|------|----------| -| **Tenancy** | Each rule, channel, template, and delivery belongs to exactly one tenant. Cross-tenant sharing is intentionally unsupported. | -| **Determinism** | Configuration persistence normalises strings and sorts collections. Template rendering produces identical `bodyHash` values when inputs match. | -| **Scaling** | Workers scale horizontally; per-tenant rule snapshots are cached and refreshed from Mongo change streams. Redis (or equivalent) guards throttles and locks. | -| **Offline** | Offline Kits include plug-ins, default templates, and seed rules. Operators can edit YAML/JSON manifests before air-gapped deployment. | -| **Security** | Channel secrets use indirection (`secretRef`), Authority-protected OAuth clients secure API access, and delivery payloads are redacted before storage where required. | - ---- - -## 5. Getting started (first 30 minutes) - -| Step | Goal | Reference | -|------|------|-----------| -| 1 | Deploy Notify WebService + Worker with Mongo and Redis | [`modules/notify/architecture.md`](../modules/notify/architecture.md#1-runtime-shape--projects) | +# Notifications Overview + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +Notifications Studio turns raw platform events into concise, tenant-scoped alerts that reach the right responders without overwhelming them. The service is sovereign/offline-first, follows the Aggregation-Only Contract (AOC), and produces deterministic outputs so the same configuration yields identical deliveries across environments. + +--- + +## 1. Mission & value + +- **Reduce noise.** Only materially new or high-impact changes reach chat, email, or webhooks thanks to rule filters, throttles, and digest windows. +- **Explainable results.** Every delivery is traceable back to a rule, action, and event payload stored in the delivery ledger; operators can audit what fired and why. +- **Safe by default.** Secrets remain in external stores, templates are sandboxed, quiet hours and throttles prevent storms, and idempotency guarantees protect downstream systems. +- **Offline-aligned.** All configuration, templates, and plug-ins ship with Offline Kits; no external SaaS is required to send notifications. + +--- + +## 2. Core capabilities + +| Capability | What it does | Key docs | +|------------|--------------|----------| +| Rules engine | Declarative matchers for event kinds, severities, namespaces, VEX context, KEV flags, and more. | [`notifications/rules.md`](rules.md) | +| Channel catalog | Slack, Teams, Email, Webhook connectors loaded via restart-time plug-ins; metadata stored without secrets. | [`notifications/architecture.md`](architecture.md) | +| Templates | Locale-aware, deterministic rendering via safe helpers; channel defaults plus tenant-specific overrides. | [`notifications/templates.md`](templates.md) | +| Digests | Coalesce bursts into periodic summaries with deterministic IDs and audit trails. | [`notifications/digests.md`](digests.md) | +| Delivery ledger | Tracks rendered payload hashes, attempts, throttles, and outcomes for every action. | [`modules/notify/architecture.md`](../modules/notify/architecture.md#7-data-model-mongo) | +| Ack tokens | DSSE-signed acknowledgement tokens with webhook allowlists and escalation guardrails enforced by Authority. | [`modules/notify/architecture.md`](../modules/notify/architecture.md#81-ack-tokens--escalation-workflows) | + +--- + +## 3. How it fits into Stella Ops + +1. **Producers emit events.** Scanner, Scheduler, VEX Lens, Attestor, and Zastava publish canonical envelopes (`NotifyEvent`) onto the internal bus. +2. **Notify.Worker evaluates rules.** For each tenant, the worker applies match filters, VEX gates, throttles, and digest policies before rendering the action. +3. **Connectors deliver.** Channel plug-ins send the rendered payload to Slack/Teams/Email/Webhook targets and report back attempts and outcomes. +4. **Consumers investigate.** Operators pivot from message links into Console dashboards, SBOM views, or policy overlays with correlation IDs preserved. + +The Notify WebService fronts worker state with REST APIs used by the UI and CLI. Tenants authenticate via StellaOps Authority scopes `notify.viewer`, `notify.operator`, and (for escalated actions) `notify.admin`. All operations require the tenant header (`X-StellaOps-Tenant`) to preserve sovereignty boundaries. + +--- + +## 4. Operating model + +| Area | Guidance | +|------|----------| +| **Tenancy** | Each rule, channel, template, and delivery belongs to exactly one tenant. Cross-tenant sharing is intentionally unsupported. | +| **Determinism** | Configuration persistence normalises strings and sorts collections. Template rendering produces identical `bodyHash` values when inputs match. | +| **Scaling** | Workers scale horizontally; per-tenant rule snapshots are cached and refreshed from Mongo change streams. Redis (or equivalent) guards throttles and locks. | +| **Offline** | Offline Kits include plug-ins, default templates, and seed rules. Operators can edit YAML/JSON manifests before air-gapped deployment. | +| **Security** | Channel secrets use indirection (`secretRef`), Authority-protected OAuth clients secure API access, and delivery payloads are redacted before storage where required. | + +--- + +## 5. Getting started (first 30 minutes) + +| Step | Goal | Reference | +|------|------|-----------| +| 1 | Deploy Notify WebService + Worker with Mongo and Redis | [`modules/notify/architecture.md`](../modules/notify/architecture.md#1-runtime-shape--projects) | | 2 | Register OAuth clients/scopes in Authority | [`etc/authority.yaml.sample`](../../etc/authority.yaml.sample) | -| 3 | Install channel plug-ins and capture secret references | [`plugins/notify`](../../plugins) | -| 4 | Create a tenant rule and test preview | [`POST /channels/{id}/test`](../modules/notify/architecture.md#8-external-apis-webservice) | -| 5 | Inspect deliveries and digests | `/api/v1/notify/deliveries`, `/api/v1/notify/digests` | - ---- - -## 6. Alignment with implementation work - -| Backlog item | Impact on docs | Status | -|--------------|----------------|--------| -| `NOTIFY-SVC-38-001..004` | Foundational correlation, throttling, simulation hooks. | **In progress** – align behaviour once services publish beta APIs. | -| `NOTIFY-SVC-39-001..004` | Adds correlation engine, digest generator, simulation API, quiet hours. | **Pending** – revisit rule/digest sections when these tasks merge. | - -Action: coordinate with the Notifications Service Guild when `NOTIFY-SVC-39-001..004` land to validate payload fields, quiet-hours semantics, and any new connector metadata that should be documented here and in the channel-specific guides. - ---- - -> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. \ No newline at end of file +| 3 | Install channel plug-ins and capture secret references | [`plugins/notify`](../../plugins) | +| 4 | Create a tenant rule and test preview | [`POST /channels/{id}/test`](../modules/notify/architecture.md#8-external-apis-webservice) | +| 5 | Inspect deliveries and digests | `/api/v1/notify/deliveries`, `/api/v1/notify/digests` | + +--- + +## 6. Alignment with implementation work + +| Backlog item | Impact on docs | Status | +|--------------|----------------|--------| +| `NOTIFY-SVC-38-001..004` | Foundational correlation, throttling, simulation hooks. | **In progress** – align behaviour once services publish beta APIs. | +| `NOTIFY-SVC-39-001..004` | Adds correlation engine, digest generator, simulation API, quiet hours. | **Pending** – revisit rule/digest sections when these tasks merge. | + +Action: coordinate with the Notifications Service Guild when `NOTIFY-SVC-39-001..004` land to validate payload fields, quiet-hours semantics, and any new connector metadata that should be documented here and in the channel-specific guides. + +--- + +> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/notifications/pack-approvals-integration.md b/docs/notifications/pack-approvals-integration.md index 23736334..acc524c4 100644 --- a/docs/notifications/pack-approvals-integration.md +++ b/docs/notifications/pack-approvals-integration.md @@ -1,62 +1,62 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Pack Approval Notification Integration — Requirements - -## Overview - -Task Runner now produces pack plans with explicit approval and policy-gate metadata. The Notifications service must ingest those events, persist their state, and fan out actionable alerts (approvals requested, policy holds, resumptions). This document captures the requirements for the first Notifications sprint dedicated to the Task Runner bridge. - -Deliverables feed Sprint 37 tasks (`NOTIFY-SVC-37-00x`) and unblock Task Runner sprint 43 (`TASKRUN-43-001`). - -## Functional Requirements - -### 1. Approval Event Contract -- Define a canonical schema for **PackApprovalRequested** and **PackApprovalUpdated** events. -- Fields must include `runId`, `approvalId`, tenant context, plan hash, required grants, step identifiers, message template, and resume callback metadata. -- Provide an OpenAPI fragment and x-go/x-cs models for Task Runner and CLI compatibility. -- Document error/acknowledgement semantics (success, retryable failure, validation failure). - -### 2. Ingestion & Persistence -- Expose a secure Notifications API endpoint (`POST /notifications/pack-approvals`) receiving Task Runner events. -- Validate scope (`Packs.Approve`, `Notifier.Events:Write`) and tenant match. -- Persist approval state transitions in Mongo (`notifications.pack_approvals`) with indexes on run/approval/tenant. -- Store outbound notification audit records with correlation IDs to support Task Runner resume flow. - -### 3. Notification Routing -- Derive recipients from new rule predicates (`event.kind == "pack.approval"`). -- Render approval templates (email + webhook JSON) including plan metadata and approval links (resume token). -- Emit policy gate notifications as “hold” incidents with context (parameters, messages). -- Support localization fallback and redaction of secrets (never ship approval tokens unencrypted). - -### 4. Resume & Ack Handshake -- Provide an approval ack endpoint (`POST /notifications/pack-approvals/{runId}/{approvalId}/ack`) that records decision metadata and forwards to Task Runner resume hook (HTTP callback + message bus placeholder). -- Return structured responses with resume token / status for CLI integration. -- Ensure idempotent updates (dedupe by runId + approvalId + decisionHash). - -### 5. Observability & Security -- Emit metrics for approval notifications queued/sent, outstanding approvals, and acknowledgement latency. -- Log audit trail events (`pack.approval.requested`, `pack.approval.acknowledged`, `pack.policy.hold`). -- Enforce HMAC or mTLS for Task Runner -> Notifier ingestion; support configurable IP allowlist. -- Provide chaos-test plan for notification failure modes (channel outage, storage failure). - -## Non-Functional Requirements - -- Deterministic processing: identical approval events lead to identical outbound notifications (idempotent). -- Timeouts: ingestion endpoint must respond < 500 ms under nominal load. -- Retry strategy: Task Runner expects 5xx/429 for transient errors; document backoff guidance. -- Data retention: approval records retained 90 days, purge job tracked under ops runbook. - -## Sprint 37 Task Mapping - -| Task ID | Scope | -| --- | --- | -| **NOTIFY-SVC-37-001** | Author this contract doc, OpenAPI fragment, and schema references. Coordinate with Task Runner/Authority guilds. | -| **NOTIFY-SVC-37-002** | Implement secure ingestion endpoint, Mongo persistence, and audit hooks. Provide integration tests with sample events. | -| **NOTIFY-SVC-37-003** | Build approval/policy notification templates, routing rules, and channel dispatch (email + webhook). | -| **NOTIFY-SVC-37-004** | Ship acknowledgement endpoint + Task Runner callback client, resume token handling, and metrics/dashboards. | - -## Open Questions - -1. Who owns approval resume callback (Task Runner Worker vs Orchestrator)? Resolve before NOTIFY-SVC-37-004. -2. Should approvals generate incidents in existing incident schema or dedicated collection? Decision impacts Mongo design. -3. Authority scopes for approval ingestion/ack — reuse `Packs.Approve` or introduce `Packs.Approve:notify`? Coordinate with Authority team. +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Pack Approval Notification Integration — Requirements + +## Overview + +Task Runner now produces pack plans with explicit approval and policy-gate metadata. The Notifications service must ingest those events, persist their state, and fan out actionable alerts (approvals requested, policy holds, resumptions). This document captures the requirements for the first Notifications sprint dedicated to the Task Runner bridge. + +Deliverables feed Sprint 37 tasks (`NOTIFY-SVC-37-00x`) and unblock Task Runner sprint 43 (`TASKRUN-43-001`). + +## Functional Requirements + +### 1. Approval Event Contract +- Define a canonical schema for **PackApprovalRequested** and **PackApprovalUpdated** events. +- Fields must include `runId`, `approvalId`, tenant context, plan hash, required grants, step identifiers, message template, and resume callback metadata. +- Provide an OpenAPI fragment and x-go/x-cs models for Task Runner and CLI compatibility. +- Document error/acknowledgement semantics (success, retryable failure, validation failure). + +### 2. Ingestion & Persistence +- Expose a secure Notifications API endpoint (`POST /notifications/pack-approvals`) receiving Task Runner events. +- Validate scope (`packs.approve`, `Notifier.Events:Write`) and tenant match. +- Persist approval state transitions in Mongo (`notifications.pack_approvals`) with indexes on run/approval/tenant. +- Store outbound notification audit records with correlation IDs to support Task Runner resume flow. + +### 3. Notification Routing +- Derive recipients from new rule predicates (`event.kind == "pack.approval"`). +- Render approval templates (email + webhook JSON) including plan metadata and approval links (resume token). +- Emit policy gate notifications as “hold” incidents with context (parameters, messages). +- Support localization fallback and redaction of secrets (never ship approval tokens unencrypted). + +### 4. Resume & Ack Handshake +- Provide an approval ack endpoint (`POST /notifications/pack-approvals/{runId}/{approvalId}/ack`) that records decision metadata and forwards to Task Runner resume hook (HTTP callback + message bus placeholder). +- Return structured responses with resume token / status for CLI integration. +- Ensure idempotent updates (dedupe by runId + approvalId + decisionHash). + +### 5. Observability & Security +- Emit metrics for approval notifications queued/sent, outstanding approvals, and acknowledgement latency. +- Log audit trail events (`pack.approval.requested`, `pack.approval.acknowledged`, `pack.policy.hold`). +- Enforce HMAC or mTLS for Task Runner -> Notifier ingestion; support configurable IP allowlist. +- Provide chaos-test plan for notification failure modes (channel outage, storage failure). + +## Non-Functional Requirements + +- Deterministic processing: identical approval events lead to identical outbound notifications (idempotent). +- Timeouts: ingestion endpoint must respond < 500 ms under nominal load. +- Retry strategy: Task Runner expects 5xx/429 for transient errors; document backoff guidance. +- Data retention: approval records retained 90 days, purge job tracked under ops runbook. + +## Sprint 37 Task Mapping + +| Task ID | Scope | +| --- | --- | +| **NOTIFY-SVC-37-001** | Author this contract doc, OpenAPI fragment, and schema references. Coordinate with Task Runner/Authority guilds. | +| **NOTIFY-SVC-37-002** | Implement secure ingestion endpoint, Mongo persistence, and audit hooks. Provide integration tests with sample events. | +| **NOTIFY-SVC-37-003** | Build approval/policy notification templates, routing rules, and channel dispatch (email + webhook). | +| **NOTIFY-SVC-37-004** | Ship acknowledgement endpoint + Task Runner callback client, resume token handling, and metrics/dashboards. | + +## Open Questions + +1. Who owns approval resume callback (Task Runner Worker vs Orchestrator)? Resolve before NOTIFY-SVC-37-004. +2. Should approvals generate incidents in existing incident schema or dedicated collection? Decision impacts Mongo design. +3. Authority scopes for approval ingestion/ack — reuse `packs.approve` or introduce `packs.approve:notify`? Coordinate with Authority team. diff --git a/docs/observability/observability.md b/docs/observability/observability.md index e84e498a..17ca09c4 100644 --- a/docs/observability/observability.md +++ b/docs/observability/observability.md @@ -1,142 +1,145 @@ -# AOC Observability Guide - -> **Audience:** Observability Guild, Concelier/Excititor SREs, platform operators. -> **Scope:** Metrics, traces, logs, dashboards, and runbooks introduced as part of the Aggregation-Only Contract (AOC) rollout (Sprint 19). - -This guide captures the canonical signals emitted by Concelier and Excititor once AOC guards are active. It explains how to consume the metrics in dashboards, correlate traces/logs for incident triage, and operate in offline environments. Pair this guide with the [AOC reference](../ingestion/aggregation-only-contract.md) and [architecture overview](../modules/platform/architecture-overview.md). - ---- - -## 1 · Metrics - -| Metric | Type | Labels | Description | -|--------|------|--------|-------------| -| `ingestion_write_total` | Counter | `source`, `tenant`, `result` (`ok`, `reject`, `noop`) | Counts write attempts to `advisory_raw`/`vex_raw`. Rejects correspond to guard failures. | -| `ingestion_latency_seconds` | Histogram | `source`, `tenant`, `phase` (`fetch`, `transform`, `write`) | Measures end-to-end runtime for ingestion stages. Use `quantile=0.95` for alerting. | -| `aoc_violation_total` | Counter | `source`, `tenant`, `code` (`ERR_AOC_00x`) | Total guard violations bucketed by error code. Drives dashboard pills and alert thresholds. | -| `ingestion_signature_verified_total` | Counter | `source`, `tenant`, `result` (`ok`, `fail`, `skipped`) | Tracks signature/checksum verification outcomes. | -| `advisory_revision_count` | Gauge | `source`, `tenant` | Supersedes depth for raw documents; spikes indicate noisy upstream feeds. | -| `verify_runs_total` | Counter | `tenant`, `initiator` (`ui`, `cli`, `api`, `scheduled`) | How many `stella aoc verify` or `/aoc/verify` runs executed. | -| `verify_duration_seconds` | Histogram | `tenant`, `initiator` | Runtime of verification jobs; use P95 to detect regressions. | - -### 1.1 Alerts - -- **Violation spike:** Alert when `increase(aoc_violation_total[15m]) > 0` for critical sources. Page SRE if `code="ERR_AOC_005"` (signature failure) or `ERR_AOC_001` persists > 30 min. -- **Stale ingestion:** Alert when `max_over_time(ingestion_latency_seconds_sum / ingestion_latency_seconds_count)[30m]` exceeds 30 s or if `ingestion_write_total` has no growth for > 60 min. -- **Signature drop:** Warn when `rate(ingestion_signature_verified_total{result="fail"}[1h]) > 0`. - ---- - -## 2 · Traces - -### 2.1 Span taxonomy - -| Span name | Parent | Key attributes | -|-----------|--------|----------------| -| `ingest.fetch` | job root span | `source`, `tenant`, `uri`, `contentHash` | -| `ingest.transform` | `ingest.fetch` | `documentType` (`csaf`, `osv`, `vex`), `payloadBytes` | -| `ingest.write` | `ingest.transform` | `collection` (`advisory_raw`, `vex_raw`), `result` (`ok`, `reject`) | -| `aoc.guard` | `ingest.write` | `code` (on violation), `violationCount`, `supersedes` | -| `verify.run` | verification job root | `tenant`, `window.from`, `window.to`, `sources`, `violations` | - -### 2.2 Trace usage - -- Correlate UI dashboard entries with traces via `traceId` surfaced in violation drawers (`docs/ui/console.md`). -- Use `aoc.guard` spans to inspect guard payload snapshots. Sensitive fields are redacted automatically; raw JSON lives in secure logs only. -- For scheduled verification, filter traces by `initiator="scheduled"` to compare runtimes pre/post change. - ---- - -## 3 · Logs - -Structured logs include the following keys (JSON): - -| Key | Description | -|-----|-------------| -| `traceId` | Matches OpenTelemetry trace/span IDs for cross-system correlation. | -| `tenant` | Tenant identifier enforced by Authority middleware. | -| `source.vendor` | Logical source (e.g., `redhat`, `ubuntu`, `osv`, `ghsa`). | -| `upstream.upstreamId` | Vendor-provided ID (CVE, GHSA, etc.). | -| `contentHash` | `sha256:` digest of the raw document. | -| `violation.code` | Present when guard rejects `ERR_AOC_00x`. | -| `verification.window` | Present on `/aoc/verify` job logs. | - -Logs are shipped to the central Loki/Elasticsearch cluster. Use the template query: - -```logql -{app="concelier-web"} | json | violation_code != "" -``` - -to spot active AOC violations. - ---- - -## 4 · Dashboards - -Primary Grafana dashboard: **“AOC Ingestion Health”** (`dashboards/aoc-ingestion.json`). Panels include: - -1. **Sources overview:** table fed by `ingestion_write_total` and `ingestion_latency_seconds` (mirrors Console tiles). -2. **Violation trend:** stacked bar chart of `aoc_violation_total` per code. -3. **Signature success rate:** timeseries derived from `ingestion_signature_verified_total`. -4. **Supersedes depth:** gauge showing `advisory_revision_count` P95. -5. **Verification runs:** histogram and latency boxplot using `verify_runs_total` / `verify_duration_seconds`. - -Secondary dashboards: - -- **AOC Alerts (Ops view):** summarises active alerts, last verify run, and links to incident runbook. -- **Offline Mode Dashboard:** fed from Offline Kit imports; highlights snapshot age and queued verification jobs. - -Update `docs/assets/dashboards/` with screenshots when Grafana capture pipeline produces the latest renders. - ---- - -## 5 · Operational workflows - -1. **During ingestion incident:** - - Check Console dashboard for offending sources. - - Pivot to logs using document `contentHash`. - - Re-run `stella sources ingest --dry-run` with problematic payloads to validate fixes. - - After remediation, run `stella aoc verify --since 24h` and confirm exit code `0`. -2. **Scheduled verification:** - - Configure cron job to run `stella aoc verify --format json --export ...`. - - Ship JSON to `aoc-verify` bucket and ingest into metrics using custom exporter. - - Alert on missing exports (no file uploaded within 26 h). -3. **Offline kit validation:** - - Use Offline Dashboard to ensure snapshots contain latest metrics. - - Run verification reports locally and attach to bundle before distribution. - ---- - -## 6 · Offline considerations - -- Metrics exporters bundled with Offline Kit write to local Prometheus snapshots; sync them with central Grafana once connectivity is restored. -- CLI verification reports should be hashed (`sha256sum`) and archived for audit trails. -- Dashboards include offline data sources (`prometheus-offline`) switchable via dropdown. - ---- - -## 7 · References - -- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) -- [Architecture overview](../modules/platform/architecture-overview.md) +# AOC Observability Guide + +> **Audience:** Observability Guild, Concelier/Excititor SREs, platform operators. +> **Scope:** Metrics, traces, logs, dashboards, and runbooks introduced as part of the Aggregation-Only Contract (AOC) rollout (Sprint 19). + +This guide captures the canonical signals emitted by Concelier and Excititor once AOC guards are active. It explains how to consume the metrics in dashboards, correlate traces/logs for incident triage, and operate in offline environments. Pair this guide with the [AOC reference](../ingestion/aggregation-only-contract.md) and [architecture overview](../modules/platform/architecture-overview.md). + +--- + +## 1 · Metrics + +| Metric | Type | Labels | Description | +|--------|------|--------|-------------| +| `ingestion_write_total` | Counter | `source`, `tenant`, `result` (`ok`, `reject`, `noop`) | Counts write attempts to `advisory_raw`/`vex_raw`. Rejects correspond to guard failures. | +| `ingestion_latency_seconds` | Histogram | `source`, `tenant`, `phase` (`fetch`, `transform`, `write`) | Measures end-to-end runtime for ingestion stages. Use `quantile=0.95` for alerting. | +| `aoc_violation_total` | Counter | `source`, `tenant`, `code` (`ERR_AOC_00x`) | Total guard violations bucketed by error code. Drives dashboard pills and alert thresholds. | +| `ingestion_signature_verified_total` | Counter | `source`, `tenant`, `result` (`ok`, `fail`, `skipped`) | Tracks signature/checksum verification outcomes. | +| `advisory_revision_count` | Gauge | `source`, `tenant` | Supersedes depth for raw documents; spikes indicate noisy upstream feeds. | +| `verify_runs_total` | Counter | `tenant`, `initiator` (`ui`, `cli`, `api`, `scheduled`) | How many `stella aoc verify` or `/aoc/verify` runs executed. | +| `verify_duration_seconds` | Histogram | `tenant`, `initiator` | Runtime of verification jobs; use P95 to detect regressions. | + +### 1.1 Alerts + +- **Violation spike:** Alert when `increase(aoc_violation_total[15m]) > 0` for critical sources. Page SRE if `code="ERR_AOC_005"` (signature failure) or `ERR_AOC_001` persists > 30 min. +- **Stale ingestion:** Alert when `max_over_time(ingestion_latency_seconds_sum / ingestion_latency_seconds_count)[30m]` exceeds 30 s or if `ingestion_write_total` has no growth for > 60 min. +- **Signature drop:** Warn when `rate(ingestion_signature_verified_total{result="fail"}[1h]) > 0`. + +--- + +## 2 · Traces + +### 2.1 Span taxonomy + +| Span name | Parent | Key attributes | +|-----------|--------|----------------| +| `ingest.fetch` | job root span | `source`, `tenant`, `uri`, `contentHash` | +| `ingest.transform` | `ingest.fetch` | `documentType` (`csaf`, `osv`, `vex`), `payloadBytes` | +| `ingest.write` | `ingest.transform` | `collection` (`advisory_raw`, `vex_raw`), `result` (`ok`, `reject`) | +| `aoc.guard` | `ingest.write` | `code` (on violation), `violationCount`, `supersedes` | +| `verify.run` | verification job root | `tenant`, `window.from`, `window.to`, `sources`, `violations` | + +### 2.2 Trace usage + +- Correlate UI dashboard entries with traces via `traceId` surfaced in violation drawers (`docs/ui/console.md`). +- Use `aoc.guard` spans to inspect guard payload snapshots. Sensitive fields are redacted automatically; raw JSON lives in secure logs only. +- For scheduled verification, filter traces by `initiator="scheduled"` to compare runtimes pre/post change. + +--- + +## 3 · Logs + +Structured logs include the following keys (JSON): + +| Key | Description | +|-----|-------------| +| `traceId` | Matches OpenTelemetry trace/span IDs for cross-system correlation. | +| `tenant` | Tenant identifier enforced by Authority middleware. | +| `source.vendor` | Logical source (e.g., `redhat`, `ubuntu`, `osv`, `ghsa`). | +| `upstream.upstreamId` | Vendor-provided ID (CVE, GHSA, etc.). | +| `contentHash` | `sha256:` digest of the raw document. | +| `violation.code` | Present when guard rejects `ERR_AOC_00x`. | +| `verification.window` | Present on `/aoc/verify` job logs. | + +Logs are shipped to the central Loki/Elasticsearch cluster. Use the template query: + +```logql +{app="concelier-web"} | json | violation_code != "" +``` + +to spot active AOC violations. + +--- + +## 4 · Dashboards + +Primary Grafana dashboard: **“AOC Ingestion Health”** (`dashboards/aoc-ingestion.json`). Panels include: + +1. **Sources overview:** table fed by `ingestion_write_total` and `ingestion_latency_seconds` (mirrors Console tiles). +2. **Violation trend:** stacked bar chart of `aoc_violation_total` per code. +3. **Signature success rate:** timeseries derived from `ingestion_signature_verified_total`. +4. **Supersedes depth:** gauge showing `advisory_revision_count` P95. +5. **Verification runs:** histogram and latency boxplot using `verify_runs_total` / `verify_duration_seconds`. + +Secondary dashboards: + +- **AOC Alerts (Ops view):** summarises active alerts, last verify run, and links to incident runbook. +- **Offline Mode Dashboard:** fed from Offline Kit imports; highlights snapshot age and queued verification jobs. + +Update `docs/assets/dashboards/` with screenshots when Grafana capture pipeline produces the latest renders. + +--- + +## 5 · Operational workflows + +1. **During ingestion incident:** + - Check Console dashboard for offending sources. + - Pivot to logs using document `contentHash`. + - Re-run `stella sources ingest --dry-run` with problematic payloads to validate fixes. + - After remediation, run `stella aoc verify --since 24h` and confirm exit code `0`. +2. **Scheduled verification:** + - Configure cron job to run `stella aoc verify --format json --export ...`. + - Ship JSON to `aoc-verify` bucket and ingest into metrics using custom exporter. + - Alert on missing exports (no file uploaded within 26 h). +3. **Offline kit validation:** + - Use Offline Dashboard +4. **Incident toggle audit:** + - Authority requires `incident_reason` when issuing `obs:incident` tokens; plan your runbooks to capture business justification. + - Auditors can call `/authority/audit/incident?limit=100` with the tenant header to list recent incident activations, including reason and issuer. to ensure snapshots contain latest metrics. + - Run verification reports locally and attach to bundle before distribution. + +--- + +## 6 · Offline considerations + +- Metrics exporters bundled with Offline Kit write to local Prometheus snapshots; sync them with central Grafana once connectivity is restored. +- CLI verification reports should be hashed (`sha256sum`) and archived for audit trails. +- Dashboards include offline data sources (`prometheus-offline`) switchable via dropdown. + +--- + +## 7 · References + +- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) +- [Architecture overview](../modules/platform/architecture-overview.md) - [Console AOC dashboard](../ui/console.md) - [CLI AOC commands](../modules/cli/guides/cli-reference.md) - [Concelier architecture](../modules/concelier/architecture.md) - [Excititor architecture](../modules/excititor/architecture.md) - [Scheduler Worker observability guide](../modules/scheduler/operations/worker.md) - ---- - -## 8 · Compliance checklist - -- [ ] Metrics documented with label sets and alert guidance. -- [ ] Tracing span taxonomy aligned with Concelier/Excititor implementation. -- [ ] Log schema matches structured logging contracts (traceId, tenant, source, contentHash). -- [ ] Grafana dashboard references verified and screenshots scheduled. -- [ ] Offline/air-gap workflow captured. -- [ ] Cross-links to AOC reference, console, and CLI docs included. -- [ ] Observability Guild sign-off scheduled (OWNER: @obs-guild, due 2025-10-28). - ---- - -*Last updated: 2025-10-26 (Sprint 19).* \ No newline at end of file + +--- + +## 8 · Compliance checklist + +- [ ] Metrics documented with label sets and alert guidance. +- [ ] Tracing span taxonomy aligned with Concelier/Excititor implementation. +- [ ] Log schema matches structured logging contracts (traceId, tenant, source, contentHash). +- [ ] Grafana dashboard references verified and screenshots scheduled. +- [ ] Offline/air-gap workflow captured. +- [ ] Cross-links to AOC reference, console, and CLI docs included. +- [ ] Observability Guild sign-off scheduled (OWNER: @obs-guild, due 2025-10-28). + +--- + +*Last updated: 2025-10-26 (Sprint 19).* diff --git a/docs/quickstart.md b/docs/quickstart.md index f0505b6d..d84cf211 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -65,7 +65,10 @@ docker compose --env-file .env -f docker-compose.stella-ops.yml up -d ## 4. Run your first scan (1 min) ```bash -stella auth login --device-code +stella auth login \ + --device-code \ + --audiences scanner,attestor \ + --scopes attestor.verify,attestor.read stella scan image \ --image registry.stella-ops.org/demo/juice-shop:latest \ --sbom-type cyclonedx-json @@ -75,6 +78,8 @@ stella scan image \ - CLI exits non-zero if lattice policy blocks the image; use `stella policy explain --last` for context. - Headers `X-Stella-Quota-Remaining` and the UI banner keep quota usage transparent. +> Need to inspect attestations only? Swap `attestor.verify` for `attestor.read`. Submission endpoints still need `attestor.write`. + ## 5. Verify & explore (1 min) - Check the Console (`https://localhost:8443`) to view findings, VEX evidence, and deterministic replay manifests. diff --git a/docs/security/authority-scopes.md b/docs/security/authority-scopes.md index 5798f968..9b9a0774 100644 --- a/docs/security/authority-scopes.md +++ b/docs/security/authority-scopes.md @@ -1,261 +1,320 @@ -# Authority Scopes & Tenancy — AOC Update - -> **Audience:** Authority Core, platform security engineers, DevOps owners. -> **Scope:** Scope taxonomy, tenancy enforcement, rollout guidance for the Aggregation-Only Contract (Sprint 19). - -Authority issues short-lived tokens bound to tenants and scopes. Sprint 19 introduces new scopes to support the AOC guardrails in Concelier and Excititor. This document lists the canonical scope catalogue, describes tenancy propagation, and outlines operational safeguards. - ---- - -## 1 · Scope catalogue (post AOC) - -| Scope | Surface | Purpose | Notes | -|-------|---------|---------|-------| -| `advisory:ingest` | Concelier ingestion APIs | Append-only writes to `advisory_raw` collections. | Requires tenant claim; blocked for global clients. | -| `advisory:read` | `/aoc/verify`, Concelier dashboards, CLI | Read-only access to stored advisories and guard results. | Must be requested with `aoc:verify`; Authority rejects tokens missing the pairing. | -| `vex:ingest` | Excititor ingestion APIs | Append-only writes to `vex_raw`. | Mirrors `advisory:ingest`; tenant required. | -| `vex:read` | `/aoc/verify`, Excititor dashboards, CLI | Read-only access to stored VEX material. | Must be requested with `aoc:verify`; Authority rejects tokens missing the pairing. | -| `aoc:verify` | CLI/CI pipelines, Console verification jobs | Execute Aggregation-Only Contract guard runs. | Always issued with tenant; required whenever requesting `advisory:read`, `vex:read`, or any `signals:*` scope. | -| `signals:read` | Signals API, reachability dashboards | Read-only access to stored reachability signals. | Tenant and `aoc:verify` required; missing pairing returns `invalid_scope`. | -| `signals:write` | Signals ingestion APIs | Append-only writes for reachability signals. | Requires tenant and `aoc:verify`; Authority logs `authority.aoc_scope_violation` on mismatch. | -| `signals:admin` | Signals administration tooling | Rotate credentials, manage reachability sensors, purge stale data. | Reserved for automation; `aoc:verify` + tenant mandatory; violations are audited. | -| `graph:write` | Cartographer pipeline | Enqueue graph build/overlay jobs. | Reserved for Cartographer service identity; tenant required. | -| `graph:read` | Graph API, Scheduler overlays, UI | Read graph projections/overlays. | Tenant required; granted to Cartographer, Graph API, Scheduler. | -| `graph:export` | Graph export endpoints | Stream GraphML/JSONL artefacts. | UI/gateway automation only; tenant required. | -| `graph:simulate` | Policy simulation overlays | Trigger what-if overlays on graphs. | Restricted to automation; tenant required. | -| `effective:write` | Policy Engine | Create/update `effective_finding_*` collections. | **Only** the Policy Engine service client may hold this scope; tenant required. | -| `findings:read` | Console, CLI, exports | Read derived findings materialised by Policy Engine. | Shared across tenants with RBAC; tenant claim still enforced. | -| `policy:author` | Policy Studio (Console, CLI) | Author drafts, run lint, execute quick simulations. | Tenant required; typically granted via `role/policy-author`. | -| `policy:review` | Policy Studio review panes | Review drafts, leave comments, request changes. | Tenant required; pair with `policy:simulate` for diff previews. | -| `policy:approve` | Policy Studio approvals | Approve or reject policy drafts. | Tenant required; fresh-auth enforced by Console UI. | -| `policy:operate` | Policy Studio promotion controls | Trigger batch simulations, promotions, and canary runs. | Tenant required; combine with `policy:run`/`policy:activate`. | -| `policy:audit` | Policy audit exports | Access immutable policy history, comments, and signatures. | Tenant required; read-only access. | -| `policy:simulate` | Policy Studio / CLI simulations | Run simulations against tenant inventories. | Tenant required; available to authors, reviewers, operators. | -| `vuln:read` | Vuln Explorer API/UI | Read normalized vulnerability data. | Tenant required. | -| `export.viewer` | Export Center APIs | List export profiles/runs, fetch manifests and bundles. | Tenant required; read-only access. | -| `export.operator` | Export Center APIs | Trigger export runs, manage schedules, request verifications. | Tenant required; pair with `export.admin` for retention/encryption changes. | -| `export.admin` | Export Center administrative APIs | Configure retention policies, encryption keys, and scheduling defaults. | Tenant required; token requests must include `export_reason` + `export_ticket`; Authority audits denials. | -| `orch:read` | Orchestrator dashboards/API | Read queued jobs, worker state, and rate-limit telemetry. | Tenant required; never grants mutation rights. | -| `orch:operate` | Orchestrator control actions | Execute pause/resume, retry, sync-now, and backfill operations. Requires tenant assignment **and** `operator_reason`/`operator_ticket` parameters when requesting tokens. | -| `exceptions:read` | Exception service APIs, Console | Enumerate exception definitions, routing templates, and approval state. | Tenant and approval routing metadata required for audit replay. | -| `exceptions:write` | Policy Engine → Authority bridge | Persist exception evaluations, lifecycle events, and status changes. | Tenant required; only service principals should hold this scope. | -| `exceptions:approve` | Console fresh-auth flows, delegated admins | Approve or reject exception requests routed through Authority. | Tenant required; Authority enforces MFA when any bound routing template has `requireMfa=true`. | -| `ui.read` | Console base APIs | Retrieve tenant catalog, profile metadata, and token introspection results. | Tenant header required; responses are DPoP-bound and audit logged. | -| `authority:tenants.read` | Console admin workspace | Enumerate configured tenants, default roles, and isolation metadata. | Tenant claim must match header; access audited via `authority.console.tenants.read`. | -| Existing scopes | (e.g., `policy:*`, `concelier.jobs.trigger`) | Unchanged. | `concelier.merge` is retired — clients must request `advisory:ingest`/`advisory:read`; requests continue to fail with `invalid_client`. Review `/docs/security/policy-governance.md` for policy-specific scopes. | - -### 1.1 Scope bundles (roles) - -- **`role/concelier-ingest`** → `advisory:ingest`, `advisory:read`. -- **`role/excititor-ingest`** → `vex:ingest`, `vex:read`. -- **`role/signals-uploader`** → `signals:write`, `signals:read`, `aoc:verify`. -- **`role/aoc-operator`** → `aoc:verify`, `advisory:read`, `vex:read`. -- **`role/policy-engine`** → `effective:write`, `findings:read`. -- **`role/cartographer-service`** → `graph:write`, `graph:read`. -- **`role/graph-gateway`** → `graph:read`, `graph:export`, `graph:simulate`. -- **`role/console`** → `ui.read`, `advisory:read`, `vex:read`, `exceptions:read`, `aoc:verify`, `findings:read`, `orch:read`, `vuln:read`. -- **`role/ui-console-admin`** → `ui.read`, `authority:tenants.read`, `authority:roles.read`, `authority:tokens.read`, `authority:clients.read` (paired with write scopes where required). -- **`role/orch-viewer`** *(Authority role: `Orch.Viewer`)* → `orch:read`. -- **`role/orch-operator`** *(Authority role: `Orch.Operator`)* → `orch:read`, `orch:operate`. -- **`role/policy-author`** → `policy:author`, `policy:read`, `policy:simulate`, `findings:read`. -- **`role/policy-reviewer`** → `policy:review`, `policy:read`, `policy:simulate`, `findings:read`. -- **`role/policy-approver`** → `policy:approve`, `policy:review`, `policy:read`, `policy:simulate`, `findings:read`. -- **`role/policy-operator`** → `policy:operate`, `policy:run`, `policy:activate`, `policy:read`, `policy:simulate`, `findings:read`. -- **`role/policy-auditor`** → `policy:audit`, `policy:read`, `policy:simulate`, `findings:read`. -- **`role/export-viewer`** *(Authority role: `Export.Viewer`)* → `export.viewer`. -- **`role/export-operator`** *(Authority role: `Export.Operator`)* → `export.viewer`, `export.operator`. -- **`role/export-admin`** *(Authority role: `Export.Admin`)* → `export.viewer`, `export.operator`, `export.admin`. -- **`role/exceptions-service`** → `exceptions:read`, `exceptions:write`. -- **`role/exceptions-approver`** → `exceptions:read`, `exceptions:approve`. - -Roles are declared per tenant in `authority.yaml`: - -```yaml -tenants: - - name: default - roles: - concelier-ingest: - scopes: [advisory:ingest, advisory:read] - signals-uploader: - scopes: [signals:write, signals:read, aoc:verify] - aoc-operator: - scopes: [aoc:verify, advisory:read, vex:read] - orch-viewer: - scopes: [orch:read] - orch-operator: - scopes: [orch:read, orch:operate] - policy-author: - scopes: [policy:author, policy:read, policy:simulate, findings:read] - policy-reviewer: - scopes: [policy:review, policy:read, policy:simulate, findings:read] - policy-approver: - scopes: [policy:approve, policy:review, policy:read, policy:simulate, findings:read] - policy-operator: - scopes: [policy:operate, policy:run, policy:activate, policy:read, policy:simulate, findings:read] - policy-auditor: - scopes: [policy:audit, policy:read, policy:simulate, findings:read] - policy-engine: - scopes: [effective:write, findings:read] - exceptions-service: - scopes: [exceptions:read, exceptions:write] - exceptions-approver: - scopes: [exceptions:read, exceptions:approve] -``` - -> **MFA requirement:** When any `exceptions.routingTemplates` entry sets `requireMfa: true`, Authority refuses to mint tokens containing `exceptions:approve` unless the authenticating identity provider advertises MFA support. Password/OIDC flows produce `authority.password.grant` audit events with `reason="Exception approval scope requires an MFA-capable identity provider."` when the requirement is violated. - ---- - -## 2 · Tenancy enforcement - -### 2.1 Token claims - -Tokens now include: - -- `tenant` claim (string) — required for all ingestion and verification scopes. -- `service_identity` (optional) — e.g., `policy-engine`, `cartographer`. Required when requesting `effective:write` or `graph:write`. -- `delegation_allowed` (boolean) — defaults `false`. Prevents console tokens from delegating ingest scopes. - -Authority rejects requests when: - -- `tenant` is missing while requesting `advisory:ingest`, `advisory:read`, `vex:ingest`, `vex:read`, or `aoc:verify` scopes. -- `aoc:verify` is absent while tokens request `advisory:read`, `vex:read`, or any `signals:*` scope (`invalid_scope` with deterministic message). -- `service_identity != policy-engine` but `effective:write` is present (`ERR_AOC_006` enforcement). -- `service_identity != cartographer` but `graph:write` is present (graph pipeline enforcement). -- Tokens attempt to combine `advisory:ingest` with `effective:write` (separation of duties). -- `exceptions:approve` is requested by a client without a tenant assignment or via an identity provider lacking MFA when `RequireMfaForApprovals=true`. - -### 2.2 Propagation - -- API Gateway forwards `tenant` claim as header (`X-Stella-Tenant`). Services refuse requests lacking the header. -- Concelier/Excititor stamp tenant into raw documents and structured logs. -- Policy Engine copies `tenant` from tokens into `effective_finding_*` collections. -- Exception lifecycle services persist tenant and the selected routing template identifier alongside approval decisions. Authority audit events (`authority.password.grant`, `authority.client_credentials.grant`) surface `audit.scopes` and, on denials, a `scope.invalid` metadata entry so operators can trace exception approval attempts without inspecting downstream services. - -### 2.3 Cross-tenant scenarios - -- Platform operators with `tenant:admin` can assume other tenants via `/authority/tenant/switch` if explicitly permitted. -- CLI commands accept `--tenant ` to override environment default; Authority logs tenant switch events (`authority.tenant.switch`). -- Console tenant picker uses delegated token exchange (`/token/exchange`) to obtain scoped tenant tokens without exposing raw credentials. - ---- - -## 3 · Configuration changes - -### 3.1 Authority configuration (`authority.yaml`) - -Add new scopes and optional claims transformations: - -```yaml -security: - scopes: - - name: advisory:ingest - description: Concelier raw ingestion (append-only) - - name: advisory:read - description: Read Concelier advisories and guard verdicts - - name: vex:ingest - description: Excititor raw ingestion - - name: vex:read - description: Read Excititor VEX records - - name: aoc:verify - description: Run AOC verification - - name: effective:write - description: Policy Engine materialisation - - name: findings:read - description: Read derived findings - - name: graph:write - description: Cartographer build submissions - - name: graph:read - description: Read graph overlays - - name: graph:export - description: Export graph artefacts - - name: graph:simulate - description: Run graph what-if simulations - - name: vuln:read - description: Read Vuln Explorer data - claimTransforms: - - match: { scope: "effective:write" } - require: - serviceIdentity: policy-engine - - match: { scope: "graph:write" } - require: - serviceIdentity: cartographer -``` - -### 3.2 Client registration - -Update service clients: - -- `Concelier.WebService` → request `advisory:ingest`, `advisory:read`. -- `Excititor.WebService` → request `vex:ingest`, `vex:read`. -- `Policy.Engine` → request `effective:write`, `findings:read`; set `properties.serviceIdentity=policy-engine`. -- `Cartographer.Service` → request `graph:write`, `graph:read`; set `properties.serviceIdentity=cartographer`. -- `Graph API Gateway` → request `graph:read`, `graph:export`, `graph:simulate`; tenant hint required. -- `Console` → request `advisory:read`, `vex:read`, `aoc:verify`, `findings:read`, `vuln:read` plus existing UI scopes. -- `CLI automation` → request `aoc:verify`, `advisory:read`, `vex:read` as needed. - -Client definition snippet: - -```yaml -clients: - - clientId: concelier-web - grantTypes: [client_credentials] - scopes: [advisory:ingest, advisory:read] - tenants: [default] - - clientId: policy-engine - grantTypes: [client_credentials] - scopes: [effective:write, findings:read] - properties: - serviceIdentity: policy-engine - - clientId: cartographer-service - grantTypes: [client_credentials] - scopes: [graph:write, graph:read] - properties: - serviceIdentity: cartographer -``` - ---- - -## 4 · Operational safeguards - -- **Audit events:** Authority emits `authority.scope.granted` and `authority.scope.revoked` events with `scope` and `tenant`. Monitor for unexpected grants. -- **Rate limiting:** Apply stricter limits on `/token` endpoints for clients requesting `advisory:ingest` or `vex:ingest` to mitigate brute-force ingestion attempts. -- **Incident response:** Link AOC alerts to Authority audit logs to confirm whether violations come from expected identities. -- **Rotation:** Rotate ingest client secrets alongside guard deployments; add rotation steps to `ops/authority-key-rotation.md`. -- **Testing:** Integration tests must fail if tokens lacking `tenant` attempt ingestion; add coverage in Concelier/Excititor smoke suites (see `CONCELIER-CORE-AOC-19-013`). - ---- - -## 5 · Offline & air-gap notes - -- Offline Kit bundles include tenant-scoped service credentials. Ensure ingest bundles ship without `advisory:ingest` scopes unless strictly required. -- CLI verification in offline environments uses pre-issued `aoc:verify` tokens; document expiration and renewal processes. -- Authority replicas in air-gapped environments should restrict scope issuance to known tenants and log all `/token` interactions for later replay. - ---- - -## 6 · References - -- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) -- [Architecture overview](../modules/platform/architecture-overview.md) -- [Concelier architecture](../modules/concelier/architecture.md) -- [Excititor architecture](../modules/excititor/architecture.md) -- [Policy governance](policy-governance.md) +# Authority Scopes & Tenancy — AOC Update + +> **Audience:** Authority Core, platform security engineers, DevOps owners. +> **Scope:** Scope taxonomy, tenancy enforcement, rollout guidance for the Aggregation-Only Contract (Sprint 19). + +Authority issues short-lived tokens bound to tenants and scopes. Sprint 19 introduces new scopes to support the AOC guardrails in Concelier and Excititor. This document lists the canonical scope catalogue, describes tenancy propagation, and outlines operational safeguards. + +--- + +## 1 · Scope catalogue (post AOC) + +| Scope | Surface | Purpose | Notes | +|-------|---------|---------|-------| +| `advisory:ingest` | Concelier ingestion APIs | Append-only writes to `advisory_raw` collections. | Requires tenant claim; blocked for global clients. | +| `advisory:read` | `/aoc/verify`, Concelier dashboards, CLI | Read-only access to stored advisories and guard results. | Must be requested with `aoc:verify`; Authority rejects tokens missing the pairing. | +| `advisory-ai:view` | Advisory AI dashboards, remediation exports | Read-only access to Advisory AI artefacts (summaries, remediation bundles). | Requires `aoc:verify`; defaults granted to `advisory-ai-viewer` role. | +| `advisory-ai:operate` | Advisory AI remote inference workflows | Submit Advisory AI inference/remediation jobs and retrieve anonymized outputs. | Requires `aoc:verify`; remote inference must be enabled and tenant-consented. | +| `advisory-ai:admin` | Advisory AI configuration APIs, Console admin | Manage Advisory AI profiles, remote inference toggles, and audit exports. | Restricted to platform admins; requires `aoc:verify`; all operations audited. | +| `vex:ingest` | Excititor ingestion APIs | Append-only writes to `vex_raw`. | Mirrors `advisory:ingest`; tenant required. | +| `vex:read` | `/aoc/verify`, Excititor dashboards, CLI | Read-only access to stored VEX material. | Must be requested with `aoc:verify`; Authority rejects tokens missing the pairing. | +| `aoc:verify` | CLI/CI pipelines, Console verification jobs | Execute Aggregation-Only Contract guard runs. | Always issued with tenant; required whenever requesting `advisory:read`, `vex:read`, or any `signals:*` scope. | +| `signals:read` | Signals API, reachability dashboards | Read-only access to stored reachability signals. | Tenant and `aoc:verify` required; missing pairing returns `invalid_scope`. | +| `signals:write` | Signals ingestion APIs | Append-only writes for reachability signals. | Requires tenant and `aoc:verify`; Authority logs `authority.aoc_scope_violation` on mismatch. | +| `signals:admin` | Signals administration tooling | Rotate credentials, manage reachability sensors, purge stale data. | Reserved for automation; `aoc:verify` + tenant mandatory; violations are audited. | +| `airgap:status:read` | AirGap Controller status API, Console status pane, CLI | Read sealed state, staleness metrics, and import history. | Tenant required; defaulted via `airgap-viewer` and Console bundles. | +| `airgap:import` | AirGap Importer APIs, CLI workflows | Import offline bundles into catalog and object storage. | Tenant required; issued to `airgap-operator`; operations are fully audited. | +| `airgap:seal` | AirGap Controller sealing endpoints | Seal or unseal installations and confirm sealing tickets. | Tenant required; restrict to `airgap-admin`; pair with operator ticket policy. | +| `obs:read` | Observability API, Console dashboards | Read observability dashboards, SLO digests, and incident overlays. | Tenant required; typically paired with `timeline:read` and `evidence:read`. | +| `timeline:read` | Timeline API, observability consoles, CLI | Read incident timeline entries and annotations. | Issue alongside `obs:read` for analyst workflows; tenant enforced. | +| `timeline:write` | Timeline ingest workers, Scheduler | Append deterministic incident timeline events and annotations. | Automation only; Authority audits source client + tenant. | +| `evidence:create` | Evidence Locker ingestion APIs | Create evidence items, upload artefacts, and link attestations. | Append-only; emits `authority.evidence.create` audit trail per item. | +| `evidence:read` | Evidence Locker APIs, Offline Kit exports | Read evidence items, artefacts, and linkage metadata. | Analysts/legal staff; tenant enforced. | +| `evidence:hold` | Evidence Locker legal hold endpoints | Apply or release legal holds on evidence items. | Restricted to compliance/legal operators; audited. | +| `attest:read` | Attestation evidence explorer, Observability UI | Read attestation records, DSSE bundles, and verification proofs. | Read-only counterpart to signer/attestor pipelines; tenant required. | +| `obs:incident` | Incident bridge automation, Console | Toggle incident mode, extend retention, enable emergency telemetry. | Requires fresh auth + `incident_reason`, no refresh; audit via `/authority/audit/incident`. | +> **Observability scope bundle (added 2 Nov 2025).** `obs:read`, `timeline:read`, `timeline:write`, `evidence:create`, `evidence:read`, `evidence:hold`, `attest:read`, and `obs:incident` always require a tenant claim. Gateways must forward `X-StellaOps-Tenant` (or the configured tenant header) or resource servers will reject the request with `tenant_header_missing`. The sample `authority.yaml` assigns these scopes via the `observability-*` roles; reuse those roles when onboarding new tenants to keep bundles consistent. +> **Fresh auth enforcement (Sprint 55).** Authority-issued `obs:incident` tokens must carry both `incident_reason` and `auth_time`. Resource servers enforce a five-minute freshness window and log `incident.reason`, `incident.auth_time`, and `incident.fresh_auth_satisfied` in `authority.resource.authorize` audit events, enabling `/authority/audit/incident` to verify activation reason and freshness. +| `graph:write` | Cartographer pipeline | Enqueue graph build/overlay jobs. | Reserved for Cartographer service identity; tenant required. | +| `graph:read` | Graph API, Scheduler overlays, UI | Read graph projections/overlays. | Tenant required; granted to Cartographer, Graph API, Scheduler. | +| `graph:export` | Graph export endpoints | Stream GraphML/JSONL artefacts. | UI/gateway automation only; tenant required. | +| `graph:simulate` | Policy simulation overlays | Trigger what-if overlays on graphs. | Restricted to automation; tenant required. | +| `effective:write` | Policy Engine | Create/update `effective_finding_*` collections. | **Only** the Policy Engine service client may hold this scope; tenant required. | +| `findings:read` | Console, CLI, exports | Read derived findings materialised by Policy Engine. | Shared across tenants with RBAC; tenant claim still enforced. | +| `policy:author` | Policy Studio (Console, CLI) | Author drafts, run lint, execute quick simulations. | Tenant required; typically granted via `role/policy-author`. | +| `policy:review` | Policy Studio review panes | Review drafts, leave comments, request changes. | Tenant required; pair with `policy:simulate` for diff previews. | +| `policy:approve` | Policy Studio approvals | Approve or reject policy drafts. | Tenant required; fresh-auth enforced by Console UI. | +| `policy:operate` | Policy Studio promotion controls | Trigger batch simulations, promotions, and canary runs. | Tenant required; combine with `policy:run`/`policy:activate`. | +| `policy:audit` | Policy audit exports | Access immutable policy history, comments, and signatures. | Tenant required; read-only access. | +| `policy:simulate` | Policy Studio / CLI simulations | Run simulations against tenant inventories. | Tenant required; available to authors, reviewers, operators. | +| `vuln:read` | Vuln Explorer API/UI | Read normalized vulnerability data. | Tenant required. | +| `export.viewer` | Export Center APIs | List export profiles/runs, fetch manifests and bundles. | Tenant required; read-only access. | +| `export.operator` | Export Center APIs | Trigger export runs, manage schedules, request verifications. | Tenant required; pair with `export.admin` for retention/encryption changes. | +| `export.admin` | Export Center administrative APIs | Configure retention policies, encryption keys, and scheduling defaults. | Tenant required; token requests must include `export_reason` + `export_ticket`; Authority audits denials. | +| `notify.viewer` | Notifier APIs, Console, CLI | Read notifier rules, channel configuration, and delivery history. | Tenant required; responses are redacted for cross-tenant data; no mutation rights. | +| `notify.operator` | Notifier APIs, automation hooks | Manage notifier rules, trigger tests, acknowledge/resolve incidents. | Tenant required; Authority enforces operator metadata (`notify_reason`, `notify_ticket`) when configured. | +| `notify.admin` | Notifier administrative APIs | Configure channels, secrets, quiet hours, and escalation policies. | Tenant required; Authority audits secret rotations and escalation changes. | +| `notify.escalate` | Notifier ack/escalation bridge | Issue escalation-bearing ack tokens and honour escalation acknowledgements. | Tenant required; Authority enforces pairing with `notify.admin` when escalation mutates platform state. | +| `orch:read` | Orchestrator dashboards/API | Read queued jobs, worker state, and rate-limit telemetry. | Tenant required; never grants mutation rights. | +| `orch:operate` | Orchestrator control actions | Execute pause/resume, retry, sync-now, and backfill operations. Requires tenant assignment **and** `operator_reason`/`operator_ticket` parameters when requesting tokens. | +| `orch:quota` | Orchestrator quota administration | Adjust per-tenant quotas, burst ceilings, and backfill allowances. Requires tenant assignment and `quota_reason` (≤256 chars); optional `quota_ticket` (≤128 chars) is recorded for audit. | +| `packs.read` | Packs Registry, Task Runner | Discover Task Packs, download manifests, and inspect metadata. | Tenant claim required; Authority rejects cross-tenant tokens and tags violations with `authority.pack_scope_violation`. | +| `packs.write` | Packs Registry APIs | Publish or update Task Packs (requires signed bundles). | Tenant claim required; typically restricted to registry automation; violations surface via `authority.pack_scope_violation`. | +| `packs.run` | Task Runner | Execute Task Packs via CLI or Task Runner APIs. | Tenant claim required; Task Runner enforces tenant isolation; Authority emits `authority.pack_scope_violation` when missing. | +| `packs.approve` | Task Runner approvals | Fulfil Task Pack approval gates and resume runs. | Tenant claim required; approval events audited with run ID and Pack scope violations tagged as `authority.pack_scope_violation`. | +| `exceptions:read` | Exception service APIs, Console | Enumerate exception definitions, routing templates, and approval state. | Tenant and approval routing metadata required for audit replay. | +| `exceptions:write` | Policy Engine → Authority bridge | Persist exception evaluations, lifecycle events, and status changes. | Tenant required; only service principals should hold this scope. | +| `exceptions:approve` | Console fresh-auth flows, delegated admins | Approve or reject exception requests routed through Authority. | Tenant required; Authority enforces MFA when any bound routing template has `requireMfa=true`. | +| `ui.read` | Console base APIs | Retrieve tenant catalog, profile metadata, and token introspection results. | Tenant header required; responses are DPoP-bound and audit logged. | +| `authority:tenants.read` | Console admin workspace | Enumerate configured tenants, default roles, and isolation metadata. | Tenant claim must match header; access audited via `authority.console.tenants.read`. | +| Existing scopes | (e.g., `policy:*`, `concelier.jobs.trigger`) | Unchanged. | `concelier.merge` is retired — clients must request `advisory:ingest`/`advisory:read`; requests continue to fail with `invalid_client`. Review `/docs/security/policy-governance.md` for policy-specific scopes. | + +### 1.1 Scope bundles (roles) + +- **`role/concelier-ingest`** → `advisory:ingest`, `advisory:read`. +- **`role/excititor-ingest`** → `vex:ingest`, `vex:read`. +- **`role/signals-uploader`** → `signals:write`, `signals:read`, `aoc:verify`. +- **`role/aoc-operator`** → `aoc:verify`, `advisory:read`, `vex:read`. +- **`role/policy-engine`** → `effective:write`, `findings:read`. +- **`role/cartographer-service`** → `graph:write`, `graph:read`. +- **`role/graph-gateway`** → `graph:read`, `graph:export`, `graph:simulate`. +- **`role/console`** → `ui.read`, `advisory:read`, `vex:read`, `exceptions:read`, `aoc:verify`, `findings:read`, `airgap:status:read`, `orch:read`, `vuln:read`. +- **`role/ui-console-admin`** → `ui.read`, `authority:tenants.read`, `authority:roles.read`, `authority:tokens.read`, `authority:clients.read` (paired with write scopes where required). +- **`role/orch-viewer`** *(Authority role: `Orch.Viewer`)* → `orch:read`. +- **`role/orch-operator`** *(Authority role: `Orch.Operator`)* → `orch:read`, `orch:operate`. +- **`role/orch-admin`** *(Authority role: `Orch.Admin`)* → `orch:read`, `orch:operate`, `orch:quota`. +- **`role/packs-runner`** → `packs.read`, `packs.run`. +- **`role/packs-publisher`** → `packs.read`, `packs.write`. +- **`role/packs-approver`** → `packs.read`, `packs.approve`. +- **`role/policy-author`** → `policy:author`, `policy:read`, `policy:simulate`, `findings:read`. +- **`role/policy-reviewer`** → `policy:review`, `policy:read`, `policy:simulate`, `findings:read`. +- **`role/policy-approver`** → `policy:approve`, `policy:review`, `policy:read`, `policy:simulate`, `findings:read`. +- **`role/policy-operator`** → `policy:operate`, `policy:run`, `policy:activate`, `policy:read`, `policy:simulate`, `findings:read`. +- **`role/policy-auditor`** → `policy:audit`, `policy:read`, `policy:simulate`, `findings:read`. +- **`role/export-viewer`** *(Authority role: `Export.Viewer`)* → `export.viewer`. +- **`role/export-operator`** *(Authority role: `Export.Operator`)* → `export.viewer`, `export.operator`. +- **`role/export-admin`** *(Authority role: `Export.Admin`)* → `export.viewer`, `export.operator`, `export.admin`. +- **`role/notify-viewer`** *(Authority role: `Notify.Viewer`)* → `notify.viewer`. +- **`role/notify-operator`** *(Authority role: `Notify.Operator`)* → `notify.viewer`, `notify.operator`. +- **`role/notify-admin`** *(Authority role: `Notify.Admin`)* → `notify.viewer`, `notify.operator`, `notify.admin`. +- **`role/observability-viewer`** *(Authority role: `Observability.Viewer`)* → `obs:read`, `timeline:read`, `evidence:read`, `attest:read`. +- **`role/observability-investigator`** *(Authority role: `Observability.Investigator`)* → `obs:read`, `timeline:read`, `timeline:write`, `evidence:read`, `evidence:create`, `attest:read`. +- **`role/observability-legal`** *(Authority role: `Observability.Legal`)* → `evidence:read`, `evidence:hold`. +- **`role/observability-incident-commander`** *(Authority role: `Observability.IncidentCommander`)* → `obs:read`, `obs:incident`, `timeline:read`, `timeline:write`, `evidence:create`, `evidence:read`, `attest:read`. +- **`role/airgap-viewer`** → `airgap:status:read`. +- **`role/airgap-operator`** → `airgap:status:read`, `airgap:import`. +- **`role/airgap-admin`** → `airgap:status:read`, `airgap:import`, `airgap:seal`. +- **`role/exceptions-service`** → `exceptions:read`, `exceptions:write`. +- **`role/exceptions-approver`** → `exceptions:read`, `exceptions:approve`. + +Roles are declared per tenant in `authority.yaml`: + +```yaml +tenants: + - name: default + roles: + concelier-ingest: + scopes: [advisory:ingest, advisory:read] + signals-uploader: + scopes: [signals:write, signals:read, aoc:verify] + aoc-operator: + scopes: [aoc:verify, advisory:read, vex:read] + orch-viewer: + scopes: [orch:read] + orch-operator: + scopes: [orch:read, orch:operate] + policy-author: + scopes: [policy:author, policy:read, policy:simulate, findings:read] + policy-reviewer: + scopes: [policy:review, policy:read, policy:simulate, findings:read] + policy-approver: + scopes: [policy:approve, policy:review, policy:read, policy:simulate, findings:read] + policy-operator: + scopes: [policy:operate, policy:run, policy:activate, policy:read, policy:simulate, findings:read] + policy-auditor: + scopes: [policy:audit, policy:read, policy:simulate, findings:read] + policy-engine: + scopes: [effective:write, findings:read] + exceptions-service: + scopes: [exceptions:read, exceptions:write] + exceptions-approver: + scopes: [exceptions:read, exceptions:approve] + notify-viewer: + scopes: [notify.viewer] + notify-operator: + scopes: [notify.viewer, notify.operator] + notify-admin: + scopes: [notify.viewer, notify.operator, notify.admin] + observability-viewer: + scopes: [obs:read, timeline:read, evidence:read, attest:read] + observability-investigator: + scopes: [obs:read, timeline:read, timeline:write, evidence:read, evidence:create, attest:read] + observability-legal: + scopes: [evidence:read, evidence:hold] + observability-incident-commander: + scopes: [obs:read, obs:incident, timeline:read, timeline:write, evidence:create, evidence:read, attest:read] +``` + +> **MFA requirement:** When any `exceptions.routingTemplates` entry sets `requireMfa: true`, Authority refuses to mint tokens containing `exceptions:approve` unless the authenticating identity provider advertises MFA support. Password/OIDC flows produce `authority.password.grant` audit events with `reason="Exception approval scope requires an MFA-capable identity provider."` when the requirement is violated. + +--- + +## 2 · Tenancy enforcement + +### 2.1 Token claims + +Tokens now include: + +- `tenant` claim (string) — required for all ingestion and verification scopes. +- `service_identity` (optional) — e.g., `policy-engine`, `cartographer`. Required when requesting `effective:write` or `graph:write`. +- `delegation_allowed` (boolean) — defaults `false`. Prevents console tokens from delegating ingest scopes. + +Authority rejects requests when: + +- `tenant` is missing while requesting `advisory:ingest`, `advisory:read`, `vex:ingest`, `vex:read`, or `aoc:verify` scopes. +- `aoc:verify` is absent while tokens request `advisory:read`, `vex:read`, or any `signals:*` scope (`invalid_scope` with deterministic message). +- `service_identity != policy-engine` but `effective:write` is present (`ERR_AOC_006` enforcement). +- `service_identity != cartographer` but `graph:write` is present (graph pipeline enforcement). +- Tokens attempt to combine `advisory:ingest` with `effective:write` (separation of duties). +- `exceptions:approve` is requested by a client without a tenant assignment or via an identity provider lacking MFA when `RequireMfaForApprovals=true`. + +### 2.2 Propagation + +- API Gateway forwards `tenant` claim as header (`X-Stella-Tenant`). Services refuse requests lacking the header. +- Concelier/Excititor stamp tenant into raw documents and structured logs. +- Policy Engine copies `tenant` from tokens into `effective_finding_*` collections. +- Exception lifecycle services persist tenant and the selected routing template identifier alongside approval decisions. Authority audit events (`authority.password.grant`, `authority.client_credentials.grant`) surface `audit.scopes` and, on denials, a `scope.invalid` metadata entry so operators can trace exception approval attempts without inspecting downstream services. + +### 2.3 Cross-tenant scenarios + +- Platform operators with `tenant:admin` can assume other tenants via `/authority/tenant/switch` if explicitly permitted. +- CLI commands accept `--tenant ` to override environment default; Authority logs tenant switch events (`authority.tenant.switch`). +- Console tenant picker uses delegated token exchange (`/token/exchange`) to obtain scoped tenant tokens without exposing raw credentials. + +--- + +## 3 · Configuration changes + +### 3.1 Authority configuration (`authority.yaml`) + +Add new scopes and optional claims transformations: + +```yaml +security: + scopes: + - name: advisory:ingest + description: Concelier raw ingestion (append-only) + - name: advisory:read + description: Read Concelier advisories and guard verdicts + - name: vex:ingest + description: Excititor raw ingestion + - name: vex:read + description: Read Excititor VEX records + - name: aoc:verify + description: Run AOC verification + - name: airgap:status:read + description: Read air-gap sealing status and staleness indicators + - name: airgap:import + description: Import offline bundles into the air-gapped catalog + - name: airgap:seal + description: Seal or unseal the installation during change control + - name: effective:write + description: Policy Engine materialisation + - name: findings:read + description: Read derived findings + - name: graph:write + description: Cartographer build submissions + - name: graph:read + description: Read graph overlays + - name: graph:export + description: Export graph artefacts + - name: graph:simulate + description: Run graph what-if simulations + - name: vuln:read + description: Read Vuln Explorer data + claimTransforms: + - match: { scope: "effective:write" } + require: + serviceIdentity: policy-engine + - match: { scope: "graph:write" } + require: + serviceIdentity: cartographer +``` + +### 3.2 Client registration + +Update service clients: + +- `Concelier.WebService` → request `advisory:ingest`, `advisory:read`. +- `Excititor.WebService` → request `vex:ingest`, `vex:read`. +- `Policy.Engine` → request `effective:write`, `findings:read`; set `properties.serviceIdentity=policy-engine`. +- `Cartographer.Service` → request `graph:write`, `graph:read`; set `properties.serviceIdentity=cartographer`. +- `Graph API Gateway` → request `graph:read`, `graph:export`, `graph:simulate`; tenant hint required. +- `Console` → request `advisory:read`, `vex:read`, `aoc:verify`, `findings:read`, `vuln:read` plus existing UI scopes. +- `CLI automation` → request `aoc:verify`, `advisory:read`, `vex:read` as needed. + +Client definition snippet: + +```yaml +clients: + - clientId: concelier-web + grantTypes: [client_credentials] + scopes: [advisory:ingest, advisory:read] + tenants: [default] + - clientId: policy-engine + grantTypes: [client_credentials] + scopes: [effective:write, findings:read] + properties: + serviceIdentity: policy-engine + - clientId: cartographer-service + grantTypes: [client_credentials] + scopes: [graph:write, graph:read] + properties: + serviceIdentity: cartographer +``` + +--- + +## 4 · Operational safeguards + +- **Audit events:** Authority emits `authority.scope.granted` and `authority.scope.revoked` events with `scope` and `tenant`. Monitor for unexpected grants. +- **Rate limiting:** Apply stricter limits on `/token` endpoints for clients requesting `advisory:ingest` or `vex:ingest` to mitigate brute-force ingestion attempts. +- **Incident response:** Link AOC alerts to Authority audit logs to confirm whether violations come from expected identities. +- **Rotation:** Rotate ingest client secrets alongside guard deployments; add rotation steps to `ops/authority-key-rotation.md`. +- **Testing:** Integration tests must fail if tokens lacking `tenant` attempt ingestion; add coverage in Concelier/Excititor smoke suites (see `CONCELIER-CORE-AOC-19-013`). + +--- + +## 5 · Offline & air-gap notes + +- Offline Kit bundles include tenant-scoped service credentials. Ensure ingest bundles ship without `advisory:ingest` scopes unless strictly required. +- CLI verification in offline environments uses pre-issued `aoc:verify` tokens; document expiration and renewal processes. +- Authority replicas in air-gapped environments should restrict scope issuance to known tenants and log all `/token` interactions for later replay. + +--- + +## 6 · References + +- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) +- [Architecture overview](../modules/platform/architecture-overview.md) +- [Concelier architecture](../modules/concelier/architecture.md) +- [Excititor architecture](../modules/excititor/architecture.md) +- [Policy governance](policy-governance.md) - [Authority key rotation playbook](../modules/authority/operations/key-rotation.md) - ---- - -## 7 · Compliance checklist - -- [ ] Scope catalogue updated in Authority configuration templates. -- [ ] Role mappings documented for each tenant profile. -- [ ] Claim transforms enforce `serviceIdentity` for `effective:write`. -- [ ] Claim transforms enforce `serviceIdentity` for `graph:write`. -- [ ] Concelier/Excititor smoke tests cover missing tenant rejection. -- [ ] Offline kit credentials reviewed for least privilege. -- [ ] Audit/monitoring guidance validated with Observability Guild. -- [ ] Authority Core sign-off recorded (owner: @authority-core, due 2025-10-28). - ---- - -*Last updated: 2025-10-27 (Sprint 19).* \ No newline at end of file + +--- + +## 7 · Compliance checklist + +- [ ] Scope catalogue updated in Authority configuration templates. +- [ ] Role mappings documented for each tenant profile. +- [ ] Claim transforms enforce `serviceIdentity` for `effective:write`. +- [ ] Claim transforms enforce `serviceIdentity` for `graph:write`. +- [ ] Concelier/Excititor smoke tests cover missing tenant rejection. +- [ ] Offline kit credentials reviewed for least privilege. +- [ ] Audit/monitoring guidance validated with Observability Guild. +- [ ] Authority Core sign-off recorded (owner: @authority-core, due 2025-10-28). + +--- + +*Last updated: 2025-10-27 (Sprint 19).* diff --git a/docs/security/console-security.md b/docs/security/console-security.md index 21f3b39d..8f690ded 100644 --- a/docs/security/console-security.md +++ b/docs/security/console-security.md @@ -52,14 +52,16 @@ The console client is registered in Authority as `console-ui` with scopes: | Policy approvals | `policy:read`, `policy:review`, `policy:approve`, `policy:operate`, `policy:simulate` | `policy:operate` (promote/activate/run) requires fresh-auth. | | Observability panes (status ticker, telemetry) | `ui.telemetry`, `scheduler:runs.read`, `advisory:read`, `vex:read` | `ui.telemetry` drives OTLP export toggles. | | Orchestrator dashboard (queues, workers, rate limits) | `orch:read` | Provision via `Orch.Viewer` role; read-only access to job state and telemetry. | -| Orchestrator control actions (pause/resume, retry, sync-now, backfill) | `orch:operate` (plus `orch:read`) | CLI/Console must request tokens with `operator_reason` and `operator_ticket`; Authority denies issuance when either value is missing. | +| Orchestrator control actions (pause/resume, retry, sync-now, backfill) | `orch:operate` (plus `orch:read`) | CLI/Console must request tokens with `operator_reason` and `operator_ticket`; Authority denies issuance when either value is missing. | +| Orchestrator quota & burst controls | `orch:quota` (plus `orch:read`, `orch:operate`) | Tokens must include `quota_reason` (≤256 chars); optional `quota_ticket` (≤128 chars) is captured for audit. | | Downloads parity (SBOM, attestation) | `downloads:read`, `attestation:verify`, `sbom:export` | Console surfaces digests only; download links require CLI parity for write operations. | Guidance: -- **Role mapping**: Provision Authority role `role/ui-console-admin` encapsulating the admin scopes above. -- **Orchestrator viewers**: Assign Authority role `role/orch-viewer` (Authority role string `Orch.Viewer`) to consoles that require read-only access to Orchestrator telemetry. -- **Orchestrator operators**: Assign Authority role `role/orch-operator` (Authority role string `Orch.Operator`) to identities allowed to pause/resume or backfill. Tokens must include `operator_reason` (≤256 chars) and `operator_ticket` (≤128 chars); Authority records the values in audit logs. +- **Role mapping**: Provision Authority role `role/ui-console-admin` encapsulating the admin scopes above. +- **Orchestrator viewers**: Assign Authority role `role/orch-viewer` (Authority role string `Orch.Viewer`) to consoles that require read-only access to Orchestrator telemetry. +- **Orchestrator operators**: Assign Authority role `role/orch-operator` (Authority role string `Orch.Operator`) to identities allowed to pause/resume or backfill. Tokens must include `operator_reason` (≤256 chars) and `operator_ticket` (≤128 chars); Authority records the values in audit logs. +- **Orchestrator admins**: Assign Authority role `role/orch-admin` (Authority role string `Orch.Admin`) to the handful of identities permitted to raise/lower quotas or trigger bulk backfills. Tokens must include `quota_reason` (≤256 chars); provide `quota_ticket` (≤128 chars) when available so Authority audit streams capture the change record. - **Tenant enforcement**: Gateway injects `X-Stella-Tenant` from token claims. Requests missing the header must be rejected by downstream services (Concelier, Excititor, Policy Engine) and logged. - **Separation of duties**: Never grant `ui.admin` and `policy:approve`/`policy:operate` to the same human role without SOC sign-off; automation accounts should use least-privilege dedicated clients. diff --git a/docs/security/pack-signing-and-rbac.md b/docs/security/pack-signing-and-rbac.md index 87dc35e1..3ced9f83 100644 --- a/docs/security/pack-signing-and-rbac.md +++ b/docs/security/pack-signing-and-rbac.md @@ -1,165 +1,165 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Pack Signing & RBAC Controls - -This document defines signing, verification, and authorization requirements for Task Packs across the CLI, Packs Registry, Task Runner, and Offline Kit. It aligns with Authority sprint tasks (`AUTH-PACKS-41-001`, `AUTH-PACKS-43-001`) and security guild expectations. - ---- - -## 1 · Threat Model Highlights - -| Threat | Mitigation | -|--------|------------| -| Unsigned or tampered pack uploaded to registry | Mandatory cosign/DSSE verification before acceptance. | -| Unauthorized user publishing or promoting packs | Authority scopes (`Packs.Write`) + registry policy checks. | -| Privilege escalation during approvals | Approval gates require `Packs.Approve` + audit logging; fresh-auth recommended. | -| Secret exfiltration via pack steps | Secrets injection sandbox with redaction, sealed-mode network guardrails, evidence review. | -| Replay of old approval tokens | Approval payloads carry plan hash + expiry; Task Runner rejects mismatches. | -| Malicious pack in Offline Kit | Mirror verification using signed manifest and DSSE provenance. | - ---- - -## 2 · Signing Requirements - -- **Cosign** signatures required for all bundles. Keys can be: - - Keyless (Fulcio OIDC). - - KMS-backed (HSM, cloud KMS). - - Offline keys stored in secure vault (air-gapped mode). -- **DSSE Attestations** recommended to embed: - - Manifest digest. - - Build metadata (repo, commit, CI run). - - CLI version (`stella/pack`). -- Signatures stored alongside bundle in registry object storage. -- `stella pack push` refuses to publish without signature (unless `--insecure-publish` used in dev). -- Registry enforces trust policy: - -| Policy | Description | -|--------|-------------| -| `anyOf` | Accepts any key in configured trust store. | -| `keyRef` | Accepts specific key ID (`kid`). | -| `oidcIssuer` | Accepts Fulcio certificates from allowed issuers (e.g., `https://fulcio.sigstore.dev`). | -| `threshold` | Requires N-of-M signatures (future release). | - ---- - -## 3 · RBAC & Scopes - -Authority exposes pack-related scopes: - -| Scope | Description | -|-------|-------------| -| `Packs.Read` | View packs, download manifests/bundles. | -| `Packs.Write` | Publish, promote, deprecate packs. | -| `Packs.Run` | Execute packs (Task Runner, CLI). | -| `Packs.Approve` | Approve pack gates, override tenant visibility. | - -### 3.1 Role Mapping - -| Role | Scopes | Use Cases | -|------|--------|-----------| -| `pack.viewer` | `Packs.Read` | Inspect packs, plan runs. | -| `pack.publisher` | `Packs.Read`, `Packs.Write` | Publish new versions, manage channels. | -| `pack.operator` | `Packs.Read`, `Packs.Run` | Execute packs, monitor runs. | -| `pack.approver` | `Packs.Read`, `Packs.Approve` | Fulfil approvals, authorize promotions. | -| `pack.admin` | All | Full lifecycle management (rare). | - -Roles are tenant-scoped; cross-tenant access requires explicit addition. - -### 3.2 CLI Enforcement - -- CLI requests scopes based on command: - - `stella pack plan` → `Packs.Read`. - - `stella pack run` → `Packs.Run`. - - `stella pack push` → `Packs.Write`. - - `stella pack approve` → `Packs.Approve`. -- Offline tokens must include same scopes; CLI warns if missing. - ---- - -## 4 · Approvals & Fresh Auth - -- Approval commands require recent fresh-auth (< 5 minutes). CLI prompts automatically; Console enforces via Authority. -- Approval payload includes: - - `runId` - - `gateId` - - `planHash` - - `approver` - - `timestamp` -- Task Runner logs approval event and verifies plan hash to prevent rerouting. - ---- - -## 5 · Secret Management - -- Secrets defined in pack manifest map to Authority secret providers (e.g., HSM, Vault). -- Task Runner obtains secrets using service account with scoped access; CLI may prompt or read from profile. -- Secret audit trail: - - `secretRequested` event with reason, pack, step. - - `secretDelivered` event omitted (only aggregate metrics) to avoid leakage. - - Evidence bundle includes hashed secret metadata (no values). - -Sealed mode requires secrets to originate from sealed vault; external endpoints blocked. - ---- - -## 6 · Audit & Evidence - -- Registry, Task Runner, and Authority emit audit events to central timeline. -- Required events: - - `pack.version.published` - - `pack.version.promoted` - - `pack.run.started/completed` - - `pack.approval.requested/granted` - - `pack.secret.requested` -- Evidence Locker stores DSSE attestations and run bundles for 90 days (configurable). -- Auditors can use `stella pack audit --run ` to retrieve audit trail. - ---- - -## 7 · Offline / Air-Gap Policies - -- Offline Kit includes: - - Pack bundles + signatures. - - Trusted key store (`trust-bundle.pem`). - - Approval workflow instructions for manual signing. -- Air-gapped approvals: - - CLI generates approval request file (`.approval-request.json`). - - Approver uses offline CLI to sign with offline key. - - Response imported to Task Runner. -- Mirror process verifies signatures prior to import; failure aborts import with `ERR_PACK_SIGNATURE_INVALID`. - ---- - -## 8 · Incident Response - -- Compromised pack signature: - - Revoke key via Authority trust store. - - Deprecate affected versions (`registry deprecate`). - - Notify consumers via Notifier (`pack.security.alert`). - - Forensically review run evidence for impacted tenants. -- Unauthorized approval: - - Review audit log for `Packs.Approve` events. - - Trigger `pack.run.freeze` (pauses run pending investigation). - - Rotate approver credentials and require fresh-auth. -- Secret leak suspicion: - - Quarantine evidence bundles. - - Rotate secrets referenced by pack. - - Run sealed-mode audit script to confirm guardrails. - ---- - -## 9 · Compliance Checklist - -- [ ] Signing requirements (cosign/DSSE, trust policies) documented. -- [ ] Authority scope mapping and CLI enforcement captured. -- [ ] Approval workflow + fresh-auth expectations defined. -- [ ] Secret lifecycle (request, injection, audit) described. -- [ ] Audit/evidence integration noted (timeline, Evidence Locker). -- [ ] Offline/air-gap controls outlined. -- [ ] Incident response playbook provided. -- [ ] Imposed rule reminder retained at top. - ---- - -*Last updated: 2025-10-27 (Sprint 43).* - +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Pack Signing & RBAC Controls + +This document defines signing, verification, and authorization requirements for Task Packs across the CLI, Packs Registry, Task Runner, and Offline Kit. It aligns with Authority sprint tasks (`AUTH-PACKS-41-001`, `AUTH-PACKS-43-001`) and security guild expectations. + +--- + +## 1 · Threat Model Highlights + +| Threat | Mitigation | +|--------|------------| +| Unsigned or tampered pack uploaded to registry | Mandatory cosign/DSSE verification before acceptance. | +| Unauthorized user publishing or promoting packs | Authority scopes (`packs.write`) + registry policy checks. | +| Privilege escalation during approvals | Approval gates require `packs.approve` + audit logging; fresh-auth recommended. | +| Secret exfiltration via pack steps | Secrets injection sandbox with redaction, sealed-mode network guardrails, evidence review. | +| Replay of old approval tokens | Approval payloads carry plan hash + expiry; Task Runner rejects mismatches. | +| Malicious pack in Offline Kit | Mirror verification using signed manifest and DSSE provenance. | + +--- + +## 2 · Signing Requirements + +- **Cosign** signatures required for all bundles. Keys can be: + - Keyless (Fulcio OIDC). + - KMS-backed (HSM, cloud KMS). + - Offline keys stored in secure vault (air-gapped mode). +- **DSSE Attestations** recommended to embed: + - Manifest digest. + - Build metadata (repo, commit, CI run). + - CLI version (`stella/pack`). +- Signatures stored alongside bundle in registry object storage. +- `stella pack push` refuses to publish without signature (unless `--insecure-publish` used in dev). +- Registry enforces trust policy: + +| Policy | Description | +|--------|-------------| +| `anyOf` | Accepts any key in configured trust store. | +| `keyRef` | Accepts specific key ID (`kid`). | +| `oidcIssuer` | Accepts Fulcio certificates from allowed issuers (e.g., `https://fulcio.sigstore.dev`). | +| `threshold` | Requires N-of-M signatures (future release). | + +--- + +## 3 · RBAC & Scopes + +Authority exposes pack-related scopes: + +| Scope | Description | +|-------|-------------| +| `packs.read` | View packs, download manifests/bundles. | +| `packs.write` | Publish, promote, deprecate packs. | +| `packs.run` | Execute packs (Task Runner, CLI). | +| `packs.approve` | Approve pack gates, override tenant visibility. | + +### 3.1 Role Mapping + +| Role | Scopes | Use Cases | +|------|--------|-----------| +| `pack.viewer` | `packs.read` | Inspect packs, plan runs. | +| `pack.publisher` | `packs.read`, `packs.write` | Publish new versions, manage channels. | +| `pack.operator` | `packs.read`, `packs.run` | Execute packs, monitor runs. | +| `pack.approver` | `packs.read`, `packs.approve` | Fulfil approvals, authorize promotions. | +| `pack.admin` | All | Full lifecycle management (rare). | + +Roles are tenant-scoped; cross-tenant access requires explicit addition. + +### 3.2 CLI Enforcement + +- CLI requests scopes based on command: + - `stella pack plan` → `packs.read`. + - `stella pack run` → `packs.run`. + - `stella pack push` → `packs.write`. + - `stella pack approve` → `packs.approve`. +- Offline tokens must include same scopes; CLI warns if missing. + +--- + +## 4 · Approvals & Fresh Auth + +- Approval commands require recent fresh-auth (< 5 minutes). CLI prompts automatically; Console enforces via Authority. +- Approval payload includes: + - `runId` + - `gateId` + - `planHash` + - `approver` + - `timestamp` +- Task Runner logs approval event and verifies plan hash to prevent rerouting. + +--- + +## 5 · Secret Management + +- Secrets defined in pack manifest map to Authority secret providers (e.g., HSM, Vault). +- Task Runner obtains secrets using service account with scoped access; CLI may prompt or read from profile. +- Secret audit trail: + - `secretRequested` event with reason, pack, step. + - `secretDelivered` event omitted (only aggregate metrics) to avoid leakage. + - Evidence bundle includes hashed secret metadata (no values). + +Sealed mode requires secrets to originate from sealed vault; external endpoints blocked. + +--- + +## 6 · Audit & Evidence + +- Registry, Task Runner, and Authority emit audit events to central timeline. +- Required events: + - `pack.version.published` + - `pack.version.promoted` + - `pack.run.started/completed` + - `pack.approval.requested/granted` + - `pack.secret.requested` +- Dashboards should track the `authority.pack_scope_violation` tag alongside `authority.aoc_scope_violation` to highlight mis-scoped automation clients. Break down counts by `authority.client_id` to surface stale Task Runner or registry configurations quickly. +- Evidence Locker stores DSSE attestations and run bundles for 90 days (configurable). +- Auditors can use `stella pack audit --run ` to retrieve audit trail. + +--- + +## 7 · Offline / Air-Gap Policies + +- Offline Kit includes: + - Pack bundles + signatures. + - Trusted key store (`trust-bundle.pem`). + - Approval workflow instructions for manual signing. +- Air-gapped approvals: + - CLI generates approval request file (`.approval-request.json`). + - Approver uses offline CLI to sign with offline key. + - Response imported to Task Runner. +- Mirror process verifies signatures prior to import; failure aborts import with `ERR_PACK_SIGNATURE_INVALID`. + +--- + +## 8 · Incident Response + +- Compromised pack signature: + - Revoke key via Authority trust store. + - Deprecate affected versions (`registry deprecate`). + - Notify consumers via Notifier (`pack.security.alert`). + - Forensically review run evidence for impacted tenants. +- Unauthorized approval: + - Review audit log for `packs.approve` events. + - Trigger `pack.run.freeze` (pauses run pending investigation). + - Rotate approver credentials and require fresh-auth. +- Secret leak suspicion: + - Quarantine evidence bundles. + - Rotate secrets referenced by pack. + - Run sealed-mode audit script to confirm guardrails. + +--- + +## 9 · Compliance Checklist + +- [ ] Signing requirements (cosign/DSSE, trust policies) documented. +- [ ] Authority scope mapping and CLI enforcement captured. +- [ ] Approval workflow + fresh-auth expectations defined. +- [ ] Secret lifecycle (request, injection, audit) described. +- [ ] Audit/evidence integration noted (timeline, Evidence Locker). +- [ ] Offline/air-gap controls outlined. +- [ ] Incident response playbook provided. +- [ ] Imposed rule reminder retained at top. + +--- + +*Last updated: 2025-10-27 (Sprint 43).* diff --git a/docs/task-packs/authoring-guide.md b/docs/task-packs/authoring-guide.md index c43496c0..abf3833b 100644 --- a/docs/task-packs/authoring-guide.md +++ b/docs/task-packs/authoring-guide.md @@ -1,208 +1,208 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Task Pack Authoring Guide - -This guide teaches engineers how to design, validate, and publish Task Packs that align with the Sprint 43 specification. Follow these steps to ensure deterministic behaviour, secure approvals, and smooth hand-off to operators. - ---- - -## 1 · Prerequisites - -- StellaOps CLI `>= 2025.10.0` with pack commands enabled. -- Authority client configured with `Packs.Write` (publish) and `Packs.Run` (local testing) scopes. -- Access to Task Runner staging environment for validation runs. -- Familiarity with the [Task Pack Specification](spec.md) and [Packs Registry](registry.md). -- Optional: connection to DevOps staging registry or Offline Kit mirror for publishing. - ---- - -## 2 · Design Checklist - -1. **Define objective.** Document the operational need, inputs, expected outputs, and rollback strategy. -2. **Identify approvals.** Determine which scopes/roles must sign off (`Packs.Approve` assignments). -3. **Plan security posture.** Limit secrets usage, set tenant visibility, and note network constraints (sealed mode). -4. **Model observability.** Decide which metrics, logs, and evidence artifacts are critical for post-run audits. -5. **Reuse libraries.** Prefer built-in modules or shared pack fragments to reduce drift. - -Capture the above in `docs/summary.md` (optional but recommended) for future maintainers. - ---- - -## 3 · Authoring Workflow - -### 3.1 Scaffold project - -```bash -mkdir my-pack -cd my-pack -stella pack init --name sbom-remediation -``` - -`stella pack init` creates baseline files: - -- `pack.yaml` with metadata placeholders. -- `schemas/inputs.schema.json` (sample). -- `docs/usage.md` (template for human instructions). -- `.packignore` to exclude build artifacts. - -### 3.2 Define inputs & schemas - -- Use JSON Schema (`draft-2020-12`) for input validation. -- Avoid optional inputs unless there is a deterministic default. -- Store schemas under `schemas/` and reference via relative paths. - -### 3.3 Compose steps - -- Break workflow into small deterministic steps. -- Name each step with stable `id`. -- Wrap scripts/tools using built-in modules; copy scripts to `assets/` if necessary. -- Use `when` expressions for branch logic; ensure expressions rely solely on inputs or previous outputs. -- For loops, adopt `map` with capped iteration count; avoid data-dependent randomness. - -### 3.4 Configure approvals - -- Add `spec.approvals` entries for each required review. -- Provide informative `reasonTemplate` with placeholders. -- Set `expiresAfter` to match operational policy (e.g., 4 h for security reviews). -- Document fallback contacts in `docs/runbook.md`. - -### 3.5 Manage secrets - -- Declare secrets under `spec.secrets`. -- Reference secrets via expressions (e.g., `{{ secrets.jiraToken.value }}`) inside modules that support secure injection. -- Never bake secrets or tokens into pack assets. -- If secret optional, set `optional: true` and handle absence in step logic. - -### 3.6 Document outputs - -- List expected artifacts under `spec.outputs`. -- Include human-friendly docs (Markdown) describing each output and how to access it through CLI or Console. - ---- - -## 4 · Validation - -### 4.1 Static validation - -```bash -stella pack validate -``` - -Checks performed: - -- Schema compliance (YAML, JSON Schema). -- Determinism guard (forbidden functions, clock usage, network allowlist). -- Reference integrity (assets, schemas, documentation). -- Approval/secret scope availability. - -### 4.2 Simulation & plan hash - -```bash -stella pack plan --inputs samples/inputs.json --output .artifacts/plan.json -stella pack simulate --inputs samples/inputs.json --output .artifacts/sim.json -``` - -- Review plan graph to ensure step ordering and gating align with expectations. -- Store simulation output with pack metadata for future audits. - -### 4.3 Local rehearsal - -```bash -stella pack run \ - --inputs samples/inputs.json \ - --secrets jiraToken=@secrets/jira.txt \ - --dry-run -``` - -- Use `--dry-run` to verify approvals and outputs without side effects. -- Real runs require `Packs.Run` and all approval gates satisfied (e.g., via CLI prompts or Console). - -### 4.4 Unit tests (optional but encouraged) - -- Create a `tests/` folder with CLI-driven regression scripts (e.g., using `stella pack plan` + `jq` assertions). -- Integrate into CI pipelines; ensure tests run offline using cached assets. - ---- - -## 5 · Publishing - -### 5.1 Build bundle - -```bash -stella pack build \ - --output dist/sbom-remediation-1.3.0.stella-pack.tgz \ - --manifest pack.yaml -``` - -### 5.2 Sign bundle - -```bash -cosign sign-blob \ - --yes \ - --output-signature dist/sbom-remediation-1.3.0.sig \ - dist/sbom-remediation-1.3.0.stella-pack.tgz -``` - -Store signature alongside bundle; DSSE optional but recommended (see [security guidance](../security/pack-signing-and-rbac.md)). - -### 5.3 Publish to registry - -```bash -stella pack push \ - registry.stella-ops.org/packs/sbom-remediation:1.3.0 \ - --bundle dist/sbom-remediation-1.3.0.stella-pack.tgz \ - --signature dist/sbom-remediation-1.3.0.sig -``` - -Registry verifies signature, stores provenance, and updates index. - -### 5.4 Offline distribution - -- Export bundle + signature + provenance into Offline Kit using `stella pack bundle export`. -- Update mirror manifest (`manifest/offline-manifest.json`) with new pack entries. - ---- - -## 6 · Versioning & Compatibility - -- Follow SemVer (increment major when breaking schema/behaviour). -- Document compatibility in `docs/compatibility.md` (recommended). -- Registry retains immutable history; use `metadata.deprecated: true` to indicate retirement. - ---- - -## 7 · Best Practices - -- **Keep steps idempotent.** Support manual retries without side effects. -- **Surface evidence early.** Export intermediate artifacts (plans, logs) for operators. -- **Localize messages.** Provide `locales/en-US.json` for CLI/Console strings (Sprint 43 requirement). -- **Avoid long-running commands.** Split heavy tasks into smaller steps with progress telemetry. -- **Guard network usage.** Use `when: "{{ env.isSealed }}"` to block disallowed network operations or provide offline instructions. -- **Document fallbacks.** Include manual recovery instructions in `docs/runbook.md`. - ---- - -## 8 · Hand-off & Review - -- Submit PR including pack bundle metadata, docs, and validation evidence. -- Request review from Task Runner + Security + DevOps stakeholders. -- Attach `stella pack plan` output and signature digest to review notes. -- After approval, update change log (`docs/CHANGELOG.md`) and notify Task Runner operations. - ---- - -## 9 · Compliance Checklist - -- [ ] Metadata, inputs, steps, approvals, secrets, and outputs defined per spec. -- [ ] Schemas provided for all object inputs and outputs. -- [ ] Determinism validation (`stella pack validate`) executed with evidence stored. -- [ ] Plan + simulation artifacts committed in `.artifacts/` or CI evidence store. -- [ ] Bundle signed (cosign/DSSE) and signature recorded. -- [ ] Runbook and troubleshooting notes documented. -- [ ] Offline distribution steps prepared (bundle export + manifest update). -- [ ] Imposed rule reminder retained at top. - ---- - -*Last updated: 2025-10-27 (Sprint 43).* - +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Task Pack Authoring Guide + +This guide teaches engineers how to design, validate, and publish Task Packs that align with the Sprint 43 specification. Follow these steps to ensure deterministic behaviour, secure approvals, and smooth hand-off to operators. + +--- + +## 1 · Prerequisites + +- StellaOps CLI `>= 2025.10.0` with pack commands enabled. +- Authority client configured with `packs.write` (publish) and `packs.run` (local testing) scopes. +- Access to Task Runner staging environment for validation runs. +- Familiarity with the [Task Pack Specification](spec.md) and [Packs Registry](registry.md). +- Optional: connection to DevOps staging registry or Offline Kit mirror for publishing. + +--- + +## 2 · Design Checklist + +1. **Define objective.** Document the operational need, inputs, expected outputs, and rollback strategy. +2. **Identify approvals.** Determine which scopes/roles must sign off (`packs.approve` assignments). +3. **Plan security posture.** Limit secrets usage, set tenant visibility, and note network constraints (sealed mode). +4. **Model observability.** Decide which metrics, logs, and evidence artifacts are critical for post-run audits. +5. **Reuse libraries.** Prefer built-in modules or shared pack fragments to reduce drift. + +Capture the above in `docs/summary.md` (optional but recommended) for future maintainers. + +--- + +## 3 · Authoring Workflow + +### 3.1 Scaffold project + +```bash +mkdir my-pack +cd my-pack +stella pack init --name sbom-remediation +``` + +`stella pack init` creates baseline files: + +- `pack.yaml` with metadata placeholders. +- `schemas/inputs.schema.json` (sample). +- `docs/usage.md` (template for human instructions). +- `.packignore` to exclude build artifacts. + +### 3.2 Define inputs & schemas + +- Use JSON Schema (`draft-2020-12`) for input validation. +- Avoid optional inputs unless there is a deterministic default. +- Store schemas under `schemas/` and reference via relative paths. + +### 3.3 Compose steps + +- Break workflow into small deterministic steps. +- Name each step with stable `id`. +- Wrap scripts/tools using built-in modules; copy scripts to `assets/` if necessary. +- Use `when` expressions for branch logic; ensure expressions rely solely on inputs or previous outputs. +- For loops, adopt `map` with capped iteration count; avoid data-dependent randomness. + +### 3.4 Configure approvals + +- Add `spec.approvals` entries for each required review. +- Provide informative `reasonTemplate` with placeholders. +- Set `expiresAfter` to match operational policy (e.g., 4 h for security reviews). +- Document fallback contacts in `docs/runbook.md`. + +### 3.5 Manage secrets + +- Declare secrets under `spec.secrets`. +- Reference secrets via expressions (e.g., `{{ secrets.jiraToken.value }}`) inside modules that support secure injection. +- Never bake secrets or tokens into pack assets. +- If secret optional, set `optional: true` and handle absence in step logic. + +### 3.6 Document outputs + +- List expected artifacts under `spec.outputs`. +- Include human-friendly docs (Markdown) describing each output and how to access it through CLI or Console. + +--- + +## 4 · Validation + +### 4.1 Static validation + +```bash +stella pack validate +``` + +Checks performed: + +- Schema compliance (YAML, JSON Schema). +- Determinism guard (forbidden functions, clock usage, network allowlist). +- Reference integrity (assets, schemas, documentation). +- Approval/secret scope availability. + +### 4.2 Simulation & plan hash + +```bash +stella pack plan --inputs samples/inputs.json --output .artifacts/plan.json +stella pack simulate --inputs samples/inputs.json --output .artifacts/sim.json +``` + +- Review plan graph to ensure step ordering and gating align with expectations. +- Store simulation output with pack metadata for future audits. + +### 4.3 Local rehearsal + +```bash +stella pack run \ + --inputs samples/inputs.json \ + --secrets jiraToken=@secrets/jira.txt \ + --dry-run +``` + +- Use `--dry-run` to verify approvals and outputs without side effects. +- Real runs require `packs.run` and all approval gates satisfied (e.g., via CLI prompts or Console). + +### 4.4 Unit tests (optional but encouraged) + +- Create a `tests/` folder with CLI-driven regression scripts (e.g., using `stella pack plan` + `jq` assertions). +- Integrate into CI pipelines; ensure tests run offline using cached assets. + +--- + +## 5 · Publishing + +### 5.1 Build bundle + +```bash +stella pack build \ + --output dist/sbom-remediation-1.3.0.stella-pack.tgz \ + --manifest pack.yaml +``` + +### 5.2 Sign bundle + +```bash +cosign sign-blob \ + --yes \ + --output-signature dist/sbom-remediation-1.3.0.sig \ + dist/sbom-remediation-1.3.0.stella-pack.tgz +``` + +Store signature alongside bundle; DSSE optional but recommended (see [security guidance](../security/pack-signing-and-rbac.md)). + +### 5.3 Publish to registry + +```bash +stella pack push \ + registry.stella-ops.org/packs/sbom-remediation:1.3.0 \ + --bundle dist/sbom-remediation-1.3.0.stella-pack.tgz \ + --signature dist/sbom-remediation-1.3.0.sig +``` + +Registry verifies signature, stores provenance, and updates index. + +### 5.4 Offline distribution + +- Export bundle + signature + provenance into Offline Kit using `stella pack bundle export`. +- Update mirror manifest (`manifest/offline-manifest.json`) with new pack entries. + +--- + +## 6 · Versioning & Compatibility + +- Follow SemVer (increment major when breaking schema/behaviour). +- Document compatibility in `docs/compatibility.md` (recommended). +- Registry retains immutable history; use `metadata.deprecated: true` to indicate retirement. + +--- + +## 7 · Best Practices + +- **Keep steps idempotent.** Support manual retries without side effects. +- **Surface evidence early.** Export intermediate artifacts (plans, logs) for operators. +- **Localize messages.** Provide `locales/en-US.json` for CLI/Console strings (Sprint 43 requirement). +- **Avoid long-running commands.** Split heavy tasks into smaller steps with progress telemetry. +- **Guard network usage.** Use `when: "{{ env.isSealed }}"` to block disallowed network operations or provide offline instructions. +- **Document fallbacks.** Include manual recovery instructions in `docs/runbook.md`. + +--- + +## 8 · Hand-off & Review + +- Submit PR including pack bundle metadata, docs, and validation evidence. +- Request review from Task Runner + Security + DevOps stakeholders. +- Attach `stella pack plan` output and signature digest to review notes. +- After approval, update change log (`docs/CHANGELOG.md`) and notify Task Runner operations. + +--- + +## 9 · Compliance Checklist + +- [ ] Metadata, inputs, steps, approvals, secrets, and outputs defined per spec. +- [ ] Schemas provided for all object inputs and outputs. +- [ ] Determinism validation (`stella pack validate`) executed with evidence stored. +- [ ] Plan + simulation artifacts committed in `.artifacts/` or CI evidence store. +- [ ] Bundle signed (cosign/DSSE) and signature recorded. +- [ ] Runbook and troubleshooting notes documented. +- [ ] Offline distribution steps prepared (bundle export + manifest update). +- [ ] Imposed rule reminder retained at top. + +--- + +*Last updated: 2025-10-27 (Sprint 43).* + diff --git a/docs/task-packs/registry.md b/docs/task-packs/registry.md index 3dd874b7..d35f0854 100644 --- a/docs/task-packs/registry.md +++ b/docs/task-packs/registry.md @@ -1,174 +1,174 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Packs Registry Architecture & Operations - -The Packs Registry stores, verifies, and serves Task Pack bundles across environments. It integrates with Authority for RBAC, Task Runner for execution, DevOps for release automation, and Offline Kit for air-gapped distribution. - ---- - -## 1 · Service Overview - -- **Service name:** `StellaOps.PacksRegistry` -- **Interfaces:** REST/GraphQL API, OCI-compatible registry endpoints, event streams for mirroring. -- **Data stores:** MongoDB (`packs`, `pack_versions`, `pack_provenance`), object storage (bundle blobs, signatures), timeline events. -- **Dependencies:** Authority scopes (`Packs.*`), Export Center (manifests), DevOps signing service, Notifications (optional). - ---- - -## 2 · Core Concepts - -| Concept | Description | -|---------|-------------| -| **Pack record** | Immutable entry representing a pack version; includes metadata, digest, signatures, tenant visibility. | -| **Channel** | Logical distribution channel (`stable`, `edge`, `beta`, custom). Controls mirroring/promotion flows. | -| **Provenance** | DSSE statements + SBOM linking pack bundle to source repo, CLI build, and Task Runner compatibility. | -| **Mirroring policy** | Rules specifying which packs replicate to downstream registries or Offline Kit bundles. | -| **Audit trail** | Append-only log capturing publish/update/delete actions, approvals, and policy evaluations. | - ---- - -## 3 · API Surface - -### 3.1 REST Endpoints - -| Method | Path | Description | Scopes | -|--------|------|-------------|--------| -| `GET` | `/api/packs` | List packs with filters (`name`, `channel`, `tenant`, `tag`). | `Packs.Read` | -| `GET` | `/api/packs/{packId}/versions` | List versions with metadata, provenance. | `Packs.Read` | -| `GET` | `/api/packs/{packId}/versions/{version}` | Retrieve manifest, signatures, compatibility matrix. | `Packs.Read` | -| `POST` | `/api/packs/{packId}/versions` | Publish new version (bundle upload or OCI reference). | `Packs.Write` | -| `POST` | `/api/packs/{packId}/promote` | Promote version between channels (edge→stable). | `Packs.Write` + approval policy | -| `DELETE` | `/api/packs/{packId}/versions/{version}` | Deprecate version (soft delete, immutability preserved). | `Packs.Write` | -| `GET` | `/api/packs/{packId}/events` | Stream audit events (SSE). | `Packs.Read` | - -### 3.2 OCI Endpoints - -The registry exposes OCI-compatible endpoints (`/v2///...`) supporting: - -- `PUT`/`PATCH`/`GET` for manifests and blobs. -- Content-addressed digests using SHA-256. -- Annotations for pack metadata (`org.opencontainers.image.title`, `io.stellaops.pack.metadata`). - -### 3.3 GraphQL (Optional) - -GraphQL endpoint (`/api/graphql`) enables advanced queries (filter by approvals, tags, compatibility). Under active development; reference API schema once published. - ---- - -## 4 · Publishing Workflow - -1. CLI/CI calls `POST /api/packs/{id}/versions` with signed bundle. -2. Registry verifies: - - Manifest schema compliance. - - Signature (cosign/DSSE) validity. - - Authority scopes (`Packs.Write`). - - Tenant visibility constraints. -3. On success, registry stores bundle, provenance, and emits event (`pack.version.published`). -4. Optional promotion requires additional approvals or integration with DevOps release boards. - -All actions recorded in audit log: - -```json -{ - "id": "evt_01HF...", - "type": "pack.version.published", - "packId": "sbom-remediation", - "version": "1.3.0", - "actor": "user:alice", - "tenant": "west-prod", - "source": "cli/2025.10.0", - "signatures": ["sha256:..."], - "metadataHash": "sha256:..." -} -``` - ---- - -## 5 · Mirroring & Offline Support - -- **Automatic mirroring:** Configure policies to push packs to secondary registries (edge clusters, regional mirrors) or object stores. -- **Offline Kit integration:** `ops/offline-kit` pipeline pulls packs matching specified channels and writes them to `offline/packs/manifest.json` with signatures. -- **Checksum manifest:** Registry maintains `digestmap.json` listing pack digests + signatures; offline installers verify before import. -- **Sealed mode:** Registry can operate in read-only mode for sealed environments; publishing disabled except via offline import command (`stella pack mirror import`). - ---- - -## 6 · Security & Compliance - -- Enforce Authority scopes; tokens without tenant or required scope are rejected (`ERR_PACK_SCOPE`). -- Signatures verified using trusted Fulcio/KMS roots; optional mirror trust bundles configured via `registry.trustBundle`. -- RBAC mapping: - -| Role | Scopes | Capabilities | -|------|--------|--------------| -| `PackViewer` | `Packs.Read` | Browse, fetch manifests/bundles. | -| `PackPublisher` | `Packs.Read`, `Packs.Write` | Publish/promote, manage channels (subject to policy). | -| `PackApprover` | `Packs.Read`, `Packs.Approve` | Approve promotions, override tenant visibility (with audit logging). | -| `PackOperator` | `Packs.Read`, `Packs.Run` | Execute packs (via CLI/Task Runner). | - -- Audit events forwarded to Authority + Evidence Locker. -- Built-in malware/secret scanning runs on bundle upload (configurable via DevOps pipeline). - -See [pack signing & RBAC guidance](../security/pack-signing-and-rbac.md) for deeper controls. - ---- - -## 7 · Observability - -- Metrics (`registry` namespace): - - `pack_publish_total{result}` – success/failure counts. - - `pack_signature_verify_seconds` – verification latency. - - `pack_channel_promotions_total` – promotions per channel. - - `pack_mirror_queue_depth` – pending mirror jobs. -- Logs (structured JSON with `packId`, `version`, `actor`, `tenant`, `digest`). -- Traces instrument bundle verification, storage writes, and mirror pushes. -- Alerting suggestions: - - Publish failure rate > 5 % (5 m window) triggers DevOps escalation. - - Mirror lag > 15 m surfaces to Ops dashboard. - ---- - -## 8 · Schema & Metadata Extensions - -- Default metadata stored under `metadata.*` from manifest. -- Registry supplements with: - - `compatibility.cli` (supported CLI versions). - - `compatibility.runner` (Task Runner build requirements). - - `provenance.attestations[]` (URIs). - - `channels[]` (current channel assignments). - - `tenantVisibility[]`. - - `deprecated` flag + replacement hints. - -Extensions must be deterministic and derived from signed bundle data. - ---- - -## 9 · Operations - -- **Backups:** Daily snapshots of Mongo collections + object storage, retained for 30 days. -- **Retention:** Old versions retained indefinitely; mark as `deprecated` instead of deleting. -- **Maintenance:** - - Run `registry vacuum` weekly to prune orphaned blobs. - - Rotate signing keys per security policy (document in `pack-signing-and-rbac`). - - Validate trust bundles quarterly. -- **Disaster recovery:** - - Restore database + object storage. - - Rebuild OCI indexes (`registry rebuild-index`). - - Replay audit events for downstream systems. - ---- - -## 10 · Compliance Checklist - -- [ ] REST + OCI endpoints documented with required scopes. -- [ ] Publishing flow covers signature verification, audit logging, and promotion policies. -- [ ] Mirroring/offline strategy recorded (policies, manifests, sealed mode notes). -- [ ] RBAC roles and scope mapping defined. -- [ ] Observability metrics, logs, and alerts described. -- [ ] Operations guidance covers backups, rotation, disaster recovery. -- [ ] Imposed rule reminder included at top of document. - ---- - -*Last updated: 2025-10-27 (Sprint 43).* - +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Packs Registry Architecture & Operations + +The Packs Registry stores, verifies, and serves Task Pack bundles across environments. It integrates with Authority for RBAC, Task Runner for execution, DevOps for release automation, and Offline Kit for air-gapped distribution. + +--- + +## 1 · Service Overview + +- **Service name:** `StellaOps.PacksRegistry` +- **Interfaces:** REST/GraphQL API, OCI-compatible registry endpoints, event streams for mirroring. +- **Data stores:** MongoDB (`packs`, `pack_versions`, `pack_provenance`), object storage (bundle blobs, signatures), timeline events. +- **Dependencies:** Authority scopes (`packs.*`), Export Center (manifests), DevOps signing service, Notifications (optional). + +--- + +## 2 · Core Concepts + +| Concept | Description | +|---------|-------------| +| **Pack record** | Immutable entry representing a pack version; includes metadata, digest, signatures, tenant visibility. | +| **Channel** | Logical distribution channel (`stable`, `edge`, `beta`, custom). Controls mirroring/promotion flows. | +| **Provenance** | DSSE statements + SBOM linking pack bundle to source repo, CLI build, and Task Runner compatibility. | +| **Mirroring policy** | Rules specifying which packs replicate to downstream registries or Offline Kit bundles. | +| **Audit trail** | Append-only log capturing publish/update/delete actions, approvals, and policy evaluations. | + +--- + +## 3 · API Surface + +### 3.1 REST Endpoints + +| Method | Path | Description | Scopes | +|--------|------|-------------|--------| +| `GET` | `/api/packs` | List packs with filters (`name`, `channel`, `tenant`, `tag`). | `packs.read` | +| `GET` | `/api/packs/{packId}/versions` | List versions with metadata, provenance. | `packs.read` | +| `GET` | `/api/packs/{packId}/versions/{version}` | Retrieve manifest, signatures, compatibility matrix. | `packs.read` | +| `POST` | `/api/packs/{packId}/versions` | Publish new version (bundle upload or OCI reference). | `packs.write` | +| `POST` | `/api/packs/{packId}/promote` | Promote version between channels (edge→stable). | `packs.write` + approval policy | +| `DELETE` | `/api/packs/{packId}/versions/{version}` | Deprecate version (soft delete, immutability preserved). | `packs.write` | +| `GET` | `/api/packs/{packId}/events` | Stream audit events (SSE). | `packs.read` | + +### 3.2 OCI Endpoints + +The registry exposes OCI-compatible endpoints (`/v2///...`) supporting: + +- `PUT`/`PATCH`/`GET` for manifests and blobs. +- Content-addressed digests using SHA-256. +- Annotations for pack metadata (`org.opencontainers.image.title`, `io.stellaops.pack.metadata`). + +### 3.3 GraphQL (Optional) + +GraphQL endpoint (`/api/graphql`) enables advanced queries (filter by approvals, tags, compatibility). Under active development; reference API schema once published. + +--- + +## 4 · Publishing Workflow + +1. CLI/CI calls `POST /api/packs/{id}/versions` with signed bundle. +2. Registry verifies: + - Manifest schema compliance. + - Signature (cosign/DSSE) validity. + - Authority scopes (`packs.write`). + - Tenant visibility constraints. +3. On success, registry stores bundle, provenance, and emits event (`pack.version.published`). +4. Optional promotion requires additional approvals or integration with DevOps release boards. + +All actions recorded in audit log: + +```json +{ + "id": "evt_01HF...", + "type": "pack.version.published", + "packId": "sbom-remediation", + "version": "1.3.0", + "actor": "user:alice", + "tenant": "west-prod", + "source": "cli/2025.10.0", + "signatures": ["sha256:..."], + "metadataHash": "sha256:..." +} +``` + +--- + +## 5 · Mirroring & Offline Support + +- **Automatic mirroring:** Configure policies to push packs to secondary registries (edge clusters, regional mirrors) or object stores. +- **Offline Kit integration:** `ops/offline-kit` pipeline pulls packs matching specified channels and writes them to `offline/packs/manifest.json` with signatures. +- **Checksum manifest:** Registry maintains `digestmap.json` listing pack digests + signatures; offline installers verify before import. +- **Sealed mode:** Registry can operate in read-only mode for sealed environments; publishing disabled except via offline import command (`stella pack mirror import`). + +--- + +## 6 · Security & Compliance + +- Enforce Authority scopes; tokens without tenant or required scope are rejected (`ERR_PACK_SCOPE`). +- Signatures verified using trusted Fulcio/KMS roots; optional mirror trust bundles configured via `registry.trustBundle`. +- RBAC mapping: + +| Role | Scopes | Capabilities | +|------|--------|--------------| +| `PackViewer` | `packs.read` | Browse, fetch manifests/bundles. | +| `PackPublisher` | `packs.read`, `packs.write` | Publish/promote, manage channels (subject to policy). | +| `PackApprover` | `packs.read`, `packs.approve` | Approve promotions, override tenant visibility (with audit logging). | +| `PackOperator` | `packs.read`, `packs.run` | Execute packs (via CLI/Task Runner). | + +- Audit events forwarded to Authority + Evidence Locker. +- Built-in malware/secret scanning runs on bundle upload (configurable via DevOps pipeline). + +See [pack signing & RBAC guidance](../security/pack-signing-and-rbac.md) for deeper controls. + +--- + +## 7 · Observability + +- Metrics (`registry` namespace): + - `pack_publish_total{result}` – success/failure counts. + - `pack_signature_verify_seconds` – verification latency. + - `pack_channel_promotions_total` – promotions per channel. + - `pack_mirror_queue_depth` – pending mirror jobs. +- Logs (structured JSON with `packId`, `version`, `actor`, `tenant`, `digest`). +- Traces instrument bundle verification, storage writes, and mirror pushes. +- Alerting suggestions: + - Publish failure rate > 5 % (5 m window) triggers DevOps escalation. + - Mirror lag > 15 m surfaces to Ops dashboard. + +--- + +## 8 · Schema & Metadata Extensions + +- Default metadata stored under `metadata.*` from manifest. +- Registry supplements with: + - `compatibility.cli` (supported CLI versions). + - `compatibility.runner` (Task Runner build requirements). + - `provenance.attestations[]` (URIs). + - `channels[]` (current channel assignments). + - `tenantVisibility[]`. + - `deprecated` flag + replacement hints. + +Extensions must be deterministic and derived from signed bundle data. + +--- + +## 9 · Operations + +- **Backups:** Daily snapshots of Mongo collections + object storage, retained for 30 days. +- **Retention:** Old versions retained indefinitely; mark as `deprecated` instead of deleting. +- **Maintenance:** + - Run `registry vacuum` weekly to prune orphaned blobs. + - Rotate signing keys per security policy (document in `pack-signing-and-rbac`). + - Validate trust bundles quarterly. +- **Disaster recovery:** + - Restore database + object storage. + - Rebuild OCI indexes (`registry rebuild-index`). + - Replay audit events for downstream systems. + +--- + +## 10 · Compliance Checklist + +- [ ] REST + OCI endpoints documented with required scopes. +- [ ] Publishing flow covers signature verification, audit logging, and promotion policies. +- [ ] Mirroring/offline strategy recorded (policies, manifests, sealed mode notes). +- [ ] RBAC roles and scope mapping defined. +- [ ] Observability metrics, logs, and alerts described. +- [ ] Operations guidance covers backups, rotation, disaster recovery. +- [ ] Imposed rule reminder included at top of document. + +--- + +*Last updated: 2025-10-27 (Sprint 43).* + diff --git a/docs/task-packs/runbook.md b/docs/task-packs/runbook.md index 7167b514..d5651820 100644 --- a/docs/task-packs/runbook.md +++ b/docs/task-packs/runbook.md @@ -1,162 +1,162 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Task Pack Operations Runbook - -This runbook guides SREs and on-call engineers through executing, monitoring, and troubleshooting Task Packs using the Task Runner service, Packs Registry, and StellaOps CLI. It aligns with Sprint 43 deliverables (approvals workflow, notifications, chaos resilience). - ---- - -## 1 · Quick Reference - -| Action | Command / UI | Notes | -|--------|--------------|-------| -| Validate pack | `stella pack validate --bundle ` | Run before publishing or importing. | -| Plan pack run | `stella pack plan --inputs inputs.json` | Outputs plan hash, required approvals, secret summary. | -| Execute pack | `stella pack run --pack :` | Streams logs; prompts for secrets/approvals if allowed. | -| Approve gate | Console notifications or `stella pack approve --run --gate ` | Requires `Packs.Approve`. | -| View run | Console `/console/packs/runs/:id` or `stella pack runs show ` | SSE stream available for live status. | -| Export evidence | `stella pack runs export --run ` | Produces bundle with plan, logs, artifacts, attestations. | - ---- - -## 2 · Run Lifecycle - -1. **Submission** - - CLI/Orchestrator submits run with inputs, pack version, tenant context. - - Task Runner validates pack hash, scopes, sealed-mode constraints. -2. **Plan & Simulation** - - Runner caches plan graph; optional simulation diff recorded. -3. **Approvals** - - Gates emit notifications (`NOTIFY-SVC-40-001`). - - Approvers can approve/resume via CLI, Console, or API. -4. **Execution** - - Steps executed per plan (sequential/parallel). - - Logs streamed via SSE (`/task-runner/runs/{id}/logs`). -5. **Evidence & Attestation** - - On completion, DSSE attestation + evidence bundle stored. - - Exports available via Export Center. -6. **Cleanup** - - Artifacts retained per retention policy (default 30 d). - - Mirror pack run manifest to Offline Kit if configured. - ---- - -## 3 · Monitoring & Telemetry - -- **Metrics dashboards:** `task-runner` Grafana board. - - `pack_run_active` – active runs per tenant. - - `pack_step_duration_seconds` – histograms per step type. - - `pack_gate_wait_seconds` – approval wait time (alert > 30 m). - - `pack_run_success_ratio` – success vs failure rate. -- **Logs:** Search by `runId`, `packId`, `tenant`, `stepId`. -- **Traces:** Query `taskrunner.run` span in Tempo/Jaeger. -- **Notifications:** Subscribe to `pack.run.*` topics via Notifier for Slack/email/PagerDuty hooks. - -Observability configuration referenced in Task Runner tasks (OBS-50-001..55-001). - ---- - -## 4 · Approvals Workflow - -- Approvals may be requested via Console banner, CLI prompt, or email/Slack. -- Approver roles: `Packs.Approve` + tenant membership. -- CLI command: - -```bash -stella pack approve \ - --run run:tenant:timestamp \ - --gate security-review \ - --comment "Validated remediation scope; proceeding." -``` - -- Auto-expiry triggers run cancellation (configurable per gate). -- Approval events logged and included in evidence bundle. - ---- - -## 5 · Secrets Handling - -- Secrets retrieved via Authority secure channel or CLI profile. -- Task Runner injects secrets into isolated environment variables or temp files (auto-shredded). -- Logs redact secrets; evidence bundles include only secret metadata (name, scope, last four characters). -- For sealed mode, secrets must originate from sealed vault (configured via `TASKRUNNER_SEALED_VAULT_URL`). - ---- - -## 6 · Failure Recovery - -| Scenario | Symptom | Resolution | -|----------|---------|------------| -| **Plan hash mismatch** | Run aborted with `ERR_PACK_HASH_MISMATCH`. | Re-run `stella pack plan`; ensure pack not modified post-plan. | -| **Approval timeout** | `ERR_PACK_APPROVAL_TIMEOUT`. | Requeue run with extended TTL or escalate to approver; verify notifications delivered. | -| **Secret missing** | Run fails at injection step. | Provide secret via CLI (`--secrets`) or configure profile; check Authority scope. | -| **Network blocked (sealed)** | `ERR_PACK_NETWORK_BLOCKED`. | Update pack to avoid external calls or whitelist domain via AirGap policy. | -| **Artifact upload failure** | Evidence missing, logs show storage errors. | Retry run with `--resume` (if supported); verify object storage health. | -| **Runner chaos trigger** | Run paused with chaos event note. | Review chaos test plan; resume if acceptable or cancel run. | - -`stella pack runs resume --run ` resumes paused runs post-remediation (approvals or transient failures). - ---- - -## 7 · Chaos & Resilience - -- Chaos hooks pause runs, drop network, or delay approvals to test resilience. -- Track chaos events via `pack.chaos.injected` timeline entries. -- Post-chaos, ensure metrics return to baseline; record findings in Ops log. - ---- - -## 8 · Offline & Air-Gapped Execution - -- Use `stella pack mirror pull` to import packs into sealed registry. -- CLI caches bundles under `~/.stella/packs/` for offline runs. -- Approvals require offline process: - - Generate approval request bundle (`stella pack approve --offline-request`). - - Approver signs bundle using offline CLI. - - Import approval via `stella pack approve --offline-response`. -- Evidence bundles exported to removable media; verify checksums before upload to online systems. - ---- - -## 9 · Runbooks for Common Packs - -Maintain per-pack playbooks in `docs/task-packs/runbook/.md`. Include: - -- Purpose and scope. -- Required inputs and secrets. -- Approval stakeholders. -- Pre-checks and post-checks. -- Rollback procedures. - -The Docs Guild can use this root runbook as a template. - ---- - -## 10 · Escalation Matrix - -| Issue | Primary | Secondary | Notes | -|-------|---------|-----------|-------| -| Pack validation errors | DevEx/CLI Guild | Task Runner Guild | Provide pack bundle + validation output. | -| Approval pipeline failure | Task Runner Guild | Authority Core | Confirm scope/role mapping. | -| Registry outage | Packs Registry Guild | DevOps Guild | Use mirror fallback if possible. | -| Evidence integrity issues | Evidence Locker Guild | Security Guild | Validate DSSE attestations, escalate if tampered. | - -Escalations must include run ID, tenant, pack version, plan hash, and timestamps. - ---- - -## 11 · Compliance Checklist - -- [ ] Run lifecycle documented (submission → evidence). -- [ ] Monitoring metrics, logs, traces, and notifications captured. -- [ ] Approvals workflow instructions provided (CLI + Console). -- [ ] Secret handling, sealed-mode constraints, and offline process described. -- [ ] Failure scenarios + recovery steps listed. -- [ ] Chaos/resilience guidance included. -- [ ] Escalation matrix defined. -- [ ] Imposed rule reminder included at top. - ---- - -*Last updated: 2025-10-27 (Sprint 43).* - +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Task Pack Operations Runbook + +This runbook guides SREs and on-call engineers through executing, monitoring, and troubleshooting Task Packs using the Task Runner service, Packs Registry, and StellaOps CLI. It aligns with Sprint 43 deliverables (approvals workflow, notifications, chaos resilience). + +--- + +## 1 · Quick Reference + +| Action | Command / UI | Notes | +|--------|--------------|-------| +| Validate pack | `stella pack validate --bundle ` | Run before publishing or importing. | +| Plan pack run | `stella pack plan --inputs inputs.json` | Outputs plan hash, required approvals, secret summary. | +| Execute pack | `stella pack run --pack :` | Streams logs; prompts for secrets/approvals if allowed. | +| Approve gate | Console notifications or `stella pack approve --run --gate ` | Requires `packs.approve`. | +| View run | Console `/console/packs/runs/:id` or `stella pack runs show ` | SSE stream available for live status. | +| Export evidence | `stella pack runs export --run ` | Produces bundle with plan, logs, artifacts, attestations. | + +--- + +## 2 · Run Lifecycle + +1. **Submission** + - CLI/Orchestrator submits run with inputs, pack version, tenant context. + - Task Runner validates pack hash, scopes, sealed-mode constraints. +2. **Plan & Simulation** + - Runner caches plan graph; optional simulation diff recorded. +3. **Approvals** + - Gates emit notifications (`NOTIFY-SVC-40-001`). + - Approvers can approve/resume via CLI, Console, or API. +4. **Execution** + - Steps executed per plan (sequential/parallel). + - Logs streamed via SSE (`/task-runner/runs/{id}/logs`). +5. **Evidence & Attestation** + - On completion, DSSE attestation + evidence bundle stored. + - Exports available via Export Center. +6. **Cleanup** + - Artifacts retained per retention policy (default 30 d). + - Mirror pack run manifest to Offline Kit if configured. + +--- + +## 3 · Monitoring & Telemetry + +- **Metrics dashboards:** `task-runner` Grafana board. + - `pack_run_active` – active runs per tenant. + - `pack_step_duration_seconds` – histograms per step type. + - `pack_gate_wait_seconds` – approval wait time (alert > 30 m). + - `pack_run_success_ratio` – success vs failure rate. +- **Logs:** Search by `runId`, `packId`, `tenant`, `stepId`. +- **Traces:** Query `taskrunner.run` span in Tempo/Jaeger. +- **Notifications:** Subscribe to `pack.run.*` topics via Notifier for Slack/email/PagerDuty hooks. + +Observability configuration referenced in Task Runner tasks (OBS-50-001..55-001). + +--- + +## 4 · Approvals Workflow + +- Approvals may be requested via Console banner, CLI prompt, or email/Slack. +- Approver roles: `packs.approve` + tenant membership. +- CLI command: + +```bash +stella pack approve \ + --run run:tenant:timestamp \ + --gate security-review \ + --comment "Validated remediation scope; proceeding." +``` + +- Auto-expiry triggers run cancellation (configurable per gate). +- Approval events logged and included in evidence bundle. + +--- + +## 5 · Secrets Handling + +- Secrets retrieved via Authority secure channel or CLI profile. +- Task Runner injects secrets into isolated environment variables or temp files (auto-shredded). +- Logs redact secrets; evidence bundles include only secret metadata (name, scope, last four characters). +- For sealed mode, secrets must originate from sealed vault (configured via `TASKRUNNER_SEALED_VAULT_URL`). + +--- + +## 6 · Failure Recovery + +| Scenario | Symptom | Resolution | +|----------|---------|------------| +| **Plan hash mismatch** | Run aborted with `ERR_PACK_HASH_MISMATCH`. | Re-run `stella pack plan`; ensure pack not modified post-plan. | +| **Approval timeout** | `ERR_PACK_APPROVAL_TIMEOUT`. | Requeue run with extended TTL or escalate to approver; verify notifications delivered. | +| **Secret missing** | Run fails at injection step. | Provide secret via CLI (`--secrets`) or configure profile; check Authority scope. | +| **Network blocked (sealed)** | `ERR_PACK_NETWORK_BLOCKED`. | Update pack to avoid external calls or whitelist domain via AirGap policy. | +| **Artifact upload failure** | Evidence missing, logs show storage errors. | Retry run with `--resume` (if supported); verify object storage health. | +| **Runner chaos trigger** | Run paused with chaos event note. | Review chaos test plan; resume if acceptable or cancel run. | + +`stella pack runs resume --run ` resumes paused runs post-remediation (approvals or transient failures). + +--- + +## 7 · Chaos & Resilience + +- Chaos hooks pause runs, drop network, or delay approvals to test resilience. +- Track chaos events via `pack.chaos.injected` timeline entries. +- Post-chaos, ensure metrics return to baseline; record findings in Ops log. + +--- + +## 8 · Offline & Air-Gapped Execution + +- Use `stella pack mirror pull` to import packs into sealed registry. +- CLI caches bundles under `~/.stella/packs/` for offline runs. +- Approvals require offline process: + - Generate approval request bundle (`stella pack approve --offline-request`). + - Approver signs bundle using offline CLI. + - Import approval via `stella pack approve --offline-response`. +- Evidence bundles exported to removable media; verify checksums before upload to online systems. + +--- + +## 9 · Runbooks for Common Packs + +Maintain per-pack playbooks in `docs/task-packs/runbook/.md`. Include: + +- Purpose and scope. +- Required inputs and secrets. +- Approval stakeholders. +- Pre-checks and post-checks. +- Rollback procedures. + +The Docs Guild can use this root runbook as a template. + +--- + +## 10 · Escalation Matrix + +| Issue | Primary | Secondary | Notes | +|-------|---------|-----------|-------| +| Pack validation errors | DevEx/CLI Guild | Task Runner Guild | Provide pack bundle + validation output. | +| Approval pipeline failure | Task Runner Guild | Authority Core | Confirm scope/role mapping. | +| Registry outage | Packs Registry Guild | DevOps Guild | Use mirror fallback if possible. | +| Evidence integrity issues | Evidence Locker Guild | Security Guild | Validate DSSE attestations, escalate if tampered. | + +Escalations must include run ID, tenant, pack version, plan hash, and timestamps. + +--- + +## 11 · Compliance Checklist + +- [ ] Run lifecycle documented (submission → evidence). +- [ ] Monitoring metrics, logs, traces, and notifications captured. +- [ ] Approvals workflow instructions provided (CLI + Console). +- [ ] Secret handling, sealed-mode constraints, and offline process described. +- [ ] Failure scenarios + recovery steps listed. +- [ ] Chaos/resilience guidance included. +- [ ] Escalation matrix defined. +- [ ] Imposed rule reminder included at top. + +--- + +*Last updated: 2025-10-27 (Sprint 43).* + diff --git a/docs/task-packs/spec.md b/docs/task-packs/spec.md index d6e7c98e..e6cc9b6d 100644 --- a/docs/task-packs/spec.md +++ b/docs/task-packs/spec.md @@ -1,249 +1,249 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Task Pack Specification (Sprint 43 Draft) - -The Task Pack specification defines a deterministic, auditable format that enables operators to encode multi-step maintenance, validation, and deployment workflows. Packs are executed by the Task Runner service, distributed through the Packs Registry, and invoked via the StellaOps CLI (`stella pack ...`) or Orchestrator integrations. - ---- - -## 1 · Goals & Scope - -- **Deterministic execution.** Identical inputs yield identical run graphs, output manifests, and evidence bundles across environments (online, sealed, or offline). -- **Secure-by-default.** Pack metadata must capture provenance, signatures, RBAC requirements, and secret usage; execution enforces tenant scopes and approvals. -- **Portable.** Packs are distributed as signed OCI artifacts or tarballs that work in connected and air-gapped deployments, including Offline Kit mirrors. -- **Composable.** Packs can reference reusable steps, expressions, and shared libraries without sacrificing determinism or auditability. - -Non-goals: full-blown workflow orchestration, unbounded scripting, or remote code injection. All logic is declarative and constrained to Task Runner capabilities. - ---- - -## 2 · Terminology - -| Term | Definition | -|------|------------| -| **Pack manifest** | Primary YAML document (`pack.yaml`) describing metadata, inputs, steps, policies, and evidence expectations. | -| **Step** | Atomic unit of work executed by Task Runner (e.g., command, API call, policy gate, approval). Steps can be sequential or parallel. | -| **Expression** | Deterministic evaluation (JMESPath-like) used for branching, templating, and conditionals. | -| **Policy gate** | Declarative rule that blocks execution until conditions are met (e.g., approval recorded, external signal received). | -| **Artifact** | File, JSON blob, or OCI object produced by a step, referenced in manifests and evidence bundles. | -| **Pack bundle** | Distribution archive (`.stella-pack.tgz` or OCI ref) containing manifest, assets, schemas, and provenance metadata. | - ---- - -## 3 · Pack Layout - -``` -my-pack/ - ├─ pack.yaml # Required manifest - ├─ assets/ # Optional static assets (scripts, templates) - ├─ schemas/ # JSON schemas for inputs/outputs - ├─ docs/ # Markdown docs rendered in Console/CLI help - ├─ provenance/ # DSSE statements, SBOM, attestations - └─ README.md # Author-facing summary (optional) -``` - -Publishing via Packs Registry or OCI ensures the directory is canonical and hashed. - ---- - -## 4 · Manifest Schema (v1.0) - -```yaml -apiVersion: stellaops.io/pack.v1 -kind: TaskPack -metadata: - name: sbom-remediation - version: 1.3.0 - description: > - Audit SBOM drift, quiet high-risk findings, and export mitigation evidence. - tags: [sbom, remediation, policy] - tenantVisibility: ["west-prod", "east-stage"] # optional allowlist - maintainers: - - name: Jane Doe - email: jane@example.com - license: AGPL-3.0-or-later - annotations: - imposedRuleReminder: true - -spec: - inputs: - - name: sbomBundle - type: object - schema: schemas/sbom-bundle.schema.json - required: true - - name: dryRun - type: boolean - default: false - secrets: - - name: jiraToken - scope: Packs.Run # Authority scope required - description: Optional token for ticket automation - approvals: - - id: security-review - grants: ["Packs.Approve"] - expiresAfter: PT4H - reasonTemplate: "Approve remediation for SBOM {{ inputs.sbomBundle.metadata.image }}" - steps: - - id: validate-input - run: - uses: builtin:validate-schema - with: - target: "{{ inputs.sbomBundle }}" - schema: schemas/sbom-bundle.schema.json - - id: plan-remediation - when: "{{ not inputs.dryRun }}" - run: - uses: builtin:policy-simulate - with: - sbom: "{{ inputs.sbomBundle }}" - policy: "policies/remediation.yaml" - - id: approval-gate - gate: - approval: security-review - message: "Security must approve remediation before changes apply." - - id: apply-remediation - run: - uses: builtin:cli-command - with: - command: ["stella", "policy", "promote", "--from-pack"] - - id: export-evidence - run: - uses: builtin:evidence-export - with: - includeArtifacts: ["{{ steps.plan-remediation.outputs.planPath }}"] - outputs: - - name: evidenceBundle - type: file - path: "{{ steps.export-evidence.outputs.bundlePath }}" - success: - message: "Remediation applied; evidence bundle ready." - failure: - retries: - maxAttempts: 1 - backoffSeconds: 0 - message: "Remediation failed; see evidence bundle for context." -``` - -### 4.1 Field Summary - -| Field | Description | Requirements | -|-------|-------------|--------------| -| `metadata` | Human-facing metadata; used for registry listings and RBAC hints. | `name` (DNS-1123), `version` (SemVer), `description` ≤ 2048 chars. | -| `spec.inputs` | Declarative inputs validated at plan time. | Must include type; custom schema optional but recommended. | -| `spec.secrets` | Secrets requested at runtime; never stored in pack bundle. | Each secret references Authority scope; CLI prompts or injects from profiles. | -| `spec.approvals` | Named approval gates with required grants and TTL. | ID unique per pack; `grants` map to Authority roles. | -| `spec.steps` | Execution graph; each step is `run`, `gate`, `parallel`, or `map`. | Steps must declare deterministic `uses` module and `id`. | -| `spec.outputs` | Declared artifacts for downstream automation. | `type` can be `file`, `object`, or `url`; path/expression required. | -| `success` / `failure` | Messages + retry policy. | `failure.retries.maxAttempts` + `backoffSeconds` default to 0. | - ---- - -## 5 · Step Types - -| Type | Schema | Notes | -|------|--------|-------| -| `run` | Executes a built-in module (`builtin:*`) or registry-provided module. | Modules must be deterministic, side-effect constrained, and versioned. | -| `parallel` | Executes sub-steps concurrently; `maxParallel` optional. | Results aggregated; failures trigger abort unless `continueOnError`. | -| `map` | Iterates over deterministic list; each iteration spawns sub-step. | Sequence derived from expression result; ordering stable. | -| `gate.approval` | Blocks until approval recorded with required grants. | Supports `autoExpire` to cancel run on timeout. | -| `gate.policy` | Calls Policy Engine to ensure criteria met (e.g., no critical findings). | Fails run if gate not satisfied. | - -`when` expressions must be pure (no side effects) and rely only on declared inputs or prior outputs. - ---- - -## 6 · Determinism & Validation - -1. **Plan phase** (`stella pack plan`, `TaskRunner.Plan` API) parses manifest, resolves expressions, validates schemas, and emits canonical graph with hash. -2. **Simulation** compares plan vs dry-run results, capturing differences in `planDiff`. Required for approvals in sealed environments. -3. **Execution** uses plan hash to ensure runtime graph matches simulation. Divergence aborts run. -4. **Evidence**: Task Runner emits DSSE attestation referencing plan hash, input digests, and output artifacts. - -Validation pipeline: - -```text -pack.yaml ──▶ schema validation ──▶ expression audit ──▶ determinism guard ──▶ signing -``` - -Packs must pass CLI validation before publishing. - ---- - -## 7 · Signatures & Provenance - -- Pack bundles are signed with **cosign** (keyless Fulcio/KMS supported) and optionally DSSE envelopes. -- `provenance/` directory stores signed statements (SLSA Build L1+) linking source repo, CI run, and manifest hash. -- Registry verifies signatures on push/pull; Task Runner refuses unsigned packs unless in development mode. -- Attestations include: - - Pack manifest digest (`sha256`) - - Pack bundle digest - - Build metadata (`git.ref`, `ci.workflow`, `cli.version`) - ---- - -## 8 · RBAC & Scopes - -Authority scopes introduced by `AUTH-PACKS-41-001`: - -| Scope | Purpose | -|-------|---------| -| `Packs.Read` | Discover packs, download manifests. | -| `Packs.Write` | Publish/update packs in registry (requires signature). | -| `Packs.Run` | Execute packs via CLI/Task Runner. | -| `Packs.Approve` | Fulfil approval gates defined in packs. | - -Task Runner enforces scopes per tenant; pack metadata may further restrict tenant visibility (`metadata.tenantVisibility`). - ---- - -## 9 · Observability & Evidence - -- Metrics: `pack_run_duration_seconds`, `pack_step_retry_total`, `pack_gate_wait_seconds`. -- Logs: Structured JSON per step with scrubbed inputs (`secretMask` applied). -- Timeline events: `pack.started`, `pack.approval.requested`, `pack.approval.granted`, `pack.completed`. -- Evidence bundle includes: - - Plan manifest (canonical JSON) - - Step transcripts (redacted) - - Artifacts manifest (sha256, size) - - Attestations references - ---- - -## 10 · Compatibility Matrix - -| CLI Version | Pack API | Task Runner | Notes | -|-------------|----------|-------------|-------| -| 2025.10.x | `pack.v1` | Runner build `>=2025.10.0` | Approvals optional, loops disabled. | -| 2025.12.x | `pack.v1` | Runner build `>=2025.12.0` | Approvals resume, secrets injection, localization strings. | -| Future | `pack.v2` | TBD | Will introduce typed outputs & partial replay (track in Epic 13). | - -CLI enforces compatibility: running pack with unsupported features yields `ERR_PACK_UNSUPPORTED`. - ---- - -## 11 · Publishing Workflow - -1. Author pack (`pack.yaml`, assets, docs). -2. Run `stella pack validate` (schema + determinism). -3. Generate bundle: `stella pack build --output my-pack.stella-pack.tgz`. -4. Sign: `cosign sign-blob my-pack.stella-pack.tgz`. -5. Publish: `stella pack push registry.example.com/org/my-pack:1.3.0`. -6. Registry verifies signature, records provenance, and exposes pack via API. - ---- - -## 12 · Compliance Checklist - -- [ ] Manifest schema documented for all fields, including approvals, secrets, and outputs. -- [ ] Determinism requirements outlined with plan/simulate semantics and CLI validation steps. -- [ ] Signing + provenance expectations spelled out with cosign/DSSE references. -- [ ] RBAC scopes (`Packs.*`) and tenant visibility rules captured. -- [ ] Observability (metrics, logs, evidence) described for Task Runner integrations. -- [ ] Compatibility matrix enumerates CLI/Runner requirements. -- [ ] Publishing workflow documented with CLI commands. -- [ ] Imposed rule reminder included at top of document. - ---- - -*Last updated: 2025-10-27 (Sprint 43).* - +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Task Pack Specification (Sprint 43 Draft) + +The Task Pack specification defines a deterministic, auditable format that enables operators to encode multi-step maintenance, validation, and deployment workflows. Packs are executed by the Task Runner service, distributed through the Packs Registry, and invoked via the StellaOps CLI (`stella pack ...`) or Orchestrator integrations. + +--- + +## 1 · Goals & Scope + +- **Deterministic execution.** Identical inputs yield identical run graphs, output manifests, and evidence bundles across environments (online, sealed, or offline). +- **Secure-by-default.** Pack metadata must capture provenance, signatures, RBAC requirements, and secret usage; execution enforces tenant scopes and approvals. +- **Portable.** Packs are distributed as signed OCI artifacts or tarballs that work in connected and air-gapped deployments, including Offline Kit mirrors. +- **Composable.** Packs can reference reusable steps, expressions, and shared libraries without sacrificing determinism or auditability. + +Non-goals: full-blown workflow orchestration, unbounded scripting, or remote code injection. All logic is declarative and constrained to Task Runner capabilities. + +--- + +## 2 · Terminology + +| Term | Definition | +|------|------------| +| **Pack manifest** | Primary YAML document (`pack.yaml`) describing metadata, inputs, steps, policies, and evidence expectations. | +| **Step** | Atomic unit of work executed by Task Runner (e.g., command, API call, policy gate, approval). Steps can be sequential or parallel. | +| **Expression** | Deterministic evaluation (JMESPath-like) used for branching, templating, and conditionals. | +| **Policy gate** | Declarative rule that blocks execution until conditions are met (e.g., approval recorded, external signal received). | +| **Artifact** | File, JSON blob, or OCI object produced by a step, referenced in manifests and evidence bundles. | +| **Pack bundle** | Distribution archive (`.stella-pack.tgz` or OCI ref) containing manifest, assets, schemas, and provenance metadata. | + +--- + +## 3 · Pack Layout + +``` +my-pack/ + ├─ pack.yaml # Required manifest + ├─ assets/ # Optional static assets (scripts, templates) + ├─ schemas/ # JSON schemas for inputs/outputs + ├─ docs/ # Markdown docs rendered in Console/CLI help + ├─ provenance/ # DSSE statements, SBOM, attestations + └─ README.md # Author-facing summary (optional) +``` + +Publishing via Packs Registry or OCI ensures the directory is canonical and hashed. + +--- + +## 4 · Manifest Schema (v1.0) + +```yaml +apiVersion: stellaops.io/pack.v1 +kind: TaskPack +metadata: + name: sbom-remediation + version: 1.3.0 + description: > + Audit SBOM drift, quiet high-risk findings, and export mitigation evidence. + tags: [sbom, remediation, policy] + tenantVisibility: ["west-prod", "east-stage"] # optional allowlist + maintainers: + - name: Jane Doe + email: jane@example.com + license: AGPL-3.0-or-later + annotations: + imposedRuleReminder: true + +spec: + inputs: + - name: sbomBundle + type: object + schema: schemas/sbom-bundle.schema.json + required: true + - name: dryRun + type: boolean + default: false + secrets: + - name: jiraToken + scope: packs.run # Authority scope required + description: Optional token for ticket automation + approvals: + - id: security-review + grants: ["packs.approve"] + expiresAfter: PT4H + reasonTemplate: "Approve remediation for SBOM {{ inputs.sbomBundle.metadata.image }}" + steps: + - id: validate-input + run: + uses: builtin:validate-schema + with: + target: "{{ inputs.sbomBundle }}" + schema: schemas/sbom-bundle.schema.json + - id: plan-remediation + when: "{{ not inputs.dryRun }}" + run: + uses: builtin:policy-simulate + with: + sbom: "{{ inputs.sbomBundle }}" + policy: "policies/remediation.yaml" + - id: approval-gate + gate: + approval: security-review + message: "Security must approve remediation before changes apply." + - id: apply-remediation + run: + uses: builtin:cli-command + with: + command: ["stella", "policy", "promote", "--from-pack"] + - id: export-evidence + run: + uses: builtin:evidence-export + with: + includeArtifacts: ["{{ steps.plan-remediation.outputs.planPath }}"] + outputs: + - name: evidenceBundle + type: file + path: "{{ steps.export-evidence.outputs.bundlePath }}" + success: + message: "Remediation applied; evidence bundle ready." + failure: + retries: + maxAttempts: 1 + backoffSeconds: 0 + message: "Remediation failed; see evidence bundle for context." +``` + +### 4.1 Field Summary + +| Field | Description | Requirements | +|-------|-------------|--------------| +| `metadata` | Human-facing metadata; used for registry listings and RBAC hints. | `name` (DNS-1123), `version` (SemVer), `description` ≤ 2048 chars. | +| `spec.inputs` | Declarative inputs validated at plan time. | Must include type; custom schema optional but recommended. | +| `spec.secrets` | Secrets requested at runtime; never stored in pack bundle. | Each secret references Authority scope; CLI prompts or injects from profiles. | +| `spec.approvals` | Named approval gates with required grants and TTL. | ID unique per pack; `grants` map to Authority roles. | +| `spec.steps` | Execution graph; each step is `run`, `gate`, `parallel`, or `map`. | Steps must declare deterministic `uses` module and `id`. | +| `spec.outputs` | Declared artifacts for downstream automation. | `type` can be `file`, `object`, or `url`; path/expression required. | +| `success` / `failure` | Messages + retry policy. | `failure.retries.maxAttempts` + `backoffSeconds` default to 0. | + +--- + +## 5 · Step Types + +| Type | Schema | Notes | +|------|--------|-------| +| `run` | Executes a built-in module (`builtin:*`) or registry-provided module. | Modules must be deterministic, side-effect constrained, and versioned. | +| `parallel` | Executes sub-steps concurrently; `maxParallel` optional. | Results aggregated; failures trigger abort unless `continueOnError`. | +| `map` | Iterates over deterministic list; each iteration spawns sub-step. | Sequence derived from expression result; ordering stable. | +| `gate.approval` | Blocks until approval recorded with required grants. | Supports `autoExpire` to cancel run on timeout. | +| `gate.policy` | Calls Policy Engine to ensure criteria met (e.g., no critical findings). | Fails run if gate not satisfied. | + +`when` expressions must be pure (no side effects) and rely only on declared inputs or prior outputs. + +--- + +## 6 · Determinism & Validation + +1. **Plan phase** (`stella pack plan`, `TaskRunner.Plan` API) parses manifest, resolves expressions, validates schemas, and emits canonical graph with hash. +2. **Simulation** compares plan vs dry-run results, capturing differences in `planDiff`. Required for approvals in sealed environments. +3. **Execution** uses plan hash to ensure runtime graph matches simulation. Divergence aborts run. +4. **Evidence**: Task Runner emits DSSE attestation referencing plan hash, input digests, and output artifacts. + +Validation pipeline: + +```text +pack.yaml ──▶ schema validation ──▶ expression audit ──▶ determinism guard ──▶ signing +``` + +Packs must pass CLI validation before publishing. + +--- + +## 7 · Signatures & Provenance + +- Pack bundles are signed with **cosign** (keyless Fulcio/KMS supported) and optionally DSSE envelopes. +- `provenance/` directory stores signed statements (SLSA Build L1+) linking source repo, CI run, and manifest hash. +- Registry verifies signatures on push/pull; Task Runner refuses unsigned packs unless in development mode. +- Attestations include: + - Pack manifest digest (`sha256`) + - Pack bundle digest + - Build metadata (`git.ref`, `ci.workflow`, `cli.version`) + +--- + +## 8 · RBAC & Scopes + +Authority scopes introduced by `AUTH-PACKS-41-001`: + +| Scope | Purpose | +|-------|---------| +| `packs.read` | Discover packs, download manifests. | +| `packs.write` | Publish/update packs in registry (requires signature). | +| `packs.run` | Execute packs via CLI/Task Runner. | +| `packs.approve` | Fulfil approval gates defined in packs. | + +Task Runner enforces scopes per tenant; pack metadata may further restrict tenant visibility (`metadata.tenantVisibility`). + +--- + +## 9 · Observability & Evidence + +- Metrics: `pack_run_duration_seconds`, `pack_step_retry_total`, `pack_gate_wait_seconds`. +- Logs: Structured JSON per step with scrubbed inputs (`secretMask` applied). +- Timeline events: `pack.started`, `pack.approval.requested`, `pack.approval.granted`, `pack.completed`. +- Evidence bundle includes: + - Plan manifest (canonical JSON) + - Step transcripts (redacted) + - Artifacts manifest (sha256, size) + - Attestations references + +--- + +## 10 · Compatibility Matrix + +| CLI Version | Pack API | Task Runner | Notes | +|-------------|----------|-------------|-------| +| 2025.10.x | `pack.v1` | Runner build `>=2025.10.0` | Approvals optional, loops disabled. | +| 2025.12.x | `pack.v1` | Runner build `>=2025.12.0` | Approvals resume, secrets injection, localization strings. | +| Future | `pack.v2` | TBD | Will introduce typed outputs & partial replay (track in Epic 13). | + +CLI enforces compatibility: running pack with unsupported features yields `ERR_PACK_UNSUPPORTED`. + +--- + +## 11 · Publishing Workflow + +1. Author pack (`pack.yaml`, assets, docs). +2. Run `stella pack validate` (schema + determinism). +3. Generate bundle: `stella pack build --output my-pack.stella-pack.tgz`. +4. Sign: `cosign sign-blob my-pack.stella-pack.tgz`. +5. Publish: `stella pack push registry.example.com/org/my-pack:1.3.0`. +6. Registry verifies signature, records provenance, and exposes pack via API. + +--- + +## 12 · Compliance Checklist + +- [ ] Manifest schema documented for all fields, including approvals, secrets, and outputs. +- [ ] Determinism requirements outlined with plan/simulate semantics and CLI validation steps. +- [ ] Signing + provenance expectations spelled out with cosign/DSSE references. +- [ ] RBAC scopes (`packs.*`) and tenant visibility rules captured. +- [ ] Observability (metrics, logs, evidence) described for Task Runner integrations. +- [ ] Compatibility matrix enumerates CLI/Runner requirements. +- [ ] Publishing workflow documented with CLI commands. +- [ ] Imposed rule reminder included at top of document. + +--- + +*Last updated: 2025-10-27 (Sprint 43).* + diff --git a/docs/updates/2025-11-01-orch-admin-scope.md b/docs/updates/2025-11-01-orch-admin-scope.md new file mode 100644 index 00000000..970efd6c --- /dev/null +++ b/docs/updates/2025-11-01-orch-admin-scope.md @@ -0,0 +1,18 @@ +# 2025-11-01 · Authority adds Orch.Admin quota controls + +**What changed** + +- Introduced new `orch:quota` scope and `Orch.Admin` role for Orchestrator quota and burst adjustments. +- Client credential requests for `orch:quota` now require `quota_reason` (≤256 chars) and accept optional `quota_ticket` (≤128 chars). Authority records both values under `quota.reason` / `quota.ticket` audit properties. +- Tokens embedding `orch:quota` expose the reason/ticket claims so downstream services and audit tooling can trace quota increases or emergency backfills. +- Console, CLI, and configuration samples include the new role plus environment variables (`STELLAOPS_ORCH_QUOTA_REASON`, `STELLAOPS_ORCH_QUOTA_TICKET`) for automation. + +**Why** + +Quotas and replay backfills materially affect tenant isolation and platform capacity. Capturing explicit operator intent keeps change windows reviewable and aligns with platform audit requirements. + +**Actions** + +1. Update Authority configuration/offline bundles to seed `Orch.Admin` role for the handful of ops identities that manage quotas. +2. Adjust automation to pass `quota_reason`/`quota_ticket` when exchanging tokens for `orch:quota`. +3. Monitor `authority.client_credentials.grant` records for the new `quota.*` audit properties when reviewing change windows. diff --git a/etc/authority.yaml b/etc/authority.yaml index 05c2cabe..ad74bbc5 100644 --- a/etc/authority.yaml +++ b/etc/authority.yaml @@ -152,13 +152,15 @@ clients: tenants: - name: "tenant-default" - roles: - orch-viewer: - scopes: [ "orch:read" ] - orch-operator: - scopes: [ "orch:read", "orch:operate" ] - export-viewer: - scopes: [ "export.viewer" ] + roles: + orch-viewer: + scopes: [ "orch:read" ] + orch-operator: + scopes: [ "orch:read", "orch:operate" ] + orch-admin: + scopes: [ "orch:read", "orch:operate", "orch:quota" ] + export-viewer: + scopes: [ "export.viewer" ] export-operator: scopes: [ "export.viewer", "export.operator" ] export-admin: @@ -169,10 +171,24 @@ tenants: scopes: [ "policy:review", "policy:read", "policy:simulate", "findings:read" ] policy-approver: scopes: [ "policy:approve", "policy:review", "policy:read", "policy:simulate", "findings:read" ] - policy-operator: - scopes: [ "policy:operate", "policy:run", "policy:activate", "policy:read", "policy:simulate", "findings:read" ] - policy-auditor: - scopes: [ "policy:audit", "policy:read", "policy:simulate", "findings:read" ] + policy-operator: + scopes: [ "policy:operate", "policy:run", "policy:activate", "policy:read", "policy:simulate", "findings:read" ] + policy-auditor: + scopes: [ "policy:audit", "policy:read", "policy:simulate", "findings:read" ] + advisory-ai-viewer: + scopes: [ "advisory-ai:view" ] + advisory-ai-operator: + scopes: [ "advisory-ai:view", "advisory-ai:operate" ] + advisory-ai-admin: + scopes: [ "advisory-ai:view", "advisory-ai:operate", "advisory-ai:admin" ] + observability-viewer: + scopes: [ "obs:read", "timeline:read", "evidence:read", "attest:read" ] + observability-investigator: + scopes: [ "obs:read", "timeline:read", "timeline:write", "evidence:read", "evidence:create", "attest:read" ] + observability-legal: + scopes: [ "evidence:read", "evidence:hold" ] + observability-incident-commander: + scopes: [ "obs:read", "obs:incident", "timeline:read", "timeline:write", "evidence:create", "evidence:read", "attest:read" ] security: rateLimiting: @@ -191,17 +207,23 @@ security: memorySizeInKib: 19456 iterations: 2 parallelism: 1 - senderConstraints: - dpop: - enabled: true - proofLifetime: "00:05:00" - allowedClockSkew: "00:00:10" - replayWindow: "00:10:00" - nonce: - enabled: false - mtls: - enabled: false - -bypassNetworks: - - "127.0.0.1/32" - - "::1/128" + senderConstraints: + dpop: + enabled: true + proofLifetime: "00:05:00" + allowedClockSkew: "00:00:10" + replayWindow: "00:10:00" + nonce: + enabled: false + mtls: + enabled: false + +advisoryAi: + remoteInference: + enabled: false + requireTenantConsent: true + allowedProfiles: [] + +bypassNetworks: + - "127.0.0.1/32" + - "::1/128" diff --git a/etc/authority.yaml.sample b/etc/authority.yaml.sample index 93a1caa3..c54b0f15 100644 --- a/etc/authority.yaml.sample +++ b/etc/authority.yaml.sample @@ -1,29 +1,29 @@ -# StellaOps Authority configuration template. -# Copy to ../etc/authority.yaml (relative to the Authority content root) -# and adjust values to fit your environment. Environment variables -# prefixed with STELLAOPS_AUTHORITY_ override these values at runtime. -# Example: STELLAOPS_AUTHORITY__ISSUER=https://authority.example.com - -schemaVersion: 1 - -# Absolute issuer URI advertised to clients. Use HTTPS for anything -# beyond loopback development. -issuer: "https://authority.stella-ops.local" - -# Token lifetimes expressed as HH:MM:SS or DD.HH:MM:SS. -accessTokenLifetime: "00:02:00" -refreshTokenLifetime: "30.00:00:00" -identityTokenLifetime: "00:05:00" -authorizationCodeLifetime: "00:05:00" -deviceCodeLifetime: "00:15:00" - -# MongoDB storage connection details. -storage: - connectionString: "mongodb://localhost:27017/stellaops-authority" - # databaseName: "stellaops_authority" - commandTimeout: "00:00:30" - -# Signing configuration for revocation bundles and JWKS. +# StellaOps Authority configuration template. +# Copy to ../etc/authority.yaml (relative to the Authority content root) +# and adjust values to fit your environment. Environment variables +# prefixed with STELLAOPS_AUTHORITY_ override these values at runtime. +# Example: STELLAOPS_AUTHORITY__ISSUER=https://authority.example.com + +schemaVersion: 1 + +# Absolute issuer URI advertised to clients. Use HTTPS for anything +# beyond loopback development. +issuer: "https://authority.stella-ops.local" + +# Token lifetimes expressed as HH:MM:SS or DD.HH:MM:SS. +accessTokenLifetime: "00:02:00" +refreshTokenLifetime: "30.00:00:00" +identityTokenLifetime: "00:05:00" +authorizationCodeLifetime: "00:05:00" +deviceCodeLifetime: "00:15:00" + +# MongoDB storage connection details. +storage: + connectionString: "mongodb://localhost:27017/stellaops-authority" + # databaseName: "stellaops_authority" + commandTimeout: "00:00:30" + +# Signing configuration for revocation bundles and JWKS. signing: enabled: true activeKeyId: "authority-signing-2025-dev" @@ -36,303 +36,481 @@ signing: - keyId: "authority-signing-dev" path: "../certificates/authority-signing-dev.pem" source: "file" - # Rotation flow: - # 1. Generate a new PEM under ./certificates (e.g. authority-signing-2026-dev.pem). - # 2. Trigger the .gitea/workflows/authority-key-rotation.yml workflow (or run - # ops/authority/key-rotation.sh) with the new keyId/keyPath. - # 3. Update activeKeyId/keyPath above and move the previous key into additionalKeys - # so restarts retain retired material for JWKS consumers. - -# Bootstrap administrative endpoints (initial provisioning). -bootstrap: - enabled: false - apiKey: "change-me" - defaultIdentityProvider: "standard" - -# Directories scanned for Authority plug-ins. Relative paths resolve -# against the application content root, enabling air-gapped deployments -# that package plug-ins alongside binaries. -pluginDirectories: - - "../StellaOps.Authority.PluginBinaries" - # "/var/lib/stellaops/authority/plugins" - -# Plug-in manifests live in descriptors below; per-plugin settings are stored -# in the configurationDirectory (YAML files). Authority will load any enabled -# plugins and surface their metadata/capabilities to the host. -plugins: - configurationDirectory: "../etc/authority.plugins" - descriptors: - standard: - type: "standard" - assemblyName: "StellaOps.Authority.Plugin.Standard" - enabled: true - configFile: "standard.yaml" - capabilities: - - password - - bootstrap - - clientProvisioning - metadata: - defaultRole: "operators" - # Example for an external identity provider plugin. Leave disabled unless - # the plug-in package exists under StellaOps.Authority.PluginBinaries. - ldap: - type: "ldap" - assemblyName: "StellaOps.Authority.Plugin.Ldap" - enabled: false - configFile: "ldap.yaml" - capabilities: - - password - - mfa - -# OAuth client registrations issued by Authority. These examples cover Notify WebService -# in dev (notify.dev audience) and production (notify audience). Replace the secret files -# with paths to your sealed credentials before enabling bootstrap mode. -clients: - - clientId: "notify-web-dev" - displayName: "Notify WebService (dev)" - grantTypes: [ "client_credentials" ] - audiences: [ "notify.dev" ] - scopes: [ "notify.read", "notify.admin" ] - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/notify-web-dev.secret" - - clientId: "notify-web" - displayName: "Notify WebService" - grantTypes: [ "client_credentials" ] - audiences: [ "notify" ] - scopes: [ "notify.read", "notify.admin" ] - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/notify-web.secret" - - clientId: "concelier-ingest" - displayName: "Concelier Ingestion" - grantTypes: [ "client_credentials" ] - audiences: [ "api://concelier" ] - scopes: [ "advisory:ingest", "advisory:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/concelier-ingest.secret" - - clientId: "excitor-ingest" - displayName: "Excititor VEX Ingestion" - grantTypes: [ "client_credentials" ] - audiences: [ "api://excitor" ] - scopes: [ "vex:ingest", "vex:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/excitor-ingest.secret" - - clientId: "aoc-verifier" - displayName: "AOC Verification Agent" - grantTypes: [ "client_credentials" ] - audiences: [ "api://concelier", "api://excitor" ] - scopes: [ "aoc:verify", "advisory:read", "vex:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/aoc-verifier.secret" - - clientId: "policy-engine" - displayName: "Policy Engine Service" - grantTypes: [ "client_credentials" ] - audiences: [ "api://policy-engine" ] - scopes: [ "policy:run", "findings:read", "effective:write" ] - tenant: "tenant-default" - properties: - serviceIdentity: "policy-engine" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/policy-engine.secret" - - clientId: "policy-cli" - displayName: "Policy Automation CLI" - grantTypes: [ "client_credentials" ] - audiences: [ "api://policy-engine" ] - scopes: [ "policy:read", "policy:author", "policy:review", "policy:simulate", "findings:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/policy-cli.secret" - - clientId: "exceptions-service" - displayName: "Policy Engine Exceptions Worker" - grantTypes: [ "client_credentials" ] - audiences: [ "api://policy-engine" ] - scopes: [ "exceptions:read", "exceptions:write" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/exceptions-service.secret" - - clientId: "console-web" - displayName: "StellaOps Console" - grantTypes: [ "authorization_code", "refresh_token" ] - audiences: [ "console" ] - scopes: [ "openid", "profile", "email", "ui.read", "authority:tenants.read", "advisory:read", "vex:read", "exceptions:read", "exceptions:approve", "aoc:verify", "findings:read", "orch:read", "vuln:read" ] - # exceptions:approve is elevated via fresh-auth and requires an MFA-capable identity provider. - tenant: "tenant-default" - senderConstraint: "dpop" - redirectUris: - - "https://console.stella-ops.local/oidc/callback" - postLogoutRedirectUris: - - "https://console.stella-ops.local/" - # Gateway must forward X-Stella-Tenant for /console endpoints; fresh-auth window (300s) - # returned by /console/profile governs admin actions in the Console UI. - auth: - type: "client_secret" - secretFile: "../secrets/console-web.secret" - - clientId: "cartographer-service" - displayName: "Cartographer Service" - grantTypes: [ "client_credentials" ] - audiences: [ "api://cartographer" ] - scopes: [ "graph:write", "graph:read" ] - tenant: "tenant-default" - properties: - serviceIdentity: "cartographer" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/cartographer-service.secret" - - clientId: "graph-api" - displayName: "Graph API Gateway" - grantTypes: [ "client_credentials" ] - audiences: [ "api://graph-api" ] - scopes: [ "graph:read", "graph:export", "graph:simulate" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/graph-api.secret" - - clientId: "export-center-operator" - displayName: "Export Center Operator" - grantTypes: [ "client_credentials" ] - audiences: [ "api://export-center" ] - scopes: [ "export.viewer", "export.operator" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/export-center-operator.secret" - - clientId: "export-center-admin" - displayName: "Export Center Admin" - grantTypes: [ "client_credentials" ] - audiences: [ "api://export-center" ] - scopes: [ "export.viewer", "export.operator", "export.admin" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/export-center-admin.secret" - - clientId: "vuln-explorer-ui" - displayName: "Vuln Explorer UI" - grantTypes: [ "client_credentials" ] - audiences: [ "api://vuln-explorer" ] - scopes: [ "vuln:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/vuln-explorer-ui.secret" - # Signals sensors must request aoc:verify alongside write scope. - - clientId: "signals-uploader" - displayName: "Signals Sensor" - grantTypes: [ "client_credentials" ] - audiences: [ "api://signals" ] - scopes: [ "signals:write", "signals:read", "aoc:verify" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/signals-uploader.secret" - -tenants: - - name: "tenant-default" - roles: - orch-viewer: - scopes: [ "orch:read" ] - orch-operator: - scopes: [ "orch:read", "orch:operate" ] - policy-author: - scopes: [ "policy:author", "policy:read", "policy:simulate", "findings:read" ] - policy-reviewer: - scopes: [ "policy:review", "policy:read", "policy:simulate", "findings:read" ] - policy-approver: - scopes: [ "policy:approve", "policy:review", "policy:read", "policy:simulate", "findings:read" ] - policy-operator: - scopes: [ "policy:operate", "policy:run", "policy:activate", "policy:read", "policy:simulate", "findings:read" ] - policy-auditor: - scopes: [ "policy:audit", "policy:read", "policy:simulate", "findings:read" ] - export-viewer: - scopes: [ "export.viewer" ] - export-operator: - scopes: [ "export.viewer", "export.operator" ] - export-admin: - scopes: [ "export.viewer", "export.operator", "export.admin" ] - -# Exception approval routing templates used by Policy Engine and Console. -exceptions: - routingTemplates: - - id: "secops" - authorityRouteId: "approvals/secops" - requireMfa: true - description: "Security Operations approval chain" - - id: "governance" - authorityRouteId: "approvals/governance" - requireMfa: false - description: "Governance review (non-production)" - -# CIDR ranges that bypass network-sensitive policies (e.g. on-host cron jobs). -# Keep the list tight: localhost is sufficient for most air-gapped installs. -bypassNetworks: - - "127.0.0.1/32" - - "::1/128" - -# Security posture (rate limiting + sender constraints). -security: - rateLimiting: - token: - enabled: true - permitLimit: 30 - window: "00:01:00" - queueLimit: 0 - authorize: - enabled: true - permitLimit: 60 - window: "00:01:00" - queueLimit: 10 - internal: - enabled: false - permitLimit: 5 - window: "00:01:00" - queueLimit: 0 - senderConstraints: - dpop: - enabled: true - allowedAlgorithms: [ "ES256", "ES384" ] - proofLifetime: "00:02:00" - allowedClockSkew: "00:00:30" - replayWindow: "00:05:00" - nonce: - enabled: true - ttl: "00:10:00" - maxIssuancePerMinute: 120 - store: "memory" # Set to "redis" for multi-node Authority deployments. - requiredAudiences: - - "signer" - - "attestor" - # redisConnectionString: "redis://authority-redis:6379?ssl=false" - mtls: - enabled: false - requireChainValidation: true - rotationGrace: "00:15:00" - enforceForAudiences: - - "signer" # Requests for these audiences force mTLS sender constraints - allowedSanTypes: - - "dns" - - "uri" - allowedCertificateAuthorities: [ ] - allowedSubjectPatterns: [ ] + # Rotation flow: + # 1. Generate a new PEM under ./certificates (e.g. authority-signing-2026-dev.pem). + # 2. Trigger the .gitea/workflows/authority-key-rotation.yml workflow (or run + # ops/authority/key-rotation.sh) with the new keyId/keyPath. + # 3. Update activeKeyId/keyPath above and move the previous key into additionalKeys + # so restarts retain retired material for JWKS consumers. + +notifications: + ackTokens: + enabled: true + payloadType: "application/vnd.stellaops.notify-ack-token+json" + defaultLifetime: "00:15:00" + maxLifetime: "00:30:00" + algorithm: "ES256" + keySource: "file" + activeKeyId: "notify-ack-2025-dev" + keyPath: "../certificates/notify-ack-2025-dev.pem" + keyUse: "notify-ack" + jwksCacheLifetime: "00:05:00" + additionalKeys: [] + webhooks: + enabled: true + allowedHosts: + - "hooks.slack.com" + - "*.pagerduty.com" + escalation: + scope: "notify.escalate" + requireAdminScope: true + +apiLifecycle: + legacyAuth: + enabled: true + deprecationDate: "2025-11-01T00:00:00Z" + sunsetDate: "2026-05-01T00:00:00Z" + documentationUrl: "https://docs.stella-ops.org/migrations/authority/legacy-auth-endpoints" + notificationTopic: "authority.api.deprecation" + +advisoryAi: + remoteInference: + enabled: false + requireTenantConsent: true + allowedProfiles: + - "cloud-openai" + - "sovereign-local" + +# Bootstrap administrative endpoints (initial provisioning). +bootstrap: + enabled: false + apiKey: "change-me" + defaultIdentityProvider: "standard" + +# Directories scanned for Authority plug-ins. Relative paths resolve +# against the application content root, enabling air-gapped deployments +# that package plug-ins alongside binaries. +pluginDirectories: + - "../StellaOps.Authority.PluginBinaries" + # "/var/lib/stellaops/authority/plugins" + +# Plug-in manifests live in descriptors below; per-plugin settings are stored +# in the configurationDirectory (YAML files). Authority will load any enabled +# plugins and surface their metadata/capabilities to the host. +plugins: + configurationDirectory: "../etc/authority.plugins" + descriptors: + standard: + type: "standard" + assemblyName: "StellaOps.Authority.Plugin.Standard" + enabled: true + configFile: "standard.yaml" + capabilities: + - password + - bootstrap + - clientProvisioning + metadata: + defaultRole: "operators" + # Example for an external identity provider plugin. Leave disabled unless + # the plug-in package exists under StellaOps.Authority.PluginBinaries. + ldap: + type: "ldap" + assemblyName: "StellaOps.Authority.Plugin.Ldap" + enabled: false + configFile: "ldap.yaml" + capabilities: + - password + - mfa + +# OAuth client registrations issued by Authority. These examples cover Notify WebService +# in dev (notify.dev audience) and production (notify audience). Replace the secret files +# with paths to your sealed credentials before enabling bootstrap mode. +clients: + - clientId: "notify-web-dev" + displayName: "Notify WebService (dev)" + grantTypes: [ "client_credentials" ] + audiences: [ "notify.dev" ] + scopes: [ "notify.read", "notify.admin" ] + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/notify-web-dev.secret" + - clientId: "notify-web" + displayName: "Notify WebService" + grantTypes: [ "client_credentials" ] + audiences: [ "notify" ] + scopes: [ "notify.read", "notify.admin" ] + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/notify-web.secret" + - clientId: "concelier-ingest" + displayName: "Concelier Ingestion" + grantTypes: [ "client_credentials" ] + audiences: [ "api://concelier" ] + scopes: [ "advisory:ingest", "advisory:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/concelier-ingest.secret" + - clientId: "excitor-ingest" + displayName: "Excititor VEX Ingestion" + grantTypes: [ "client_credentials" ] + audiences: [ "api://excitor" ] + scopes: [ "vex:ingest", "vex:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/excitor-ingest.secret" + - clientId: "aoc-verifier" + displayName: "AOC Verification Agent" + grantTypes: [ "client_credentials" ] + audiences: [ "api://concelier", "api://excitor" ] + scopes: [ "aoc:verify", "advisory:read", "vex:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/aoc-verifier.secret" + - clientId: "airgap-operator" + displayName: "AirGap Operations CLI" + grantTypes: [ "client_credentials" ] + audiences: [ "api://airgap-controller", "api://airgap-importer" ] + scopes: [ "airgap:status:read", "airgap:import", "airgap:seal" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/airgap-operator.secret" + - clientId: "policy-engine" + displayName: "Policy Engine Service" + grantTypes: [ "client_credentials" ] + audiences: [ "api://policy-engine" ] + scopes: [ "policy:run", "findings:read", "effective:write" ] + tenant: "tenant-default" + properties: + serviceIdentity: "policy-engine" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/policy-engine.secret" + - clientId: "policy-cli" + displayName: "Policy Automation CLI" + grantTypes: [ "client_credentials" ] + audiences: [ "api://policy-engine" ] + scopes: [ "policy:read", "policy:author", "policy:review", "policy:simulate", "findings:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/policy-cli.secret" + - clientId: "exceptions-service" + displayName: "Policy Engine Exceptions Worker" + grantTypes: [ "client_credentials" ] + audiences: [ "api://policy-engine" ] + scopes: [ "exceptions:read", "exceptions:write" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/exceptions-service.secret" + - clientId: "console-web" + displayName: "StellaOps Console" + grantTypes: [ "authorization_code", "refresh_token" ] + audiences: [ "console" ] + scopes: [ "openid", "profile", "email", "ui.read", "authority:tenants.read", "advisory:read", "vex:read", "exceptions:read", "exceptions:approve", "aoc:verify", "findings:read", "airgap:status:read", "obs:read", "obs:incident", "timeline:read", "evidence:read", "attest:read", "orch:read", "vuln:read" ] + # exceptions:approve is elevated via fresh-auth and requires an MFA-capable identity provider. + tenant: "tenant-default" + senderConstraint: "dpop" + redirectUris: + - "https://console.stella-ops.local/oidc/callback" + postLogoutRedirectUris: + - "https://console.stella-ops.local/" + # Gateway must forward X-Stella-Tenant for /console endpoints; fresh-auth window (300s) + # returned by /console/profile governs admin actions in the Console UI. + auth: + type: "client_secret" + secretFile: "../secrets/console-web.secret" + - clientId: "cartographer-service" + displayName: "Cartographer Service" + grantTypes: [ "client_credentials" ] + audiences: [ "api://cartographer" ] + scopes: [ "graph:write", "graph:read" ] + tenant: "tenant-default" + properties: + serviceIdentity: "cartographer" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/cartographer-service.secret" + - clientId: "packs-registry" + displayName: "Packs Registry Service" + grantTypes: [ "client_credentials" ] + audiences: [ "api://packs-registry" ] + scopes: [ "packs.read", "packs.write" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/packs-registry.secret" + - clientId: "task-runner" + displayName: "Task Runner Service" + grantTypes: [ "client_credentials" ] + audiences: [ "api://task-runner" ] + scopes: [ "packs.run", "packs.read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/task-runner.secret" + - clientId: "pack-approver" + displayName: "Pack Approver Automation" + grantTypes: [ "client_credentials" ] + audiences: [ "api://task-runner" ] + scopes: [ "packs.approve", "packs.read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/pack-approver.secret" + - clientId: "graph-api" + displayName: "Graph API Gateway" + grantTypes: [ "client_credentials" ] + audiences: [ "api://graph-api" ] + scopes: [ "graph:read", "graph:export", "graph:simulate" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/graph-api.secret" + - clientId: "export-center-operator" + displayName: "Export Center Operator" + grantTypes: [ "client_credentials" ] + audiences: [ "api://export-center" ] + scopes: [ "export.viewer", "export.operator" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/export-center-operator.secret" + - clientId: "export-center-admin" + displayName: "Export Center Admin" + grantTypes: [ "client_credentials" ] + audiences: [ "api://export-center" ] + scopes: [ "export.viewer", "export.operator", "export.admin" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/export-center-admin.secret" + - clientId: "notify-service" + displayName: "Notify WebService" + grantTypes: [ "client_credentials" ] + audiences: [ "api://notify" ] + scopes: [ "notify.viewer", "notify.operator" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/notify-service.secret" + - clientId: "notify-admin" + displayName: "Notify Admin Automation" + grantTypes: [ "client_credentials" ] + audiences: [ "api://notify" ] + scopes: [ "notify.viewer", "notify.operator", "notify.admin" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/notify-admin.secret" + - clientId: "observability-web" + displayName: "Observability Console Backend" + grantTypes: [ "client_credentials" ] + audiences: [ "api://observability" ] + scopes: [ "obs:read", "timeline:read", "evidence:read", "attest:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/observability-web.secret" + - clientId: "timeline-indexer" + displayName: "Timeline Indexer Worker" + grantTypes: [ "client_credentials" ] + audiences: [ "api://timeline" ] + scopes: [ "timeline:write", "timeline:read", "obs:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/timeline-indexer.secret" + - clientId: "evidence-locker" + displayName: "Evidence Locker Service" + grantTypes: [ "client_credentials" ] + audiences: [ "api://evidence" ] + scopes: [ "evidence:create", "evidence:read", "evidence:hold" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/evidence-locker.secret" + - clientId: "incident-bridge" + displayName: "Incident Bridge Automation" + grantTypes: [ "client_credentials" ] + audiences: [ "api://observability" ] + scopes: [ "obs:incident", "obs:read", "timeline:read", "timeline:write", "evidence:create" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/incident-bridge.secret" + - clientId: "vuln-explorer-ui" + displayName: "Vuln Explorer UI" + grantTypes: [ "client_credentials" ] + audiences: [ "api://vuln-explorer" ] + scopes: [ "vuln:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/vuln-explorer-ui.secret" + # Signals sensors must request aoc:verify alongside write scope. + - clientId: "signals-uploader" + displayName: "Signals Sensor" + grantTypes: [ "client_credentials" ] + audiences: [ "api://signals" ] + scopes: [ "signals:write", "signals:read", "aoc:verify" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/signals-uploader.secret" + +tenants: + - name: "tenant-default" + roles: + orch-viewer: + scopes: [ "orch:read" ] + orch-operator: + scopes: [ "orch:read", "orch:operate" ] + orch-admin: + scopes: [ "orch:read", "orch:operate", "orch:quota" ] + policy-author: + scopes: [ "policy:author", "policy:read", "policy:simulate", "findings:read" ] + policy-reviewer: + scopes: [ "policy:review", "policy:read", "policy:simulate", "findings:read" ] + policy-approver: + scopes: [ "policy:approve", "policy:review", "policy:read", "policy:simulate", "findings:read" ] + policy-operator: + scopes: [ "policy:operate", "policy:run", "policy:activate", "policy:read", "policy:simulate", "findings:read" ] + policy-auditor: + scopes: [ "policy:audit", "policy:read", "policy:simulate", "findings:read" ] + export-viewer: + scopes: [ "export.viewer" ] + export-operator: + scopes: [ "export.viewer", "export.operator" ] + export-admin: + scopes: [ "export.viewer", "export.operator", "export.admin" ] + notify-viewer: + scopes: [ "notify.viewer" ] + notify-operator: + scopes: [ "notify.viewer", "notify.operator" ] + notify-admin: + scopes: [ "notify.viewer", "notify.operator", "notify.admin" ] + observability-viewer: + scopes: [ "obs:read", "timeline:read", "evidence:read", "attest:read" ] + observability-investigator: + scopes: [ "obs:read", "timeline:read", "timeline:write", "evidence:read", "evidence:create", "attest:read" ] + observability-legal: + scopes: [ "evidence:read", "evidence:hold" ] + observability-incident-commander: + scopes: [ "obs:read", "obs:incident", "timeline:read", "timeline:write", "evidence:create", "evidence:read", "attest:read" ] + airgap-viewer: + scopes: [ "airgap:status:read" ] + airgap-operator: + scopes: [ "airgap:status:read", "airgap:import" ] + airgap-admin: + scopes: [ "airgap:status:read", "airgap:import", "airgap:seal" ] + advisory-ai-viewer: + scopes: [ "advisory-ai:view" ] + advisory-ai-operator: + scopes: [ "advisory-ai:view", "advisory-ai:operate" ] + advisory-ai-admin: + scopes: [ "advisory-ai:view", "advisory-ai:operate", "advisory-ai:admin" ] + advisoryAi: + remoteInference: + consentGranted: false + consentVersion: "" + consentedAt: "" + consentedBy: "" + +# Exception approval routing templates used by Policy Engine and Console. +exceptions: + routingTemplates: + - id: "secops" + authorityRouteId: "approvals/secops" + requireMfa: true + description: "Security Operations approval chain" + - id: "governance" + authorityRouteId: "approvals/governance" + requireMfa: false + description: "Governance review (non-production)" + +# CIDR ranges that bypass network-sensitive policies (e.g. on-host cron jobs). +# Keep the list tight: localhost is sufficient for most air-gapped installs. +bypassNetworks: + - "127.0.0.1/32" + - "::1/128" + +# Security posture (rate limiting + sender constraints). +security: + rateLimiting: + token: + enabled: true + permitLimit: 30 + window: "00:01:00" + queueLimit: 0 + authorize: + enabled: true + permitLimit: 60 + window: "00:01:00" + queueLimit: 10 + internal: + enabled: false + permitLimit: 5 + window: "00:01:00" + queueLimit: 0 + senderConstraints: + dpop: + enabled: true + allowedAlgorithms: [ "ES256", "ES384" ] + proofLifetime: "00:02:00" + allowedClockSkew: "00:00:30" + replayWindow: "00:05:00" + nonce: + enabled: true + ttl: "00:10:00" + maxIssuancePerMinute: 120 + store: "memory" # Set to "redis" for multi-node Authority deployments. + requiredAudiences: + - "signer" + - "attestor" + # redisConnectionString: "redis://authority-redis:6379?ssl=false" + mtls: + enabled: false + requireChainValidation: true + rotationGrace: "00:15:00" + enforceForAudiences: + - "signer" # Requests for these audiences force mTLS sender constraints + allowedSanTypes: + - "dns" + - "uri" + allowedCertificateAuthorities: [ ] + allowedSubjectPatterns: [ ] + +advisoryAi: + remoteInference: + enabled: false + requireTenantConsent: true + allowedProfiles: [] diff --git a/etc/issuer-directory.yaml.sample b/etc/issuer-directory.yaml.sample new file mode 100644 index 00000000..958be473 --- /dev/null +++ b/etc/issuer-directory.yaml.sample @@ -0,0 +1,22 @@ +IssuerDirectory: + telemetry: + minimumLogLevel: Information + authority: + enabled: true + issuer: https://authority.example.com/realms/stellaops + requireHttpsMetadata: true + audiences: + - stellaops-platform + readScope: issuer-directory:read + writeScope: issuer-directory:write + adminScope: issuer-directory:admin + tenantHeader: X-StellaOps-Tenant + seedCsafPublishers: true + csafSeedPath: data/csaf-publishers.json + Mongo: + connectionString: mongodb://localhost:27017 + database: issuer-directory + issuersCollection: issuers + issuerKeysCollection: issuer_keys + issuerTrustCollection: issuer_trust_overrides + auditCollection: issuer_audit diff --git a/etc/notify.dev.yaml b/etc/notify.dev.yaml index 5261d81b..f2f626d2 100644 --- a/etc/notify.dev.yaml +++ b/etc/notify.dev.yaml @@ -14,10 +14,11 @@ authority: allowAnonymousFallback: false backchannelTimeoutSeconds: 30 tokenClockSkewSeconds: 60 - audiences: - - notify.dev - readScope: notify.read - adminScope: notify.admin + audiences: + - notify.dev + viewerScope: notify.viewer + operatorScope: notify.operator + adminScope: notify.admin api: basePath: "/api/v1/notify" diff --git a/etc/notify.prod.yaml b/etc/notify.prod.yaml index e0c993b7..1610229a 100644 --- a/etc/notify.prod.yaml +++ b/etc/notify.prod.yaml @@ -14,10 +14,11 @@ authority: allowAnonymousFallback: false backchannelTimeoutSeconds: 30 tokenClockSkewSeconds: 60 - audiences: - - notify - readScope: notify.read - adminScope: notify.admin + audiences: + - notify + viewerScope: notify.viewer + operatorScope: notify.operator + adminScope: notify.admin api: basePath: "/api/v1/notify" diff --git a/etc/notify.stage.yaml b/etc/notify.stage.yaml index 3e336dbd..caf57784 100644 --- a/etc/notify.stage.yaml +++ b/etc/notify.stage.yaml @@ -14,10 +14,11 @@ authority: allowAnonymousFallback: false backchannelTimeoutSeconds: 30 tokenClockSkewSeconds: 60 - audiences: - - notify - readScope: notify.read - adminScope: notify.admin + audiences: + - notify + viewerScope: notify.viewer + operatorScope: notify.operator + adminScope: notify.admin api: basePath: "/api/v1/notify" diff --git a/etc/notify.yaml.sample b/etc/notify.yaml.sample index 4015cfac..ee85be77 100644 --- a/etc/notify.yaml.sample +++ b/etc/notify.yaml.sample @@ -15,10 +15,11 @@ authority: allowAnonymousFallback: false backchannelTimeoutSeconds: 30 tokenClockSkewSeconds: 60 - audiences: - - notify - readScope: notify.read - adminScope: notify.admin + audiences: + - notify + viewerScope: notify.viewer + operatorScope: notify.operator + adminScope: notify.admin api: basePath: "/api/v1/notify" diff --git a/etc/packs-registry.yaml.sample b/etc/packs-registry.yaml.sample new file mode 100644 index 00000000..9c5b10e0 --- /dev/null +++ b/etc/packs-registry.yaml.sample @@ -0,0 +1,62 @@ +# StellaOps Packs Registry configuration template. +# Copy to ../etc/packs-registry.yaml (relative to the Packs Registry content root) +# and adjust values as needed. Environment variables prefixed with +# STELLAOPS_PACKSREGISTRY_ override these settings at runtime. + +schemaVersion: 1 + +telemetry: + enabled: true + serviceName: "stellaops-packs-registry" + exportConsole: true + minimumLogLevel: "Information" + otlpEndpoint: "" + resourceAttributes: + deployment.environment: "local" + +authority: + issuer: "https://authority.stella-ops.local" + metadataAddress: "" + requireHttpsMetadata: true + audiences: + - "api://packs-registry" + tenant: "tenant-default" + # Client credentials for publishing packs into the registry. + publishClient: + clientId: "packs-registry" + clientSecret: "" + clientSecretFile: "../secrets/packs-registry.secret" + scopes: + - "packs.write" + - "packs.read" + # Optional read-only client for mirrors or offline tooling. + readerClient: + clientId: "packs-reader" + clientSecret: "" + clientSecretFile: "../secrets/packs-reader.secret" + scopes: + - "packs.read" + +storage: + # Mongo database storing pack metadata and provenance. + mongoConnectionString: "mongodb://packs-registry:registry@mongo:27017/packs-registry?authSource=admin" + # Object storage bucket/container for pack bundles and signatures. + bundleStore: "s3://stellaops-packs" + +signing: + # Trusted keys (PEM paths or Fulcio issuer URLs) used to validate inbound packs. + trustedKeys: + - "../certificates/packs-signing.pem" + # Enforce DSSE attestations for published packs. + requireDsse: true + +mirroring: + enabled: false + # Remote registry or file share used to seed mirrors in offline deployments. + source: "" + schedule: "00:30:00" + +offlineKit: + enabled: true + exportDirectory: "out/offline/packs" + provenanceManifest: "out/offline/packs/provenance.json" diff --git a/etc/task-runner.yaml.sample b/etc/task-runner.yaml.sample new file mode 100644 index 00000000..10a91898 --- /dev/null +++ b/etc/task-runner.yaml.sample @@ -0,0 +1,69 @@ +# StellaOps Task Runner configuration template. +# Copy to ../etc/task-runner.yaml (relative to the Task Runner content root) +# and adjust values for your environment. Environment variables prefixed with +# STELLAOPS_TASKRUNNER_ override these values at runtime. + +schemaVersion: 1 + +telemetry: + enabled: true + serviceName: "stellaops-taskrunner" + exportConsole: true + minimumLogLevel: "Information" + otlpEndpoint: "" + resourceAttributes: + deployment.environment: "local" + +authority: + issuer: "https://authority.stella-ops.local" + metadataAddress: "" + requireHttpsMetadata: true + audiences: + - "api://task-runner" + # Client credentials used for executing packs. Provide either clientSecret or + # clientSecretFile (preferred for production). + runnerClient: + clientId: "task-runner" + clientSecret: "" + clientSecretFile: "../secrets/task-runner.secret" + scopes: + - "packs.run" + - "packs.read" + # Client used to approve gates when automation workflows sign off on runs. + approvalsClient: + clientId: "pack-approver" + clientSecret: "" + clientSecretFile: "../secrets/pack-approver.secret" + scopes: + - "packs.approve" + - "packs.read" + # Optional secondary client used for registry interactions (promote/deprecate). + registryClient: + clientId: "packs-registry" + clientSecret: "" + clientSecretFile: "../secrets/packs-registry.secret" + scopes: + - "packs.write" + - "packs.read" + # Tenant context required for all Task Runner operations. + tenant: "tenant-default" + +storage: + # Object storage bucket where run artifacts and evidence bundles are kept. + artifactsBucket: "s3://stellaops-taskrunner-artifacts" + # MongoDB stores run metadata and approval state; update connection string + # before deploying. + mongoConnectionString: "mongodb://taskrunner:taskrunner@mongo:27017/taskrunner?authSource=admin" + +approvals: + # Default timeout before pending approvals auto-expire. + defaultExpiresAfter: "04:00:00" + # Notifications topic emitted when approvals are requested/resolved. + notifyTopic: "pack.run.approvals" + +runner: + # Maximum concurrent steps Task Runner executes per worker. + maxParallelSteps: 8 + # Allowlist of modules that can initiate network calls when sealed=false. + networkAllowlist: + - "*.internal.stella-ops.local" diff --git a/ops/offline-kit/build_offline_kit.py b/ops/offline-kit/build_offline_kit.py index d1500934..c60bb004 100644 --- a/ops/offline-kit/build_offline_kit.py +++ b/ops/offline-kit/build_offline_kit.py @@ -90,11 +90,16 @@ def clean_directory(path: Path) -> None: path.mkdir(parents=True, exist_ok=True) -def run_python_analyzer_smoke() -> None: - script = REPO_ROOT / "ops" / "offline-kit" / "run-python-analyzer-smoke.sh" - run(["bash", str(script)], cwd=REPO_ROOT) - - +def run_python_analyzer_smoke() -> None: + script = REPO_ROOT / "ops" / "offline-kit" / "run-python-analyzer-smoke.sh" + run(["bash", str(script)], cwd=REPO_ROOT) + + +def run_rust_analyzer_smoke() -> None: + script = REPO_ROOT / "ops" / "offline-kit" / "run-rust-analyzer-smoke.sh" + run(["bash", str(script)], cwd=REPO_ROOT) + + def copy_if_exists(source: Path, target: Path) -> None: if source.is_dir(): shutil.copytree(source, target, dirs_exist_ok=True) @@ -304,10 +309,11 @@ def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]: staging_dir = args.staging_dir.resolve() output_dir = args.output_dir.resolve() - verify_release(release_dir) - if not args.skip_smoke: - run_python_analyzer_smoke() - clean_directory(staging_dir) + verify_release(release_dir) + if not args.skip_smoke: + run_rust_analyzer_smoke() + run_python_analyzer_smoke() + clean_directory(staging_dir) copy_debug_store(release_dir, staging_dir) manifest_data = load_manifest(release_dir) diff --git a/ops/offline-kit/run-python-analyzer-smoke.sh b/ops/offline-kit/run-python-analyzer-smoke.sh index ad79ff2c..cb4712f9 100644 --- a/ops/offline-kit/run-python-analyzer-smoke.sh +++ b/ops/offline-kit/run-python-analyzer-smoke.sh @@ -1,36 +1,36 @@ -#!/usr/bin/env bash -set -euo pipefail - -repo_root="$(git -C "${BASH_SOURCE%/*}/.." rev-parse --show-toplevel 2>/dev/null || pwd)" -project_path="${repo_root}/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj" -output_dir="${repo_root}/out/analyzers/python" -plugin_dir="${repo_root}/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python" - -to_win_path() { - if command -v wslpath >/dev/null 2>&1; then - wslpath -w "$1" - else - printf '%s\n' "$1" - fi -} - -rm -rf "${output_dir}" -project_path_win="$(to_win_path "$project_path")" -output_dir_win="$(to_win_path "$output_dir")" - -dotnet publish "$project_path_win" \ - --configuration Release \ - --output "$output_dir_win" \ - --self-contained false - -mkdir -p "${plugin_dir}" -cp "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Python.dll" "${plugin_dir}/" -if [[ -f "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Python.pdb" ]]; then - cp "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Python.pdb" "${plugin_dir}/" -fi - -repo_root_win="$(to_win_path "$repo_root")" -exec dotnet run \ - --project "${repo_root_win}/src/Tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj" \ - --configuration Release \ - -- --repo-root "${repo_root_win}" +#!/usr/bin/env bash +set -euo pipefail + +repo_root="$(git -C "${BASH_SOURCE%/*}/.." rev-parse --show-toplevel 2>/dev/null || pwd)" +project_path="${repo_root}/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj" +output_dir="${repo_root}/out/analyzers/python" +plugin_dir="${repo_root}/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python" + +to_win_path() { + if command -v wslpath >/dev/null 2>&1; then + wslpath -w "$1" + else + printf '%s\n' "$1" + fi +} + +rm -rf "${output_dir}" +project_path_win="$(to_win_path "$project_path")" +output_dir_win="$(to_win_path "$output_dir")" + +dotnet publish "$project_path_win" \ + --configuration Release \ + --output "$output_dir_win" \ + --self-contained false + +mkdir -p "${plugin_dir}" +cp "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Python.dll" "${plugin_dir}/" +if [[ -f "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Python.pdb" ]]; then + cp "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Python.pdb" "${plugin_dir}/" +fi + +repo_root_win="$(to_win_path "$repo_root")" +exec dotnet run \ + --project "${repo_root_win}/src/Tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj" \ + --configuration Release \ + -- --repo-root "${repo_root_win}" diff --git a/ops/offline-kit/run-rust-analyzer-smoke.sh b/ops/offline-kit/run-rust-analyzer-smoke.sh new file mode 100644 index 00000000..04df06fd --- /dev/null +++ b/ops/offline-kit/run-rust-analyzer-smoke.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +set -euo pipefail + +repo_root="$(git -C "${BASH_SOURCE%/*}/.." rev-parse --show-toplevel 2>/dev/null || pwd)" +project_path="${repo_root}/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj" +output_dir="${repo_root}/out/analyzers/rust" +plugin_dir="${repo_root}/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Rust" + +to_win_path() { + if command -v wslpath >/dev/null 2>&1; then + wslpath -w "$1" + else + printf '%s\n' "$1" + fi +} + +rm -rf "${output_dir}" +project_path_win="$(to_win_path "$project_path")" +output_dir_win="$(to_win_path "$output_dir")" + +dotnet publish "$project_path_win" \ + --configuration Release \ + --output "$output_dir_win" \ + --self-contained false + +mkdir -p "${plugin_dir}" +cp "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Rust.dll" "${plugin_dir}/" +if [[ -f "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Rust.pdb" ]]; then + cp "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Rust.pdb" "${plugin_dir}/" +fi + +repo_root_win="$(to_win_path "$repo_root")" +exec dotnet run \ + --project "${repo_root_win}/src/Tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj" \ + --configuration Release \ + -- --repo-root "${repo_root_win}" \ + --analyzer rust diff --git a/out/analyzers/python/StellaOps.Auth.Abstractions.xml b/out/analyzers/python/StellaOps.Auth.Abstractions.xml new file mode 100644 index 00000000..50110445 --- /dev/null +++ b/out/analyzers/python/StellaOps.Auth.Abstractions.xml @@ -0,0 +1,767 @@ + + + + StellaOps.Auth.Abstractions + + + + + Canonical telemetry metadata for the StellaOps Authority stack. + + + + + service.name resource attribute recorded by Authority components. + + + + + service.namespace resource attribute aligning Authority with other StellaOps services. + + + + + Activity source identifier used by Authority instrumentation. + + + + + Meter name used by Authority instrumentation. + + + + + Builds the default set of resource attributes (service name/namespace/version). + + Optional assembly used to resolve the service version. + + + + Resolves the service version string from the provided assembly (defaults to the Authority telemetry assembly). + + + + + Represents an IP network expressed in CIDR notation. + + + + + Initialises a new . + + Canonical network address with host bits zeroed. + Prefix length (0-32 for IPv4, 0-128 for IPv6). + + + + Canonical network address with host bits zeroed. + + + + + Prefix length. + + + + + Attempts to parse the supplied value as CIDR notation or a single IP address. + + Thrown when the input is not recognised. + + + + Attempts to parse the supplied value as CIDR notation or a single IP address. + + + + + Determines whether the provided address belongs to this network. + + + + + + + + Evaluates remote addresses against configured network masks. + + + + + Creates a matcher from raw CIDR strings. + + Sequence of CIDR entries or IP addresses. + Thrown when a value cannot be parsed. + + + + Creates a matcher from already parsed masks. + + Sequence of network masks. + + + + Gets a matcher that allows every address. + + + + + Gets a matcher that denies every address (no masks configured). + + + + + Indicates whether this matcher has no masks configured and does not allow all. + + + + + Returns the configured masks. + + + + + Checks whether the provided address matches any of the configured masks. + + Remote address to test. + true when the address is allowed. + + + + Default authentication constants used by StellaOps resource servers and clients. + + + + + Default authentication scheme for StellaOps bearer tokens. + + + + + Logical authentication type attached to . + + + + + Policy prefix applied to named authorization policies. + + + + + Canonical claim type identifiers used across StellaOps services. + + + + + Subject identifier claim (maps to sub in JWTs). + + + + + StellaOps tenant identifier claim (multi-tenant deployments). + + + + + StellaOps project identifier claim (optional project scoping within a tenant). + + + + + OAuth2/OIDC client identifier claim (maps to client_id). + + + + + Unique token identifier claim (maps to jti). + + + + + Authentication method reference claim (amr). + + + + + Space separated scope list (scope). + + + + + Individual scope items (scp). + + + + + OAuth2 resource audiences (aud). + + + + + Identity provider hint for downstream services. + + + + + Operator reason supplied when issuing orchestrator control tokens. + + + + + Operator ticket supplied when issuing orchestrator control tokens. + + + + + Quota change reason supplied when issuing Orchestrator quota tokens. + + + + + Quota change ticket/incident reference supplied when issuing Orchestrator quota tokens. + + + + + Incident activation reason recorded when issuing observability incident tokens. + + + + + Session identifier claim (sid). + + + + + Fluent helper used to construct instances that follow StellaOps conventions. + + + + + Adds or replaces the canonical subject identifier. + + + + + Adds or replaces the canonical client identifier. + + + + + Adds or replaces the tenant identifier claim. + + + + + Adds or replaces the user display name claim. + + + + + Adds or replaces the identity provider claim. + + + + + Adds or replaces the session identifier claim. + + + + + Adds or replaces the token identifier claim. + + + + + Adds or replaces the authentication method reference claim. + + + + + Sets the name claim type appended when building the . + + + + + Sets the role claim type appended when building the . + + + + + Sets the authentication type stamped on the . + + + + + Registers the supplied scopes (normalised to lower-case, deduplicated, sorted). + + + + + Registers the supplied audiences (trimmed, deduplicated, sorted). + + + + + Adds a single audience. + + + + + Adds an arbitrary claim (no deduplication is performed). + + + + + Adds multiple claims (incoming claims are cloned to enforce value trimming). + + + + + Adds an iat (issued at) claim using Unix time seconds. + + + + + Adds an nbf (not before) claim using Unix time seconds. + + + + + Adds an exp (expires) claim using Unix time seconds. + + + + + Returns the normalised scope list (deduplicated + sorted). + + + + + Returns the normalised audience list (deduplicated + sorted). + + + + + Builds the immutable instance based on the registered data. + + + + + Factory helpers for returning RFC 7807 problem responses using StellaOps conventions. + + + + + Produces a 401 problem response indicating authentication is required. + + + + + Produces a 401 problem response for invalid, expired, or revoked tokens. + + + + + Produces a 403 problem response when access is denied. + + + + + Produces a 403 problem response for insufficient scopes. + + + + + Canonical scope names supported by StellaOps services. + + + + + Scope required to trigger Concelier jobs. + + + + + Scope required to manage Concelier merge operations. + + + + + Scope granting administrative access to Authority user management. + + + + + Scope granting administrative access to Authority client registrations. + + + + + Scope granting read-only access to Authority audit logs. + + + + + Synthetic scope representing trusted network bypass. + + + + + Scope granting read-only access to console UX features. + + + + + Scope granting permission to approve exceptions. + + + + + Scope granting read-only access to raw advisory ingestion data. + + + + + Scope granting write access for raw advisory ingestion. + + + + + Scope granting read-only access to Advisory AI artefacts (summaries, remediation exports). + + + + + Scope permitting Advisory AI inference requests and workflow execution. + + + + + Scope granting administrative control over Advisory AI configuration and profiles. + + + + + Scope granting read-only access to raw VEX ingestion data. + + + + + Scope granting write access for raw VEX ingestion. + + + + + Scope granting permission to execute aggregation-only contract verification. + + + + + Scope granting read-only access to reachability signals. + + + + + Scope granting permission to write reachability signals. + + + + + Scope granting administrative access to reachability signal ingestion. + + + + + Scope granting permission to seal or unseal an installation in air-gapped mode. + + + + + Scope granting permission to import offline bundles while in air-gapped mode. + + + + + Scope granting read-only access to air-gap status and sealing state endpoints. + + + + + Scope granting permission to create or edit policy drafts. + + + + + Scope granting permission to author Policy Studio workspaces. + + + + + Scope granting permission to edit policy configurations. + + + + + Scope granting read-only access to policy metadata. + + + + + Scope granting permission to review Policy Studio drafts. + + + + + Scope granting permission to submit drafts for review. + + + + + Scope granting permission to approve or reject policies. + + + + + Scope granting permission to operate Policy Studio promotions and runs. + + + + + Scope granting permission to audit Policy Studio activity. + + + + + Scope granting permission to trigger policy runs and activation workflows. + + + + + Scope granting permission to activate policies. + + + + + Scope granting read-only access to effective findings materialised by Policy Engine. + + + + + Scope granting permission to run Policy Studio simulations. + + + + + Scope granted to Policy Engine service identity for writing effective findings. + + + + + Scope granting read-only access to graph queries and overlays. + + + + + Scope granting read-only access to Vuln Explorer resources and permalinks. + + + + + Scope granting read-only access to observability dashboards and overlays. + + + + + Scope granting read-only access to incident timelines and chronology data. + + + + + Scope granting permission to append events to incident timelines. + + + + + Scope granting permission to create evidence packets in the evidence locker. + + + + + Scope granting read-only access to stored evidence packets. + + + + + Scope granting permission to place or release legal holds on evidence packets. + + + + + Scope granting read-only access to attestation records and observer feeds. + + + + + Scope granting permission to activate or resolve observability incident mode controls. + + + + + Scope granting read-only access to export center runs and bundles. + + + + + Scope granting permission to operate export center scheduling and run execution. + + + + + Scope granting administrative control over export center retention, encryption keys, and scheduling policies. + + + + + Scope granting read-only access to notifier channels, rules, and delivery history. + + + + + Scope permitting notifier rule management, delivery actions, and channel operations. + + + + + Scope granting administrative control over notifier secrets, escalations, and platform-wide settings. + + + + + Scope granting read-only access to issuer directory catalogues. + + + + + Scope permitting creation and modification of issuer directory entries. + + + + + Scope granting administrative control over issuer directory resources (delete, audit bypass). + + + + + Scope required to issue or honour escalation actions for notifications. + + + + + Scope granting read-only access to Task Packs catalogues and manifests. + + + + + Scope permitting publication or updates to Task Packs in the registry. + + + + + Scope granting permission to execute Task Packs via CLI or Task Runner. + + + + + Scope granting permission to fulfil Task Pack approval gates. + + + + + Scope granting permission to enqueue or mutate graph build jobs. + + + + + Scope granting permission to export graph artefacts (GraphML/JSONL/etc.). + + + + + Scope granting permission to trigger what-if simulations on graphs. + + + + + Scope granting read-only access to Orchestrator job state and telemetry. + + + + + Scope granting permission to execute Orchestrator control actions. + + + + + Scope granting permission to manage Orchestrator quotas and elevated backfill tooling. + + + + + Scope granting read-only access to Authority tenant catalog APIs. + + + + + Normalises a scope string (trim/convert to lower case). + + Scope raw value. + Normalised scope or null when the input is blank. + + + + Checks whether the provided scope is registered as a built-in StellaOps scope. + + + + + Returns the full set of built-in scopes. + + + + + Canonical identifiers for StellaOps service principals. + + + + + Service identity used by Policy Engine when materialising effective findings. + + + + + Service identity used by Cartographer when constructing and maintaining graph projections. + + + + + Service identity used by Vuln Explorer when issuing scoped permalink requests. + + + + + Service identity used by Signals components when managing reachability facts. + + + + + Shared tenancy default values used across StellaOps services. + + + + + Sentinel value indicating the token is not scoped to a specific project. + + + + diff --git a/out/analyzers/python/StellaOps.Auth.Client.xml b/out/analyzers/python/StellaOps.Auth.Client.xml new file mode 100644 index 00000000..cd693458 --- /dev/null +++ b/out/analyzers/python/StellaOps.Auth.Client.xml @@ -0,0 +1,233 @@ + + + + StellaOps.Auth.Client + + + + + File-based token cache suitable for CLI/offline usage. + + + + + In-memory token cache suitable for service scenarios. + + + + + Abstraction for caching StellaOps tokens. + + + + + Retrieves a cached token entry, if present. + + + + + Stores or updates a token entry for the specified key. + + + + + Removes the cached entry for the specified key. + + + + + Abstraction for requesting tokens from StellaOps Authority. + + + + + Requests an access token using the resource owner password credentials flow. + + + + + Requests an access token using the client credentials flow. + + + + + Retrieves the cached JWKS document. + + + + + Retrieves a cached token entry. + + + + + Persists a token entry in the cache. + + + + + Removes a cached entry. + + + + + DI helpers for the StellaOps auth client. + + + + + Registers the StellaOps auth client with the provided configuration. + + + + + Registers a file-backed token cache implementation. + + + + + Options controlling the StellaOps authentication client. + + + + + Authority (issuer) base URL. + + + + + OAuth client identifier (optional for password flow). + + + + + OAuth client secret (optional for public clients). + + + + + Default scopes requested for flows that do not explicitly override them. + + + + + Retry delays applied by HTTP retry policy (empty uses defaults). + + + + + Gets or sets a value indicating whether HTTP retry policies are enabled. + + + + + Timeout applied to discovery and token HTTP requests. + + + + + Lifetime of cached discovery metadata. + + + + + Lifetime of cached JWKS metadata. + + + + + Buffer applied when determining cache expiration (default: 30 seconds). + + + + + Gets or sets a value indicating whether cached discovery/JWKS responses may be served when the Authority is unreachable. + + + + + Additional tolerance window during which stale cache entries remain valid if offline fallback is allowed. + + + + + Parsed Authority URI (populated after validation). + + + + + Normalised scope list (populated after validation). + + + + + Normalised retry delays (populated after validation). + + + + + Validates required values and normalises scope entries. + + + + + Caches Authority discovery metadata. + + + + + Minimal OpenID Connect configuration representation. + + + + + Minimal OpenID Connect configuration representation. + + + + + Caches JWKS documents for Authority. + + + + + Represents a cached token entry. + + + + + Represents a cached token entry. + + + + + Determines whether the token is expired given the provided . + + + + + Creates a copy with scopes normalised. + + + + + Default implementation of . + + + + + Represents an issued token with metadata. + + + + + Represents an issued token with metadata. + + + + + Converts the result to a cache entry. + + + + diff --git a/out/analyzers/python/StellaOps.Scanner.Analyzers.Lang.Python.deps.json b/out/analyzers/python/StellaOps.Scanner.Analyzers.Lang.Python.deps.json new file mode 100644 index 00000000..529d77d8 --- /dev/null +++ b/out/analyzers/python/StellaOps.Scanner.Analyzers.Lang.Python.deps.json @@ -0,0 +1,858 @@ +{ + "runtimeTarget": { + "name": ".NETCoreApp,Version=v10.0", + "signature": "" + }, + "compilationOptions": {}, + "targets": { + ".NETCoreApp,Version=v10.0": { + "StellaOps.Scanner.Analyzers.Lang.Python/1.0.0": { + "dependencies": { + "SharpCompress": "0.41.0", + "StellaOps.Scanner.Analyzers.Lang": "1.0.0" + }, + "runtime": { + "StellaOps.Scanner.Analyzers.Lang.Python.dll": {} + } + }, + "Konscious.Security.Cryptography.Argon2/1.3.1": { + "dependencies": { + "Konscious.Security.Cryptography.Blake2": "1.1.1" + }, + "runtime": { + "lib/net8.0/Konscious.Security.Cryptography.Argon2.dll": { + "assemblyVersion": "1.3.1.0", + "fileVersion": "1.3.1.0" + } + } + }, + "Konscious.Security.Cryptography.Blake2/1.1.1": { + "runtime": { + "lib/net8.0/Konscious.Security.Cryptography.Blake2.dll": { + "assemblyVersion": "1.1.1.0", + "fileVersion": "1.1.1.0" + } + } + }, + "Microsoft.Extensions.Configuration/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.Binder/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.Binder.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.EnvironmentVariables/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.EnvironmentVariables.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.FileExtensions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileProviders.Physical": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.FileExtensions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.Json/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.Json.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.DependencyInjection/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.DependencyInjection.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.DependencyInjection.Abstractions/10.0.0-rc.2.25502.107": { + "runtime": { + "lib/net10.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Diagnostics/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Diagnostics.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options.ConfigurationExtensions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Diagnostics.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Diagnostics.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Diagnostics.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.FileProviders.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.FileProviders.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.FileProviders.Physical/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileSystemGlobbing": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.FileProviders.Physical.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.FileSystemGlobbing/10.0.0-rc.2.25502.107": { + "runtime": { + "lib/net10.0/Microsoft.Extensions.FileSystemGlobbing.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Http/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Diagnostics": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Http.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Http.Polly/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Http": "10.0.0-rc.2.25502.107", + "Polly": "7.2.4", + "Polly.Extensions.Http": "3.0.0" + }, + "runtime": { + "lib/netstandard2.0/Microsoft.Extensions.Http.Polly.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Logging/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Logging.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Logging.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Logging.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Options/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Options.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Options.ConfigurationExtensions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Binder": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Options.ConfigurationExtensions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Primitives/10.0.0-rc.2.25502.107": { + "runtime": { + "lib/net10.0/Microsoft.Extensions.Primitives.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.IdentityModel.Abstractions/8.14.0": { + "runtime": { + "lib/net9.0/Microsoft.IdentityModel.Abstractions.dll": { + "assemblyVersion": "8.14.0.0", + "fileVersion": "8.14.0.60815" + } + } + }, + "Microsoft.IdentityModel.JsonWebTokens/7.2.0": { + "dependencies": { + "Microsoft.IdentityModel.Tokens": "8.14.0" + }, + "runtime": { + "lib/net8.0/Microsoft.IdentityModel.JsonWebTokens.dll": { + "assemblyVersion": "7.2.0.0", + "fileVersion": "7.2.0.50110" + } + } + }, + "Microsoft.IdentityModel.Logging/8.14.0": { + "dependencies": { + "Microsoft.IdentityModel.Abstractions": "8.14.0" + }, + "runtime": { + "lib/net9.0/Microsoft.IdentityModel.Logging.dll": { + "assemblyVersion": "8.14.0.0", + "fileVersion": "8.14.0.60815" + } + } + }, + "Microsoft.IdentityModel.Tokens/8.14.0": { + "dependencies": { + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.IdentityModel.Logging": "8.14.0" + }, + "runtime": { + "lib/net9.0/Microsoft.IdentityModel.Tokens.dll": { + "assemblyVersion": "8.14.0.0", + "fileVersion": "8.14.0.60815" + } + } + }, + "NetEscapades.Configuration.Yaml/2.1.0": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", + "YamlDotNet": "9.1.0" + }, + "runtime": { + "lib/netstandard2.0/NetEscapades.Configuration.Yaml.dll": { + "assemblyVersion": "2.1.0.0", + "fileVersion": "2.1.0.0" + } + } + }, + "Pipelines.Sockets.Unofficial/2.2.8": { + "runtime": { + "lib/net5.0/Pipelines.Sockets.Unofficial.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "2.2.8.1080" + } + } + }, + "Polly/7.2.4": { + "runtime": { + "lib/netstandard2.0/Polly.dll": { + "assemblyVersion": "7.0.0.0", + "fileVersion": "7.2.4.982" + } + } + }, + "Polly.Extensions.Http/3.0.0": { + "dependencies": { + "Polly": "7.2.4" + }, + "runtime": { + "lib/netstandard2.0/Polly.Extensions.Http.dll": { + "assemblyVersion": "3.0.0.0", + "fileVersion": "3.0.0.0" + } + } + }, + "SharpCompress/0.41.0": { + "dependencies": { + "ZstdSharp.Port": "0.8.6" + }, + "runtime": { + "lib/net8.0/SharpCompress.dll": { + "assemblyVersion": "0.41.0.0", + "fileVersion": "0.41.0.0" + } + } + }, + "StackExchange.Redis/2.8.24": { + "dependencies": { + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Pipelines.Sockets.Unofficial": "2.2.8" + }, + "runtime": { + "lib/net8.0/StackExchange.Redis.dll": { + "assemblyVersion": "2.0.0.0", + "fileVersion": "2.8.24.3255" + } + } + }, + "System.IdentityModel.Tokens.Jwt/7.2.0": { + "dependencies": { + "Microsoft.IdentityModel.JsonWebTokens": "7.2.0", + "Microsoft.IdentityModel.Tokens": "8.14.0" + }, + "runtime": { + "lib/net8.0/System.IdentityModel.Tokens.Jwt.dll": { + "assemblyVersion": "7.2.0.0", + "fileVersion": "7.2.0.50110" + } + } + }, + "YamlDotNet/9.1.0": { + "runtime": { + "lib/netstandard2.1/YamlDotNet.dll": { + "assemblyVersion": "9.0.0.0", + "fileVersion": "9.1.0.0" + } + } + }, + "ZstdSharp.Port/0.8.6": { + "runtime": { + "lib/net9.0/ZstdSharp.dll": { + "assemblyVersion": "0.8.6.0", + "fileVersion": "0.8.6.0" + } + } + }, + "StellaOps.Auth.Abstractions/1.0.0-preview.1": { + "dependencies": { + "SharpCompress": "0.41.0" + }, + "runtime": { + "StellaOps.Auth.Abstractions.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Auth.Client/1.0.0-preview.1": { + "dependencies": { + "Microsoft.Extensions.Http.Polly": "10.0.0-rc.2.25502.107", + "Microsoft.IdentityModel.Tokens": "8.14.0", + "SharpCompress": "0.41.0", + "StellaOps.Auth.Abstractions": "1.0.0-preview.1", + "StellaOps.Configuration": "1.0.0" + }, + "runtime": { + "StellaOps.Auth.Client.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Auth.Security/1.0.0-preview.1": { + "dependencies": { + "Microsoft.IdentityModel.Tokens": "8.14.0", + "SharpCompress": "0.41.0", + "StackExchange.Redis": "2.8.24", + "System.IdentityModel.Tokens.Jwt": "7.2.0" + }, + "runtime": { + "StellaOps.Auth.Security.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Authority.Plugins.Abstractions/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0", + "StellaOps.Auth.Abstractions": "1.0.0-preview.1", + "StellaOps.Cryptography": "1.0.0" + }, + "runtime": { + "StellaOps.Authority.Plugins.Abstractions.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Configuration/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Binder": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.EnvironmentVariables": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Json": "10.0.0-rc.2.25502.107", + "NetEscapades.Configuration.Yaml": "2.1.0", + "SharpCompress": "0.41.0", + "StellaOps.Authority.Plugins.Abstractions": "1.0.0", + "StellaOps.Cryptography": "1.0.0" + }, + "runtime": { + "StellaOps.Configuration.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Cryptography/1.0.0": { + "dependencies": { + "Konscious.Security.Cryptography.Argon2": "1.3.1", + "Microsoft.IdentityModel.Tokens": "8.14.0", + "SharpCompress": "0.41.0" + }, + "runtime": { + "StellaOps.Cryptography.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.DependencyInjection/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0" + }, + "runtime": { + "StellaOps.DependencyInjection.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Plugin/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0", + "StellaOps.DependencyInjection": "1.0.0" + }, + "runtime": { + "StellaOps.Plugin.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Scanner.Analyzers.Lang/1.0.0": { + "dependencies": { + "SharpCompress": "0.41.0", + "StellaOps.Plugin": "1.0.0", + "StellaOps.Scanner.Core": "1.0.0" + }, + "runtime": { + "StellaOps.Scanner.Analyzers.Lang.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Scanner.Core/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0", + "StellaOps.Auth.Client": "1.0.0-preview.1", + "StellaOps.Auth.Security": "1.0.0-preview.1" + }, + "runtime": { + "StellaOps.Scanner.Core.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + } + } + }, + "libraries": { + "StellaOps.Scanner.Analyzers.Lang.Python/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "Konscious.Security.Cryptography.Argon2/1.3.1": { + "type": "package", + "serviceable": true, + "sha512": "sha512-T+OAGwzYYXftahpOxO7J4xA5K6urxwGnWQf3M+Jpi+76Azv/0T3M5SuN+h7/QvXuiqNw3ZEZ5QqVLI5ygDAylw==", + "path": "konscious.security.cryptography.argon2/1.3.1", + "hashPath": "konscious.security.cryptography.argon2.1.3.1.nupkg.sha512" + }, + "Konscious.Security.Cryptography.Blake2/1.1.1": { + "type": "package", + "serviceable": true, + "sha512": "sha512-odwOyzj/J/lHJZNwFWJGU/LRecBShupAJ2S8TQqZfhUe9niHzu/voBYK5wuVKsvSpzbfupKQYZguVyIk1sgOkQ==", + "path": "konscious.security.cryptography.blake2/1.1.1", + "hashPath": "konscious.security.cryptography.blake2.1.1.1.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-273Ggibh3DdVrj47ENbUGIirOiqmLTAizpkvOD584Ps6NL/CMXPzesijnJgsjp7Fv/UCp69FKYBaSxZZ3q5R9g==", + "path": "microsoft.extensions.configuration/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-H+i/Qy30Rg/K9BcW2Z6DCHPCzwMH3bCwNOjEz31shWTUDK8GeeeMnrKVusprTcRA2Y6yPST+hg2zc3whPEs14Q==", + "path": "microsoft.extensions.configuration.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.Binder/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-aA6/V6lw1Gueyb1PqhHAl/i/qUUuv+Fusfk4oaMOzzOjspBkYtPpNHCmml/0t1x0/DnZoed+u2WwpP+mSwd8Dg==", + "path": "microsoft.extensions.configuration.binder/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.binder.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.EnvironmentVariables/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-2SV60IUAWfluZv2YHNZ+nUOljYHGIsy96FpJs+N9/bgKDYs9qr6DdzPeIhiHrz+XvRzbybvcwtTBf5dKrYN4oA==", + "path": "microsoft.extensions.configuration.environmentvariables/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.environmentvariables.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.FileExtensions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-5KrgXSTFR8cFLmDXXoT7GLVvDyHNw0Z9xG4doD78Q/HdlAR4jiMzmLLS9GFXrPGopmC6qqEZr2VBJHEu16INcA==", + "path": "microsoft.extensions.configuration.fileextensions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.fileextensions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.Json/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-USwHuFz4BFKoaqSydHWH/d7Mr+fVsAh9S0S9pdsdHro1IixMbqQ9Gpo2sEZf25e3tZSq/ts6XsVmrQWmxmDhYA==", + "path": "microsoft.extensions.configuration.json/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.json.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.DependencyInjection/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-mDw80K98jBWCyLFCra51PRv+Ttnjse1lZIzXEFybKby0/ajBFTEeHj/4r/QJexmb8Uun0yaFH1HlFtmHP1YEVA==", + "path": "microsoft.extensions.dependencyinjection/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.dependencyinjection.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.DependencyInjection.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-8jujunpkNNfTkE9PFHp9/aD6GPKVfNCuz8tUbzOcyU5tQOCoIZId4hwQNVx3Tb8XEWw9BYdh0k5vPpqdCM+UtA==", + "path": "microsoft.extensions.dependencyinjection.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.dependencyinjection.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Diagnostics/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-tQfQFXI+ZQcL2RzDarDLx3Amh0WCp1KPGp1ie3y/CMV5hDhEq98WTmcMoXrFY0GkYLEaCQlVi2A6qVLcooG2Ow==", + "path": "microsoft.extensions.diagnostics/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.diagnostics.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Diagnostics.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-x6XVv3RiwOlN2unjyX/Zat0gI0HiRoDDdjkwBCwsMftYWpbJu4SiyRwDbrv2zAF8v8nbEEvcWi3/pUxZfaqLQw==", + "path": "microsoft.extensions.diagnostics.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.diagnostics.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.FileProviders.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-dOpmW14MkOZIwV6269iXhoMp6alCHBoxqCR4pJ37GLjFaBIyzsIy+Ra8tsGmjHtFvEHKq0JRDIsb1PUkrK+yxw==", + "path": "microsoft.extensions.fileproviders.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.fileproviders.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.FileProviders.Physical/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-3+RiR6TEakDL0dCUqR7PjFffyrVMLdx/vAVBiN1mGmwScKYCTePIkYVkWsX85CTKh7R9J4M9C1MHzVdjbKcg3g==", + "path": "microsoft.extensions.fileproviders.physical/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.fileproviders.physical.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.FileSystemGlobbing/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-XtcPOKB7sMFzj8SxaOglZV3eaqZ1GxUMVZTwaz4pRpBt0S45ghb836uUej4YaI8EzsnUJoqzOIKrTW4CDJMfVw==", + "path": "microsoft.extensions.filesystemglobbing/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.filesystemglobbing.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Http/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-d60bvi/NpzkpVlSpxZqOfdjX1hrQgL/byWVc3PryjbmB7zvfLtqQbYifjEWToqtS0Fb1rGnkuVI5JEdOnK1tNQ==", + "path": "microsoft.extensions.http/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.http.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Http.Polly/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-aY5vLcrhdXCHsCjYI2lNwfat2vdSuiPs0FFZiy7IM6zcyqdxaefG8J8ezTKkZyiuAtznjVJJT70B660l/WlsxA==", + "path": "microsoft.extensions.http.polly/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.http.polly.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Logging/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-q2C5gq86qkTmcYSJJSnw8sgTUyuqENYSOjk/NOYjHnYlKSrK3oI9Rjv1bWFpx2I3Btq9ZBEJb9aMM+IUQ0PvZA==", + "path": "microsoft.extensions.logging/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.logging.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Logging.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-SKKKZjyCpBaDQ7yuFjdk6ELnRBRWeZsbnzUfo59Wc4PGhgf92chE3we/QlT6nk6NqlWcUgH/jogM+B/uq/Qdnw==", + "path": "microsoft.extensions.logging.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.logging.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Options/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-Ib6BCCjisp7ZUdhtNpSulFO0ODhz/IE4ZZd8OCqQWoRs363BQ0QOZi9KwpqpiEWo51S0kIXWqNicDPGXwpt9pQ==", + "path": "microsoft.extensions.options/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.options.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Options.ConfigurationExtensions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-MFbT8+JKX49YCXEFvlZDzQzI/R3QKzRZlb4dSud+569cMgA9hWbndjWWvOgGASoRcXynGRrBSq1Bw3PeCsB5/Q==", + "path": "microsoft.extensions.options.configurationextensions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.options.configurationextensions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Primitives/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-9pm2zqqn5u/OsKs2zgkhJEQQeMx9KkVOWPdHrs7Kt5sfpk+eIh/gmpi/mMH/ljS2T/PFsFdCEtm+GS/6l7zoZA==", + "path": "microsoft.extensions.primitives/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.primitives.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.IdentityModel.Abstractions/8.14.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-iwbCpSjD3ehfTwBhtSNEtKPK0ICun6ov7Ibx6ISNA9bfwIyzI2Siwyi9eJFCJBwxowK9xcA1mj+jBWiigeqgcQ==", + "path": "microsoft.identitymodel.abstractions/8.14.0", + "hashPath": "microsoft.identitymodel.abstractions.8.14.0.nupkg.sha512" + }, + "Microsoft.IdentityModel.JsonWebTokens/7.2.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-zLFA9IBxDWw6Y1nz2PPZyQvF+ZZ4aW1pwgtwusQB39lgxOc2xVqZ8gitsuT1rwyuIbchGOWbax4fsJ8OgGRxSQ==", + "path": "microsoft.identitymodel.jsonwebtokens/7.2.0", + "hashPath": "microsoft.identitymodel.jsonwebtokens.7.2.0.nupkg.sha512" + }, + "Microsoft.IdentityModel.Logging/8.14.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-eqqnemdW38CKZEHS6diA50BV94QICozDZEvSrsvN3SJXUFwVB9gy+/oz76gldP7nZliA16IglXjXTCTdmU/Ejg==", + "path": "microsoft.identitymodel.logging/8.14.0", + "hashPath": "microsoft.identitymodel.logging.8.14.0.nupkg.sha512" + }, + "Microsoft.IdentityModel.Tokens/8.14.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-ySPkj429HrYHvwLVNoPZdQ/bKZZKSkuWKod68qxo+5/pLdXFimgflckKgAZclX9tuO9qWk/KFiIN65diMWgh+g==", + "path": "microsoft.identitymodel.tokens/8.14.0", + "hashPath": "microsoft.identitymodel.tokens.8.14.0.nupkg.sha512" + }, + "NetEscapades.Configuration.Yaml/2.1.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-kNTX7kvRvbzBpLd3Vg9iu6t60tTyhVxsruAPgH6kl1GkAZIHLZw9cQysvjUenDU7JEnUgyxQnzfL8627ARDn+g==", + "path": "netescapades.configuration.yaml/2.1.0", + "hashPath": "netescapades.configuration.yaml.2.1.0.nupkg.sha512" + }, + "Pipelines.Sockets.Unofficial/2.2.8": { + "type": "package", + "serviceable": true, + "sha512": "sha512-zG2FApP5zxSx6OcdJQLbZDk2AVlN2BNQD6MorwIfV6gVj0RRxWPEp2LXAxqDGZqeNV1Zp0BNPcNaey/GXmTdvQ==", + "path": "pipelines.sockets.unofficial/2.2.8", + "hashPath": "pipelines.sockets.unofficial.2.2.8.nupkg.sha512" + }, + "Polly/7.2.4": { + "type": "package", + "serviceable": true, + "sha512": "sha512-bw00Ck5sh6ekduDE3mnCo1ohzuad946uslCDEENu3091+6UKnBuKLo4e+yaNcCzXxOZCXWY2gV4a35+K1d4LDA==", + "path": "polly/7.2.4", + "hashPath": "polly.7.2.4.nupkg.sha512" + }, + "Polly.Extensions.Http/3.0.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-drrG+hB3pYFY7w1c3BD+lSGYvH2oIclH8GRSehgfyP5kjnFnHKQuuBhuHLv+PWyFuaTDyk/vfRpnxOzd11+J8g==", + "path": "polly.extensions.http/3.0.0", + "hashPath": "polly.extensions.http.3.0.0.nupkg.sha512" + }, + "SharpCompress/0.41.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-z04dBVdTIAFTRKi38f0LkajaKA++bR+M8kYCbasXePILD2H+qs7CkLpyiippB24CSbTrWIgpBKm6BenZqkUwvw==", + "path": "sharpcompress/0.41.0", + "hashPath": "sharpcompress.0.41.0.nupkg.sha512" + }, + "StackExchange.Redis/2.8.24": { + "type": "package", + "serviceable": true, + "sha512": "sha512-GWllmsFAtLyhm4C47cOCipGxyEi1NQWTFUHXnJ8hiHOsK/bH3T5eLkWPVW+LRL6jDiB3g3izW3YEHgLuPoJSyA==", + "path": "stackexchange.redis/2.8.24", + "hashPath": "stackexchange.redis.2.8.24.nupkg.sha512" + }, + "System.IdentityModel.Tokens.Jwt/7.2.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-Z3Fmkrxkp+o51ANMO/PqASRRlEz8dH4mTWwZXMFMXZt2bUGztBiNcIDnwBCElYLYpzpmz4sIqHb6aW8QVLe6YQ==", + "path": "system.identitymodel.tokens.jwt/7.2.0", + "hashPath": "system.identitymodel.tokens.jwt.7.2.0.nupkg.sha512" + }, + "YamlDotNet/9.1.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-fuvGXU4Ec5HrsmEc+BiFTNPCRf1cGBI2kh/3RzMWgddM2M4ALhbSPoI3X3mhXZUD1qqQd9oSkFAtWjpz8z9eRg==", + "path": "yamldotnet/9.1.0", + "hashPath": "yamldotnet.9.1.0.nupkg.sha512" + }, + "ZstdSharp.Port/0.8.6": { + "type": "package", + "serviceable": true, + "sha512": "sha512-iP4jVLQoQmUjMU88g1WObiNr6YKZGvh4aOXn3yOJsHqZsflwRsxZPcIBvNXgjXO3vQKSLctXGLTpcBPLnWPS8A==", + "path": "zstdsharp.port/0.8.6", + "hashPath": "zstdsharp.port.0.8.6.nupkg.sha512" + }, + "StellaOps.Auth.Abstractions/1.0.0-preview.1": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Auth.Client/1.0.0-preview.1": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Auth.Security/1.0.0-preview.1": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Authority.Plugins.Abstractions/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Configuration/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Cryptography/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.DependencyInjection/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Plugin/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Scanner.Analyzers.Lang/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Scanner.Core/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + } + } +} \ No newline at end of file diff --git a/out/analyzers/rust/StellaOps.Auth.Abstractions.xml b/out/analyzers/rust/StellaOps.Auth.Abstractions.xml new file mode 100644 index 00000000..50110445 --- /dev/null +++ b/out/analyzers/rust/StellaOps.Auth.Abstractions.xml @@ -0,0 +1,767 @@ + + + + StellaOps.Auth.Abstractions + + + + + Canonical telemetry metadata for the StellaOps Authority stack. + + + + + service.name resource attribute recorded by Authority components. + + + + + service.namespace resource attribute aligning Authority with other StellaOps services. + + + + + Activity source identifier used by Authority instrumentation. + + + + + Meter name used by Authority instrumentation. + + + + + Builds the default set of resource attributes (service name/namespace/version). + + Optional assembly used to resolve the service version. + + + + Resolves the service version string from the provided assembly (defaults to the Authority telemetry assembly). + + + + + Represents an IP network expressed in CIDR notation. + + + + + Initialises a new . + + Canonical network address with host bits zeroed. + Prefix length (0-32 for IPv4, 0-128 for IPv6). + + + + Canonical network address with host bits zeroed. + + + + + Prefix length. + + + + + Attempts to parse the supplied value as CIDR notation or a single IP address. + + Thrown when the input is not recognised. + + + + Attempts to parse the supplied value as CIDR notation or a single IP address. + + + + + Determines whether the provided address belongs to this network. + + + + + + + + Evaluates remote addresses against configured network masks. + + + + + Creates a matcher from raw CIDR strings. + + Sequence of CIDR entries or IP addresses. + Thrown when a value cannot be parsed. + + + + Creates a matcher from already parsed masks. + + Sequence of network masks. + + + + Gets a matcher that allows every address. + + + + + Gets a matcher that denies every address (no masks configured). + + + + + Indicates whether this matcher has no masks configured and does not allow all. + + + + + Returns the configured masks. + + + + + Checks whether the provided address matches any of the configured masks. + + Remote address to test. + true when the address is allowed. + + + + Default authentication constants used by StellaOps resource servers and clients. + + + + + Default authentication scheme for StellaOps bearer tokens. + + + + + Logical authentication type attached to . + + + + + Policy prefix applied to named authorization policies. + + + + + Canonical claim type identifiers used across StellaOps services. + + + + + Subject identifier claim (maps to sub in JWTs). + + + + + StellaOps tenant identifier claim (multi-tenant deployments). + + + + + StellaOps project identifier claim (optional project scoping within a tenant). + + + + + OAuth2/OIDC client identifier claim (maps to client_id). + + + + + Unique token identifier claim (maps to jti). + + + + + Authentication method reference claim (amr). + + + + + Space separated scope list (scope). + + + + + Individual scope items (scp). + + + + + OAuth2 resource audiences (aud). + + + + + Identity provider hint for downstream services. + + + + + Operator reason supplied when issuing orchestrator control tokens. + + + + + Operator ticket supplied when issuing orchestrator control tokens. + + + + + Quota change reason supplied when issuing Orchestrator quota tokens. + + + + + Quota change ticket/incident reference supplied when issuing Orchestrator quota tokens. + + + + + Incident activation reason recorded when issuing observability incident tokens. + + + + + Session identifier claim (sid). + + + + + Fluent helper used to construct instances that follow StellaOps conventions. + + + + + Adds or replaces the canonical subject identifier. + + + + + Adds or replaces the canonical client identifier. + + + + + Adds or replaces the tenant identifier claim. + + + + + Adds or replaces the user display name claim. + + + + + Adds or replaces the identity provider claim. + + + + + Adds or replaces the session identifier claim. + + + + + Adds or replaces the token identifier claim. + + + + + Adds or replaces the authentication method reference claim. + + + + + Sets the name claim type appended when building the . + + + + + Sets the role claim type appended when building the . + + + + + Sets the authentication type stamped on the . + + + + + Registers the supplied scopes (normalised to lower-case, deduplicated, sorted). + + + + + Registers the supplied audiences (trimmed, deduplicated, sorted). + + + + + Adds a single audience. + + + + + Adds an arbitrary claim (no deduplication is performed). + + + + + Adds multiple claims (incoming claims are cloned to enforce value trimming). + + + + + Adds an iat (issued at) claim using Unix time seconds. + + + + + Adds an nbf (not before) claim using Unix time seconds. + + + + + Adds an exp (expires) claim using Unix time seconds. + + + + + Returns the normalised scope list (deduplicated + sorted). + + + + + Returns the normalised audience list (deduplicated + sorted). + + + + + Builds the immutable instance based on the registered data. + + + + + Factory helpers for returning RFC 7807 problem responses using StellaOps conventions. + + + + + Produces a 401 problem response indicating authentication is required. + + + + + Produces a 401 problem response for invalid, expired, or revoked tokens. + + + + + Produces a 403 problem response when access is denied. + + + + + Produces a 403 problem response for insufficient scopes. + + + + + Canonical scope names supported by StellaOps services. + + + + + Scope required to trigger Concelier jobs. + + + + + Scope required to manage Concelier merge operations. + + + + + Scope granting administrative access to Authority user management. + + + + + Scope granting administrative access to Authority client registrations. + + + + + Scope granting read-only access to Authority audit logs. + + + + + Synthetic scope representing trusted network bypass. + + + + + Scope granting read-only access to console UX features. + + + + + Scope granting permission to approve exceptions. + + + + + Scope granting read-only access to raw advisory ingestion data. + + + + + Scope granting write access for raw advisory ingestion. + + + + + Scope granting read-only access to Advisory AI artefacts (summaries, remediation exports). + + + + + Scope permitting Advisory AI inference requests and workflow execution. + + + + + Scope granting administrative control over Advisory AI configuration and profiles. + + + + + Scope granting read-only access to raw VEX ingestion data. + + + + + Scope granting write access for raw VEX ingestion. + + + + + Scope granting permission to execute aggregation-only contract verification. + + + + + Scope granting read-only access to reachability signals. + + + + + Scope granting permission to write reachability signals. + + + + + Scope granting administrative access to reachability signal ingestion. + + + + + Scope granting permission to seal or unseal an installation in air-gapped mode. + + + + + Scope granting permission to import offline bundles while in air-gapped mode. + + + + + Scope granting read-only access to air-gap status and sealing state endpoints. + + + + + Scope granting permission to create or edit policy drafts. + + + + + Scope granting permission to author Policy Studio workspaces. + + + + + Scope granting permission to edit policy configurations. + + + + + Scope granting read-only access to policy metadata. + + + + + Scope granting permission to review Policy Studio drafts. + + + + + Scope granting permission to submit drafts for review. + + + + + Scope granting permission to approve or reject policies. + + + + + Scope granting permission to operate Policy Studio promotions and runs. + + + + + Scope granting permission to audit Policy Studio activity. + + + + + Scope granting permission to trigger policy runs and activation workflows. + + + + + Scope granting permission to activate policies. + + + + + Scope granting read-only access to effective findings materialised by Policy Engine. + + + + + Scope granting permission to run Policy Studio simulations. + + + + + Scope granted to Policy Engine service identity for writing effective findings. + + + + + Scope granting read-only access to graph queries and overlays. + + + + + Scope granting read-only access to Vuln Explorer resources and permalinks. + + + + + Scope granting read-only access to observability dashboards and overlays. + + + + + Scope granting read-only access to incident timelines and chronology data. + + + + + Scope granting permission to append events to incident timelines. + + + + + Scope granting permission to create evidence packets in the evidence locker. + + + + + Scope granting read-only access to stored evidence packets. + + + + + Scope granting permission to place or release legal holds on evidence packets. + + + + + Scope granting read-only access to attestation records and observer feeds. + + + + + Scope granting permission to activate or resolve observability incident mode controls. + + + + + Scope granting read-only access to export center runs and bundles. + + + + + Scope granting permission to operate export center scheduling and run execution. + + + + + Scope granting administrative control over export center retention, encryption keys, and scheduling policies. + + + + + Scope granting read-only access to notifier channels, rules, and delivery history. + + + + + Scope permitting notifier rule management, delivery actions, and channel operations. + + + + + Scope granting administrative control over notifier secrets, escalations, and platform-wide settings. + + + + + Scope granting read-only access to issuer directory catalogues. + + + + + Scope permitting creation and modification of issuer directory entries. + + + + + Scope granting administrative control over issuer directory resources (delete, audit bypass). + + + + + Scope required to issue or honour escalation actions for notifications. + + + + + Scope granting read-only access to Task Packs catalogues and manifests. + + + + + Scope permitting publication or updates to Task Packs in the registry. + + + + + Scope granting permission to execute Task Packs via CLI or Task Runner. + + + + + Scope granting permission to fulfil Task Pack approval gates. + + + + + Scope granting permission to enqueue or mutate graph build jobs. + + + + + Scope granting permission to export graph artefacts (GraphML/JSONL/etc.). + + + + + Scope granting permission to trigger what-if simulations on graphs. + + + + + Scope granting read-only access to Orchestrator job state and telemetry. + + + + + Scope granting permission to execute Orchestrator control actions. + + + + + Scope granting permission to manage Orchestrator quotas and elevated backfill tooling. + + + + + Scope granting read-only access to Authority tenant catalog APIs. + + + + + Normalises a scope string (trim/convert to lower case). + + Scope raw value. + Normalised scope or null when the input is blank. + + + + Checks whether the provided scope is registered as a built-in StellaOps scope. + + + + + Returns the full set of built-in scopes. + + + + + Canonical identifiers for StellaOps service principals. + + + + + Service identity used by Policy Engine when materialising effective findings. + + + + + Service identity used by Cartographer when constructing and maintaining graph projections. + + + + + Service identity used by Vuln Explorer when issuing scoped permalink requests. + + + + + Service identity used by Signals components when managing reachability facts. + + + + + Shared tenancy default values used across StellaOps services. + + + + + Sentinel value indicating the token is not scoped to a specific project. + + + + diff --git a/out/analyzers/rust/StellaOps.Auth.Client.xml b/out/analyzers/rust/StellaOps.Auth.Client.xml new file mode 100644 index 00000000..cd693458 --- /dev/null +++ b/out/analyzers/rust/StellaOps.Auth.Client.xml @@ -0,0 +1,233 @@ + + + + StellaOps.Auth.Client + + + + + File-based token cache suitable for CLI/offline usage. + + + + + In-memory token cache suitable for service scenarios. + + + + + Abstraction for caching StellaOps tokens. + + + + + Retrieves a cached token entry, if present. + + + + + Stores or updates a token entry for the specified key. + + + + + Removes the cached entry for the specified key. + + + + + Abstraction for requesting tokens from StellaOps Authority. + + + + + Requests an access token using the resource owner password credentials flow. + + + + + Requests an access token using the client credentials flow. + + + + + Retrieves the cached JWKS document. + + + + + Retrieves a cached token entry. + + + + + Persists a token entry in the cache. + + + + + Removes a cached entry. + + + + + DI helpers for the StellaOps auth client. + + + + + Registers the StellaOps auth client with the provided configuration. + + + + + Registers a file-backed token cache implementation. + + + + + Options controlling the StellaOps authentication client. + + + + + Authority (issuer) base URL. + + + + + OAuth client identifier (optional for password flow). + + + + + OAuth client secret (optional for public clients). + + + + + Default scopes requested for flows that do not explicitly override them. + + + + + Retry delays applied by HTTP retry policy (empty uses defaults). + + + + + Gets or sets a value indicating whether HTTP retry policies are enabled. + + + + + Timeout applied to discovery and token HTTP requests. + + + + + Lifetime of cached discovery metadata. + + + + + Lifetime of cached JWKS metadata. + + + + + Buffer applied when determining cache expiration (default: 30 seconds). + + + + + Gets or sets a value indicating whether cached discovery/JWKS responses may be served when the Authority is unreachable. + + + + + Additional tolerance window during which stale cache entries remain valid if offline fallback is allowed. + + + + + Parsed Authority URI (populated after validation). + + + + + Normalised scope list (populated after validation). + + + + + Normalised retry delays (populated after validation). + + + + + Validates required values and normalises scope entries. + + + + + Caches Authority discovery metadata. + + + + + Minimal OpenID Connect configuration representation. + + + + + Minimal OpenID Connect configuration representation. + + + + + Caches JWKS documents for Authority. + + + + + Represents a cached token entry. + + + + + Represents a cached token entry. + + + + + Determines whether the token is expired given the provided . + + + + + Creates a copy with scopes normalised. + + + + + Default implementation of . + + + + + Represents an issued token with metadata. + + + + + Represents an issued token with metadata. + + + + + Converts the result to a cache entry. + + + + diff --git a/out/analyzers/rust/StellaOps.Scanner.Analyzers.Lang.Rust.deps.json b/out/analyzers/rust/StellaOps.Scanner.Analyzers.Lang.Rust.deps.json new file mode 100644 index 00000000..6e1d9635 --- /dev/null +++ b/out/analyzers/rust/StellaOps.Scanner.Analyzers.Lang.Rust.deps.json @@ -0,0 +1,858 @@ +{ + "runtimeTarget": { + "name": ".NETCoreApp,Version=v10.0", + "signature": "" + }, + "compilationOptions": {}, + "targets": { + ".NETCoreApp,Version=v10.0": { + "StellaOps.Scanner.Analyzers.Lang.Rust/1.0.0": { + "dependencies": { + "SharpCompress": "0.41.0", + "StellaOps.Scanner.Analyzers.Lang": "1.0.0" + }, + "runtime": { + "StellaOps.Scanner.Analyzers.Lang.Rust.dll": {} + } + }, + "Konscious.Security.Cryptography.Argon2/1.3.1": { + "dependencies": { + "Konscious.Security.Cryptography.Blake2": "1.1.1" + }, + "runtime": { + "lib/net8.0/Konscious.Security.Cryptography.Argon2.dll": { + "assemblyVersion": "1.3.1.0", + "fileVersion": "1.3.1.0" + } + } + }, + "Konscious.Security.Cryptography.Blake2/1.1.1": { + "runtime": { + "lib/net8.0/Konscious.Security.Cryptography.Blake2.dll": { + "assemblyVersion": "1.1.1.0", + "fileVersion": "1.1.1.0" + } + } + }, + "Microsoft.Extensions.Configuration/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.Binder/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.Binder.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.EnvironmentVariables/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.EnvironmentVariables.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.FileExtensions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileProviders.Physical": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.FileExtensions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.Json/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.Json.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.DependencyInjection/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.DependencyInjection.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.DependencyInjection.Abstractions/10.0.0-rc.2.25502.107": { + "runtime": { + "lib/net10.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Diagnostics/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Diagnostics.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options.ConfigurationExtensions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Diagnostics.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Diagnostics.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Diagnostics.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.FileProviders.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.FileProviders.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.FileProviders.Physical/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileSystemGlobbing": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.FileProviders.Physical.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.FileSystemGlobbing/10.0.0-rc.2.25502.107": { + "runtime": { + "lib/net10.0/Microsoft.Extensions.FileSystemGlobbing.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Http/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Diagnostics": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Http.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Http.Polly/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Http": "10.0.0-rc.2.25502.107", + "Polly": "7.2.4", + "Polly.Extensions.Http": "3.0.0" + }, + "runtime": { + "lib/netstandard2.0/Microsoft.Extensions.Http.Polly.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Logging/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Logging.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Logging.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Logging.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Options/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Options.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Options.ConfigurationExtensions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Binder": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Options.ConfigurationExtensions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Primitives/10.0.0-rc.2.25502.107": { + "runtime": { + "lib/net10.0/Microsoft.Extensions.Primitives.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.IdentityModel.Abstractions/8.14.0": { + "runtime": { + "lib/net9.0/Microsoft.IdentityModel.Abstractions.dll": { + "assemblyVersion": "8.14.0.0", + "fileVersion": "8.14.0.60815" + } + } + }, + "Microsoft.IdentityModel.JsonWebTokens/7.2.0": { + "dependencies": { + "Microsoft.IdentityModel.Tokens": "8.14.0" + }, + "runtime": { + "lib/net8.0/Microsoft.IdentityModel.JsonWebTokens.dll": { + "assemblyVersion": "7.2.0.0", + "fileVersion": "7.2.0.50110" + } + } + }, + "Microsoft.IdentityModel.Logging/8.14.0": { + "dependencies": { + "Microsoft.IdentityModel.Abstractions": "8.14.0" + }, + "runtime": { + "lib/net9.0/Microsoft.IdentityModel.Logging.dll": { + "assemblyVersion": "8.14.0.0", + "fileVersion": "8.14.0.60815" + } + } + }, + "Microsoft.IdentityModel.Tokens/8.14.0": { + "dependencies": { + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.IdentityModel.Logging": "8.14.0" + }, + "runtime": { + "lib/net9.0/Microsoft.IdentityModel.Tokens.dll": { + "assemblyVersion": "8.14.0.0", + "fileVersion": "8.14.0.60815" + } + } + }, + "NetEscapades.Configuration.Yaml/2.1.0": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", + "YamlDotNet": "9.1.0" + }, + "runtime": { + "lib/netstandard2.0/NetEscapades.Configuration.Yaml.dll": { + "assemblyVersion": "2.1.0.0", + "fileVersion": "2.1.0.0" + } + } + }, + "Pipelines.Sockets.Unofficial/2.2.8": { + "runtime": { + "lib/net5.0/Pipelines.Sockets.Unofficial.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "2.2.8.1080" + } + } + }, + "Polly/7.2.4": { + "runtime": { + "lib/netstandard2.0/Polly.dll": { + "assemblyVersion": "7.0.0.0", + "fileVersion": "7.2.4.982" + } + } + }, + "Polly.Extensions.Http/3.0.0": { + "dependencies": { + "Polly": "7.2.4" + }, + "runtime": { + "lib/netstandard2.0/Polly.Extensions.Http.dll": { + "assemblyVersion": "3.0.0.0", + "fileVersion": "3.0.0.0" + } + } + }, + "SharpCompress/0.41.0": { + "dependencies": { + "ZstdSharp.Port": "0.8.6" + }, + "runtime": { + "lib/net8.0/SharpCompress.dll": { + "assemblyVersion": "0.41.0.0", + "fileVersion": "0.41.0.0" + } + } + }, + "StackExchange.Redis/2.8.24": { + "dependencies": { + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Pipelines.Sockets.Unofficial": "2.2.8" + }, + "runtime": { + "lib/net8.0/StackExchange.Redis.dll": { + "assemblyVersion": "2.0.0.0", + "fileVersion": "2.8.24.3255" + } + } + }, + "System.IdentityModel.Tokens.Jwt/7.2.0": { + "dependencies": { + "Microsoft.IdentityModel.JsonWebTokens": "7.2.0", + "Microsoft.IdentityModel.Tokens": "8.14.0" + }, + "runtime": { + "lib/net8.0/System.IdentityModel.Tokens.Jwt.dll": { + "assemblyVersion": "7.2.0.0", + "fileVersion": "7.2.0.50110" + } + } + }, + "YamlDotNet/9.1.0": { + "runtime": { + "lib/netstandard2.1/YamlDotNet.dll": { + "assemblyVersion": "9.0.0.0", + "fileVersion": "9.1.0.0" + } + } + }, + "ZstdSharp.Port/0.8.6": { + "runtime": { + "lib/net9.0/ZstdSharp.dll": { + "assemblyVersion": "0.8.6.0", + "fileVersion": "0.8.6.0" + } + } + }, + "StellaOps.Auth.Abstractions/1.0.0-preview.1": { + "dependencies": { + "SharpCompress": "0.41.0" + }, + "runtime": { + "StellaOps.Auth.Abstractions.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Auth.Client/1.0.0-preview.1": { + "dependencies": { + "Microsoft.Extensions.Http.Polly": "10.0.0-rc.2.25502.107", + "Microsoft.IdentityModel.Tokens": "8.14.0", + "SharpCompress": "0.41.0", + "StellaOps.Auth.Abstractions": "1.0.0-preview.1", + "StellaOps.Configuration": "1.0.0" + }, + "runtime": { + "StellaOps.Auth.Client.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Auth.Security/1.0.0-preview.1": { + "dependencies": { + "Microsoft.IdentityModel.Tokens": "8.14.0", + "SharpCompress": "0.41.0", + "StackExchange.Redis": "2.8.24", + "System.IdentityModel.Tokens.Jwt": "7.2.0" + }, + "runtime": { + "StellaOps.Auth.Security.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Authority.Plugins.Abstractions/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0", + "StellaOps.Auth.Abstractions": "1.0.0-preview.1", + "StellaOps.Cryptography": "1.0.0" + }, + "runtime": { + "StellaOps.Authority.Plugins.Abstractions.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Configuration/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Binder": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.EnvironmentVariables": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Json": "10.0.0-rc.2.25502.107", + "NetEscapades.Configuration.Yaml": "2.1.0", + "SharpCompress": "0.41.0", + "StellaOps.Authority.Plugins.Abstractions": "1.0.0", + "StellaOps.Cryptography": "1.0.0" + }, + "runtime": { + "StellaOps.Configuration.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Cryptography/1.0.0": { + "dependencies": { + "Konscious.Security.Cryptography.Argon2": "1.3.1", + "Microsoft.IdentityModel.Tokens": "8.14.0", + "SharpCompress": "0.41.0" + }, + "runtime": { + "StellaOps.Cryptography.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.DependencyInjection/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0" + }, + "runtime": { + "StellaOps.DependencyInjection.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Plugin/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0", + "StellaOps.DependencyInjection": "1.0.0" + }, + "runtime": { + "StellaOps.Plugin.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Scanner.Analyzers.Lang/1.0.0": { + "dependencies": { + "SharpCompress": "0.41.0", + "StellaOps.Plugin": "1.0.0", + "StellaOps.Scanner.Core": "1.0.0" + }, + "runtime": { + "StellaOps.Scanner.Analyzers.Lang.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Scanner.Core/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0", + "StellaOps.Auth.Client": "1.0.0-preview.1", + "StellaOps.Auth.Security": "1.0.0-preview.1" + }, + "runtime": { + "StellaOps.Scanner.Core.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + } + } + }, + "libraries": { + "StellaOps.Scanner.Analyzers.Lang.Rust/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "Konscious.Security.Cryptography.Argon2/1.3.1": { + "type": "package", + "serviceable": true, + "sha512": "sha512-T+OAGwzYYXftahpOxO7J4xA5K6urxwGnWQf3M+Jpi+76Azv/0T3M5SuN+h7/QvXuiqNw3ZEZ5QqVLI5ygDAylw==", + "path": "konscious.security.cryptography.argon2/1.3.1", + "hashPath": "konscious.security.cryptography.argon2.1.3.1.nupkg.sha512" + }, + "Konscious.Security.Cryptography.Blake2/1.1.1": { + "type": "package", + "serviceable": true, + "sha512": "sha512-odwOyzj/J/lHJZNwFWJGU/LRecBShupAJ2S8TQqZfhUe9niHzu/voBYK5wuVKsvSpzbfupKQYZguVyIk1sgOkQ==", + "path": "konscious.security.cryptography.blake2/1.1.1", + "hashPath": "konscious.security.cryptography.blake2.1.1.1.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-273Ggibh3DdVrj47ENbUGIirOiqmLTAizpkvOD584Ps6NL/CMXPzesijnJgsjp7Fv/UCp69FKYBaSxZZ3q5R9g==", + "path": "microsoft.extensions.configuration/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-H+i/Qy30Rg/K9BcW2Z6DCHPCzwMH3bCwNOjEz31shWTUDK8GeeeMnrKVusprTcRA2Y6yPST+hg2zc3whPEs14Q==", + "path": "microsoft.extensions.configuration.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.Binder/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-aA6/V6lw1Gueyb1PqhHAl/i/qUUuv+Fusfk4oaMOzzOjspBkYtPpNHCmml/0t1x0/DnZoed+u2WwpP+mSwd8Dg==", + "path": "microsoft.extensions.configuration.binder/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.binder.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.EnvironmentVariables/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-2SV60IUAWfluZv2YHNZ+nUOljYHGIsy96FpJs+N9/bgKDYs9qr6DdzPeIhiHrz+XvRzbybvcwtTBf5dKrYN4oA==", + "path": "microsoft.extensions.configuration.environmentvariables/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.environmentvariables.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.FileExtensions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-5KrgXSTFR8cFLmDXXoT7GLVvDyHNw0Z9xG4doD78Q/HdlAR4jiMzmLLS9GFXrPGopmC6qqEZr2VBJHEu16INcA==", + "path": "microsoft.extensions.configuration.fileextensions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.fileextensions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.Json/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-USwHuFz4BFKoaqSydHWH/d7Mr+fVsAh9S0S9pdsdHro1IixMbqQ9Gpo2sEZf25e3tZSq/ts6XsVmrQWmxmDhYA==", + "path": "microsoft.extensions.configuration.json/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.json.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.DependencyInjection/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-mDw80K98jBWCyLFCra51PRv+Ttnjse1lZIzXEFybKby0/ajBFTEeHj/4r/QJexmb8Uun0yaFH1HlFtmHP1YEVA==", + "path": "microsoft.extensions.dependencyinjection/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.dependencyinjection.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.DependencyInjection.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-8jujunpkNNfTkE9PFHp9/aD6GPKVfNCuz8tUbzOcyU5tQOCoIZId4hwQNVx3Tb8XEWw9BYdh0k5vPpqdCM+UtA==", + "path": "microsoft.extensions.dependencyinjection.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.dependencyinjection.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Diagnostics/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-tQfQFXI+ZQcL2RzDarDLx3Amh0WCp1KPGp1ie3y/CMV5hDhEq98WTmcMoXrFY0GkYLEaCQlVi2A6qVLcooG2Ow==", + "path": "microsoft.extensions.diagnostics/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.diagnostics.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Diagnostics.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-x6XVv3RiwOlN2unjyX/Zat0gI0HiRoDDdjkwBCwsMftYWpbJu4SiyRwDbrv2zAF8v8nbEEvcWi3/pUxZfaqLQw==", + "path": "microsoft.extensions.diagnostics.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.diagnostics.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.FileProviders.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-dOpmW14MkOZIwV6269iXhoMp6alCHBoxqCR4pJ37GLjFaBIyzsIy+Ra8tsGmjHtFvEHKq0JRDIsb1PUkrK+yxw==", + "path": "microsoft.extensions.fileproviders.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.fileproviders.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.FileProviders.Physical/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-3+RiR6TEakDL0dCUqR7PjFffyrVMLdx/vAVBiN1mGmwScKYCTePIkYVkWsX85CTKh7R9J4M9C1MHzVdjbKcg3g==", + "path": "microsoft.extensions.fileproviders.physical/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.fileproviders.physical.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.FileSystemGlobbing/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-XtcPOKB7sMFzj8SxaOglZV3eaqZ1GxUMVZTwaz4pRpBt0S45ghb836uUej4YaI8EzsnUJoqzOIKrTW4CDJMfVw==", + "path": "microsoft.extensions.filesystemglobbing/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.filesystemglobbing.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Http/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-d60bvi/NpzkpVlSpxZqOfdjX1hrQgL/byWVc3PryjbmB7zvfLtqQbYifjEWToqtS0Fb1rGnkuVI5JEdOnK1tNQ==", + "path": "microsoft.extensions.http/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.http.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Http.Polly/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-aY5vLcrhdXCHsCjYI2lNwfat2vdSuiPs0FFZiy7IM6zcyqdxaefG8J8ezTKkZyiuAtznjVJJT70B660l/WlsxA==", + "path": "microsoft.extensions.http.polly/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.http.polly.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Logging/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-q2C5gq86qkTmcYSJJSnw8sgTUyuqENYSOjk/NOYjHnYlKSrK3oI9Rjv1bWFpx2I3Btq9ZBEJb9aMM+IUQ0PvZA==", + "path": "microsoft.extensions.logging/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.logging.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Logging.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-SKKKZjyCpBaDQ7yuFjdk6ELnRBRWeZsbnzUfo59Wc4PGhgf92chE3we/QlT6nk6NqlWcUgH/jogM+B/uq/Qdnw==", + "path": "microsoft.extensions.logging.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.logging.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Options/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-Ib6BCCjisp7ZUdhtNpSulFO0ODhz/IE4ZZd8OCqQWoRs363BQ0QOZi9KwpqpiEWo51S0kIXWqNicDPGXwpt9pQ==", + "path": "microsoft.extensions.options/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.options.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Options.ConfigurationExtensions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-MFbT8+JKX49YCXEFvlZDzQzI/R3QKzRZlb4dSud+569cMgA9hWbndjWWvOgGASoRcXynGRrBSq1Bw3PeCsB5/Q==", + "path": "microsoft.extensions.options.configurationextensions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.options.configurationextensions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Primitives/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-9pm2zqqn5u/OsKs2zgkhJEQQeMx9KkVOWPdHrs7Kt5sfpk+eIh/gmpi/mMH/ljS2T/PFsFdCEtm+GS/6l7zoZA==", + "path": "microsoft.extensions.primitives/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.primitives.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.IdentityModel.Abstractions/8.14.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-iwbCpSjD3ehfTwBhtSNEtKPK0ICun6ov7Ibx6ISNA9bfwIyzI2Siwyi9eJFCJBwxowK9xcA1mj+jBWiigeqgcQ==", + "path": "microsoft.identitymodel.abstractions/8.14.0", + "hashPath": "microsoft.identitymodel.abstractions.8.14.0.nupkg.sha512" + }, + "Microsoft.IdentityModel.JsonWebTokens/7.2.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-zLFA9IBxDWw6Y1nz2PPZyQvF+ZZ4aW1pwgtwusQB39lgxOc2xVqZ8gitsuT1rwyuIbchGOWbax4fsJ8OgGRxSQ==", + "path": "microsoft.identitymodel.jsonwebtokens/7.2.0", + "hashPath": "microsoft.identitymodel.jsonwebtokens.7.2.0.nupkg.sha512" + }, + "Microsoft.IdentityModel.Logging/8.14.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-eqqnemdW38CKZEHS6diA50BV94QICozDZEvSrsvN3SJXUFwVB9gy+/oz76gldP7nZliA16IglXjXTCTdmU/Ejg==", + "path": "microsoft.identitymodel.logging/8.14.0", + "hashPath": "microsoft.identitymodel.logging.8.14.0.nupkg.sha512" + }, + "Microsoft.IdentityModel.Tokens/8.14.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-ySPkj429HrYHvwLVNoPZdQ/bKZZKSkuWKod68qxo+5/pLdXFimgflckKgAZclX9tuO9qWk/KFiIN65diMWgh+g==", + "path": "microsoft.identitymodel.tokens/8.14.0", + "hashPath": "microsoft.identitymodel.tokens.8.14.0.nupkg.sha512" + }, + "NetEscapades.Configuration.Yaml/2.1.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-kNTX7kvRvbzBpLd3Vg9iu6t60tTyhVxsruAPgH6kl1GkAZIHLZw9cQysvjUenDU7JEnUgyxQnzfL8627ARDn+g==", + "path": "netescapades.configuration.yaml/2.1.0", + "hashPath": "netescapades.configuration.yaml.2.1.0.nupkg.sha512" + }, + "Pipelines.Sockets.Unofficial/2.2.8": { + "type": "package", + "serviceable": true, + "sha512": "sha512-zG2FApP5zxSx6OcdJQLbZDk2AVlN2BNQD6MorwIfV6gVj0RRxWPEp2LXAxqDGZqeNV1Zp0BNPcNaey/GXmTdvQ==", + "path": "pipelines.sockets.unofficial/2.2.8", + "hashPath": "pipelines.sockets.unofficial.2.2.8.nupkg.sha512" + }, + "Polly/7.2.4": { + "type": "package", + "serviceable": true, + "sha512": "sha512-bw00Ck5sh6ekduDE3mnCo1ohzuad946uslCDEENu3091+6UKnBuKLo4e+yaNcCzXxOZCXWY2gV4a35+K1d4LDA==", + "path": "polly/7.2.4", + "hashPath": "polly.7.2.4.nupkg.sha512" + }, + "Polly.Extensions.Http/3.0.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-drrG+hB3pYFY7w1c3BD+lSGYvH2oIclH8GRSehgfyP5kjnFnHKQuuBhuHLv+PWyFuaTDyk/vfRpnxOzd11+J8g==", + "path": "polly.extensions.http/3.0.0", + "hashPath": "polly.extensions.http.3.0.0.nupkg.sha512" + }, + "SharpCompress/0.41.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-z04dBVdTIAFTRKi38f0LkajaKA++bR+M8kYCbasXePILD2H+qs7CkLpyiippB24CSbTrWIgpBKm6BenZqkUwvw==", + "path": "sharpcompress/0.41.0", + "hashPath": "sharpcompress.0.41.0.nupkg.sha512" + }, + "StackExchange.Redis/2.8.24": { + "type": "package", + "serviceable": true, + "sha512": "sha512-GWllmsFAtLyhm4C47cOCipGxyEi1NQWTFUHXnJ8hiHOsK/bH3T5eLkWPVW+LRL6jDiB3g3izW3YEHgLuPoJSyA==", + "path": "stackexchange.redis/2.8.24", + "hashPath": "stackexchange.redis.2.8.24.nupkg.sha512" + }, + "System.IdentityModel.Tokens.Jwt/7.2.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-Z3Fmkrxkp+o51ANMO/PqASRRlEz8dH4mTWwZXMFMXZt2bUGztBiNcIDnwBCElYLYpzpmz4sIqHb6aW8QVLe6YQ==", + "path": "system.identitymodel.tokens.jwt/7.2.0", + "hashPath": "system.identitymodel.tokens.jwt.7.2.0.nupkg.sha512" + }, + "YamlDotNet/9.1.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-fuvGXU4Ec5HrsmEc+BiFTNPCRf1cGBI2kh/3RzMWgddM2M4ALhbSPoI3X3mhXZUD1qqQd9oSkFAtWjpz8z9eRg==", + "path": "yamldotnet/9.1.0", + "hashPath": "yamldotnet.9.1.0.nupkg.sha512" + }, + "ZstdSharp.Port/0.8.6": { + "type": "package", + "serviceable": true, + "sha512": "sha512-iP4jVLQoQmUjMU88g1WObiNr6YKZGvh4aOXn3yOJsHqZsflwRsxZPcIBvNXgjXO3vQKSLctXGLTpcBPLnWPS8A==", + "path": "zstdsharp.port/0.8.6", + "hashPath": "zstdsharp.port.0.8.6.nupkg.sha512" + }, + "StellaOps.Auth.Abstractions/1.0.0-preview.1": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Auth.Client/1.0.0-preview.1": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Auth.Security/1.0.0-preview.1": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Authority.Plugins.Abstractions/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Configuration/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Cryptography/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.DependencyInjection/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Plugin/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Scanner.Analyzers.Lang/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Scanner.Core/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + } + } +} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 85d67b30..b9918b6c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,86 @@ -{ - "name": "git.stella-ops.org", - "lockfileVersion": 3, - "requires": true, - "packages": {} -} +{ + "name": "stellaops-docs", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "stellaops-docs", + "version": "0.1.0", + "dependencies": { + "ajv": "^8.17.1", + "ajv-formats": "^2.1.1" + } + }, + "node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", + "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", + "license": "MIT", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "license": "MIT" + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + } + } +} diff --git a/package.json b/package.json index 69a88e3b..a733a42f 100644 --- a/package.json +++ b/package.json @@ -1 +1,14 @@ -{} +{ + "name": "stellaops-docs", + "version": "0.1.0", + "private": true, + "type": "module", + "scripts": { + "docs:attestor:validate": "node scripts/validate-attestation-schemas.mjs", + "docs:attestor:generate": "dotnet run --project src/Attestor/StellaOps.Attestor.Types/Tools/StellaOps.Attestor.Types.Generator --configuration Release" + }, + "dependencies": { + "ajv": "^8.17.1", + "ajv-formats": "^2.1.1" + } +} diff --git a/scripts/render_docs.py b/scripts/render_docs.py index 0b51d3c4..c95dcd71 100644 --- a/scripts/render_docs.py +++ b/scripts/render_docs.py @@ -15,11 +15,12 @@ Dependencies: from __future__ import annotations -import argparse -import json -import logging -import os -import shutil +import argparse +import json +import logging +import os +import shutil +import subprocess from dataclasses import dataclass from datetime import datetime, timezone from pathlib import Path @@ -211,25 +212,44 @@ def write_index(entries: List[DocEntry], output_root: Path) -> None: logging.info("Wrote HTML index with %d entries", len(entries)) -def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Render documentation bundle") +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Render documentation bundle") parser.add_argument("--source", default="docs", type=Path, help="Directory containing Markdown sources") parser.add_argument("--output", default=Path("build/docs-site"), type=Path, help="Directory for rendered output") parser.add_argument("--clean", action="store_true", help="Remove the output directory before rendering") - return parser.parse_args() - - -def main() -> int: - logging.basicConfig(level=logging.INFO, format="%(levelname)s %(message)s") - args = parse_args() - - source_root: Path = args.source.resolve() - output_root: Path = args.output.resolve() - - if not source_root.exists(): - logging.error("Source directory %s does not exist", source_root) - return os.EX_NOINPUT - + return parser.parse_args() + + +def run_attestor_validation(repo_root: Path) -> None: + """Execute the attestor schema + SDK validation prior to rendering docs.""" + logging.info("Running attestor payload validation (npm run docs:attestor:validate)") + result = subprocess.run( + ["npm", "run", "docs:attestor:validate"], + cwd=repo_root, + check=False, + ) + if result.returncode != 0: + raise RuntimeError("Attestor payload validation failed; aborting docs render.") + + +def main() -> int: + logging.basicConfig(level=logging.INFO, format="%(levelname)s %(message)s") + args = parse_args() + + source_root: Path = args.source.resolve() + output_root: Path = args.output.resolve() + repo_root = Path(__file__).resolve().parents[1] + + if not source_root.exists(): + logging.error("Source directory %s does not exist", source_root) + return os.EX_NOINPUT + + try: + run_attestor_validation(repo_root) + except RuntimeError as exc: + logging.error("%s", exc) + return os.EX_DATAERR + if args.clean and output_root.exists(): logging.info("Cleaning existing output directory %s", output_root) shutil.rmtree(output_root) diff --git a/scripts/run-attestor-ttl-validation.sh b/scripts/run-attestor-ttl-validation.sh new file mode 100644 index 00000000..f3cdbd1b --- /dev/null +++ b/scripts/run-attestor-ttl-validation.sh @@ -0,0 +1,81 @@ +#!/usr/bin/env bash +# Runs live TTL validation for Attestor dedupe stores against local MongoDB/Redis. + +set -euo pipefail + +if ! command -v docker >/dev/null 2>&1; then + echo "docker CLI is required. Install Docker Desktop or ensure docker is on PATH." >&2 + exit 1 +fi + +if ! docker compose version >/dev/null 2>&1; then + if command -v docker-compose >/dev/null 2>&1; then + compose_cmd="docker-compose" + else + echo "docker compose plugin (or docker-compose) is required." >&2 + exit 1 + fi +else + compose_cmd="docker compose" +fi + +repo_root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +compose_file="$(mktemp -t attestor-ttl-compose-XXXXXX.yaml)" + +cleanup() { + $compose_cmd -f "$compose_file" down -v >/dev/null 2>&1 || true + rm -f "$compose_file" +} +trap cleanup EXIT + +cat >"$compose_file" <<'YAML' +services: + mongo: + image: mongo:7.0 + ports: + - "27017:27017" + healthcheck: + test: ["CMD", "mongosh", "--quiet", "localhost/test", "--eval", "db.runCommand({ ping: 1 })"] + interval: 5s + timeout: 3s + retries: 20 + redis: + image: redis:7.2 + command: ["redis-server", "--save", "", "--appendonly", "no"] + ports: + - "6379:6379" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 5s + timeout: 3s + retries: 20 +YAML + +echo "Starting MongoDB and Redis containers..." +$compose_cmd -f "$compose_file" up -d + +wait_for_port() { + local host=$1 + local port=$2 + local name=$3 + for attempt in {1..60}; do + if (echo > /dev/tcp/"$host"/"$port") >/dev/null 2>&1; then + echo "$name is accepting connections." + return 0 + fi + sleep 1 + done + echo "Timeout waiting for $name on $host:$port" >&2 + return 1 +} + +wait_for_port 127.0.0.1 27017 "MongoDB" +wait_for_port 127.0.0.1 6379 "Redis" + +export ATTESTOR_LIVE_MONGO_URI="${ATTESTOR_LIVE_MONGO_URI:-mongodb://127.0.0.1:27017}" +export ATTESTOR_LIVE_REDIS_URI="${ATTESTOR_LIVE_REDIS_URI:-127.0.0.1:6379}" + +echo "Running live TTL validation tests..." +dotnet test "$repo_root/src/Attestor/StellaOps.Attestor.sln" --no-build --filter "Category=LiveTTL" "$@" + +echo "Live TTL validation complete. Shutting down containers." diff --git a/scripts/validate-attestation-schemas.mjs b/scripts/validate-attestation-schemas.mjs new file mode 100644 index 00000000..ec0a97f6 --- /dev/null +++ b/scripts/validate-attestation-schemas.mjs @@ -0,0 +1,145 @@ +import { readFileSync } from 'node:fs'; +import { fileURLToPath } from 'node:url'; +import { dirname, join } from 'node:path'; +import { spawnSync } from 'node:child_process'; +import Ajv2020 from 'ajv/dist/2020.js'; +import addFormats from 'ajv-formats'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); +const repoRoot = join(__dirname, '..'); +const moduleRoot = join(repoRoot, 'src', 'Attestor', 'StellaOps.Attestor.Types'); +const schemasDir = join(moduleRoot, 'schemas'); +const fixturesDir = join(moduleRoot, 'fixtures', 'v1'); +const tsDir = join(moduleRoot, 'generated', 'ts'); +const goDir = join(moduleRoot, 'generated', 'go'); + +const schemaFiles = [ + { schema: 'stellaops-build-provenance.v1.schema.json', sample: 'build-provenance.sample.json' }, + { schema: 'stellaops-sbom-attestation.v1.schema.json', sample: 'sbom-attestation.sample.json' }, + { schema: 'stellaops-scan-results.v1.schema.json', sample: 'scan-results.sample.json' }, + { schema: 'stellaops-vex-attestation.v1.schema.json', sample: 'vex-attestation.sample.json' }, + { schema: 'stellaops-policy-evaluation.v1.schema.json', sample: 'policy-evaluation.sample.json' }, + { schema: 'stellaops-risk-profile.v1.schema.json', sample: 'risk-profile-evidence.sample.json' }, + { schema: 'stellaops-custom-evidence.v1.schema.json', sample: 'custom-evidence.sample.json' } +]; + +const commonSchemaPath = join(schemasDir, 'attestation-common.v1.schema.json'); +const ajv = new Ajv2020({ strict: false, allErrors: true }); +addFormats(ajv); + +const commonSchema = JSON.parse(readFileSync(commonSchemaPath, 'utf8')); +const commonId = commonSchema.$id || 'https://schemas.stella-ops.org/attestations/common/v1'; +ajv.addSchema(commonSchema, commonId); + +let failed = false; + +function stableStringify(value) { + if (Array.isArray(value)) { + return '[' + value.map(stableStringify).join(',') + ']'; + } + + if (value && typeof value === 'object') { + const entries = Object.keys(value) + .sort() + .map((key) => `${JSON.stringify(key)}:${stableStringify(value[key])}`); + return '{' + entries.join(',') + '}'; + } + + return JSON.stringify(value); +} + +function runCommand(command, args, options) { + const result = spawnSync(command, args, { stdio: 'inherit', ...options }); + if (result.error) { + if (result.error.code === 'ENOENT') { + throw new Error(`Command not found: ${command}`); + } + throw result.error; + } + if (result.status !== 0) { + throw new Error(`Command failed: ${command} ${args.join(' ')}`); + } +} + +function commandExists(command) { + const result = spawnSync(command, ['--version'], { + stdio: 'ignore', + env: { + ...process.env, + PATH: `/usr/local/go/bin:${process.env.PATH ?? ''}`, + }, + }); + if (result.error && result.error.code === 'ENOENT') { + return false; + } + return (result.status ?? 0) === 0; +} + +for (const mapping of schemaFiles) { + const schemaFile = mapping.schema; + const sample = mapping.sample; + const schemaPath = join(schemasDir, schemaFile); + const samplePath = join(fixturesDir, sample); + + const schemaJson = JSON.parse(readFileSync(schemaPath, 'utf8')); + const sampleJson = JSON.parse(readFileSync(samplePath, 'utf8')); + + const schemaId = schemaJson.$id || ('https://stella-ops.org/schemas/attestor/' + schemaFile); + ajv.removeSchema(schemaId); + ajv.addSchema(schemaJson, schemaId); + + const alias = new URL('attestation-common.v1.schema.json', new URL(schemaId)); + if (!ajv.getSchema(alias.href)) { + ajv.addSchema(commonSchema, alias.href); + } + + const validate = ajv.getSchema(schemaId) || ajv.compile(schemaJson); + const valid = validate(sampleJson); + + if (!valid) { + failed = true; + console.error('✖ ' + schemaFile + ' failed for fixture ' + sample); + console.error(validate.errors || []); + } else { + const canonical = stableStringify(sampleJson); + const digest = Buffer.from(canonical, 'utf8').toString('base64'); + console.log('✔ ' + schemaFile + ' ✓ ' + sample + ' (canonical b64: ' + digest.slice(0, 16) + '… )'); + } +} + +if (failed) { + console.error('One or more schema validations failed.'); + process.exit(1); +} + +try { + console.log('\n▶ Installing TypeScript dependencies...'); + runCommand('npm', ['install', '--no-fund', '--no-audit'], { cwd: tsDir }); + + console.log('▶ Running TypeScript build/tests...'); + runCommand('npm', ['run', 'test'], { cwd: tsDir }); + + const goCandidates = [ + 'go', + '/usr/local/go/bin/go', + process.env.GO || '', + ].filter(Boolean); + const goCommand = goCandidates.find((candidate) => commandExists(candidate)); + + if (goCommand) { + console.log('▶ Running Go tests...'); + const goEnv = { + ...process.env, + PATH: `/usr/local/go/bin:${process.env.PATH ?? ''}`, + }; + runCommand(goCommand, ['test', './...'], { cwd: goDir, env: goEnv }); + } else { + console.warn('⚠️ Go toolchain not found; skipping Go SDK tests.'); + } +} catch (err) { + console.error(err.message); + process.exit(1); +} + +console.log('All attestation schemas and SDKs validated successfully.'); diff --git a/src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml b/src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml index cd1aa678..18b05240 100644 --- a/src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml +++ b/src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml @@ -29,15 +29,32 @@ components: password: tokenUrl: /token refreshUrl: /token - scopes: - advisory:ingest: Submit advisory ingestion payloads. - advisory:read: Read advisory ingestion data. - aoc:verify: Execute Aggregation-Only Contract verification workflows. - authority.audit.read: Read Authority audit logs. - authority.clients.manage: Manage Authority client registrations. - authority.users.manage: Manage Authority users. - authority:tenants.read: Read the Authority tenant catalog. - concelier.jobs.trigger: Trigger Concelier aggregation jobs. + scopes: + attestor.write: Submit attestation bundles and Rekor entries. + attestor.verify: Invoke attestation verification APIs. + attestor.read: Fetch attestation entries and proofs. + advisory:ingest: Submit advisory ingestion payloads. + advisory:read: Read advisory ingestion data. + advisory-ai:view: View Advisory AI artefacts and cached outputs. + advisory-ai:operate: Submit Advisory AI inference and remediation requests. + advisory-ai:admin: Administer Advisory AI configuration, profiles, and remote execution. + aoc:verify: Execute Aggregation-Only Contract verification workflows. + airgap:seal: Seal or unseal an air-gapped installation. + airgap:import: Import offline bundles and mirror artifacts while air-gapped. + airgap:status:read: Read air-gap sealing status and staleness indicators. + obs:read: Read observability dashboards, SLO digests, and incident overlays. + timeline:read: Read incident timeline entries and annotations. + timeline:write: Append deterministic incident timeline events and annotations. + evidence:create: Create evidence items, upload artefacts, and link attestations. + evidence:read: Read evidence items, artefacts, and linkage metadata. + evidence:hold: Apply or release legal holds on evidence items. + attest:read: Read attestation records, DSSE bundles, and verification proofs. + obs:incident: Toggle incident mode, extend retention, enable emergency telemetry. + authority.audit.read: Read Authority audit logs. + authority.clients.manage: Manage Authority client registrations. + authority.users.manage: Manage Authority users. + authority:tenants.read: Read the Authority tenant catalog. + concelier.jobs.trigger: Trigger Concelier aggregation jobs. concelier.merge: Manage Concelier merge operations. effective:write: Write effective findings (Policy Engine service identity only). email: Access email claim data. @@ -72,17 +89,34 @@ components: vex:ingest: Submit VEX ingestion payloads. vex:read: Read VEX ingestion data. vuln:read: Read vulnerability permalinks and overlays. - authorizationCode: - authorizationUrl: /authorize - tokenUrl: /token - refreshUrl: /token - scopes: - advisory:ingest: Submit advisory ingestion payloads. - advisory:read: Read advisory ingestion data. - aoc:verify: Execute Aggregation-Only Contract verification workflows. - authority.audit.read: Read Authority audit logs. - authority.clients.manage: Manage Authority client registrations. - authority.users.manage: Manage Authority users. + authorizationCode: + authorizationUrl: /authorize + tokenUrl: /token + refreshUrl: /token + scopes: + attestor.write: Submit attestation bundles and Rekor entries. + attestor.verify: Invoke attestation verification APIs. + attestor.read: Fetch attestation entries and proofs. + advisory:ingest: Submit advisory ingestion payloads. + advisory:read: Read advisory ingestion data. + advisory-ai:view: View Advisory AI artefacts and cached outputs. + advisory-ai:operate: Submit Advisory AI inference and remediation requests. + advisory-ai:admin: Administer Advisory AI configuration, profiles, and remote execution. + aoc:verify: Execute Aggregation-Only Contract verification workflows. + airgap:seal: Seal or unseal an air-gapped installation. + airgap:import: Import offline bundles and mirror artifacts while air-gapped. + airgap:status:read: Read air-gap sealing status and staleness indicators. + obs:read: Read observability dashboards, SLO digests, and incident overlays. + timeline:read: Read incident timeline entries and annotations. + timeline:write: Append deterministic incident timeline events and annotations. + evidence:create: Create evidence items, upload artefacts, and link attestations. + evidence:read: Read evidence items, artefacts, and linkage metadata. + evidence:hold: Apply or release legal holds on evidence items. + attest:read: Read attestation records, DSSE bundles, and verification proofs. + obs:incident: Toggle incident mode, extend retention, enable emergency telemetry. + authority.audit.read: Read Authority audit logs. + authority.clients.manage: Manage Authority client registrations. + authority.users.manage: Manage Authority users. authority:tenants.read: Read the Authority tenant catalog. concelier.jobs.trigger: Trigger Concelier aggregation jobs. concelier.merge: Manage Concelier merge operations. @@ -125,11 +159,25 @@ components: flows: clientCredentials: tokenUrl: /token - scopes: - advisory:ingest: Submit advisory ingestion payloads. - advisory:read: Read advisory ingestion data. - aoc:verify: Execute Aggregation-Only Contract verification workflows. - authority.audit.read: Read Authority audit logs. + scopes: + attestor.write: Submit attestation bundles and Rekor entries. + attestor.verify: Invoke attestation verification APIs. + attestor.read: Fetch attestation entries and proofs. + advisory:ingest: Submit advisory ingestion payloads. + advisory:read: Read advisory ingestion data. + advisory-ai:view: View Advisory AI artefacts and cached outputs. + advisory-ai:operate: Submit Advisory AI inference and remediation requests. + advisory-ai:admin: Administer Advisory AI configuration, profiles, and remote execution. + aoc:verify: Execute Aggregation-Only Contract verification workflows. + obs:read: Read observability dashboards, SLO digests, and incident overlays. + timeline:read: Read incident timeline entries and annotations. + timeline:write: Append deterministic incident timeline events and annotations. + evidence:create: Create evidence items, upload artefacts, and link attestations. + evidence:read: Read evidence items, artefacts, and linkage metadata. + evidence:hold: Apply or release legal holds on evidence items. + attest:read: Read attestation records, DSSE bundles, and verification proofs. + obs:incident: Toggle incident mode, extend retention, enable emergency telemetry. + authority.audit.read: Read Authority audit logs. authority.clients.manage: Manage Authority client registrations. authority.users.manage: Manage Authority users. authority:tenants.read: Read the Authority tenant catalog. diff --git a/src/Attestor/StellaOps.Attestor.Envelope/DsseCompressionAlgorithm.cs b/src/Attestor/StellaOps.Attestor.Envelope/DsseCompressionAlgorithm.cs new file mode 100644 index 00000000..99ea0acb --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/DsseCompressionAlgorithm.cs @@ -0,0 +1,8 @@ +namespace StellaOps.Attestor.Envelope; + +public enum DsseCompressionAlgorithm +{ + None = 0, + Gzip = 1, + Brotli = 2 +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/DsseDetachedPayloadReference.cs b/src/Attestor/StellaOps.Attestor.Envelope/DsseDetachedPayloadReference.cs new file mode 100644 index 00000000..196f4822 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/DsseDetachedPayloadReference.cs @@ -0,0 +1,32 @@ +using System; + +namespace StellaOps.Attestor.Envelope; + +public sealed record DsseDetachedPayloadReference +{ + public DsseDetachedPayloadReference(string uri, string sha256, long? length = null, string? mediaType = null) + { + if (string.IsNullOrWhiteSpace(uri)) + { + throw new ArgumentException("Detached payload URI must be provided.", nameof(uri)); + } + + if (string.IsNullOrWhiteSpace(sha256)) + { + throw new ArgumentException("Detached payload digest must be provided.", nameof(sha256)); + } + + Uri = uri; + Sha256 = sha256.ToLowerInvariant(); + Length = length; + MediaType = mediaType; + } + + public string Uri { get; } + + public string Sha256 { get; } + + public long? Length { get; } + + public string? MediaType { get; } +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelope.cs b/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelope.cs new file mode 100644 index 00000000..1853feba --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelope.cs @@ -0,0 +1,48 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.Attestor.Envelope; + +public sealed class DsseEnvelope +{ + public DsseEnvelope( + string payloadType, + ReadOnlyMemory payload, + IEnumerable signatures, + string? payloadContentType = null, + DsseDetachedPayloadReference? detachedPayload = null) + { + if (string.IsNullOrWhiteSpace(payloadType)) + { + throw new ArgumentException("payloadType must be provided.", nameof(payloadType)); + } + + PayloadType = payloadType; + Payload = payload; + PayloadContentType = payloadContentType; + DetachedPayload = detachedPayload; + + var normalised = signatures?.ToArray() ?? Array.Empty(); + if (normalised.Length == 0) + { + throw new ArgumentException("At least one signature must be supplied.", nameof(signatures)); + } + + // Deterministic ordering (keyid asc, signature asc) for canonical output. + Signatures = normalised + .OrderBy(static x => x.KeyId ?? string.Empty, StringComparer.Ordinal) + .ThenBy(static x => x.Signature, StringComparer.Ordinal) + .ToArray(); + } + + public string PayloadType { get; } + + public ReadOnlyMemory Payload { get; } + + public string? PayloadContentType { get; } + + public IReadOnlyList Signatures { get; } + + public DsseDetachedPayloadReference? DetachedPayload { get; } +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializationOptions.cs b/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializationOptions.cs new file mode 100644 index 00000000..070f94df --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializationOptions.cs @@ -0,0 +1,14 @@ +namespace StellaOps.Attestor.Envelope; + +public sealed class DsseEnvelopeSerializationOptions +{ + public bool EmitCompactJson { get; init; } = true; + + public bool EmitExpandedJson { get; init; } = true; + + public bool IndentExpandedJson { get; init; } = true; + + public bool IncludePayloadPreview { get; init; } = true; + + public DsseCompressionAlgorithm CompressionAlgorithm { get; init; } = DsseCompressionAlgorithm.None; +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializationResult.cs b/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializationResult.cs new file mode 100644 index 00000000..803360a8 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializationResult.cs @@ -0,0 +1,38 @@ +using System; + +namespace StellaOps.Attestor.Envelope; + +public sealed class DsseEnvelopeSerializationResult +{ + public DsseEnvelopeSerializationResult( + byte[]? compactJson, + byte[]? expandedJson, + string payloadSha256, + int originalPayloadLength, + int embeddedPayloadLength, + DsseCompressionAlgorithm compression, + DsseDetachedPayloadReference? detachedPayload) + { + CompactJson = compactJson; + ExpandedJson = expandedJson; + PayloadSha256 = payloadSha256 ?? throw new ArgumentNullException(nameof(payloadSha256)); + OriginalPayloadLength = originalPayloadLength; + EmbeddedPayloadLength = embeddedPayloadLength; + Compression = compression; + DetachedPayload = detachedPayload; + } + + public byte[]? CompactJson { get; } + + public byte[]? ExpandedJson { get; } + + public string PayloadSha256 { get; } + + public int OriginalPayloadLength { get; } + + public int EmbeddedPayloadLength { get; } + + public DsseCompressionAlgorithm Compression { get; } + + public DsseDetachedPayloadReference? DetachedPayload { get; } +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializer.cs b/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializer.cs new file mode 100644 index 00000000..551507e5 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/DsseEnvelopeSerializer.cs @@ -0,0 +1,331 @@ +using System; +using System.Buffers; +using System.Collections.Generic; +using System.IO; +using System.IO.Compression; +using System.Security.Cryptography; +using System.Text; +using System.Text.Encodings.Web; +using System.Text.Json; + +namespace StellaOps.Attestor.Envelope; + +public static class DsseEnvelopeSerializer +{ + public static DsseEnvelopeSerializationResult Serialize(DsseEnvelope envelope, DsseEnvelopeSerializationOptions? options = null) + { + ArgumentNullException.ThrowIfNull(envelope); + + options ??= new DsseEnvelopeSerializationOptions(); + + var originalPayload = envelope.Payload.ToArray(); + var processedPayload = ApplyCompression(originalPayload, options.CompressionAlgorithm); + var payloadSha256 = Convert.ToHexString(SHA256.HashData(originalPayload)).ToLowerInvariant(); + var payloadBase64 = Convert.ToBase64String(processedPayload); + + byte[]? compactJson = null; + if (options.EmitCompactJson) + { + compactJson = BuildCompactJson(envelope.PayloadType, payloadBase64, envelope.Signatures); + } + + byte[]? expandedJson = null; + if (options.EmitExpandedJson) + { + expandedJson = BuildExpandedJson( + envelope, + payloadBase64, + payloadSha256, + originalPayload.Length, + processedPayload.Length, + options, + originalPayload); + } + + return new DsseEnvelopeSerializationResult( + compactJson, + expandedJson, + payloadSha256, + originalPayload.Length, + processedPayload.Length, + options.CompressionAlgorithm, + envelope.DetachedPayload); + } + + private static byte[] BuildCompactJson(string payloadType, string payloadBase64, IReadOnlyList signatures) + { + var buffer = new ArrayBufferWriter(); + using var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions + { + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, + Indented = false + }); + + writer.WriteStartObject(); + writer.WriteString("payloadType", payloadType); + writer.WriteString("payload", payloadBase64); + writer.WritePropertyName("signatures"); + writer.WriteStartArray(); + + foreach (var signature in EnumerateCanonicalSignatures(signatures)) + { + writer.WriteStartObject(); + if (!string.IsNullOrWhiteSpace(signature.KeyId)) + { + writer.WriteString("keyid", signature.KeyId); + } + + writer.WriteString("sig", signature.Signature); + writer.WriteEndObject(); + } + + writer.WriteEndArray(); + writer.WriteEndObject(); + writer.Flush(); + return buffer.WrittenSpan.ToArray(); + } + + private static byte[]? BuildExpandedJson( + DsseEnvelope envelope, + string payloadBase64, + string payloadSha256, + int originalPayloadLength, + int embeddedPayloadLength, + DsseEnvelopeSerializationOptions options, + byte[] originalPayload) + { + var buffer = new ArrayBufferWriter(); + using var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions + { + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, + Indented = options.IndentExpandedJson + }); + + writer.WriteStartObject(); + writer.WriteString("payloadType", envelope.PayloadType); + writer.WriteString("payload", payloadBase64); + writer.WritePropertyName("signatures"); + writer.WriteStartArray(); + + foreach (var signature in EnumerateCanonicalSignatures(envelope.Signatures)) + { + writer.WriteStartObject(); + if (!string.IsNullOrWhiteSpace(signature.KeyId)) + { + writer.WriteString("keyid", signature.KeyId); + } + + writer.WriteString("sig", signature.Signature); + writer.WriteEndObject(); + } + + writer.WriteEndArray(); + + writer.WritePropertyName("payloadInfo"); + writer.WriteStartObject(); + writer.WriteString("sha256", payloadSha256); + writer.WriteNumber("length", originalPayloadLength); + + if (options.CompressionAlgorithm != DsseCompressionAlgorithm.None) + { + writer.WritePropertyName("compression"); + writer.WriteStartObject(); + writer.WriteString("algorithm", GetCompressionName(options.CompressionAlgorithm)); + writer.WriteNumber("compressedLength", embeddedPayloadLength); + writer.WriteEndObject(); + } + + writer.WriteEndObject(); // payloadInfo + + if (options.IncludePayloadPreview && TryWritePayloadPreview(envelope.PayloadContentType, originalPayload, writer)) + { + // preview already written inside helper + } + + if (envelope.DetachedPayload is not null) + { + writer.WritePropertyName("detachedPayload"); + writer.WriteStartObject(); + writer.WriteString("uri", envelope.DetachedPayload.Uri); + writer.WriteString("sha256", envelope.DetachedPayload.Sha256); + if (envelope.DetachedPayload.Length.HasValue) + { + writer.WriteNumber("length", envelope.DetachedPayload.Length.Value); + } + + if (!string.IsNullOrWhiteSpace(envelope.DetachedPayload.MediaType)) + { + writer.WriteString("mediaType", envelope.DetachedPayload.MediaType); + } + + writer.WriteEndObject(); + } + + writer.WriteEndObject(); + writer.Flush(); + return buffer.WrittenSpan.ToArray(); + } + + private static bool TryWritePayloadPreview(string? contentType, byte[] originalPayload, Utf8JsonWriter writer) + { + if (string.IsNullOrWhiteSpace(contentType)) + { + return false; + } + + var lower = contentType.ToLowerInvariant(); + if (!lower.Contains("json") && !lower.StartsWith("text/", StringComparison.Ordinal)) + { + return false; + } + + writer.WritePropertyName("payloadPreview"); + writer.WriteStartObject(); + writer.WriteString("mediaType", contentType); + + if (lower.Contains("json") && TryParseJson(originalPayload, out var jsonDocument)) + { + writer.WritePropertyName("json"); + jsonDocument.WriteTo(writer); + jsonDocument.Dispose(); + } + else if (TryDecodeUtf8(originalPayload, out var text)) + { + writer.WriteString("text", text); + } + + writer.WriteEndObject(); + return true; + } + + private static bool TryParseJson(byte[] payload, out JsonDocument document) + { + try + { + document = JsonDocument.Parse(payload); + return true; + } + catch (JsonException) + { + document = null!; + return false; + } + } + + private static bool TryDecodeUtf8(byte[] payload, out string text) + { + var utf8 = new UTF8Encoding(false, true); + try + { + text = utf8.GetString(payload); + return true; + } + catch (DecoderFallbackException) + { + text = string.Empty; + return false; + } + } + + private static byte[] ApplyCompression(byte[] payload, DsseCompressionAlgorithm algorithm) + { + return algorithm switch + { + DsseCompressionAlgorithm.None => payload, + DsseCompressionAlgorithm.Gzip => CompressWithStream(payload, static (stream) => new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true)), + DsseCompressionAlgorithm.Brotli => CompressWithStream(payload, static (stream) => new BrotliStream(stream, CompressionLevel.SmallestSize, leaveOpen: true)), + _ => throw new NotSupportedException($"Compression algorithm '{algorithm}' is not supported.") + }; + } + + private static byte[] CompressWithStream(byte[] payload, Func streamFactory) + { + if (payload.Length == 0) + { + return Array.Empty(); + } + + using var output = new MemoryStream(); + using (var compressionStream = streamFactory(output)) + { + compressionStream.Write(payload); + } + + return output.ToArray(); + } + + private static string GetCompressionName(DsseCompressionAlgorithm algorithm) + { + return algorithm switch + { + DsseCompressionAlgorithm.Gzip => "gzip", + DsseCompressionAlgorithm.Brotli => "brotli", + DsseCompressionAlgorithm.None => "none", + _ => algorithm.ToString().ToLowerInvariant() + }; + } + + private static IEnumerable EnumerateCanonicalSignatures(IReadOnlyList signatures) + { + if (signatures.Count <= 1) + { + return signatures; + } + + var comparer = CanonicalSignatureComparer.Instance; + var previous = signatures[0]; + for (var i = 1; i < signatures.Count; i++) + { + var current = signatures[i]; + if (comparer.Compare(previous, current) > 0) + { + var buffer = new List(signatures.Count); + for (var j = 0; j < signatures.Count; j++) + { + buffer.Add(signatures[j]); + } + + buffer.Sort(comparer); + return buffer; + } + + previous = current; + } + + return signatures; + } + + private sealed class CanonicalSignatureComparer : IComparer + { + public static CanonicalSignatureComparer Instance { get; } = new(); + + public int Compare(DsseSignature? x, DsseSignature? y) + { + if (ReferenceEquals(x, y)) + { + return 0; + } + + ArgumentNullException.ThrowIfNull(x); + ArgumentNullException.ThrowIfNull(y); + + var keyComparison = string.Compare(x.KeyId, y.KeyId, StringComparison.Ordinal); + if (keyComparison != 0) + { + if (x.KeyId is null) + { + return -1; + } + + if (y.KeyId is null) + { + return 1; + } + + return keyComparison; + } + + return string.Compare(x.Signature, y.Signature, StringComparison.Ordinal); + } + } +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/DsseSignature.cs b/src/Attestor/StellaOps.Attestor.Envelope/DsseSignature.cs new file mode 100644 index 00000000..0a57bd87 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/DsseSignature.cs @@ -0,0 +1,31 @@ +using System; + +namespace StellaOps.Attestor.Envelope; + +public sealed record DsseSignature +{ + public DsseSignature(string signature, string? keyId = null) + { + if (string.IsNullOrWhiteSpace(signature)) + { + throw new ArgumentException("Signature must be provided.", nameof(signature)); + } + + Signature = signature; + KeyId = keyId; + } + + public string Signature { get; } + + public string? KeyId { get; } + + public static DsseSignature FromBytes(ReadOnlySpan signature, string? keyId = null) + { + if (signature.IsEmpty) + { + throw new ArgumentException("Signature bytes must be provided.", nameof(signature)); + } + + return new DsseSignature(Convert.ToBase64String(signature), keyId); + } +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeKey.cs b/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeKey.cs new file mode 100644 index 00000000..38aebff4 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeKey.cs @@ -0,0 +1,301 @@ +using System; +using System.Security.Cryptography; +using StellaOps.Cryptography; + +namespace StellaOps.Attestor.Envelope; + +/// +/// Describes the underlying key algorithm for DSSE envelope signing. +/// +public enum EnvelopeKeyKind +{ + Ed25519, + Ecdsa +} + +/// +/// Represents signing or verification key material for DSSE envelope operations. +/// +public sealed class EnvelopeKey +{ + private const int Ed25519PublicKeyLength = 32; + private const int Ed25519PrivateKeySeedLength = 32; + private const int Ed25519PrivateKeyExpandedLength = 64; + + private readonly byte[]? ed25519PublicKey; + private readonly byte[]? ed25519PrivateKey; + private readonly ECParameters? ecdsaPublicParameters; + private readonly ECParameters? ecdsaPrivateParameters; + + private EnvelopeKey( + EnvelopeKeyKind kind, + string algorithmId, + string keyId, + byte[]? ed25519PublicKey, + byte[]? ed25519PrivateKey, + ECParameters? ecdsaPublicParameters, + ECParameters? ecdsaPrivateParameters) + { + Kind = kind; + AlgorithmId = algorithmId; + KeyId = keyId; + this.ed25519PublicKey = ed25519PublicKey; + this.ed25519PrivateKey = ed25519PrivateKey; + this.ecdsaPublicParameters = ecdsaPublicParameters; + this.ecdsaPrivateParameters = ecdsaPrivateParameters; + } + + /// + /// Gets the key classification. + /// + public EnvelopeKeyKind Kind { get; } + + /// + /// Gets the signing algorithm identifier (e.g., ED25519, ES256). + /// + public string AlgorithmId { get; } + + /// + /// Gets the deterministic key identifier (RFC7638 JWK thumbprint based). + /// + public string KeyId { get; } + + /// + /// Indicates whether the key has private material available. + /// + public bool HasPrivateMaterial => Kind switch + { + EnvelopeKeyKind.Ed25519 => ed25519PrivateKey is not null, + EnvelopeKeyKind.Ecdsa => ecdsaPrivateParameters.HasValue, + _ => false + }; + + /// + /// Indicates whether the key has public material available. + /// + public bool HasPublicMaterial => Kind switch + { + EnvelopeKeyKind.Ed25519 => ed25519PublicKey is not null, + EnvelopeKeyKind.Ecdsa => ecdsaPublicParameters.HasValue, + _ => false + }; + + internal ReadOnlySpan GetEd25519PublicKey() + { + if (Kind != EnvelopeKeyKind.Ed25519 || ed25519PublicKey is null) + { + throw new InvalidOperationException("Key does not provide Ed25519 public material."); + } + + return ed25519PublicKey; + } + + internal ReadOnlySpan GetEd25519PrivateKey() + { + if (Kind != EnvelopeKeyKind.Ed25519 || ed25519PrivateKey is null) + { + throw new InvalidOperationException("Key does not provide Ed25519 private material."); + } + + return ed25519PrivateKey; + } + + internal ECParameters GetEcdsaPublicParameters() + { + if (Kind != EnvelopeKeyKind.Ecdsa || !ecdsaPublicParameters.HasValue) + { + throw new InvalidOperationException("Key does not provide ECDSA public parameters."); + } + + return CloneParameters(ecdsaPublicParameters.Value, includePrivate: false); + } + + internal ECParameters GetEcdsaPrivateParameters() + { + if (Kind != EnvelopeKeyKind.Ecdsa || !ecdsaPrivateParameters.HasValue) + { + throw new InvalidOperationException("Key does not provide ECDSA private parameters."); + } + + return CloneParameters(ecdsaPrivateParameters.Value, includePrivate: true); + } + + /// + /// Creates an Ed25519 signing key (requires private + public material). + /// + /// 64-byte Ed25519 private key (seed || public key). + /// 32-byte Ed25519 public key. + /// Optional external key identifier override. + /// Envelope key instance. + public static EnvelopeKey CreateEd25519Signer(ReadOnlySpan privateKey, ReadOnlySpan publicKey, string? keyId = null) + { + var normalizedPrivate = NormalizeEd25519PrivateKey(privateKey); + ValidateEd25519PublicLength(publicKey); + var publicCopy = publicKey.ToArray(); + var resolvedKeyId = string.IsNullOrWhiteSpace(keyId) + ? EnvelopeKeyIdCalculator.FromEd25519(publicCopy) + : keyId; + + return new EnvelopeKey( + EnvelopeKeyKind.Ed25519, + SignatureAlgorithms.Ed25519, + resolvedKeyId, + publicCopy, + normalizedPrivate, + ecdsaPublicParameters: null, + ecdsaPrivateParameters: null); + } + + /// + /// Creates an Ed25519 verification key (public material only). + /// + /// 32-byte Ed25519 public key. + /// Optional external key identifier override. + /// Envelope key instance. + public static EnvelopeKey CreateEd25519Verifier(ReadOnlySpan publicKey, string? keyId = null) + { + ValidateEd25519PublicLength(publicKey); + + var publicCopy = publicKey.ToArray(); + var resolvedKeyId = string.IsNullOrWhiteSpace(keyId) + ? EnvelopeKeyIdCalculator.FromEd25519(publicCopy) + : keyId; + + return new EnvelopeKey( + EnvelopeKeyKind.Ed25519, + SignatureAlgorithms.Ed25519, + resolvedKeyId, + publicCopy, + ed25519PrivateKey: null, + ecdsaPublicParameters: null, + ecdsaPrivateParameters: null); + } + + /// + /// Creates an ECDSA signing key (private + public EC parameters). + /// + /// ECDSA algorithm identifier (ES256, ES384, ES512). + /// EC parameters including private scalar. + /// Optional external key identifier override. + /// Envelope key instance. + public static EnvelopeKey CreateEcdsaSigner(string algorithmId, in ECParameters privateParameters, string? keyId = null) + { + ValidateEcdsaAlgorithm(algorithmId); + + if (privateParameters.D is null || privateParameters.D.Length == 0) + { + throw new ArgumentException("ECDSA private parameters must include the scalar component (D).", nameof(privateParameters)); + } + + if (privateParameters.Q.X is null || privateParameters.Q.Y is null) + { + throw new ArgumentException("ECDSA private parameters must include public coordinates.", nameof(privateParameters)); + } + + var publicClone = CloneParameters(privateParameters, includePrivate: false); + var privateClone = CloneParameters(privateParameters, includePrivate: true); + var resolvedKeyId = string.IsNullOrWhiteSpace(keyId) + ? EnvelopeKeyIdCalculator.FromEcdsa(algorithmId, publicClone) + : keyId; + + return new EnvelopeKey( + EnvelopeKeyKind.Ecdsa, + algorithmId, + resolvedKeyId, + ed25519PublicKey: null, + ed25519PrivateKey: null, + ecdsaPublicParameters: publicClone, + ecdsaPrivateParameters: privateClone); + } + + /// + /// Creates an ECDSA verification key (public EC parameters). + /// + /// ECDSA algorithm identifier (ES256, ES384, ES512). + /// EC parameters containing only public coordinates. + /// Optional external key identifier override. + /// Envelope key instance. + public static EnvelopeKey CreateEcdsaVerifier(string algorithmId, in ECParameters publicParameters, string? keyId = null) + { + ValidateEcdsaAlgorithm(algorithmId); + + if (publicParameters.Q.X is null || publicParameters.Q.Y is null) + { + throw new ArgumentException("ECDSA public parameters must include X and Y coordinates.", nameof(publicParameters)); + } + + if (publicParameters.D is not null) + { + throw new ArgumentException("ECDSA verification parameters must not include private scalar data.", nameof(publicParameters)); + } + + var publicClone = CloneParameters(publicParameters, includePrivate: false); + var resolvedKeyId = string.IsNullOrWhiteSpace(keyId) + ? EnvelopeKeyIdCalculator.FromEcdsa(algorithmId, publicClone) + : keyId; + + return new EnvelopeKey( + EnvelopeKeyKind.Ecdsa, + algorithmId, + resolvedKeyId, + ed25519PublicKey: null, + ed25519PrivateKey: null, + ecdsaPublicParameters: publicClone, + ecdsaPrivateParameters: null); + } + + private static byte[] NormalizeEd25519PrivateKey(ReadOnlySpan privateKey) + { + return privateKey.Length switch + { + Ed25519PrivateKeySeedLength => privateKey.ToArray(), + Ed25519PrivateKeyExpandedLength => privateKey[..Ed25519PrivateKeySeedLength].ToArray(), + _ => throw new ArgumentException($"Ed25519 private key must be {Ed25519PrivateKeySeedLength} or {Ed25519PrivateKeyExpandedLength} bytes.", nameof(privateKey)) + }; + } + + private static void ValidateEd25519PublicLength(ReadOnlySpan publicKey) + { + if (publicKey.Length != Ed25519PublicKeyLength) + { + throw new ArgumentException($"Ed25519 public key must be {Ed25519PublicKeyLength} bytes.", nameof(publicKey)); + } + } + + private static void ValidateEcdsaAlgorithm(string algorithmId) + { + if (string.IsNullOrWhiteSpace(algorithmId)) + { + throw new ArgumentException("Algorithm identifier is required.", nameof(algorithmId)); + } + + var supported = string.Equals(algorithmId, SignatureAlgorithms.Es256, StringComparison.OrdinalIgnoreCase) + || string.Equals(algorithmId, SignatureAlgorithms.Es384, StringComparison.OrdinalIgnoreCase) + || string.Equals(algorithmId, SignatureAlgorithms.Es512, StringComparison.OrdinalIgnoreCase); + + if (!supported) + { + throw new ArgumentException($"Unsupported ECDSA algorithm '{algorithmId}'.", nameof(algorithmId)); + } + } + + private static ECParameters CloneParameters(ECParameters source, bool includePrivate) + { + var clone = new ECParameters + { + Curve = source.Curve, + Q = new ECPoint + { + X = source.Q.X is null ? null : (byte[])source.Q.X.Clone(), + Y = source.Q.Y is null ? null : (byte[])source.Q.Y.Clone() + } + }; + + if (includePrivate && source.D is not null) + { + clone.D = (byte[])source.D.Clone(); + } + + return clone; + } +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeKeyIdCalculator.cs b/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeKeyIdCalculator.cs new file mode 100644 index 00000000..a68e53e1 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeKeyIdCalculator.cs @@ -0,0 +1,54 @@ +using System; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.Attestor.Envelope; + +internal static class EnvelopeKeyIdCalculator +{ + public static string FromEd25519(ReadOnlySpan publicKey) + { + if (publicKey.Length != 32) + { + throw new ArgumentException("Ed25519 public key must be 32 bytes.", nameof(publicKey)); + } + + var jwk = $"{{\"crv\":\"Ed25519\",\"kty\":\"OKP\",\"x\":\"{ToBase64Url(publicKey)}\"}}"; + return $"sha256:{ComputeSha256Base64Url(jwk)}"; + } + + public static string FromEcdsa(string algorithmId, in ECParameters parameters) + { + var curve = ResolveCurveName(algorithmId); + var x = parameters.Q.X ?? throw new ArgumentException("ECDSA public parameters missing X coordinate.", nameof(parameters)); + var y = parameters.Q.Y ?? throw new ArgumentException("ECDSA public parameters missing Y coordinate.", nameof(parameters)); + + var jwk = $"{{\"crv\":\"{curve}\",\"kty\":\"EC\",\"x\":\"{ToBase64Url(x)}\",\"y\":\"{ToBase64Url(y)}\"}}"; + return $"sha256:{ComputeSha256Base64Url(jwk)}"; + } + + private static string ResolveCurveName(string algorithmId) => algorithmId?.ToUpperInvariant() switch + { + "ES256" => "P-256", + "ES384" => "P-384", + "ES512" => "P-521", + _ => throw new ArgumentException($"Unsupported ECDSA algorithm '{algorithmId}'.", nameof(algorithmId)) + }; + + private static string ComputeSha256Base64Url(string value) + { + using var sha = SHA256.Create(); + var bytes = Encoding.UTF8.GetBytes(value); + var digest = sha.ComputeHash(bytes); + return ToBase64Url(digest); + } + + private static string ToBase64Url(ReadOnlySpan value) + { + var base64 = Convert.ToBase64String(value); + return base64 + .TrimEnd('=') + .Replace('+', '-') + .Replace('/', '_'); + } +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignature.cs b/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignature.cs new file mode 100644 index 00000000..dc5d66c8 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignature.cs @@ -0,0 +1,48 @@ +using System; + +namespace StellaOps.Attestor.Envelope; + +/// +/// Represents a DSSE envelope signature (detached from payload). +/// +public sealed class EnvelopeSignature +{ + private readonly byte[] signature; + + public EnvelopeSignature(string keyId, string algorithmId, ReadOnlySpan value) + { + if (string.IsNullOrWhiteSpace(keyId)) + { + throw new ArgumentException("Key identifier is required.", nameof(keyId)); + } + + if (string.IsNullOrWhiteSpace(algorithmId)) + { + throw new ArgumentException("Algorithm identifier is required.", nameof(algorithmId)); + } + + if (value.Length == 0) + { + throw new ArgumentException("Signature bytes must not be empty.", nameof(value)); + } + + KeyId = keyId; + AlgorithmId = algorithmId; + signature = value.ToArray(); + } + + /// + /// Gets the key identifier associated with the signature. + /// + public string KeyId { get; } + + /// + /// Gets the signing algorithm identifier. + /// + public string AlgorithmId { get; } + + /// + /// Gets the raw signature bytes. + /// + public ReadOnlyMemory Value => signature; +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignatureResult.cs b/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignatureResult.cs new file mode 100644 index 00000000..deaec583 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignatureResult.cs @@ -0,0 +1,56 @@ +using System; + +namespace StellaOps.Attestor.Envelope; + +/// +/// Error codes returned by envelope signing and verification helpers. +/// +public enum EnvelopeSignatureErrorCode +{ + UnsupportedAlgorithm, + InvalidKeyMaterial, + MissingPrivateKey, + MissingPublicKey, + AlgorithmMismatch, + KeyIdMismatch, + InvalidSignatureFormat, + SignatureInvalid, + SigningFailed, + VerificationFailed +} + +/// +/// Represents a deterministic error emitted by signature helpers. +/// +public sealed record EnvelopeSignatureError(EnvelopeSignatureErrorCode Code, string Message, Exception? Exception = null); + +/// +/// Generic result wrapper providing success state and structured errors. +/// +public sealed class EnvelopeResult +{ + private EnvelopeResult(bool isSuccess, T? value, EnvelopeSignatureError? error) + { + IsSuccess = isSuccess; + this.value = value; + this.error = error; + } + + public bool IsSuccess { get; } + + public T Value => IsSuccess + ? value ?? throw new InvalidOperationException("Successful result is missing value.") + : throw new InvalidOperationException("Cannot access Value when result indicates failure."); + + public EnvelopeSignatureError Error => !IsSuccess + ? error ?? throw new InvalidOperationException("Failed result is missing error information.") + : throw new InvalidOperationException("Cannot access Error when result indicates success."); + + private readonly T? value; + + private readonly EnvelopeSignatureError? error; + + public static EnvelopeResult Success(T value) => new(true, value, null); + + public static EnvelopeResult Failure(EnvelopeSignatureError error) => new(false, default, error); +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignatureService.cs b/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignatureService.cs new file mode 100644 index 00000000..5cc92a5f --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/EnvelopeSignatureService.cs @@ -0,0 +1,164 @@ +using System; +using System.Security.Cryptography; +using System.Threading; +using Org.BouncyCastle.Crypto.Parameters; +using Org.BouncyCastle.Crypto.Signers; + +namespace StellaOps.Attestor.Envelope; + +/// +/// Provides Ed25519 and ECDSA helpers for creating and verifying DSSE envelope signatures. +/// +public sealed class EnvelopeSignatureService +{ + private const int Ed25519SignatureLength = 64; + + public EnvelopeResult Sign(ReadOnlySpan payload, EnvelopeKey key, CancellationToken cancellationToken = default) + { + if (key is null) + { + throw new ArgumentNullException(nameof(key)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + return key.Kind switch + { + EnvelopeKeyKind.Ed25519 => SignEd25519(payload, key), + EnvelopeKeyKind.Ecdsa => SignEcdsa(payload, key), + _ => EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.UnsupportedAlgorithm, $"Unsupported key kind '{key.Kind}'.")) + }; + } + + public EnvelopeResult Verify(ReadOnlySpan payload, EnvelopeSignature signature, EnvelopeKey key, CancellationToken cancellationToken = default) + { + if (signature is null) + { + throw new ArgumentNullException(nameof(signature)); + } + + if (key is null) + { + throw new ArgumentNullException(nameof(key)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + if (!key.HasPublicMaterial) + { + return EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.MissingPublicKey, "Verification requires public key material.")); + } + + if (!string.Equals(signature.KeyId, key.KeyId, StringComparison.Ordinal)) + { + return EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.KeyIdMismatch, "Signature key identifier does not match the supplied key.")); + } + + if (!string.Equals(signature.AlgorithmId, key.AlgorithmId, StringComparison.OrdinalIgnoreCase)) + { + return EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.AlgorithmMismatch, "Signature algorithm does not match the supplied key.")); + } + + return key.Kind switch + { + EnvelopeKeyKind.Ed25519 => VerifyEd25519(payload, signature, key), + EnvelopeKeyKind.Ecdsa => VerifyEcdsa(payload, signature, key), + _ => EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.UnsupportedAlgorithm, $"Unsupported key kind '{key.Kind}'.")) + }; + } + + private static EnvelopeResult SignEd25519(ReadOnlySpan payload, EnvelopeKey key) + { + if (!key.HasPrivateMaterial) + { + return EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.MissingPrivateKey, "Signing requires Ed25519 private material.")); + } + + try + { + var payloadBytes = payload.ToArray(); + var privateKey = new Ed25519PrivateKeyParameters(key.GetEd25519PrivateKey().ToArray(), 0); + var signer = new Ed25519Signer(); + signer.Init(true, privateKey); + signer.BlockUpdate(payloadBytes, 0, payloadBytes.Length); + var signatureBytes = signer.GenerateSignature(); + return EnvelopeResult.Success(new EnvelopeSignature(key.KeyId, key.AlgorithmId, signatureBytes)); + } + catch (Exception ex) when (ex is ArgumentException or CryptographicException or InvalidOperationException) + { + return EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.SigningFailed, "Failed to produce Ed25519 signature.", ex)); + } + } + + private static EnvelopeResult SignEcdsa(ReadOnlySpan payload, EnvelopeKey key) + { + if (!key.HasPrivateMaterial) + { + return EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.MissingPrivateKey, "Signing requires ECDSA private material.")); + } + + try + { + using var ecdsa = ECDsa.Create(key.GetEcdsaPrivateParameters()); + var signatureBytes = ecdsa.SignData(payload, ResolveHashAlgorithm(key.AlgorithmId)); + return EnvelopeResult.Success(new EnvelopeSignature(key.KeyId, key.AlgorithmId, signatureBytes)); + } + catch (Exception ex) when (ex is ArgumentException or CryptographicException or InvalidOperationException) + { + return EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.SigningFailed, "Failed to produce ECDSA signature.", ex)); + } + } + + private static EnvelopeResult VerifyEd25519(ReadOnlySpan payload, EnvelopeSignature signature, EnvelopeKey key) + { + var signatureBytes = signature.Value.Span; + if (signatureBytes.Length != Ed25519SignatureLength) + { + return EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.InvalidSignatureFormat, $"Ed25519 signatures must be {Ed25519SignatureLength} bytes.")); + } + + try + { + var payloadBytes = payload.ToArray(); + var publicKey = new Ed25519PublicKeyParameters(key.GetEd25519PublicKey().ToArray(), 0); + var verifier = new Ed25519Signer(); + verifier.Init(false, publicKey); + verifier.BlockUpdate(payloadBytes, 0, payloadBytes.Length); + var valid = verifier.VerifySignature(signatureBytes.ToArray()); + return valid + ? EnvelopeResult.Success(true) + : EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.SignatureInvalid, "Ed25519 signature verification failed.")); + } + catch (Exception ex) when (ex is ArgumentException or CryptographicException) + { + return EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.VerificationFailed, "Failed to verify Ed25519 signature.", ex)); + } + } + + private static EnvelopeResult VerifyEcdsa(ReadOnlySpan payload, EnvelopeSignature signature, EnvelopeKey key) + { + try + { + using var ecdsa = ECDsa.Create(key.GetEcdsaPublicParameters()); + var valid = ecdsa.VerifyData(payload, signature.Value.Span, ResolveHashAlgorithm(key.AlgorithmId)); + return valid + ? EnvelopeResult.Success(true) + : EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.SignatureInvalid, "ECDSA signature verification failed.")); + } + catch (Exception ex) when (ex is ArgumentException or CryptographicException) + { + return EnvelopeResult.Failure(Error(EnvelopeSignatureErrorCode.VerificationFailed, "Failed to verify ECDSA signature.", ex)); + } + } + + private static HashAlgorithmName ResolveHashAlgorithm(string algorithmId) => algorithmId?.ToUpperInvariant() switch + { + "ES256" => HashAlgorithmName.SHA256, + "ES384" => HashAlgorithmName.SHA384, + "ES512" => HashAlgorithmName.SHA512, + _ => throw new ArgumentException($"Unsupported ECDSA algorithm '{algorithmId}'.", nameof(algorithmId)) + }; + + private static EnvelopeSignatureError Error(EnvelopeSignatureErrorCode code, string message, Exception? exception = null) + => new(code, message, exception); +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/DsseEnvelopeSerializerTests.cs b/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/DsseEnvelopeSerializerTests.cs new file mode 100644 index 00000000..7dbe988c --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/DsseEnvelopeSerializerTests.cs @@ -0,0 +1,57 @@ +using System; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using FluentAssertions; +using Xunit; +using EnvelopeModel = StellaOps.Attestor.Envelope; +namespace StellaOps.Attestor.Envelope.Tests; + +public sealed class DsseEnvelopeSerializerTests +{ + private static readonly byte[] SamplePayload = Encoding.UTF8.GetBytes("deterministic-dsse-payload"); + + [Fact] + public void Serialize_ProducesDeterministicCompactJson_ForSignaturePermutations() + { + var signatures = new[] + { + EnvelopeModel.DsseSignature.FromBytes(Convert.FromHexString("0A1B2C3D4E5F60718293A4B5C6D7E8F9"), "tenant-z"), + EnvelopeModel.DsseSignature.FromBytes(Convert.FromHexString("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"), null), + EnvelopeModel.DsseSignature.FromBytes(Convert.FromHexString("00112233445566778899AABBCCDDEEFF"), "tenant-a"), + EnvelopeModel.DsseSignature.FromBytes(Convert.FromHexString("1234567890ABCDEF1234567890ABCDEF"), "tenant-b") + }; + + var baselineEnvelope = new EnvelopeModel.DsseEnvelope("application/vnd.stellaops.test+json", SamplePayload, signatures); + var baseline = EnvelopeModel.DsseEnvelopeSerializer.Serialize(baselineEnvelope); + baseline.CompactJson.Should().NotBeNull(); + var baselineJson = Encoding.UTF8.GetString(baseline.CompactJson!); + + var rng = new Random(12345); + for (var iteration = 0; iteration < 32; iteration++) + { + var shuffled = signatures.OrderBy(_ => rng.Next()).ToArray(); + var envelope = new EnvelopeModel.DsseEnvelope("application/vnd.stellaops.test+json", SamplePayload, shuffled); + var result = EnvelopeModel.DsseEnvelopeSerializer.Serialize(envelope); + + result.CompactJson.Should().NotBeNull(); + var json = Encoding.UTF8.GetString(result.CompactJson!); + json.Should().Be(baselineJson, "canonical JSON must be deterministic regardless of signature insertion order"); + + result.PayloadSha256.Should().Be( + Convert.ToHexString(SHA256.HashData(SamplePayload)).ToLowerInvariant(), + "payload hash must reflect the raw payload bytes"); + + using var document = JsonDocument.Parse(result.CompactJson!); + var keyIds = document.RootElement + .GetProperty("signatures") + .EnumerateArray() + .Select(element => element.TryGetProperty("keyid", out var key) ? key.GetString() : null) + .ToArray(); + + keyIds.Should().Equal(new string?[] { null, "tenant-a", "tenant-b", "tenant-z" }, + "signatures must be ordered by key identifier (null first) for canonical output"); + } + } +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/EnvelopeSignatureServiceTests.cs b/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/EnvelopeSignatureServiceTests.cs new file mode 100644 index 00000000..3c81357a --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/EnvelopeSignatureServiceTests.cs @@ -0,0 +1,149 @@ +using System; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using FluentAssertions; +using StellaOps.Attestor.Envelope; +using StellaOps.Cryptography; +using Xunit; + +namespace StellaOps.Attestor.Envelope.Tests; + +public sealed class EnvelopeSignatureServiceTests +{ + private static readonly byte[] SamplePayload = Encoding.UTF8.GetBytes("stella-ops-deterministic"); + + private static readonly byte[] Ed25519Seed = + Convert.FromHexString("9D61B19DEFFD5A60BA844AF492EC2CC4" + + "4449C5697B326919703BAC031CAE7F60D75A980182B10AB7D54BFED3C964073A" + + "0EE172F3DAA62325AF021A68F707511A"); + + private static readonly byte[] Ed25519Public = + Convert.FromHexString("D75A980182B10AB7D54BFED3C964073A0EE172F3DAA62325AF021A68F707511A"); + + private readonly EnvelopeSignatureService service = new(); + + [Fact] + public void SignAndVerify_Ed25519_Succeeds() + { + var signingKey = EnvelopeKey.CreateEd25519Signer(Ed25519Seed, Ed25519Public); + var verifyKey = EnvelopeKey.CreateEd25519Verifier(Ed25519Public); + + var signResult = service.Sign(SamplePayload, signingKey); + + signResult.IsSuccess.Should().BeTrue(); + signResult.Value.AlgorithmId.Should().Be(SignatureAlgorithms.Ed25519); + signResult.Value.KeyId.Should().Be(signingKey.KeyId); + + var verifyResult = service.Verify(SamplePayload, signResult.Value, verifyKey); + + verifyResult.IsSuccess.Should().BeTrue(); + verifyResult.Value.Should().BeTrue(); + + var expectedKeyId = ComputeExpectedEd25519KeyId(Ed25519Public); + signingKey.KeyId.Should().Be(expectedKeyId); + } + + [Fact] + public void Verify_Ed25519_InvalidSignature_ReturnsError() + { + var signingKey = EnvelopeKey.CreateEd25519Signer(Ed25519Seed, Ed25519Public); + var signResult = service.Sign(SamplePayload, signingKey); + signResult.IsSuccess.Should().BeTrue(); + + var tamperedBytes = signResult.Value.Value.ToArray(); + tamperedBytes[0] ^= 0xFF; + var tamperedSignature = new EnvelopeSignature(signResult.Value.KeyId, signResult.Value.AlgorithmId, tamperedBytes); + var verifyKey = EnvelopeKey.CreateEd25519Verifier(Ed25519Public); + + var verifyResult = service.Verify(SamplePayload, tamperedSignature, verifyKey); + + verifyResult.IsSuccess.Should().BeFalse(); + verifyResult.Error.Code.Should().Be(EnvelopeSignatureErrorCode.SignatureInvalid); + } + + [Fact] + public void SignAndVerify_EcdsaEs256_Succeeds() + { + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var privateParameters = ecdsa.ExportParameters(includePrivateParameters: true); + var publicParameters = ecdsa.ExportParameters(includePrivateParameters: false); + + var signingKey = EnvelopeKey.CreateEcdsaSigner(SignatureAlgorithms.Es256, in privateParameters); + var verifyKey = EnvelopeKey.CreateEcdsaVerifier(SignatureAlgorithms.Es256, in publicParameters); + + var signResult = service.Sign(SamplePayload, signingKey); + signResult.IsSuccess.Should().BeTrue(); + + var verifyResult = service.Verify(SamplePayload, signResult.Value, verifyKey); + verifyResult.IsSuccess.Should().BeTrue(); + verifyResult.Value.Should().BeTrue(); + } + + [Fact] + public void Sign_WithVerificationOnlyKey_ReturnsMissingPrivateKey() + { + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var publicParameters = ecdsa.ExportParameters(includePrivateParameters: false); + var verifyOnlyKey = EnvelopeKey.CreateEcdsaVerifier(SignatureAlgorithms.Es256, in publicParameters); + + var signResult = service.Sign(SamplePayload, verifyOnlyKey); + + signResult.IsSuccess.Should().BeFalse(); + signResult.Error.Code.Should().Be(EnvelopeSignatureErrorCode.MissingPrivateKey); + } + + [Fact] + public void Verify_WithMismatchedKeyId_ReturnsError() + { + var signingKey = EnvelopeKey.CreateEd25519Signer(Ed25519Seed, Ed25519Public); + var signResult = service.Sign(SamplePayload, signingKey); + signResult.IsSuccess.Should().BeTrue(); + + var alternateKey = EnvelopeKey.CreateEd25519Verifier(Ed25519Public, "sha256:alternate"); + var verifyResult = service.Verify(SamplePayload, signResult.Value, alternateKey); + + verifyResult.IsSuccess.Should().BeFalse(); + verifyResult.Error.Code.Should().Be(EnvelopeSignatureErrorCode.KeyIdMismatch); + } + + [Fact] + public void Verify_WithInvalidSignatureLength_ReturnsFormatError() + { + var verifyKey = EnvelopeKey.CreateEd25519Verifier(Ed25519Public); + var invalidSignature = new EnvelopeSignature(verifyKey.KeyId, verifyKey.AlgorithmId, new byte[16]); + + var verifyResult = service.Verify(SamplePayload, invalidSignature, verifyKey); + + verifyResult.IsSuccess.Should().BeFalse(); + verifyResult.Error.Code.Should().Be(EnvelopeSignatureErrorCode.InvalidSignatureFormat); + } + + [Fact] + public void Verify_WithAlgorithmMismatch_ReturnsError() + { + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var privateParameters = ecdsa.ExportParameters(includePrivateParameters: true); + var publicParameters = ecdsa.ExportParameters(includePrivateParameters: false); + var signingKey = EnvelopeKey.CreateEcdsaSigner(SignatureAlgorithms.Es256, in privateParameters); + var signResult = service.Sign(SamplePayload, signingKey); + signResult.IsSuccess.Should().BeTrue(); + + var mismatchKey = EnvelopeKey.CreateEcdsaVerifier(SignatureAlgorithms.Es384, in publicParameters, signResult.Value.KeyId); + var verifyResult = service.Verify(SamplePayload, signResult.Value, mismatchKey); + + verifyResult.IsSuccess.Should().BeFalse(); + verifyResult.Error.Code.Should().Be(EnvelopeSignatureErrorCode.AlgorithmMismatch); + } + + private static string ComputeExpectedEd25519KeyId(byte[] publicKey) + { + var jwk = $"{{\"crv\":\"Ed25519\",\"kty\":\"OKP\",\"x\":\"{ToBase64Url(publicKey)}\"}}"; + using var sha = SHA256.Create(); + var digest = sha.ComputeHash(Encoding.UTF8.GetBytes(jwk)); + return $"sha256:{ToBase64Url(digest)}"; + } + + private static string ToBase64Url(byte[] bytes) + => Convert.ToBase64String(bytes).TrimEnd('=').Replace('+', '-').Replace('/', '_'); +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/StellaOps.Attestor.Envelope.Tests.csproj b/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/StellaOps.Attestor.Envelope.Tests.csproj new file mode 100644 index 00000000..35dd858e --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.Tests/StellaOps.Attestor.Envelope.Tests.csproj @@ -0,0 +1,22 @@ + + + net10.0 + preview + false + enable + enable + true + NU1504 + false + + + + + + + + + + + + diff --git a/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.csproj b/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.csproj new file mode 100644 index 00000000..f6f3c052 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.csproj @@ -0,0 +1,24 @@ + + + + net10.0 + preview + enable + enable + true + + + + + + + + + + + + + + + + diff --git a/src/Attestor/StellaOps.Attestor.Envelope/TASKS.md b/src/Attestor/StellaOps.Attestor.Envelope/TASKS.md index 2ac75e5c..6c28843f 100644 --- a/src/Attestor/StellaOps.Attestor.Envelope/TASKS.md +++ b/src/Attestor/StellaOps.Attestor.Envelope/TASKS.md @@ -1,13 +1,13 @@ -# Attestation Envelope Task Board — Epic 19: Attestor Console - -## Sprint 72 – Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ATTEST-ENVELOPE-72-001 | TODO | Envelope Guild | — | Implement DSSE canonicalization, JSON normalization, multi-signature structures, and hashing helpers. | Canonicalization deterministic (property tests); hash matches DSSE spec; unit tests green. | -| ATTEST-ENVELOPE-72-002 | TODO | Envelope Guild | ATTEST-ENVELOPE-72-001 | Support compact and expanded JSON output, payload compression, and detached payload references. | API returns both variants; payload compression toggles tested; docs updated. | - -## Sprint 73 – Crypto Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ATTEST-ENVELOPE-73-001 | TODO | Envelope Guild, KMS Guild | ATTEST-ENVELOPE-72-001 | Implement Ed25519 & ECDSA signature create/verify helpers, key identification (`keyid`) scheme, and error mapping. | Sign/verify tests pass with fixtures; invalid signatures produce deterministic errors. | -| ATTEST-ENVELOPE-73-002 | TODO | Envelope Guild | ATTEST-ENVELOPE-73-001 | Add fuzz tests for envelope parsing, signature verification, and canonical JSON round-trips. | Fuzz suite integrated; coverage metrics recorded; no regressions. | +# Attestation Envelope Task Board — Epic 19: Attestor Console + +## Sprint 72 – Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ATTEST-ENVELOPE-72-001 | DONE (2025-11-01) | Envelope Guild | — | Implement DSSE canonicalization, JSON normalization, multi-signature structures, and hashing helpers. | Canonicalization deterministic (property tests); hash matches DSSE spec; unit tests green. | +| ATTEST-ENVELOPE-72-002 | DONE | Envelope Guild | ATTEST-ENVELOPE-72-001 | Support compact and expanded JSON output, payload compression, and detached payload references. | API returns both variants; payload compression toggles tested; docs updated. | + +## Sprint 73 – Crypto Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ATTEST-ENVELOPE-73-001 | DONE | Envelope Guild, KMS Guild | ATTEST-ENVELOPE-72-001 | Implement Ed25519 & ECDSA signature create/verify helpers, key identification (`keyid`) scheme, and error mapping. | Sign/verify tests pass with fixtures; invalid signatures produce deterministic errors. | +| ATTEST-ENVELOPE-73-002 | DONE | Envelope Guild | ATTEST-ENVELOPE-73-001 | Add fuzz tests for envelope parsing, signature verification, and canonical JSON round-trips. | Fuzz suite integrated; coverage metrics recorded; no regressions. | diff --git a/src/Attestor/StellaOps.Attestor.Envelope/__Tests/StellaOps.Attestor.Envelope.Tests/DsseEnvelopeSerializerTests.cs b/src/Attestor/StellaOps.Attestor.Envelope/__Tests/StellaOps.Attestor.Envelope.Tests/DsseEnvelopeSerializerTests.cs new file mode 100644 index 00000000..9cc99994 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/__Tests/StellaOps.Attestor.Envelope.Tests/DsseEnvelopeSerializerTests.cs @@ -0,0 +1,139 @@ +using System; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Text; +using System.Text.Json; +using StellaOps.Attestor.Envelope; +using Xunit; + +namespace StellaOps.Attestor.Envelope.Tests; + +public sealed class DsseEnvelopeSerializerTests +{ + [Fact] + public void Serialize_WithDefaultOptions_ProducesCompactAndExpandedJson() + { + var payload = Encoding.UTF8.GetBytes("{\"foo\":\"bar\"}"); + var envelope = new DsseEnvelope( + "application/vnd.in-toto+json", + payload, + new[] { new DsseSignature("AQID") }, + "application/json"); + + var result = DsseEnvelopeSerializer.Serialize(envelope); + + Assert.NotNull(result.CompactJson); + Assert.NotNull(result.ExpandedJson); + + var compact = Encoding.UTF8.GetString(result.CompactJson!); + Assert.Equal("{\"payloadType\":\"application/vnd.in-toto+json\",\"payload\":\"eyJmb28iOiJiYXIifQ==\",\"signatures\":[{\"sig\":\"AQID\"}]}", compact); + + using var expanded = JsonDocument.Parse(result.ExpandedJson!); + var root = expanded.RootElement; + + Assert.Equal("application/vnd.in-toto+json", root.GetProperty("payloadType").GetString()); + Assert.Equal("eyJmb28iOiJiYXIifQ==", root.GetProperty("payload").GetString()); + Assert.Equal("AQID", root.GetProperty("signatures")[0].GetProperty("sig").GetString()); + + var info = root.GetProperty("payloadInfo"); + Assert.Equal(payload.Length, info.GetProperty("length").GetInt32()); + Assert.Equal(result.PayloadSha256, info.GetProperty("sha256").GetString()); + Assert.False(info.TryGetProperty("compression", out _)); + + var preview = root.GetProperty("payloadPreview"); + Assert.Equal("application/json", preview.GetProperty("mediaType").GetString()); + Assert.Equal("bar", preview.GetProperty("json").GetProperty("foo").GetString()); + } + + [Fact] + public void Serialize_WithCompressionEnabled_EmbedsCompressedPayloadMetadata() + { + var payload = Encoding.UTF8.GetBytes("{\"foo\":\"bar\",\"count\":1}"); + var envelope = new DsseEnvelope( + "application/vnd.in-toto+json", + payload, + new[] { new DsseSignature("AQID") }, + "application/json"); + + var options = new DsseEnvelopeSerializationOptions + { + CompressionAlgorithm = DsseCompressionAlgorithm.Gzip + }; + + var result = DsseEnvelopeSerializer.Serialize(envelope, options); + + Assert.NotNull(result.CompactJson); + var compactDoc = JsonDocument.Parse(result.CompactJson!); + var payloadBase64 = compactDoc.RootElement.GetProperty("payload").GetString(); + Assert.False(string.IsNullOrEmpty(payloadBase64)); + + var compressedBytes = Convert.FromBase64String(payloadBase64!); + using var compressedStream = new MemoryStream(compressedBytes); + using var gzip = new GZipStream(compressedStream, CompressionMode.Decompress); + using var decompressed = new MemoryStream(); + gzip.CopyTo(decompressed); + Assert.True(payload.SequenceEqual(decompressed.ToArray())); + + using var expanded = JsonDocument.Parse(result.ExpandedJson!); + var info = expanded.RootElement.GetProperty("payloadInfo"); + Assert.Equal(payload.Length, info.GetProperty("length").GetInt32()); + var compression = info.GetProperty("compression"); + Assert.Equal("gzip", compression.GetProperty("algorithm").GetString()); + Assert.Equal(compressedBytes.Length, compression.GetProperty("compressedLength").GetInt32()); + + Assert.Equal(DsseCompressionAlgorithm.Gzip, result.Compression); + Assert.Equal(payload.Length, result.OriginalPayloadLength); + Assert.Equal(compressedBytes.Length, result.EmbeddedPayloadLength); + } + + [Fact] + public void Serialize_WithDetachedReference_WritesMetadata() + { + var payload = Encoding.UTF8.GetBytes("detached payload preview"); + var reference = new DsseDetachedPayloadReference( + "https://evidence.example.com/sbom.json", + "abc123", + payload.Length, + "application/json"); + + var envelope = new DsseEnvelope( + "application/vnd.in-toto+json", + payload, + new[] { new DsseSignature("AQID") }, + "text/plain", + reference); + + var result = DsseEnvelopeSerializer.Serialize(envelope); + + Assert.NotNull(result.ExpandedJson); + + using var expanded = JsonDocument.Parse(result.ExpandedJson!); + var detached = expanded.RootElement.GetProperty("detachedPayload"); + + Assert.Equal(reference.Uri, detached.GetProperty("uri").GetString()); + Assert.Equal(reference.Sha256, detached.GetProperty("sha256").GetString()); + Assert.Equal(reference.Length, detached.GetProperty("length").GetInt64()); + Assert.Equal(reference.MediaType, detached.GetProperty("mediaType").GetString()); + } + + [Fact] + public void Serialize_CompactOnly_SkipsExpandedPayload() + { + var payload = Encoding.UTF8.GetBytes("payload"); + var envelope = new DsseEnvelope( + "application/vnd.in-toto+json", + payload, + new[] { new DsseSignature("AQID") }); + + var options = new DsseEnvelopeSerializationOptions + { + EmitExpandedJson = false + }; + + var result = DsseEnvelopeSerializer.Serialize(envelope, options); + + Assert.NotNull(result.CompactJson); + Assert.Null(result.ExpandedJson); + } +} diff --git a/src/Attestor/StellaOps.Attestor.Envelope/__Tests/StellaOps.Attestor.Envelope.Tests/StellaOps.Attestor.Envelope.Tests.csproj b/src/Attestor/StellaOps.Attestor.Envelope/__Tests/StellaOps.Attestor.Envelope.Tests/StellaOps.Attestor.Envelope.Tests.csproj new file mode 100644 index 00000000..81ef0dab --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Envelope/__Tests/StellaOps.Attestor.Envelope.Tests/StellaOps.Attestor.Envelope.Tests.csproj @@ -0,0 +1,30 @@ + + + + net10.0 + preview + enable + enable + true + NU1504 + false + + + + + + + + + + + + + + + + + + + + diff --git a/src/Attestor/StellaOps.Attestor.Types/AGENTS.md b/src/Attestor/StellaOps.Attestor.Types/AGENTS.md index fcd6cff5..dcba43a0 100644 --- a/src/Attestor/StellaOps.Attestor.Types/AGENTS.md +++ b/src/Attestor/StellaOps.Attestor.Types/AGENTS.md @@ -23,3 +23,4 @@ Define strongly typed, versioned schemas for all attestation payloads and provid - 3. Keep changes deterministic (stable ordering, timestamps, hashes) and align with offline/air-gap expectations. - 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change. - 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context. +- 6. When schemas or fixtures change, run `npm run docs:attestor:generate` followed by `npm run docs:attestor:validate` to refresh SDKs and guard parity. diff --git a/src/Attestor/StellaOps.Attestor.Types/TASKS.md b/src/Attestor/StellaOps.Attestor.Types/TASKS.md index 76e69812..9f1fa108 100644 --- a/src/Attestor/StellaOps.Attestor.Types/TASKS.md +++ b/src/Attestor/StellaOps.Attestor.Types/TASKS.md @@ -3,11 +3,11 @@ ## Sprint 72 – Schema Definition | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| ATTEST-TYPES-72-001 | TODO | Attestation Payloads Guild | — | Draft JSON Schemas for BuildProvenance v1, SBOMAttestation v1, VEXAttestation v1, ScanResults v1, PolicyEvaluation v1, RiskProfileEvidence v1, CustomEvidence v1. | Schemas validated with test fixtures; docs stubbed; versioned under `schemas/`. | -| ATTEST-TYPES-72-002 | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-001 | Generate Go/TS models from schemas with validation helpers and canonical JSON serialization. | Code generation integrated; lints pass; unit tests cover round-trips. | +| ATTEST-TYPES-72-001 | DONE | Attestation Payloads Guild | — | Draft JSON Schemas for BuildProvenance v1, SBOMAttestation v1, VEXAttestation v1, ScanResults v1, PolicyEvaluation v1, RiskProfileEvidence v1, CustomEvidence v1. | Schemas validated with test fixtures; docs stubbed; versioned under `schemas/`. | +| ATTEST-TYPES-72-002 | DONE | Attestation Payloads Guild | ATTEST-TYPES-72-001 | Generate Go/TS models from schemas with validation helpers and canonical JSON serialization. | Code generation integrated; lints pass; unit tests cover round-trips. | ## Sprint 73 – Fixtures & Docs | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| ATTEST-TYPES-73-001 | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-002 | Create golden payload samples for each type; integrate into tests and documentation. | Golden fixtures stored; tests compare outputs; docs embed examples. | -| ATTEST-TYPES-73-002 | TODO | Attestation Payloads Guild, Docs Guild | ATTEST-TYPES-73-001 | Publish schema reference docs (`/docs/modules/attestor/payloads.md`) with annotated JSON examples. | Doc merged with banner; examples validated by tests. | +| ATTEST-TYPES-73-001 | DONE | Attestation Payloads Guild | ATTEST-TYPES-72-002 | Create golden payload samples for each type; integrate into tests and documentation. | Golden fixtures stored; tests compare outputs; docs embed examples. | +| ATTEST-TYPES-73-002 | DONE | Attestation Payloads Guild, Docs Guild | ATTEST-TYPES-73-001 | Publish schema reference docs (`/docs/modules/attestor/payloads.md`) with annotated JSON examples. | Doc merged with banner; examples validated by tests. | diff --git a/src/Attestor/StellaOps.Attestor.Types/Tools/StellaOps.Attestor.Types.Generator/Program.cs b/src/Attestor/StellaOps.Attestor.Types/Tools/StellaOps.Attestor.Types.Generator/Program.cs new file mode 100644 index 00000000..4069cdd4 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/Tools/StellaOps.Attestor.Types.Generator/Program.cs @@ -0,0 +1,1189 @@ +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; + +var generator = new Generator(); +generator.Run(); + +internal sealed class Generator +{ + private readonly TypeRegistry _registry; + private readonly string _repoRoot; + private readonly string _moduleRoot; + private readonly string _schemaDir; + private readonly string _tsDir; + private readonly string _goDir; + + public Generator() + { + _registry = TypeRegistry.Build(); + _repoRoot = ResolveRepoRoot(); + _moduleRoot = Path.Combine(_repoRoot, "src", "Attestor", "StellaOps.Attestor.Types"); + _schemaDir = Path.Combine(_moduleRoot, "schemas"); + _tsDir = Path.Combine(_moduleRoot, "generated", "ts"); + _goDir = Path.Combine(_moduleRoot, "generated", "go"); + } + + public void Run() + { + Directory.CreateDirectory(_schemaDir); + Directory.CreateDirectory(_tsDir); + Directory.CreateDirectory(_goDir); + + foreach (var root in _registry.RootObjects) + { + var schema = SchemaBuilder.Build(root); + var schemaPath = Path.Combine(_schemaDir, $"{root.SchemaFileStem}.schema.json"); + WriteUtf8File(schemaPath, schema); + } + + var tsCode = TypeScriptEmitter.Emit(_registry); + WriteUtf8File(Path.Combine(_tsDir, "index.ts"), tsCode); + + var goCode = GoEmitter.Emit(_registry); + WriteUtf8File(Path.Combine(_goDir, "types.go"), goCode); + } + + private static string ResolveRepoRoot() + { + var current = new DirectoryInfo(AppContext.BaseDirectory); + for (var i = 0; i < 8; i++) + { + current = current?.Parent ?? throw new InvalidOperationException("Unable to locate repository root."); + } + + return current!.FullName; + } + + private static void WriteUtf8File(string path, string content) + { + var normalized = content.Replace("\r\n", "\n", StringComparison.Ordinal); + File.WriteAllText(path, normalized, new UTF8Encoding(encoderShouldEmitUTF8Identifier: false)); + } +} + +internal sealed class TypeRegistry +{ + public IReadOnlyList RootObjects { get; } + public IReadOnlyDictionary Objects { get; } + public IReadOnlyDictionary Enums { get; } + + private TypeRegistry( + List rootObjects, + Dictionary objects, + Dictionary enums) + { + RootObjects = rootObjects; + Objects = objects; + Enums = enums; + } + + public static TypeRegistry Build() + { + var objects = new Dictionary(StringComparer.Ordinal); + var enums = new Dictionary(StringComparer.Ordinal); + var roots = new List(); + + EnumSpec RegisterEnum(string name, string summary, params string[] values) + { + var spec = new EnumSpec(name, summary, values.ToList()); + enums.Add(name, spec); + return spec; + } + + ObjectSpec RegisterObject( + string name, + string summary, + bool isRoot = false, + string? schemaFileStem = null, + string? qualifiedVersion = null) + { + var spec = new ObjectSpec(name, summary) + { + IsRoot = isRoot, + SchemaFileStem = schemaFileStem, + QualifiedVersion = qualifiedVersion + }; + + objects.Add(name, spec); + + if (isRoot) + { + if (schemaFileStem is null) + { + throw new ArgumentException("Root objects must define a schema file stem.", nameof(schemaFileStem)); + } + + if (qualifiedVersion is null) + { + throw new ArgumentException("Root objects must define a qualified version.", nameof(qualifiedVersion)); + } + + roots.Add(spec); + } + + return spec; + } + + // Enumerations + var sbomFormat = RegisterEnum("SbomFormat", "Supported SBOM formats.", "CycloneDX-1.6", "SBOM-3.0.0"); + var vexStatus = RegisterEnum("VexStatus", "VEX statement status values.", "not_affected", "affected", "under_investigation", "fixed"); + var severity = RegisterEnum("Severity", "Finding severity scale.", "critical", "high", "medium", "low", "info"); + var findingStatus = RegisterEnum("FindingStatus", "Finding lifecycle status.", "detected", "confirmed", "fixed", "not_affected"); + var policyOutcome = RegisterEnum("PolicyOutcome", "Policy evaluation outcome values.", "pass", "fail", "waived"); + var policyEffect = RegisterEnum("PolicyEffect", "Policy rule effect values.", "allow", "deny", "warn"); + var riskLevel = RegisterEnum("RiskLevel", "Risk level indicators.", "critical", "high", "medium", "low", "informational"); + + // Shared objects + var digest = RegisterObject("DigestReference", "Normalized digest entry containing algorithm and value."); + digest.Properties.Add(new PropertySpec("algorithm", PrimitiveShape.String(), true, "Digest algorithm identifier (e.g., sha256).")); + digest.Properties.Add(new PropertySpec("value", PrimitiveShape.String(pattern: "^[A-Fa-f0-9]{64}$"), true, "Hex-encoded digest value.")); + + var builder = RegisterObject("BuilderIdentity", "Identifies the builder that produced the artifact."); + builder.Properties.Add(new PropertySpec("id", PrimitiveShape.String(), true, "Unique builder identity (URI or name).")); + builder.Properties.Add(new PropertySpec("version", PrimitiveShape.String(), false, "Builder version identifier.")); + builder.Properties.Add(new PropertySpec("platform", PrimitiveShape.String(), false, "Execution platform for the build.")); + + var material = RegisterObject("MaterialReference", "Inputs used during build or analysis."); + material.Properties.Add(new PropertySpec("uri", PrimitiveShape.String(), true, "Material location or identifier.")); + material.Properties.Add(new PropertySpec("digests", new ArrayShape(new ObjectShape(digest), 1), true, "Digests associated with the material.")); + material.Properties.Add(new PropertySpec("note", PrimitiveShape.String(), false, "Optional annotation about the material.")); + + var buildMetadata = RegisterObject("BuildMetadata", "Metadata describing build timing and reproducibility."); + buildMetadata.Properties.Add(new PropertySpec("buildStartedOn", PrimitiveShape.String(format: "date-time"), true, "UTC timestamp for build start.")); + buildMetadata.Properties.Add(new PropertySpec("buildFinishedOn", PrimitiveShape.String(format: "date-time"), true, "UTC timestamp for build completion.")); + buildMetadata.Properties.Add(new PropertySpec("reproducible", PrimitiveShape.Boolean(), false, "Indicates whether the build is reproducible.")); + buildMetadata.Properties.Add(new PropertySpec("buildInvocationId", PrimitiveShape.String(), false, "Unique identifier for the build invocation.")); + + var environmentMetadata = RegisterObject("EnvironmentMetadata", "Optional environment metadata for build context."); + environmentMetadata.Properties.Add(new PropertySpec("platform", PrimitiveShape.String(), false, "Execution platform or runtime.")); + environmentMetadata.Properties.Add(new PropertySpec("imageDigest", new ObjectShape(digest), false, "Digest for the environment image.")); + + var sbomPackage = RegisterObject("SbomPackage", "SBOM package entry."); + sbomPackage.Properties.Add(new PropertySpec("purl", PrimitiveShape.String(), true, "Package URL reference.")); + sbomPackage.Properties.Add(new PropertySpec("version", PrimitiveShape.String(), false, "Resolved package version.")); + sbomPackage.Properties.Add(new PropertySpec("licenses", new ArrayShape(PrimitiveShape.String(), 1), false, "Associated license identifiers.")); + + var vexStatement = RegisterObject("VexStatement", "Single VEX statement covering a vulnerability and status."); + vexStatement.Properties.Add(new PropertySpec("vulnerabilityId", PrimitiveShape.String(), true, "Vulnerability identifier (e.g., CVE).")); + vexStatement.Properties.Add(new PropertySpec("status", new EnumShape(vexStatus), true, "VEX status value.")); + vexStatement.Properties.Add(new PropertySpec("timestamp", PrimitiveShape.String(format: "date-time"), true, "UTC timestamp for statement issuance.")); + vexStatement.Properties.Add(new PropertySpec("justification", PrimitiveShape.String(), false, "Justification for the chosen status.")); + vexStatement.Properties.Add(new PropertySpec("impactStatement", PrimitiveShape.String(), false, "Impact description for affected systems.")); + vexStatement.Properties.Add(new PropertySpec("actionStatement", PrimitiveShape.String(), false, "Recommended remediation or action.")); + vexStatement.Properties.Add(new PropertySpec("references", new ArrayShape(PrimitiveShape.String(), 1), false, "Supporting reference URLs.")); + + var scanFinding = RegisterObject("ScanFinding", "Individual finding from a scan."); + scanFinding.Properties.Add(new PropertySpec("id", PrimitiveShape.String(), true, "Scanner-issued identifier.")); + scanFinding.Properties.Add(new PropertySpec("severity", new EnumShape(severity), true, "Severity classification.")); + scanFinding.Properties.Add(new PropertySpec("status", new EnumShape(findingStatus), true, "Lifecycle state of the finding.")); + scanFinding.Properties.Add(new PropertySpec("packageName", PrimitiveShape.String(), true, "Affected package name.")); + scanFinding.Properties.Add(new PropertySpec("packageVersion", PrimitiveShape.String(), false, "Affected package version.")); + scanFinding.Properties.Add(new PropertySpec("cvssScore", PrimitiveShape.Number(minimum: 0, maximum: 10), false, "CVSS base score if available.")); + scanFinding.Properties.Add(new PropertySpec("description", PrimitiveShape.String(), false, "Human-readable description of the finding.")); + scanFinding.Properties.Add(new PropertySpec("references", new ArrayShape(PrimitiveShape.String(), 1), false, "Reference links or advisory identifiers.")); + + var policyDecision = RegisterObject("PolicyDecision", "Outcome of an individual policy rule evaluation."); + policyDecision.Properties.Add(new PropertySpec("policyId", PrimitiveShape.String(), true, "Policy identifier.")); + policyDecision.Properties.Add(new PropertySpec("ruleId", PrimitiveShape.String(), true, "Specific rule identifier.")); + policyDecision.Properties.Add(new PropertySpec("effect", new EnumShape(policyEffect), true, "Resulting effect of the rule.")); + policyDecision.Properties.Add(new PropertySpec("reason", PrimitiveShape.String(), false, "Explanation for the effect.")); + policyDecision.Properties.Add(new PropertySpec("remediation", PrimitiveShape.String(), false, "Suggested remediation action.")); + + var riskFactor = RegisterObject("RiskFactor", "Factor contributing to risk calculation."); + riskFactor.Properties.Add(new PropertySpec("name", PrimitiveShape.String(), true, "Risk factor name.")); + riskFactor.Properties.Add(new PropertySpec("weight", PrimitiveShape.Number(minimum: 0, maximum: 1), true, "Relative weight between 0 and 1.")); + riskFactor.Properties.Add(new PropertySpec("description", PrimitiveShape.String(), false, "Additional context for the factor.")); + + var customProperty = RegisterObject("CustomProperty", "Key/value entry for custom evidence."); + customProperty.Properties.Add(new PropertySpec("key", PrimitiveShape.String(), true, "Property key.")); + customProperty.Properties.Add(new PropertySpec("value", PrimitiveShape.String(), true, "Property value serialized as string.")); + + // Root objects + var buildProvenance = RegisterObject("BuildProvenance", "Build provenance evidence capturing builder inputs and outputs.", isRoot: true, schemaFileStem: "stellaops-build-provenance.v1", qualifiedVersion: "StellaOps.BuildProvenance@1"); + buildProvenance.Properties.Add(new PropertySpec("schemaVersion", PrimitiveShape.String(constValue: "StellaOps.BuildProvenance@1"), true, "Schema version identifier.")); + buildProvenance.Properties.Add(new PropertySpec("buildType", PrimitiveShape.String(), true, "Build type or workflow identifier.")); + buildProvenance.Properties.Add(new PropertySpec("builder", new ObjectShape(builder), true, "Builder identity metadata.")); + buildProvenance.Properties.Add(new PropertySpec("materials", new ArrayShape(new ObjectShape(material), 1), true, "Materials consumed during the build.")); + buildProvenance.Properties.Add(new PropertySpec("metadata", new ObjectShape(buildMetadata), true, "Build metadata information.")); + buildProvenance.Properties.Add(new PropertySpec("environment", new ObjectShape(environmentMetadata), false, "Optional environment details for the build context.")); + + var sbomAttestation = RegisterObject("SbomAttestation", "SBOM attestation linking an SBOM document to an artifact.", isRoot: true, schemaFileStem: "stellaops-sbom-attestation.v1", qualifiedVersion: "StellaOps.SBOMAttestation@1"); + sbomAttestation.Properties.Add(new PropertySpec("schemaVersion", PrimitiveShape.String(constValue: "StellaOps.SBOMAttestation@1"), true, "Schema version identifier.")); + sbomAttestation.Properties.Add(new PropertySpec("subjectDigest", PrimitiveShape.String(pattern: "^sha256:[A-Fa-f0-9]{64}$"), true, "Artifact digest referenced by the SBOM.")); + sbomAttestation.Properties.Add(new PropertySpec("sbomFormat", new EnumShape(sbomFormat), true, "SBOM format identifier.")); + sbomAttestation.Properties.Add(new PropertySpec("sbomDigest", new ObjectShape(digest), true, "Digest of the SBOM document.")); + sbomAttestation.Properties.Add(new PropertySpec("sbomUri", PrimitiveShape.String(), false, "Location where the SBOM can be retrieved.")); + sbomAttestation.Properties.Add(new PropertySpec("componentCount", PrimitiveShape.Integer(minimum: 0), true, "Number of components described by the SBOM.")); + sbomAttestation.Properties.Add(new PropertySpec("packages", new ArrayShape(new ObjectShape(sbomPackage), 0), false, "Optional package listing for quick lookups.")); + + var vexAttestation = RegisterObject("VexAttestation", "VEX attestation describing vulnerability status for an artifact.", isRoot: true, schemaFileStem: "stellaops-vex-attestation.v1", qualifiedVersion: "StellaOps.VEXAttestation@1"); + vexAttestation.Properties.Add(new PropertySpec("schemaVersion", PrimitiveShape.String(constValue: "StellaOps.VEXAttestation@1"), true, "Schema version identifier.")); + vexAttestation.Properties.Add(new PropertySpec("subjectDigest", PrimitiveShape.String(pattern: "^sha256:[A-Fa-f0-9]{64}$"), true, "Artifact digest covered by the VEX statements.")); + vexAttestation.Properties.Add(new PropertySpec("generatedAt", PrimitiveShape.String(format: "date-time"), true, "Timestamp when the VEX attestation was generated.")); + vexAttestation.Properties.Add(new PropertySpec("statements", new ArrayShape(new ObjectShape(vexStatement), 1), true, "Collection of VEX statements.")); + + var scanResults = RegisterObject("ScanResults", "Scanner findings for an artifact at a point in time.", isRoot: true, schemaFileStem: "stellaops-scan-results.v1", qualifiedVersion: "StellaOps.ScanResults@1"); + scanResults.Properties.Add(new PropertySpec("schemaVersion", PrimitiveShape.String(constValue: "StellaOps.ScanResults@1"), true, "Schema version identifier.")); + scanResults.Properties.Add(new PropertySpec("subjectDigest", PrimitiveShape.String(pattern: "^sha256:[A-Fa-f0-9]{64}$"), true, "Artifact digest that was scanned.")); + scanResults.Properties.Add(new PropertySpec("scannerName", PrimitiveShape.String(), true, "Name of the scanner that produced the findings.")); + scanResults.Properties.Add(new PropertySpec("scannerVersion", PrimitiveShape.String(), true, "Scanner version string.")); + scanResults.Properties.Add(new PropertySpec("generatedAt", PrimitiveShape.String(format: "date-time"), true, "Timestamp when the scan results were generated.")); + scanResults.Properties.Add(new PropertySpec("findings", new ArrayShape(new ObjectShape(scanFinding), 0), true, "List of findings captured during the scan.")); + + var policyEvaluation = RegisterObject("PolicyEvaluation", "Policy evaluation outcome for an artifact.", isRoot: true, schemaFileStem: "stellaops-policy-evaluation.v1", qualifiedVersion: "StellaOps.PolicyEvaluation@1"); + policyEvaluation.Properties.Add(new PropertySpec("schemaVersion", PrimitiveShape.String(constValue: "StellaOps.PolicyEvaluation@1"), true, "Schema version identifier.")); + policyEvaluation.Properties.Add(new PropertySpec("subjectDigest", PrimitiveShape.String(pattern: "^sha256:[A-Fa-f0-9]{64}$"), true, "Artifact digest that was evaluated.")); + policyEvaluation.Properties.Add(new PropertySpec("policyVersion", PrimitiveShape.String(), true, "Policy bundle version applied.")); + policyEvaluation.Properties.Add(new PropertySpec("evaluatedAt", PrimitiveShape.String(format: "date-time"), true, "Timestamp when policy evaluation was executed.")); + policyEvaluation.Properties.Add(new PropertySpec("outcome", new EnumShape(policyOutcome), true, "Overall evaluation outcome.")); + policyEvaluation.Properties.Add(new PropertySpec("decisions", new ArrayShape(new ObjectShape(policyDecision), 0), true, "Detailed rule-level decisions.")); + + var riskProfile = RegisterObject("RiskProfileEvidence", "Risk scoring evidence summarising exposure for an artifact.", isRoot: true, schemaFileStem: "stellaops-risk-profile.v1", qualifiedVersion: "StellaOps.RiskProfileEvidence@1"); + riskProfile.Properties.Add(new PropertySpec("schemaVersion", PrimitiveShape.String(constValue: "StellaOps.RiskProfileEvidence@1"), true, "Schema version identifier.")); + riskProfile.Properties.Add(new PropertySpec("subjectDigest", PrimitiveShape.String(pattern: "^sha256:[A-Fa-f0-9]{64}$"), true, "Artifact digest that the risk profile describes.")); + riskProfile.Properties.Add(new PropertySpec("generatedAt", PrimitiveShape.String(format: "date-time"), true, "Timestamp when scoring was performed.")); + riskProfile.Properties.Add(new PropertySpec("riskScore", PrimitiveShape.Number(minimum: 0, maximum: 100), true, "Normalized risk score between 0 and 100.")); + riskProfile.Properties.Add(new PropertySpec("riskLevel", new EnumShape(riskLevel), true, "Risk level classification.")); + riskProfile.Properties.Add(new PropertySpec("factors", new ArrayShape(new ObjectShape(riskFactor), 0), true, "Factors contributing to the total risk.")); + + var customEvidence = RegisterObject("CustomEvidence", "Generic evidence payload for bespoke attestations.", isRoot: true, schemaFileStem: "stellaops-custom-evidence.v1", qualifiedVersion: "StellaOps.CustomEvidence@1"); + customEvidence.Properties.Add(new PropertySpec("schemaVersion", PrimitiveShape.String(constValue: "StellaOps.CustomEvidence@1"), true, "Schema version identifier.")); + customEvidence.Properties.Add(new PropertySpec("subjectDigest", PrimitiveShape.String(pattern: "^sha256:[A-Fa-f0-9]{64}$"), true, "Artifact digest this evidence references.")); + customEvidence.Properties.Add(new PropertySpec("kind", PrimitiveShape.String(), true, "Custom evidence kind identifier.")); + customEvidence.Properties.Add(new PropertySpec("generatedAt", PrimitiveShape.String(format: "date-time"), true, "Timestamp when the evidence was generated.")); + customEvidence.Properties.Add(new PropertySpec("properties", new ArrayShape(new ObjectShape(customProperty), 0), false, "Optional key/value properties for additional context.")); + + return new TypeRegistry(roots, objects, enums); + } +} + +internal sealed class EnumSpec +{ + public string Name { get; } + public string Summary { get; } + public IReadOnlyList Values { get; } + + public EnumSpec(string name, string summary, IReadOnlyList values) + { + Name = name; + Summary = summary; + Values = values; + } +} + +internal sealed class ObjectSpec +{ + public string Name { get; } + public string Summary { get; } + public bool IsRoot { get; set; } + public string? SchemaFileStem { get; set; } + public string? QualifiedVersion { get; set; } + public List Properties { get; } = new(); + + public ObjectSpec(string name, string summary) + { + Name = name; + Summary = summary; + } +} + +internal sealed class PropertySpec +{ + public string Name { get; } + public TypeShape Type { get; } + public bool Required { get; } + public string? Description { get; } + + public PropertySpec(string name, TypeShape type, bool required, string? description) + { + Name = name; + Type = type; + Required = required; + Description = description; + } +} + +internal abstract record TypeShape; + +internal sealed record PrimitiveShape(PrimitiveKind Kind, string? Format = null, string? ConstValue = null, string? Pattern = null, double? Minimum = null, double? Maximum = null) : TypeShape +{ + public static PrimitiveShape String(string? format = null, string? constValue = null, string? pattern = null) + => new(PrimitiveKind.String, format, constValue, pattern); + + public static PrimitiveShape Number(double? minimum = null, double? maximum = null) + => new(PrimitiveKind.Number, Minimum: minimum, Maximum: maximum); + + public static PrimitiveShape Integer(double? minimum = null, double? maximum = null) + => new(PrimitiveKind.Integer, Minimum: minimum, Maximum: maximum); + + public static PrimitiveShape Boolean() + => new(PrimitiveKind.Boolean); +} + +internal enum PrimitiveKind +{ + String, + Number, + Integer, + Boolean +} + +internal sealed record EnumShape(EnumSpec Enum) : TypeShape; + +internal sealed record ObjectShape(ObjectSpec Object) : TypeShape; + +internal sealed record ArrayShape(TypeShape Item, int? MinItems = null) : TypeShape; + +internal static class SchemaBuilder +{ + public static string Build(ObjectSpec root) + { + var schema = BuildSchema(root); + return JsonSerializer.Serialize(schema, new JsonSerializerOptions { WriteIndented = true }); + } + + private static JsonObject BuildSchema(ObjectSpec root) + { + var schema = new JsonObject + { + ["$schema"] = "https://json-schema.org/draft/2020-12/schema", + ["$id"] = $"https://stella-ops.org/schemas/attestor/{root.SchemaFileStem}.json", + ["title"] = root.Summary, + ["type"] = "object", + ["additionalProperties"] = false + }; + + var properties = new JsonObject(); + var required = new JsonArray(); + var defs = new JsonObject(); + + var visitedObjects = new HashSet(StringComparer.Ordinal); + var visitedEnums = new HashSet(StringComparer.Ordinal); + + foreach (var property in root.Properties) + { + properties[property.Name] = BuildPropertySchema(property.Type, defs, visitedObjects, visitedEnums); + if (property.Required) + { + required.Add(property.Name); + } + + if (!string.IsNullOrWhiteSpace(property.Description)) + { + properties[property.Name]!["description"] = property.Description; + } + } + + if (required.Count > 0) + { + schema["required"] = required; + } + + schema["properties"] = properties; + + if (defs.Count > 0) + { + schema["$defs"] = defs; + } + + return schema; + } + + private static JsonObject BuildObjectDefinition(ObjectSpec spec, JsonObject defs, HashSet visitedObjects, HashSet visitedEnums) + { + if (!visitedObjects.Add(spec.Name)) + { + return new JsonObject { ["$ref"] = $"#/$defs/{spec.Name}" }; + } + + var def = new JsonObject + { + ["type"] = "object", + ["additionalProperties"] = false, + ["description"] = spec.Summary + }; + + var properties = new JsonObject(); + var required = new JsonArray(); + + foreach (var property in spec.Properties) + { + properties[property.Name] = BuildPropertySchema(property.Type, defs, visitedObjects, visitedEnums); + if (property.Required) + { + required.Add(property.Name); + } + + if (!string.IsNullOrWhiteSpace(property.Description)) + { + properties[property.Name]!["description"] = property.Description; + } + } + + if (required.Count > 0) + { + def["required"] = required; + } + + def["properties"] = properties; + defs[spec.Name] = def; + return new JsonObject { ["$ref"] = $"#/$defs/{spec.Name}" }; + } + + private static JsonObject BuildEnumDefinition(EnumSpec spec, JsonObject defs, HashSet visitedEnums) + { + if (!visitedEnums.Add(spec.Name)) + { + return new JsonObject { ["$ref"] = $"#/$defs/{spec.Name}" }; + } + + var def = new JsonObject + { + ["type"] = "string", + ["description"] = spec.Summary, + ["enum"] = new JsonArray(spec.Values.Select(v => (JsonNode)v).ToArray()) + }; + + defs[spec.Name] = def; + return new JsonObject { ["$ref"] = $"#/$defs/{spec.Name}" }; + } + + private static JsonObject BuildPropertySchema(TypeShape type, JsonObject defs, HashSet visitedObjects, HashSet visitedEnums) + { + switch (type) + { + case PrimitiveShape primitive: + var primitiveSchema = new JsonObject(); + primitiveSchema["type"] = primitive.Kind switch + { + PrimitiveKind.String => "string", + PrimitiveKind.Number => "number", + PrimitiveKind.Integer => "integer", + PrimitiveKind.Boolean => "boolean", + _ => throw new InvalidOperationException("Unsupported primitive kind.") + }; + + if (!string.IsNullOrWhiteSpace(primitive.Format)) + { + primitiveSchema["format"] = primitive.Format; + } + + if (!string.IsNullOrWhiteSpace(primitive.ConstValue)) + { + primitiveSchema["const"] = primitive.ConstValue; + } + + if (!string.IsNullOrWhiteSpace(primitive.Pattern)) + { + primitiveSchema["pattern"] = primitive.Pattern; + } + + if (primitive.Minimum.HasValue) + { + primitiveSchema["minimum"] = primitive.Minimum.Value; + } + + if (primitive.Maximum.HasValue) + { + primitiveSchema["maximum"] = primitive.Maximum.Value; + } + + return primitiveSchema; + + case EnumShape enumShape: + return BuildEnumDefinition(enumShape.Enum, defs, visitedEnums); + + case ObjectShape objectShape: + return BuildObjectDefinition(objectShape.Object, defs, visitedObjects, visitedEnums); + + case ArrayShape arrayShape: + var arraySchema = new JsonObject + { + ["type"] = "array", + ["items"] = BuildPropertySchema(arrayShape.Item, defs, visitedObjects, visitedEnums) + }; + + if (arrayShape.MinItems.HasValue) + { + arraySchema["minItems"] = arrayShape.MinItems.Value; + } + + return arraySchema; + + default: + throw new InvalidOperationException("Unsupported property type."); + } + } +} + +internal static class TypeScriptEmitter +{ + private const int IndentSize = 2; + + public static string Emit(TypeRegistry registry) + { + var builder = new StringBuilder(); + AppendLine(builder, 0, "// "); + AppendLine(builder, 0, "// Generated by StellaOps.Attestor.Types.Generator"); + AppendLine(builder, 0, "/* eslint-disable */"); + AppendLine(builder, 0, "/* prettier-ignore */"); + builder.AppendLine(); + + var orderedEnums = registry.Enums.Values.OrderBy(e => e.Name, StringComparer.Ordinal).ToList(); + foreach (var enumSpec in orderedEnums) + { + AppendLine(builder, 0, $"export const {enumSpec.Name}Values = Object.freeze([{string.Join(", ", enumSpec.Values.Select(v => $"'{v}'"))}] as const);"); + AppendLine(builder, 0, $"export type {enumSpec.Name} = typeof {enumSpec.Name}Values[number];"); + builder.AppendLine(); + } + + var orderedObjects = registry.Objects.Values.OrderBy(o => o.Name, StringComparer.Ordinal).ToList(); + foreach (var obj in orderedObjects) + { + AppendLine(builder, 0, $"export interface {obj.Name} {{"); + foreach (var property in obj.Properties) + { + var optionalSuffix = property.Required ? string.Empty : "?"; + AppendLine(builder, 1, $"{property.Name}{optionalSuffix}: {ToTsType(property.Type)};"); + } + AppendLine(builder, 0, "}"); + builder.AppendLine(); + } + + AppendLine(builder, 0, "function isRecord(value: unknown): value is Record {"); + AppendLine(builder, 1, "return typeof value === 'object' && value !== null && !Array.isArray(value);"); + AppendLine(builder, 0, "}"); + builder.AppendLine(); + + AppendLine(builder, 0, "function pathString(path: string[]): string {"); + AppendLine(builder, 1, "return path.length === 0 ? 'value' : `value.${path.join('.')}`;"); + AppendLine(builder, 0, "}"); + builder.AppendLine(); + + foreach (var obj in orderedObjects) + { + AppendLine(builder, 0, $"function assert{obj.Name}(value: unknown, path: string[]): asserts value is {obj.Name} {{"); + AppendLine(builder, 1, "if (!isRecord(value)) {"); + AppendLine(builder, 2, "throw new Error(`${pathString(path)} must be an object.`);"); + AppendLine(builder, 1, "}"); + + foreach (var property in obj.Properties) + { + var accessor = $"value.{property.Name}"; + var pathExpression = $"[...path, '{property.Name}']"; + if (property.Required) + { + AppendLine(builder, 1, $"if ({accessor} === undefined) {{"); + AppendLine(builder, 2, "throw new Error(`${pathString(" + pathExpression + ")} is required.`);"); + AppendLine(builder, 1, "}"); + AppendLines(builder, EmitTsAssertion(property.Type, accessor, pathExpression), 1); + } + else + { + AppendLine(builder, 1, $"if ({accessor} !== undefined) {{"); + AppendLines(builder, EmitTsAssertion(property.Type, accessor, pathExpression), 2); + AppendLine(builder, 1, "}"); + } + } + + AppendLine(builder, 0, "}"); + builder.AppendLine(); + } + + foreach (var root in registry.RootObjects.OrderBy(r => r.Name, StringComparer.Ordinal)) + { + AppendLine(builder, 0, $"export function validate{root.Name}(value: unknown): {root.Name} {{"); + AppendLine(builder, 1, $"assert{root.Name}(value, []);"); + AppendLine(builder, 1, $"return value as {root.Name};"); + AppendLine(builder, 0, "}"); + builder.AppendLine(); + + AppendLine(builder, 0, $"export function canonicalize{root.Name}(value: {root.Name}): string {{"); + AppendLine(builder, 1, $"assert{root.Name}(value, []);"); + AppendLine(builder, 1, "return canonicalStringify(value);"); + AppendLine(builder, 0, "}"); + builder.AppendLine(); + } + + AppendLine(builder, 0, "function canonicalStringify(input: unknown): string {"); + AppendLine(builder, 1, "return JSON.stringify(sortValue(input));"); + AppendLine(builder, 0, "}"); + builder.AppendLine(); + + AppendLine(builder, 0, "function sortValue(value: unknown): unknown {"); + AppendLine(builder, 1, "if (Array.isArray(value)) {"); + AppendLine(builder, 2, "return value.map(sortValue);"); + AppendLine(builder, 1, "}"); + AppendLine(builder, 1, "if (isRecord(value)) {"); + AppendLine(builder, 2, "const ordered: Record = {};"); + AppendLine(builder, 2, "const keys = Object.keys(value).sort();"); + AppendLine(builder, 2, "for (const key of keys) {"); + AppendLine(builder, 3, "ordered[key] = sortValue(value[key]);"); + AppendLine(builder, 2, "}"); + AppendLine(builder, 2, "return ordered;"); + AppendLine(builder, 1, "}"); + AppendLine(builder, 1, "return value;"); + AppendLine(builder, 0, "}"); + builder.AppendLine(); + + return builder.ToString(); + } + + private static void AppendLine(StringBuilder builder, int indentLevel, string text) + { + if (text.Length == 0) + { + builder.AppendLine(); + return; + } + + builder.Append(' ', indentLevel * IndentSize); + builder.AppendLine(text); + } + + private static void AppendLines(StringBuilder builder, IEnumerable lines, int indentLevel) + { + foreach (var line in lines) + { + AppendLine(builder, indentLevel, line); + } + } + + private static IReadOnlyList EmitTsAssertion(TypeShape type, string accessor, string pathExpression) + { + var lines = new List(); + switch (type) + { + case PrimitiveShape primitive when primitive.Kind == PrimitiveKind.String: + lines.AddRange(EmitStringAssertion(primitive, accessor, pathExpression)); + break; + case PrimitiveShape primitive when primitive.Kind == PrimitiveKind.Number || primitive.Kind == PrimitiveKind.Integer: + lines.Add("if (typeof " + accessor + " !== 'number') {"); + lines.Add(" throw new Error(`${pathString(" + pathExpression + ")} must be a number.`);"); + lines.Add("}"); + if (primitive.Minimum.HasValue) + { + lines.Add("if (" + accessor + " < " + primitive.Minimum.Value.ToString(System.Globalization.CultureInfo.InvariantCulture) + ") {"); + lines.Add(" throw new Error(`${pathString(" + pathExpression + ")} must be >= " + primitive.Minimum.Value.ToString(System.Globalization.CultureInfo.InvariantCulture) + "`);"); + lines.Add("}"); + } + if (primitive.Maximum.HasValue) + { + lines.Add("if (" + accessor + " > " + primitive.Maximum.Value.ToString(System.Globalization.CultureInfo.InvariantCulture) + ") {"); + lines.Add(" throw new Error(`${pathString(" + pathExpression + ")} must be <= " + primitive.Maximum.Value.ToString(System.Globalization.CultureInfo.InvariantCulture) + "`);"); + lines.Add("}"); + } + break; + case PrimitiveShape primitive when primitive.Kind == PrimitiveKind.Boolean: + lines.Add("if (typeof " + accessor + " !== 'boolean') {"); + lines.Add(" throw new Error(`${pathString(" + pathExpression + ")} must be a boolean.`);"); + lines.Add("}"); + break; + case EnumShape enumShape: + lines.Add("if (!" + enumShape.Enum.Name + "Values.includes(" + accessor + " as " + enumShape.Enum.Name + ")) {"); + lines.Add(" throw new Error(`${pathString(" + pathExpression + ")} must be one of ${" + enumShape.Enum.Name + "Values.join(', ')}`);"); + lines.Add("}"); + break; + case ObjectShape objectShape: + lines.Add("assert" + objectShape.Object.Name + "(" + accessor + ", " + pathExpression + ");"); + break; + case ArrayShape arrayShape: + lines.AddRange(EmitArrayAssertion(arrayShape, accessor, pathExpression)); + break; + default: + lines.Add("// Unsupported type encountered during validation."); + break; + } + + return lines; + } + + private static IReadOnlyList EmitStringAssertion(PrimitiveShape primitive, string accessor, string pathExpression) + { + var lines = new List + { + "if (typeof " + accessor + " !== 'string') {", + " throw new Error(`${pathString(" + pathExpression + ")} must be a string.`);", + "}" + }; + + if (!string.IsNullOrWhiteSpace(primitive.ConstValue)) + { + var escapedConst = primitive.ConstValue!.Replace("'", "\\'"); + lines.Add("if (" + accessor + " !== '" + escapedConst + "') {"); + lines.Add(" throw new Error(`${pathString(" + pathExpression + ")} must equal '" + escapedConst + "'.`);"); + lines.Add("}"); + } + + if (!string.IsNullOrWhiteSpace(primitive.Pattern)) + { + lines.Add("if (!/" + primitive.Pattern + "/.test(" + accessor + ")) {"); + lines.Add(" throw new Error(`${pathString(" + pathExpression + ")} does not match expected format.`);"); + lines.Add("}"); + } + + return lines; + } + + private static IReadOnlyList EmitArrayAssertion(ArrayShape arrayShape, string accessor, string pathExpression) + { + var lines = new List + { + "if (!Array.isArray(" + accessor + ")) {", + " throw new Error(`${pathString(" + pathExpression + ")} must be an array.`);", + "}" + }; + + if (arrayShape.MinItems.HasValue && arrayShape.MinItems.Value > 0) + { + lines.Add("if (" + accessor + ".length < " + arrayShape.MinItems.Value + ") {"); + lines.Add(" throw new Error(`${pathString(" + pathExpression + ")} must contain at least " + arrayShape.MinItems.Value + " item(s).`);"); + lines.Add("}"); + } + + lines.Add("for (let i = 0; i < " + accessor + ".length; i += 1) {"); + var childLines = EmitTsAssertion(arrayShape.Item, accessor + "[i]", "[..." + pathExpression + ", String(i)]"); + foreach (var line in childLines) + { + lines.Add(" " + line); + } + lines.Add("}"); + + return lines; + } + + private static string ToTsType(TypeShape type) + => type switch + { + PrimitiveShape primitive when primitive.Kind == PrimitiveKind.String && !string.IsNullOrWhiteSpace(primitive.ConstValue) + => $"'{primitive.ConstValue}'", + PrimitiveShape { Kind: PrimitiveKind.String } => "string", + PrimitiveShape { Kind: PrimitiveKind.Number } => "number", + PrimitiveShape { Kind: PrimitiveKind.Integer } => "number", + PrimitiveShape { Kind: PrimitiveKind.Boolean } => "boolean", + EnumShape enumShape => enumShape.Enum.Name, + ObjectShape objectShape => objectShape.Object.Name, + ArrayShape array => $"Array<{ToTsType(array.Item)}>", + _ => "unknown" + }; +} + +internal static class GoEmitter +{ + public static string Emit(TypeRegistry registry) + { + var builder = new StringBuilder(); + AppendLine(builder, 0, "// Code generated by StellaOps.Attestor.Types.Generator. DO NOT EDIT."); + AppendLine(builder, 0, "package attesttypes"); + builder.AppendLine(); + + AppendLine(builder, 0, "import ("); + AppendLine(builder, 1, "\"encoding/json\""); + AppendLine(builder, 1, "\"errors\""); + AppendLine(builder, 1, "\"fmt\""); + AppendLine(builder, 0, ")"); + builder.AppendLine(); + + foreach (var enumSpec in registry.Enums.Values.OrderBy(e => e.Name, StringComparer.Ordinal)) + { + EmitEnum(builder, enumSpec); + builder.AppendLine(); + } + + foreach (var root in registry.RootObjects.OrderBy(o => o.Name, StringComparer.Ordinal)) + { + if (!string.IsNullOrEmpty(root.QualifiedVersion)) + { + AppendLine(builder, 0, $"const {root.Name}SchemaVersion = \"{root.QualifiedVersion}\""); + builder.AppendLine(); + } + } + + var orderedObjects = registry.Objects.Values.OrderBy(o => o.Name, StringComparer.Ordinal).ToList(); + foreach (var obj in orderedObjects) + { + EmitStruct(builder, obj); + builder.AppendLine(); + EmitValidateMethod(builder, obj); + builder.AppendLine(); + } + + foreach (var root in registry.RootObjects.OrderBy(r => r.Name, StringComparer.Ordinal)) + { + EmitCanonicalFunction(builder, root.Name); + builder.AppendLine(); + } + + return builder.ToString(); + } + + private static void EmitEnum(StringBuilder builder, EnumSpec enumSpec) + { + AppendLine(builder, 0, $"type {enumSpec.Name} string"); + builder.AppendLine(); + AppendLine(builder, 0, "const ("); + foreach (var value in enumSpec.Values) + { + AppendLine(builder, 1, $"{enumSpec.Name}{ToExported(value)} {enumSpec.Name} = \"{value}\""); + } + AppendLine(builder, 0, ")"); + builder.AppendLine(); + AppendLine(builder, 0, $"func (v {enumSpec.Name}) Validate() error {{"); + AppendLine(builder, 1, "switch v {"); + var cases = string.Join(", ", enumSpec.Values.Select(value => $"{enumSpec.Name}{ToExported(value)}")); + AppendLine(builder, 1, $"case {cases}:"); + AppendLine(builder, 2, "return nil"); + AppendLine(builder, 1, "default:"); + AppendLine(builder, 2, $"return fmt.Errorf(\"invalid value for {enumSpec.Name}: %s\", string(v))"); + AppendLine(builder, 1, "}"); + AppendLine(builder, 0, "}"); + } + + private static void EmitStruct(StringBuilder builder, ObjectSpec obj) + { + AppendLine(builder, 0, $"type {obj.Name} struct {{"); + foreach (var property in obj.Properties) + { + var fieldName = ToExported(property.Name); + AppendLine(builder, 1, $"{fieldName} {GoFieldType(property)} `json:\"{JsonTag(property)}\"`"); + } + AppendLine(builder, 0, "}"); + } + + private static void EmitValidateMethod(StringBuilder builder, ObjectSpec obj) + { + AppendLine(builder, 0, $"func (value *{obj.Name}) Validate() error {{"); + AppendLine(builder, 1, "if value == nil {"); + AppendLine(builder, 2, $"return errors.New(\"{obj.Name} is nil\")"); + AppendLine(builder, 1, "}"); + + foreach (var property in obj.Properties) + { + EmitPropertyValidation(builder, property, $"value.{ToExported(property.Name)}", $"{obj.Name}.{ToExported(property.Name)}", 1); + } + + AppendLine(builder, 1, "return nil"); + AppendLine(builder, 0, "}"); + } + + private static void EmitCanonicalFunction(StringBuilder builder, string typeName) + { + AppendLine(builder, 0, $"func (value *{typeName}) CanonicalJSON() ([]byte, error) {{"); + AppendLine(builder, 1, "if err := value.Validate(); err != nil {"); + AppendLine(builder, 2, "return nil, err"); + AppendLine(builder, 1, "}"); + AppendLine(builder, 1, "buf, err := json.Marshal(value)"); + AppendLine(builder, 1, "if err != nil {"); + AppendLine(builder, 2, $"return nil, fmt.Errorf(\"failed to marshal {typeName}: %w\", err)"); + AppendLine(builder, 1, "}"); + AppendLine(builder, 1, "return buf, nil"); + AppendLine(builder, 0, "}"); + } + + private static void EmitPropertyValidation(StringBuilder builder, PropertySpec property, string accessor, string path, int indent) + { + switch (property.Type) + { + case PrimitiveShape primitive: + EmitPrimitiveValidation(builder, primitive, accessor, path, property.Required, indent); + break; + case EnumShape enumShape: + EmitEnumValidation(builder, enumShape, accessor, path, property.Required, indent); + break; + case ObjectShape objectShape: + EmitObjectValidation(builder, objectShape, accessor, path, property.Required, indent); + break; + case ArrayShape arrayShape: + EmitArrayValidation(builder, arrayShape, accessor, path, indent); + break; + } + } + + private static void EmitPrimitiveValidation(StringBuilder builder, PrimitiveShape primitive, string accessor, string path, bool required, int indent) + { + var pointer = UsesPointer(primitive, required); + if (!TryBuildPrimitiveChecks(primitive, pointer ? $"*{accessor}" : accessor, path, out var lines)) + { + return; + } + + if (pointer) + { + AppendLine(builder, indent, $"if {accessor} != nil {{"); + foreach (var line in lines) + { + AppendLine(builder, indent + 1, line); + } + AppendLine(builder, indent, "}"); + } + else + { + foreach (var line in lines) + { + AppendLine(builder, indent, line); + } + } + } + + private static bool TryBuildPrimitiveChecks(PrimitiveShape primitive, string target, string path, out List lines) + { + lines = new List(); + + if (primitive.Kind == PrimitiveKind.String && !string.IsNullOrEmpty(primitive.ConstValue)) + { + lines.Add($"if {target} != \"{primitive.ConstValue}\" {{"); + lines.Add($"\treturn fmt.Errorf(\"{path} must equal {primitive.ConstValue}\")"); + lines.Add("}"); + } + + if ((primitive.Kind == PrimitiveKind.Number || primitive.Kind == PrimitiveKind.Integer) && primitive.Minimum.HasValue) + { + lines.Add($"if {target} < {primitive.Minimum.Value} {{"); + lines.Add($"\treturn fmt.Errorf(\"{path} must be >= {primitive.Minimum.Value}\")"); + lines.Add("}"); + } + + if ((primitive.Kind == PrimitiveKind.Number || primitive.Kind == PrimitiveKind.Integer) && primitive.Maximum.HasValue) + { + lines.Add($"if {target} > {primitive.Maximum.Value} {{"); + lines.Add($"\treturn fmt.Errorf(\"{path} must be <= {primitive.Maximum.Value}\")"); + lines.Add("}"); + } + + // No pattern validation for now. + if (lines.Count == 0) + { + return false; + } + + return true; + } + + private static void EmitEnumValidation(StringBuilder builder, EnumShape enumShape, string accessor, string path, bool required, int indent) + { + if (UsesPointer(enumShape, required)) + { + AppendLine(builder, indent, $"if {accessor} != nil {{"); + AppendLine(builder, indent + 1, $"if err := (*{accessor}).Validate(); err != nil {{"); + AppendLine(builder, indent + 2, $"return fmt.Errorf(\"invalid {path}: %w\", err)"); + AppendLine(builder, indent + 1, "}"); + AppendLine(builder, indent, "}"); + } + else + { + AppendLine(builder, indent, $"if err := {accessor}.Validate(); err != nil {{"); + AppendLine(builder, indent + 1, $"return fmt.Errorf(\"invalid {path}: %w\", err)"); + AppendLine(builder, indent, "}"); + } + } + + private static void EmitObjectValidation(StringBuilder builder, ObjectShape objectShape, string accessor, string path, bool required, int indent) + { + if (UsesPointer(objectShape, required)) + { + AppendLine(builder, indent, $"if {accessor} != nil {{"); + AppendLine(builder, indent + 1, $"if err := {accessor}.Validate(); err != nil {{"); + AppendLine(builder, indent + 2, $"return fmt.Errorf(\"invalid {path}: %w\", err)"); + AppendLine(builder, indent + 1, "}"); + AppendLine(builder, indent, "}"); + } + else + { + AppendLine(builder, indent, $"if err := {accessor}.Validate(); err != nil {{"); + AppendLine(builder, indent + 1, $"return fmt.Errorf(\"invalid {path}: %w\", err)"); + AppendLine(builder, indent, "}"); + } + } + + private static void EmitArrayValidation(StringBuilder builder, ArrayShape arrayShape, string accessor, string path, int indent) + { + if (arrayShape.MinItems.HasValue && arrayShape.MinItems.Value > 0) + { + AppendLine(builder, indent, $"if len({accessor}) < {arrayShape.MinItems.Value} {{"); + AppendLine(builder, indent + 1, $"return fmt.Errorf(\"{path} must contain at least {arrayShape.MinItems.Value} item(s)\")"); + AppendLine(builder, indent, "}"); + } + + if (NeedsArrayItemValidation(arrayShape.Item)) + { + AppendLine(builder, indent, $"for i := range {accessor} {{"); + EmitArrayItemValidation(builder, arrayShape.Item, $"{accessor}[i]", path, "i", indent + 1); + AppendLine(builder, indent, "}"); + } + } + + private static void EmitArrayItemValidation(StringBuilder builder, TypeShape itemType, string accessor, string path, string indexVar, int indent) + { + switch (itemType) + { + case PrimitiveShape primitive: + EmitArrayPrimitiveValidation(builder, primitive, accessor, path, indexVar, indent); + break; + case EnumShape: + AppendLine(builder, indent, $"if err := {accessor}.Validate(); err != nil {{"); + AppendLine(builder, indent + 1, $"return fmt.Errorf(\"invalid {path}[%d]: %w\", {indexVar}, err)"); + AppendLine(builder, indent, "}"); + break; + case ObjectShape: + AppendLine(builder, indent, $"if err := {accessor}.Validate(); err != nil {{"); + AppendLine(builder, indent + 1, $"return fmt.Errorf(\"invalid {path}[%d]: %w\", {indexVar}, err)"); + AppendLine(builder, indent, "}"); + break; + case ArrayShape nestedArray: + EmitArrayValidation(builder, nestedArray, accessor, $"{path}[%d]", indent); + break; + } + } + + private static bool NeedsPrimitiveValidation(PrimitiveShape primitive) + => !string.IsNullOrEmpty(primitive.ConstValue) + || primitive.Minimum.HasValue + || primitive.Maximum.HasValue; + + private static bool NeedsArrayItemValidation(TypeShape itemType) + => itemType switch + { + PrimitiveShape primitive => NeedsPrimitiveValidation(primitive), + EnumShape => true, + ObjectShape => true, + ArrayShape => true, + _ => false + }; + + private static void EmitArrayPrimitiveValidation(StringBuilder builder, PrimitiveShape primitive, string accessor, string path, string indexVar, int indent) + { + if (primitive.Kind == PrimitiveKind.String && !string.IsNullOrEmpty(primitive.ConstValue)) + { + AppendLine(builder, indent, $"if {accessor} != \"{primitive.ConstValue}\" {{"); + AppendLine(builder, indent + 1, $"return fmt.Errorf(\"{path}[%d] must equal {primitive.ConstValue}\", {indexVar})"); + AppendLine(builder, indent, "}"); + } + + if ((primitive.Kind == PrimitiveKind.Number || primitive.Kind == PrimitiveKind.Integer) && primitive.Minimum.HasValue) + { + AppendLine(builder, indent, $"if {accessor} < {primitive.Minimum.Value} {{"); + AppendLine(builder, indent + 1, $"return fmt.Errorf(\"{path}[%d] must be >= {primitive.Minimum.Value}\", {indexVar})"); + AppendLine(builder, indent, "}"); + } + + if ((primitive.Kind == PrimitiveKind.Number || primitive.Kind == PrimitiveKind.Integer) && primitive.Maximum.HasValue) + { + AppendLine(builder, indent, $"if {accessor} > {primitive.Maximum.Value} {{"); + AppendLine(builder, indent + 1, $"return fmt.Errorf(\"{path}[%d] must be <= {primitive.Maximum.Value}\", {indexVar})"); + AppendLine(builder, indent, "}"); + } + } + + private static string GoFieldType(PropertySpec property) + { + return property.Type switch + { + PrimitiveShape primitive when primitive.Kind == PrimitiveKind.String && !property.Required => "*string", + PrimitiveShape primitive when (primitive.Kind == PrimitiveKind.Number || primitive.Kind == PrimitiveKind.Integer) && !property.Required => "*float64", + PrimitiveShape primitive when primitive.Kind == PrimitiveKind.Boolean && !property.Required => "*bool", + PrimitiveShape primitive when primitive.Kind == PrimitiveKind.String => "string", + PrimitiveShape primitive when primitive.Kind == PrimitiveKind.Number || primitive.Kind == PrimitiveKind.Integer => "float64", + PrimitiveShape primitive when primitive.Kind == PrimitiveKind.Boolean => "bool", + EnumShape enumShape when property.Required => enumShape.Enum.Name, + EnumShape enumShape => "*" + enumShape.Enum.Name, + ObjectShape objectShape when property.Required => objectShape.Object.Name, + ObjectShape objectShape => "*" + objectShape.Object.Name, + ArrayShape arrayShape => $"[]{GoElementType(arrayShape.Item)}", + _ => "interface{}" + }; + } + + private static string GoElementType(TypeShape itemType) + => itemType switch + { + PrimitiveShape primitive when primitive.Kind == PrimitiveKind.String => "string", + PrimitiveShape primitive when primitive.Kind == PrimitiveKind.Number || primitive.Kind == PrimitiveKind.Integer => "float64", + PrimitiveShape primitive when primitive.Kind == PrimitiveKind.Boolean => "bool", + EnumShape enumShape => enumShape.Enum.Name, + ObjectShape objectShape => objectShape.Object.Name, + ArrayShape nested => $"[]{GoElementType(nested.Item)}", + _ => "interface{}" + }; + + private static bool UsesPointer(TypeShape type, bool required) + => type switch + { + PrimitiveShape => !required, + EnumShape => !required, + ObjectShape => !required, + _ => false + }; + + private static void AppendLine(StringBuilder builder, int indent, string text) + { + builder.Append(new string('\t', indent)); + builder.AppendLine(text); + } + + private static string JsonTag(PropertySpec property) + => property.Required ? property.Name : $"{property.Name},omitempty"; + + private static string ToExported(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return string.Empty; + } + + var builder = new StringBuilder(); + var span = value.AsSpan(); + + void AppendSegment(ReadOnlySpan segment) + { + if (segment.IsEmpty) + { + return; + } + + var cleaned = new List(segment.Length); + for (var i = 0; i < segment.Length; i++) + { + var ch = segment[i]; + if (char.IsLetterOrDigit(ch)) + { + cleaned.Add(ch); + } + } + + if (cleaned.Count == 0) + { + return; + } + + builder.Append(char.ToUpperInvariant(cleaned[0])); + for (var i = 1; i < cleaned.Count; i++) + { + builder.Append(char.ToLowerInvariant(cleaned[i])); + } + } + + var start = 0; + for (var i = 0; i < span.Length; i++) + { + var ch = span[i]; + if (ch == '-' || ch == '_' || ch == ' ') + { + AppendSegment(span[start..i]); + start = i + 1; + continue; + } + + if (i > start && char.IsUpper(ch) && char.IsLower(span[i - 1])) + { + AppendSegment(span[start..i]); + start = i; + } + } + + AppendSegment(span[start..]); + + return builder.Length == 0 ? "Value" : builder.ToString(); + } +} diff --git a/src/Attestor/StellaOps.Attestor.Types/Tools/StellaOps.Attestor.Types.Generator/StellaOps.Attestor.Types.Generator.csproj b/src/Attestor/StellaOps.Attestor.Types/Tools/StellaOps.Attestor.Types.Generator/StellaOps.Attestor.Types.Generator.csproj new file mode 100644 index 00000000..dab6122f --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/Tools/StellaOps.Attestor.Types.Generator/StellaOps.Attestor.Types.Generator.csproj @@ -0,0 +1,9 @@ + + + Exe + net10.0 + enable + enable + true + + diff --git a/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/build-provenance.sample.json b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/build-provenance.sample.json new file mode 100644 index 00000000..c860c2c4 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/build-provenance.sample.json @@ -0,0 +1,107 @@ +{ + "schemaVersion": "1.0.0", + "predicateType": "StellaOps.BuildProvenance@1", + "subject": [ + { + "subjectKind": "container-image", + "name": "registry.stella-ops.internal/scan/api", + "digest": { + "sha256": "5f4d4b1e9c2f3a1d7a4e5b6c7d8e9f00112233445566778899aabbccddeeff00" + }, + "imageDigest": "sha256:5f4d4b1e9c2f3a1d7a4e5b6c7d8e9f00112233445566778899aabbccddeeff00", + "mediaType": "application/vnd.docker.distribution.manifest.v2+json" + } + ], + "issuer": { + "issuerType": "service", + "id": "urn:stellaops:svc:builder", + "tenantId": "tenant-alpha", + "displayName": "StellaOps Build Service", + "workload": { + "service": "builder-web", + "cluster": "prod-us-east", + "namespace": "build-system" + }, + "signingKey": { + "keyId": "builder-key-01", + "mode": "kms", + "algorithm": "ed25519", + "issuer": "vault.kms.internal" + } + }, + "issuedAt": "2025-10-31T18:21:04Z", + "materials": [ + { + "uri": "git+https://git.stella-ops.org/scanner.git@refs/heads/main", + "digest": { + "sha1": "a1b2c3d4e5f6a7b8c9d00112233445566778899a" + }, + "role": "source" + }, + { + "uri": "oci://registry.stella-ops.internal/base/node:20-bullseye", + "digest": { + "sha256": "ab40d8d0734c28f3b60df1e6a4ed3f2c1b5d7e9f0a1b2c3d4e5f66778899aabb" + }, + "role": "base-image" + } + ], + "transparency": [ + { + "logId": "rekor-primary", + "logUrl": "https://rekor.stella-ops.internal", + "uuid": "cb2a6f2e-353e-4a62-8504-18f741fa0010", + "index": 128943, + "checkpoint": { + "origin": "rekor-primary", + "size": 155000, + "rootHash": "3rJcAM1b9x1Pcjwo8y9zKg2v1nX8/oe3mY4HhE2bY0g=", + "timestamp": "2025-10-31T18:21:06Z" + }, + "witnessed": true + } + ], + "build": { + "buildType": "stellaops:buildkit@v1", + "builder": { + "id": "urn:stellaops:builder:buildkit", + "version": "1.9.2", + "displayName": "BuildKit Runner" + }, + "invocation": { + "configSource": { + "uri": "git+https://git.stella-ops.org/scanner.git//.stella/build.yaml", + "digest": { + "sha256": "1f7e26d668d9fd6bae1a5d0a7a27bf3cdf8b4dd0d9775ad911e6cef0e1edf1d2" + } + }, + "parameters": { + "target": "release", + "platform": "linux/amd64" + }, + "environment": { + "GIT_SHA": "9f3e7ad1", + "CI_PIPELINE_ID": "build-2045" + }, + "entryPoint": "ci/scripts/build-image.sh" + }, + "metadata": { + "startedAt": "2025-10-31T18:19:11Z", + "finishedAt": "2025-10-31T18:20:52Z", + "reproducible": true, + "buildDurationSeconds": 101 + }, + "outputs": [ + { + "subjectKind": "artifact", + "name": "dist/scanner-api.tar", + "digest": { + "sha256": "cfe4b9b77b4a90d63ba6c2e5b40e6d9b9724f9a3e0d5b6c7f8e9d0a1b2c3d4e5" + }, + "mediaType": "application/x-tar", + "sizeBytes": 31457280 + } + ] + }, + "slsaLevel": "slsa3.0" +} diff --git a/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/custom-evidence.sample.json b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/custom-evidence.sample.json new file mode 100644 index 00000000..b26e1584 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/custom-evidence.sample.json @@ -0,0 +1,39 @@ +{ + "schemaVersion": "1.0.0", + "predicateType": "StellaOps.CustomEvidence@1", + "subject": [ + { + "subjectKind": "artifact", + "name": "registry.stella-ops.internal/runtime/api@sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba", + "digest": { + "sha256": "f3b4c5d6e7f8091a2b3c4d5e6f708192a3b4c5d6e7f8091a2b3c4d5e6f708192" + } + } + ], + "issuer": { + "issuerType": "automation", + "id": "urn:stellaops:automation:evidence-uploader", + "tenantId": "tenant-alpha", + "signingKey": { + "keyId": "automation-key-17", + "mode": "offline", + "algorithm": "ed25519" + } + }, + "issuedAt": "2025-10-31T05:32:28Z", + "customSchema": { + "uri": "https://schemas.stella-ops.org/custom/runtime-evidence/v1.json", + "digest": { + "sha256": "aa11bb22cc33dd44ee55ff66aa77bb88cc99ddeeff0011223344556677889900" + }, + "version": "1.0" + }, + "payload": { + "controlId": "OPS-RUN-102", + "controlStatus": "passed", + "auditedBy": "auditor@example.org", + "evidenceUri": "s3://compliance-artifacts/runtime/api/2025-10-31/report.pdf", + "notes": "Manual security review completed for release 3.14.0." + }, + "notes": "Custom evidence uploaded by compliance automation workflow." +} diff --git a/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/policy-evaluation.sample.json b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/policy-evaluation.sample.json new file mode 100644 index 00000000..b099b2dd --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/policy-evaluation.sample.json @@ -0,0 +1,77 @@ +{ + "schemaVersion": "1.0.0", + "predicateType": "StellaOps.PolicyEvaluation@1", + "subject": [ + { + "subjectKind": "policy-report", + "name": "policy-eval/runtime-api@sha256:5f4d4b1e9c2f3a1d7a4e5b6c7d8e9f00112233445566778899aabbccddeeff00", + "digest": { + "sha256": "21f4b8d7c6e5a4f3b2c1d0e9f8a7b6c5d4e3f2a1b0c9d8e7f6a5b4c3d2e1f0a9" + } + } + ], + "issuer": { + "issuerType": "service", + "id": "urn:stellaops:svc:policy-engine", + "tenantId": "tenant-alpha", + "signingKey": { + "keyId": "policy-engine-key", + "mode": "hsm", + "algorithm": "ed25519", + "issuer": "yubi-hsm" + } + }, + "issuedAt": "2025-10-31T02:44:09Z", + "policy": { + "policyId": "runtime-enforce", + "policyVersion": "2025.10.1", + "revisionDigest": { + "sha256": "aa55bb66cc77dd88ee99ff00112233445566778899aabbccddeeff0011223344" + }, + "mode": "enforce" + }, + "result": { + "status": "fail", + "summary": "Policy runtime-enforce failed: 1 blocking rule violation.", + "violations": [ + { + "ruleId": "RULE-RUNTIME-001", + "severity": "high", + "message": "Critical KEV vulnerabilities detected without waiver.", + "evidence": [ + { + "type": "scan", + "id": "CVE-2025-10001" + } + ], + "suggestedRemediation": "Apply patched base image or configure approved waiver." + } + ], + "waiversApplied": [ + "WAIVER-LICENSE-123" + ] + }, + "explain": [ + { + "id": "trace-node-1", + "type": "rule", + "message": "Evaluated RULE-RUNTIME-001 on scan results" + }, + { + "id": "trace-node-1.1", + "type": "binding", + "message": "Matched vulnerability CVE-2025-10001 with severity critical" + } + ], + "metrics": { + "rulesEvaluated": 12, + "rulesPassed": 11, + "rulesFailed": 1, + "evaluationDurationMs": 84 + }, + "policyContext": { + "policyId": "runtime-enforce", + "policyVersion": "2025.10.1", + "mode": "enforce" + } +} diff --git a/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/risk-profile-evidence.sample.json b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/risk-profile-evidence.sample.json new file mode 100644 index 00000000..0a52ed8d --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/risk-profile-evidence.sample.json @@ -0,0 +1,68 @@ +{ + "schemaVersion": "1.0.0", + "predicateType": "StellaOps.RiskProfileEvidence@1", + "subject": [ + { + "subjectKind": "risk-profile", + "name": "runtime-api@sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba", + "digest": { + "sha256": "f3c2b1a0e9d8c7b6a5f4e3d2c1b0a9876543210fedcba9876543210fedcba987" + } + } + ], + "issuer": { + "issuerType": "service", + "id": "urn:stellaops:svc:risk-engine", + "tenantId": "tenant-alpha", + "signingKey": { + "keyId": "risk-engine-key", + "mode": "kms", + "algorithm": "ed25519" + } + }, + "issuedAt": "2025-10-31T04:00:00Z", + "window": { + "startedAt": "2025-10-30T04:00:00Z", + "endedAt": "2025-10-31T04:00:00Z" + }, + "riskScores": { + "overall": 0.62, + "exploitability": 0.74, + "impact": 0.51, + "epss98Percentile": 0.92, + "kevCount": 1 + }, + "exposure": { + "internetFacing": true, + "runtimeEnforced": false, + "criticality": "mission-critical", + "deployments": 48 + }, + "controls": { + "sbomAttested": true, + "vexCoverage": "partial", + "policyStatus": "fail", + "lastPolicyEvaluation": "2025-10-31T02:44:09Z" + }, + "findings": [ + { + "category": "vulnerability", + "severity": "critical", + "summary": "KEV-listed OpenSSL vulnerability present without compensating control.", + "detail": "CVE-2025-10001 remained open in production deployments for >24h.", + "evidence": [ + "scan:CVE-2025-10001", + "policy:RULE-RUNTIME-001" + ] + }, + { + "category": "runtime", + "severity": "medium", + "summary": "No runtime admission control for critical namespaces.", + "detail": "Zastava webhook disabled on cluster prod-us-east due to maintenance.", + "evidence": [ + "zastava:event:2025-10-30T21:41Z" + ] + } + ] +} diff --git a/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/sbom-attestation.sample.json b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/sbom-attestation.sample.json new file mode 100644 index 00000000..ecbbb076 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/sbom-attestation.sample.json @@ -0,0 +1,80 @@ +{ + "schemaVersion": "1.0.0", + "predicateType": "StellaOps.SBOMAttestation@1", + "subject": [ + { + "subjectKind": "container-image", + "name": "registry.stella-ops.internal/policy/engine", + "digest": { + "sha256": "4d7c3a1b2f9e0d6c5b4a3f2e1d0c9b8a7766554433221100ffaabbccddeeff12" + }, + "imageDigest": "sha256:4d7c3a1b2f9e0d6c5b4a3f2e1d0c9b8a7766554433221100ffaabbccddeeff12" + } + ], + "issuer": { + "issuerType": "service", + "id": "urn:stellaops:svc:scanner", + "tenantId": "tenant-alpha", + "signingKey": { + "keyId": "scanner-key-01", + "mode": "keyless", + "algorithm": "ecdsa-p256", + "issuer": "fulcio.internal", + "certificateChain": [ + "-----BEGIN CERTIFICATE-----MIIB...==-----END CERTIFICATE-----" + ] + } + }, + "issuedAt": "2025-10-30T14:05:18Z", + "materials": [ + { + "uri": "oci://registry.stella-ops.internal/scanner/sbom-indexer@sha256:1122aa55bb66cc77dd88ee99ff00112233445566778899aabbccddeeff001122", + "role": "scanner-runtime" + } + ], + "transparency": [ + { + "logId": "rekor-primary", + "logUrl": "https://rekor.stella-ops.internal", + "uuid": "11111111-2222-3333-4444-555555555555", + "index": 567890 + } + ], + "sbom": { + "format": "cyclonedx-json", + "specVersion": "1.6", + "digest": { + "sha256": "9a7b6c5d4e3f2a1b0c9d8e7f6a5b4c3d2e1f0a9b8c7d6e5f4a3b2c1d0e9f8a7b" + }, + "contentUri": "cas://sbom/blobs/9a7b6c5d4e3f2a1b0c9d8e7f6a5b4c3d2e1f0a9b8c7d6e5f4a3b2c1d0e9f8a7b", + "contentMediaType": "application/vnd.cyclonedx+json;version=1.6", + "sizeBytes": 48213, + "descriptor": { + "bomRef": "urn:uuid:fa8706c2-2d3e-4e74-bc3e-337ca0fdf2f7", + "componentName": "policy-engine", + "componentVersion": "1.12.0" + }, + "componentCounts": { + "packages": 215, + "dependencies": 214, + "services": 0, + "vulnerabilities": 14 + } + }, + "coverage": { + "layers": [ + "sha256:aa11bb22cc33dd44ee55ff66aa77bb88cc99ddeeff00112233445566778899aa", + "sha256:bb22cc33dd44ee55ff66aa77bb88cc99ddeeff00112233445566778899aabbcc" + ], + "packagesIncluded": true, + "licenseScanEnabled": true + }, + "generator": { + "name": "StellaOps Scanner", + "version": "2.4.3", + "buildId": "scanner-build-8897", + "configurationDigest": { + "sha256": "abc1239f7e6d5c4b3a29181706f5e4d3c2b1a0f99887766554433221100ffeedd" + } + } +} diff --git a/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/scan-results.sample.json b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/scan-results.sample.json new file mode 100644 index 00000000..c9c445d2 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/scan-results.sample.json @@ -0,0 +1,126 @@ +{ + "schemaVersion": "1.0.0", + "predicateType": "StellaOps.ScanResults@1", + "subject": [ + { + "subjectKind": "scan-report", + "name": "registry.stella-ops.internal/runtime/api@sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba", + "digest": { + "sha256": "deafbeefdeafbeefdeafbeefdeafbeefdeafbeefdeafbeefdeafbeefdeafbeef" + }, + "imageDigest": "sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba" + } + ], + "issuer": { + "issuerType": "service", + "id": "urn:stellaops:svc:scanner.worker", + "tenantId": "tenant-alpha", + "signingKey": { + "keyId": "scanner-worker-key", + "mode": "keyless", + "algorithm": "ed25519", + "issuer": "fulcio.internal" + } + }, + "issuedAt": "2025-10-29T06:14:45Z", + "materials": [ + { + "uri": "git+https://git.stella-ops.org/runtime/api.git@refs/tags/v3.14.0", + "role": "source" + } + ], + "transparency": [ + { + "logId": "rekor-primary", + "logUrl": "https://rekor.stella-ops.internal", + "uuid": "33333333-4444-5555-6666-777777777777", + "index": 778899 + } + ], + "scanner": { + "name": "StellaOps Scanner", + "version": "2.4.3", + "runId": "scan-20251029-0614", + "configurationDigest": { + "sha256": "f1c2d3e4a5b60718293a4b5c6d7e8f90123456789abcdef0123456789abcdef0" + }, + "mode": "inventory" + }, + "summary": { + "totalFindings": 6, + "newFindings": 2, + "kevFindings": 1, + "fixableFindings": 4, + "severityCounts": { + "critical": 1, + "high": 2, + "medium": 2, + "low": 1, + "informational": 0 + } + }, + "policyContext": { + "policyId": "default-runtime-policy", + "policyVersion": "42", + "mode": "enforce" + }, + "findings": [ + { + "vulnerabilityId": "CVE-2025-10001", + "severity": "critical", + "status": "detected", + "kev": true, + "package": { + "name": "openssl", + "version": "3.0.12-3.el9", + "purl": "pkg:rpm/redhat/openssl@3.0.12-3.el9", + "type": "rpm" + }, + "fixedVersion": "3.0.13-1.el9", + "introducedIn": "sha256:aa99887766554433221100ffeeddccbbaa99887766554433221100ffeeddccbb", + "evidence": { + "source": "os-packages", + "paths": [ + "/usr/lib64/libssl.so.3" + ], + "callers": [ + "policy-engine" + ] + } + }, + { + "vulnerabilityId": "GHSA-1234-abcd-5678", + "severity": "high", + "status": "detected", + "kev": false, + "package": { + "name": "lodash", + "version": "4.17.21", + "purl": "pkg:npm/lodash@4.17.21", + "type": "npm" + }, + "fixedVersion": "4.17.22", + "evidence": { + "source": "application-lockfile", + "paths": [ + "/app/package-lock.json" + ] + }, + "notes": "Used by metrics exporter." + }, + { + "vulnerabilityId": "CVE-2024-50010", + "severity": "medium", + "status": "remediated", + "kev": false, + "package": { + "name": "glibc", + "version": "2.36-60.el9", + "purl": "pkg:rpm/redhat/glibc@2.36-60.el9", + "type": "rpm" + }, + "fixedVersion": "2.36-62.el9", + "notes": "Patched in base image refresh." + } + ] +} diff --git a/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/vex-attestation.sample.json b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/vex-attestation.sample.json new file mode 100644 index 00000000..98450c8a --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/fixtures/v1/vex-attestation.sample.json @@ -0,0 +1,75 @@ +{ + "schemaVersion": "1.0.0", + "predicateType": "StellaOps.VEXAttestation@1", + "subject": [ + { + "subjectKind": "vex-statement", + "name": "registry.stella-ops.internal/runtime/api@sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba", + "digest": { + "sha256": "8f6e5d4c3b2a190817263544554433221100ffeeddaabbccddeeff0011223344" + } + } + ], + "issuer": { + "issuerType": "service", + "id": "urn:stellaops:svc:excitor", + "tenantId": "tenant-alpha", + "signingKey": { + "keyId": "vex-service-key", + "mode": "kms", + "algorithm": "ed25519", + "issuer": "kms.attestor.internal" + } + }, + "issuedAt": "2025-10-30T09:12:03Z", + "vexStandard": "openvex-1.0", + "generator": { + "name": "StellaOps Excititor", + "version": "1.8.0" + }, + "statements": [ + { + "id": "stmt-001", + "vulnerabilityId": "CVE-2025-10001", + "status": "not_affected", + "statementType": "analysis", + "timestamp": "2025-10-30T09:11:40Z", + "justification": "Component not present in the deployed runtime closure.", + "impactStatement": "The affected OpenSSL module is unused by the runtime API image entrypoint chain.", + "products": [ + { + "productId": "registry.stella-ops.internal/runtime/api@sha256:d2c3b4...", + "name": "runtime-api", + "version": "3.14.0", + "purl": "pkg:oci/runtime-api@sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba" + } + ], + "supplier": { + "name": "StellaOps Runtime Guild", + "id": "urn:stellaops:guild:runtime" + }, + "references": [ + "https://kb.stella-ops.org/vex/CVE-2025-10001" + ] + }, + { + "id": "stmt-002", + "vulnerabilityId": "GHSA-1234-abcd-5678", + "status": "affected", + "statementType": "remediation", + "timestamp": "2025-10-30T09:11:55Z", + "impactStatement": "Lodash is present in the telemetry plug-in; exploitation requires UID 0 inside the container.", + "actionStatement": "Upgrade telemetry plug-in to v2.1.5 or apply policy waiver until patch window.", + "products": [ + { + "productId": "registry.stella-ops.internal/runtime/api@sha256:d2c3b4...", + "name": "runtime-api", + "version": "3.14.0" + } + ], + "references": [ + "https://github.com/lodash/lodash/security/advisory" + ] + } + ] +} diff --git a/src/Attestor/StellaOps.Attestor.Types/generated/go/go.mod b/src/Attestor/StellaOps.Attestor.Types/generated/go/go.mod new file mode 100644 index 00000000..29b16c5d --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/generated/go/go.mod @@ -0,0 +1,3 @@ +module github.com/stella-ops/attestor/types + +go 1.22 diff --git a/src/Attestor/StellaOps.Attestor.Types/generated/go/types.go b/src/Attestor/StellaOps.Attestor.Types/generated/go/types.go new file mode 100644 index 00000000..78b6721e --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/generated/go/types.go @@ -0,0 +1,628 @@ +// Code generated by StellaOps.Attestor.Types.Generator. DO NOT EDIT. +package attesttypes + +import ( + "encoding/json" + "errors" + "fmt" +) + +type FindingStatus string + +const ( + FindingStatusDetected FindingStatus = "detected" + FindingStatusConfirmed FindingStatus = "confirmed" + FindingStatusFixed FindingStatus = "fixed" + FindingStatusNotAffected FindingStatus = "not_affected" +) + +func (v FindingStatus) Validate() error { + switch v { + case FindingStatusDetected, FindingStatusConfirmed, FindingStatusFixed, FindingStatusNotAffected: + return nil + default: + return fmt.Errorf("invalid value for FindingStatus: %s", string(v)) + } +} + +type PolicyEffect string + +const ( + PolicyEffectAllow PolicyEffect = "allow" + PolicyEffectDeny PolicyEffect = "deny" + PolicyEffectWarn PolicyEffect = "warn" +) + +func (v PolicyEffect) Validate() error { + switch v { + case PolicyEffectAllow, PolicyEffectDeny, PolicyEffectWarn: + return nil + default: + return fmt.Errorf("invalid value for PolicyEffect: %s", string(v)) + } +} + +type PolicyOutcome string + +const ( + PolicyOutcomePass PolicyOutcome = "pass" + PolicyOutcomeFail PolicyOutcome = "fail" + PolicyOutcomeWaived PolicyOutcome = "waived" +) + +func (v PolicyOutcome) Validate() error { + switch v { + case PolicyOutcomePass, PolicyOutcomeFail, PolicyOutcomeWaived: + return nil + default: + return fmt.Errorf("invalid value for PolicyOutcome: %s", string(v)) + } +} + +type RiskLevel string + +const ( + RiskLevelCritical RiskLevel = "critical" + RiskLevelHigh RiskLevel = "high" + RiskLevelMedium RiskLevel = "medium" + RiskLevelLow RiskLevel = "low" + RiskLevelInformational RiskLevel = "informational" +) + +func (v RiskLevel) Validate() error { + switch v { + case RiskLevelCritical, RiskLevelHigh, RiskLevelMedium, RiskLevelLow, RiskLevelInformational: + return nil + default: + return fmt.Errorf("invalid value for RiskLevel: %s", string(v)) + } +} + +type SbomFormat string + +const ( + SbomFormatCycloneDx16 SbomFormat = "CycloneDX-1.6" + SbomFormatSbom300 SbomFormat = "SBOM-3.0.0" +) + +func (v SbomFormat) Validate() error { + switch v { + case SbomFormatCycloneDx16, SbomFormatSbom300: + return nil + default: + return fmt.Errorf("invalid value for SbomFormat: %s", string(v)) + } +} + +type Severity string + +const ( + SeverityCritical Severity = "critical" + SeverityHigh Severity = "high" + SeverityMedium Severity = "medium" + SeverityLow Severity = "low" + SeverityInfo Severity = "info" +) + +func (v Severity) Validate() error { + switch v { + case SeverityCritical, SeverityHigh, SeverityMedium, SeverityLow, SeverityInfo: + return nil + default: + return fmt.Errorf("invalid value for Severity: %s", string(v)) + } +} + +type VexStatus string + +const ( + VexStatusNotAffected VexStatus = "not_affected" + VexStatusAffected VexStatus = "affected" + VexStatusUnderInvestigation VexStatus = "under_investigation" + VexStatusFixed VexStatus = "fixed" +) + +func (v VexStatus) Validate() error { + switch v { + case VexStatusNotAffected, VexStatusAffected, VexStatusUnderInvestigation, VexStatusFixed: + return nil + default: + return fmt.Errorf("invalid value for VexStatus: %s", string(v)) + } +} + +const BuildProvenanceSchemaVersion = "StellaOps.BuildProvenance@1" + +const CustomEvidenceSchemaVersion = "StellaOps.CustomEvidence@1" + +const PolicyEvaluationSchemaVersion = "StellaOps.PolicyEvaluation@1" + +const RiskProfileEvidenceSchemaVersion = "StellaOps.RiskProfileEvidence@1" + +const SbomAttestationSchemaVersion = "StellaOps.SBOMAttestation@1" + +const ScanResultsSchemaVersion = "StellaOps.ScanResults@1" + +const VexAttestationSchemaVersion = "StellaOps.VEXAttestation@1" + +type BuildMetadata struct { + BuildStartedOn string `json:"buildStartedOn"` + BuildFinishedOn string `json:"buildFinishedOn"` + Reproducible *bool `json:"reproducible,omitempty"` + BuildInvocationId *string `json:"buildInvocationId,omitempty"` +} + +func (value *BuildMetadata) Validate() error { + if value == nil { + return errors.New("BuildMetadata is nil") + } + return nil +} + +type BuildProvenance struct { + SchemaVersion string `json:"schemaVersion"` + BuildType string `json:"buildType"` + Builder BuilderIdentity `json:"builder"` + Materials []MaterialReference `json:"materials"` + Metadata BuildMetadata `json:"metadata"` + Environment *EnvironmentMetadata `json:"environment,omitempty"` +} + +func (value *BuildProvenance) Validate() error { + if value == nil { + return errors.New("BuildProvenance is nil") + } + if value.SchemaVersion != "StellaOps.BuildProvenance@1" { + return fmt.Errorf("BuildProvenance.SchemaVersion must equal StellaOps.BuildProvenance@1") + } + if err := value.Builder.Validate(); err != nil { + return fmt.Errorf("invalid BuildProvenance.Builder: %w", err) + } + if len(value.Materials) < 1 { + return fmt.Errorf("BuildProvenance.Materials must contain at least 1 item(s)") + } + for i := range value.Materials { + if err := value.Materials[i].Validate(); err != nil { + return fmt.Errorf("invalid BuildProvenance.Materials[%d]: %w", i, err) + } + } + if err := value.Metadata.Validate(); err != nil { + return fmt.Errorf("invalid BuildProvenance.Metadata: %w", err) + } + if value.Environment != nil { + if err := value.Environment.Validate(); err != nil { + return fmt.Errorf("invalid BuildProvenance.Environment: %w", err) + } + } + return nil +} + +type BuilderIdentity struct { + Id string `json:"id"` + Version *string `json:"version,omitempty"` + Platform *string `json:"platform,omitempty"` +} + +func (value *BuilderIdentity) Validate() error { + if value == nil { + return errors.New("BuilderIdentity is nil") + } + return nil +} + +type CustomEvidence struct { + SchemaVersion string `json:"schemaVersion"` + SubjectDigest string `json:"subjectDigest"` + Kind string `json:"kind"` + GeneratedAt string `json:"generatedAt"` + Properties []CustomProperty `json:"properties,omitempty"` +} + +func (value *CustomEvidence) Validate() error { + if value == nil { + return errors.New("CustomEvidence is nil") + } + if value.SchemaVersion != "StellaOps.CustomEvidence@1" { + return fmt.Errorf("CustomEvidence.SchemaVersion must equal StellaOps.CustomEvidence@1") + } + for i := range value.Properties { + if err := value.Properties[i].Validate(); err != nil { + return fmt.Errorf("invalid CustomEvidence.Properties[%d]: %w", i, err) + } + } + return nil +} + +type CustomProperty struct { + Key string `json:"key"` + Value string `json:"value"` +} + +func (value *CustomProperty) Validate() error { + if value == nil { + return errors.New("CustomProperty is nil") + } + return nil +} + +type DigestReference struct { + Algorithm string `json:"algorithm"` + Value string `json:"value"` +} + +func (value *DigestReference) Validate() error { + if value == nil { + return errors.New("DigestReference is nil") + } + return nil +} + +type EnvironmentMetadata struct { + Platform *string `json:"platform,omitempty"` + ImageDigest *DigestReference `json:"imageDigest,omitempty"` +} + +func (value *EnvironmentMetadata) Validate() error { + if value == nil { + return errors.New("EnvironmentMetadata is nil") + } + if value.ImageDigest != nil { + if err := value.ImageDigest.Validate(); err != nil { + return fmt.Errorf("invalid EnvironmentMetadata.ImageDigest: %w", err) + } + } + return nil +} + +type MaterialReference struct { + Uri string `json:"uri"` + Digests []DigestReference `json:"digests"` + Note *string `json:"note,omitempty"` +} + +func (value *MaterialReference) Validate() error { + if value == nil { + return errors.New("MaterialReference is nil") + } + if len(value.Digests) < 1 { + return fmt.Errorf("MaterialReference.Digests must contain at least 1 item(s)") + } + for i := range value.Digests { + if err := value.Digests[i].Validate(); err != nil { + return fmt.Errorf("invalid MaterialReference.Digests[%d]: %w", i, err) + } + } + return nil +} + +type PolicyDecision struct { + PolicyId string `json:"policyId"` + RuleId string `json:"ruleId"` + Effect PolicyEffect `json:"effect"` + Reason *string `json:"reason,omitempty"` + Remediation *string `json:"remediation,omitempty"` +} + +func (value *PolicyDecision) Validate() error { + if value == nil { + return errors.New("PolicyDecision is nil") + } + if err := value.Effect.Validate(); err != nil { + return fmt.Errorf("invalid PolicyDecision.Effect: %w", err) + } + return nil +} + +type PolicyEvaluation struct { + SchemaVersion string `json:"schemaVersion"` + SubjectDigest string `json:"subjectDigest"` + PolicyVersion string `json:"policyVersion"` + EvaluatedAt string `json:"evaluatedAt"` + Outcome PolicyOutcome `json:"outcome"` + Decisions []PolicyDecision `json:"decisions"` +} + +func (value *PolicyEvaluation) Validate() error { + if value == nil { + return errors.New("PolicyEvaluation is nil") + } + if value.SchemaVersion != "StellaOps.PolicyEvaluation@1" { + return fmt.Errorf("PolicyEvaluation.SchemaVersion must equal StellaOps.PolicyEvaluation@1") + } + if err := value.Outcome.Validate(); err != nil { + return fmt.Errorf("invalid PolicyEvaluation.Outcome: %w", err) + } + for i := range value.Decisions { + if err := value.Decisions[i].Validate(); err != nil { + return fmt.Errorf("invalid PolicyEvaluation.Decisions[%d]: %w", i, err) + } + } + return nil +} + +type RiskFactor struct { + Name string `json:"name"` + Weight float64 `json:"weight"` + Description *string `json:"description,omitempty"` +} + +func (value *RiskFactor) Validate() error { + if value == nil { + return errors.New("RiskFactor is nil") + } + if value.Weight < 0 { + return fmt.Errorf("RiskFactor.Weight must be >= 0") + } + if value.Weight > 1 { + return fmt.Errorf("RiskFactor.Weight must be <= 1") + } + return nil +} + +type RiskProfileEvidence struct { + SchemaVersion string `json:"schemaVersion"` + SubjectDigest string `json:"subjectDigest"` + GeneratedAt string `json:"generatedAt"` + RiskScore float64 `json:"riskScore"` + RiskLevel RiskLevel `json:"riskLevel"` + Factors []RiskFactor `json:"factors"` +} + +func (value *RiskProfileEvidence) Validate() error { + if value == nil { + return errors.New("RiskProfileEvidence is nil") + } + if value.SchemaVersion != "StellaOps.RiskProfileEvidence@1" { + return fmt.Errorf("RiskProfileEvidence.SchemaVersion must equal StellaOps.RiskProfileEvidence@1") + } + if value.RiskScore < 0 { + return fmt.Errorf("RiskProfileEvidence.RiskScore must be >= 0") + } + if value.RiskScore > 100 { + return fmt.Errorf("RiskProfileEvidence.RiskScore must be <= 100") + } + if err := value.RiskLevel.Validate(); err != nil { + return fmt.Errorf("invalid RiskProfileEvidence.RiskLevel: %w", err) + } + for i := range value.Factors { + if err := value.Factors[i].Validate(); err != nil { + return fmt.Errorf("invalid RiskProfileEvidence.Factors[%d]: %w", i, err) + } + } + return nil +} + +type SbomAttestation struct { + SchemaVersion string `json:"schemaVersion"` + SubjectDigest string `json:"subjectDigest"` + SbomFormat SbomFormat `json:"sbomFormat"` + SbomDigest DigestReference `json:"sbomDigest"` + SbomUri *string `json:"sbomUri,omitempty"` + ComponentCount float64 `json:"componentCount"` + Packages []SbomPackage `json:"packages,omitempty"` +} + +func (value *SbomAttestation) Validate() error { + if value == nil { + return errors.New("SbomAttestation is nil") + } + if value.SchemaVersion != "StellaOps.SBOMAttestation@1" { + return fmt.Errorf("SbomAttestation.SchemaVersion must equal StellaOps.SBOMAttestation@1") + } + if err := value.SbomFormat.Validate(); err != nil { + return fmt.Errorf("invalid SbomAttestation.SbomFormat: %w", err) + } + if err := value.SbomDigest.Validate(); err != nil { + return fmt.Errorf("invalid SbomAttestation.SbomDigest: %w", err) + } + if value.ComponentCount < 0 { + return fmt.Errorf("SbomAttestation.ComponentCount must be >= 0") + } + for i := range value.Packages { + if err := value.Packages[i].Validate(); err != nil { + return fmt.Errorf("invalid SbomAttestation.Packages[%d]: %w", i, err) + } + } + return nil +} + +type SbomPackage struct { + Purl string `json:"purl"` + Version *string `json:"version,omitempty"` + Licenses []string `json:"licenses,omitempty"` +} + +func (value *SbomPackage) Validate() error { + if value == nil { + return errors.New("SbomPackage is nil") + } + if len(value.Licenses) < 1 { + return fmt.Errorf("SbomPackage.Licenses must contain at least 1 item(s)") + } + return nil +} + +type ScanFinding struct { + Id string `json:"id"` + Severity Severity `json:"severity"` + Status FindingStatus `json:"status"` + PackageName string `json:"packageName"` + PackageVersion *string `json:"packageVersion,omitempty"` + CvssScore *float64 `json:"cvssScore,omitempty"` + Description *string `json:"description,omitempty"` + References []string `json:"references,omitempty"` +} + +func (value *ScanFinding) Validate() error { + if value == nil { + return errors.New("ScanFinding is nil") + } + if err := value.Severity.Validate(); err != nil { + return fmt.Errorf("invalid ScanFinding.Severity: %w", err) + } + if err := value.Status.Validate(); err != nil { + return fmt.Errorf("invalid ScanFinding.Status: %w", err) + } + if value.CvssScore != nil { + if *value.CvssScore < 0 { + return fmt.Errorf("ScanFinding.CvssScore must be >= 0") + } + if *value.CvssScore > 10 { + return fmt.Errorf("ScanFinding.CvssScore must be <= 10") + } + } + if len(value.References) < 1 { + return fmt.Errorf("ScanFinding.References must contain at least 1 item(s)") + } + return nil +} + +type ScanResults struct { + SchemaVersion string `json:"schemaVersion"` + SubjectDigest string `json:"subjectDigest"` + ScannerName string `json:"scannerName"` + ScannerVersion string `json:"scannerVersion"` + GeneratedAt string `json:"generatedAt"` + Findings []ScanFinding `json:"findings"` +} + +func (value *ScanResults) Validate() error { + if value == nil { + return errors.New("ScanResults is nil") + } + if value.SchemaVersion != "StellaOps.ScanResults@1" { + return fmt.Errorf("ScanResults.SchemaVersion must equal StellaOps.ScanResults@1") + } + for i := range value.Findings { + if err := value.Findings[i].Validate(); err != nil { + return fmt.Errorf("invalid ScanResults.Findings[%d]: %w", i, err) + } + } + return nil +} + +type VexAttestation struct { + SchemaVersion string `json:"schemaVersion"` + SubjectDigest string `json:"subjectDigest"` + GeneratedAt string `json:"generatedAt"` + Statements []VexStatement `json:"statements"` +} + +func (value *VexAttestation) Validate() error { + if value == nil { + return errors.New("VexAttestation is nil") + } + if value.SchemaVersion != "StellaOps.VEXAttestation@1" { + return fmt.Errorf("VexAttestation.SchemaVersion must equal StellaOps.VEXAttestation@1") + } + if len(value.Statements) < 1 { + return fmt.Errorf("VexAttestation.Statements must contain at least 1 item(s)") + } + for i := range value.Statements { + if err := value.Statements[i].Validate(); err != nil { + return fmt.Errorf("invalid VexAttestation.Statements[%d]: %w", i, err) + } + } + return nil +} + +type VexStatement struct { + VulnerabilityId string `json:"vulnerabilityId"` + Status VexStatus `json:"status"` + Timestamp string `json:"timestamp"` + Justification *string `json:"justification,omitempty"` + ImpactStatement *string `json:"impactStatement,omitempty"` + ActionStatement *string `json:"actionStatement,omitempty"` + References []string `json:"references,omitempty"` +} + +func (value *VexStatement) Validate() error { + if value == nil { + return errors.New("VexStatement is nil") + } + if err := value.Status.Validate(); err != nil { + return fmt.Errorf("invalid VexStatement.Status: %w", err) + } + if len(value.References) < 1 { + return fmt.Errorf("VexStatement.References must contain at least 1 item(s)") + } + return nil +} + +func (value *BuildProvenance) CanonicalJSON() ([]byte, error) { + if err := value.Validate(); err != nil { + return nil, err + } + buf, err := json.Marshal(value) + if err != nil { + return nil, fmt.Errorf("failed to marshal BuildProvenance: %w", err) + } + return buf, nil +} + +func (value *CustomEvidence) CanonicalJSON() ([]byte, error) { + if err := value.Validate(); err != nil { + return nil, err + } + buf, err := json.Marshal(value) + if err != nil { + return nil, fmt.Errorf("failed to marshal CustomEvidence: %w", err) + } + return buf, nil +} + +func (value *PolicyEvaluation) CanonicalJSON() ([]byte, error) { + if err := value.Validate(); err != nil { + return nil, err + } + buf, err := json.Marshal(value) + if err != nil { + return nil, fmt.Errorf("failed to marshal PolicyEvaluation: %w", err) + } + return buf, nil +} + +func (value *RiskProfileEvidence) CanonicalJSON() ([]byte, error) { + if err := value.Validate(); err != nil { + return nil, err + } + buf, err := json.Marshal(value) + if err != nil { + return nil, fmt.Errorf("failed to marshal RiskProfileEvidence: %w", err) + } + return buf, nil +} + +func (value *SbomAttestation) CanonicalJSON() ([]byte, error) { + if err := value.Validate(); err != nil { + return nil, err + } + buf, err := json.Marshal(value) + if err != nil { + return nil, fmt.Errorf("failed to marshal SbomAttestation: %w", err) + } + return buf, nil +} + +func (value *ScanResults) CanonicalJSON() ([]byte, error) { + if err := value.Validate(); err != nil { + return nil, err + } + buf, err := json.Marshal(value) + if err != nil { + return nil, fmt.Errorf("failed to marshal ScanResults: %w", err) + } + return buf, nil +} + +func (value *VexAttestation) CanonicalJSON() ([]byte, error) { + if err := value.Validate(); err != nil { + return nil, err + } + buf, err := json.Marshal(value) + if err != nil { + return nil, fmt.Errorf("failed to marshal VexAttestation: %w", err) + } + return buf, nil +} + diff --git a/src/Attestor/StellaOps.Attestor.Types/generated/go/types_test.go b/src/Attestor/StellaOps.Attestor.Types/generated/go/types_test.go new file mode 100644 index 00000000..97ef191f --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/generated/go/types_test.go @@ -0,0 +1,239 @@ +package attesttypes + +import ( + "fmt" + "testing" +) + +func hexString(ch byte) string { + buf := make([]byte, 64) + for i := range buf { + buf[i] = ch + } + return string(buf) +} + +func sampleBuildProvenance() BuildProvenance { + return BuildProvenance{ + SchemaVersion: BuildProvenanceSchemaVersion, + BuildType: "docker/buildx", + Builder: BuilderIdentity{ + Id: "builder://stellaops/ci", + Version: stringPtr("2025.10.31"), + Platform: stringPtr("linux/amd64"), + }, + Materials: []MaterialReference{ + { + Uri: "git+https://git.stella-ops.org/org/repo@refs/heads/main", + Digests: []DigestReference{ + {Algorithm: "sha256", Value: hexString('a')}, + }, + Note: stringPtr("Source repository"), + }, + }, + Metadata: BuildMetadata{ + BuildStartedOn: "2025-10-31T12:00:00Z", + BuildFinishedOn: "2025-10-31T12:05:00Z", + Reproducible: boolPtr(true), + BuildInvocationId: stringPtr("invocations/123"), + }, + Environment: &EnvironmentMetadata{ + Platform: stringPtr("linux/amd64"), + ImageDigest: &DigestReference{Algorithm: "sha256", Value: hexString('b')}, + }, + } +} + +func sampleSbomAttestation() SbomAttestation { + return SbomAttestation{ + SchemaVersion: SbomAttestationSchemaVersion, + SubjectDigest: fmt.Sprintf("sha256:%s", hexString('c')), + SbomFormat: "CycloneDX-1.6", + SbomDigest: DigestReference{Algorithm: "sha256", Value: hexString('d')}, + SbomUri: stringPtr("https://example.invalid/sbom.json"), + ComponentCount: 2, + Packages: []SbomPackage{ + { + Purl: "pkg:npm/%40stellaops/example@1.0.0", + Version: stringPtr("1.0.0"), + Licenses: []string{"MIT"}, + }, + }, + } +} + +func sampleVexAttestation() VexAttestation { + return VexAttestation{ + SchemaVersion: VexAttestationSchemaVersion, + SubjectDigest: fmt.Sprintf("sha256:%s", hexString('e')), + GeneratedAt: "2025-10-31T12:10:00Z", + Statements: []VexStatement{ + { + VulnerabilityId: "CVE-2025-1234", + Status: VexStatusNotAffected, + Timestamp: "2025-10-31T12:10:00Z", + Justification: stringPtr("component_not_present"), + References: []string{"https://example.invalid/advisory"}, + }, + }, + } +} + +func sampleScanResults() ScanResults { + return ScanResults{ + SchemaVersion: ScanResultsSchemaVersion, + SubjectDigest: fmt.Sprintf("sha256:%s", hexString('f')), + ScannerName: "stellaops/scanner", + ScannerVersion: "2025.10.31", + GeneratedAt: "2025-10-31T12:15:00Z", + Findings: []ScanFinding{ + { + Id: "FIND-001", + Severity: SeverityMedium, + Status: FindingStatusDetected, + PackageName: "libexample", + PackageVersion: stringPtr("1.2.3"), + CvssScore: floatPtr(7.5), + Description: stringPtr("Example vulnerability"), + References: []string{"https://example.invalid/CVE-2025-1234"}, + }, + }, + } +} + +func samplePolicyEvaluation() PolicyEvaluation { + return PolicyEvaluation{ + SchemaVersion: PolicyEvaluationSchemaVersion, + SubjectDigest: fmt.Sprintf("sha256:%s", hexString('1')), + PolicyVersion: "2025.10.31", + EvaluatedAt: "2025-10-31T12:18:00Z", + Outcome: PolicyOutcomePass, + Decisions: []PolicyDecision{ + { + PolicyId: "policy/access-control", + RuleId: "rule/allow-latest", + Effect: PolicyEffectAllow, + Reason: stringPtr("No blocking findings"), + }, + }, + } +} + +func sampleRiskProfile() RiskProfileEvidence { + return RiskProfileEvidence{ + SchemaVersion: RiskProfileEvidenceSchemaVersion, + SubjectDigest: fmt.Sprintf("sha256:%s", hexString('2')), + GeneratedAt: "2025-10-31T12:20:00Z", + RiskScore: 42.5, + RiskLevel: RiskLevelMedium, + Factors: []RiskFactor{ + { + Name: "exploitability", + Weight: 0.6, + Description: stringPtr("No known exploits published"), + }, + }, + } +} + +func sampleCustomEvidence() CustomEvidence { + return CustomEvidence{ + SchemaVersion: CustomEvidenceSchemaVersion, + SubjectDigest: fmt.Sprintf("sha256:%s", hexString('3')), + Kind: "org.stellaops.demo/custom", + GeneratedAt: "2025-10-31T12:25:00Z", + Properties: []CustomProperty{ + {Key: "note", Value: "Custom attestation payload"}, + }, + } +} + +func TestBuildProvenanceRoundTrip(t *testing.T) { + sample := sampleBuildProvenance() + if err := sample.Validate(); err != nil { + t.Fatalf("validate: %v", err) + } + a, err := sample.CanonicalJSON() + if err != nil { + t.Fatalf("canonical JSON: %v", err) + } + b, err := sample.CanonicalJSON() + if err != nil { + t.Fatalf("canonical JSON repeat: %v", err) + } + if string(a) != string(b) { + t.Fatalf("canonical output mismatch:\n%s\n%s", string(a), string(b)) + } +} + +func TestSbomAttestationRoundTrip(t *testing.T) { + sample := sampleSbomAttestation() + if err := sample.Validate(); err != nil { + t.Fatalf("validate: %v", err) + } + if _, err := sample.CanonicalJSON(); err != nil { + t.Fatalf("canonical JSON: %v", err) + } +} + +func TestVexAttestationRoundTrip(t *testing.T) { + sample := sampleVexAttestation() + if err := sample.Validate(); err != nil { + t.Fatalf("validate: %v", err) + } + if _, err := sample.CanonicalJSON(); err != nil { + t.Fatalf("canonical JSON: %v", err) + } +} + +func TestScanResultsRoundTrip(t *testing.T) { + sample := sampleScanResults() + if err := sample.Validate(); err != nil { + t.Fatalf("validate: %v", err) + } + if _, err := sample.CanonicalJSON(); err != nil { + t.Fatalf("canonical JSON: %v", err) + } +} + +func TestPolicyEvaluationRoundTrip(t *testing.T) { + sample := samplePolicyEvaluation() + if err := sample.Validate(); err != nil { + t.Fatalf("validate: %v", err) + } + if _, err := sample.CanonicalJSON(); err != nil { + t.Fatalf("canonical JSON: %v", err) + } +} + +func TestRiskProfileRoundTrip(t *testing.T) { + sample := sampleRiskProfile() + if err := sample.Validate(); err != nil { + t.Fatalf("validate: %v", err) + } + if _, err := sample.CanonicalJSON(); err != nil { + t.Fatalf("canonical JSON: %v", err) + } +} + +func TestCustomEvidenceRoundTrip(t *testing.T) { + sample := sampleCustomEvidence() + if err := sample.Validate(); err != nil { + t.Fatalf("validate: %v", err) + } + if _, err := sample.CanonicalJSON(); err != nil { + t.Fatalf("canonical JSON: %v", err) + } +} + +func TestCustomEvidenceSchemaVersionMismatch(t *testing.T) { + sample := sampleCustomEvidence() + sample.SchemaVersion = "StellaOps.CustomEvidence@9" + if err := sample.Validate(); err == nil { + t.Fatal("expected schemaVersion mismatch to fail validation") + } +} + +func stringPtr(v string) *string { return &v } +func boolPtr(v bool) *bool { return &v } +func floatPtr(v float64) *float64 { return &v } diff --git a/src/Attestor/StellaOps.Attestor.Types/generated/ts/index.test.ts b/src/Attestor/StellaOps.Attestor.Types/generated/ts/index.test.ts new file mode 100644 index 00000000..ecb6ef68 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/generated/ts/index.test.ts @@ -0,0 +1,195 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { + canonicalizeBuildProvenance, + canonicalizeCustomEvidence, + canonicalizePolicyEvaluation, + canonicalizeRiskProfileEvidence, + canonicalizeSbomAttestation, + canonicalizeScanResults, + canonicalizeVexAttestation, + validateBuildProvenance, + validateCustomEvidence, + validatePolicyEvaluation, + validateRiskProfileEvidence, + validateSbomAttestation, + validateScanResults, + validateVexAttestation +} from './index.js'; + +const hex = (char: string): string => char.repeat(64); + +const buildProvenanceSample = { + schemaVersion: 'StellaOps.BuildProvenance@1' as const, + buildType: 'docker/buildx', + builder: { + id: 'builder://stellaops/ci', + version: '2025.10.31', + platform: 'linux/amd64' + }, + materials: [ + { + uri: 'git+https://git.stella-ops.org/org/repo@refs/heads/main', + digests: [ + { algorithm: 'sha256', value: hex('a') } + ], + note: 'Source repository' + } + ], + metadata: { + buildStartedOn: '2025-10-31T12:00:00Z', + buildFinishedOn: '2025-10-31T12:05:00Z', + reproducible: true, + buildInvocationId: 'invocations/123' + }, + environment: { + platform: 'linux/amd64', + imageDigest: { algorithm: 'sha256', value: hex('b') } + } +}; + +const sbomAttestationSample = { + schemaVersion: 'StellaOps.SBOMAttestation@1' as const, + subjectDigest: `sha256:${hex('c')}`, + sbomFormat: 'CycloneDX-1.6' as const, + sbomDigest: { algorithm: 'sha256', value: hex('d') }, + sbomUri: 'https://example.invalid/sbom.json', + componentCount: 2, + packages: [ + { + purl: 'pkg:npm/%40stellaops/example@1.0.0', + version: '1.0.0', + licenses: ['MIT'] + } + ] +}; + +const vexAttestationSample = { + schemaVersion: 'StellaOps.VEXAttestation@1' as const, + subjectDigest: `sha256:${hex('e')}`, + generatedAt: '2025-10-31T12:10:00Z', + statements: [ + { + vulnerabilityId: 'CVE-2025-1234', + status: 'not_affected' as const, + timestamp: '2025-10-31T12:10:00Z', + justification: 'component_not_present', + references: ['https://example.invalid/advisory'] + } + ] +}; + +const scanResultsSample = { + schemaVersion: 'StellaOps.ScanResults@1' as const, + subjectDigest: `sha256:${hex('f')}`, + scannerName: 'stellaops/scanner', + scannerVersion: '2025.10.31', + generatedAt: '2025-10-31T12:15:00Z', + findings: [ + { + id: 'FIND-001', + severity: 'medium' as const, + status: 'detected' as const, + packageName: 'libexample', + packageVersion: '1.2.3', + cvssScore: 7.5, + description: 'Example vulnerability', + references: ['https://example.invalid/CVE-2025-1234'] + } + ] +}; + +const policyEvaluationSample = { + schemaVersion: 'StellaOps.PolicyEvaluation@1' as const, + subjectDigest: `sha256:${hex('1')}`, + policyVersion: '2025.10.31', + evaluatedAt: '2025-10-31T12:18:00Z', + outcome: 'pass' as const, + decisions: [ + { + policyId: 'policy/access-control', + ruleId: 'rule/allow-latest', + effect: 'allow' as const, + reason: 'No blocking findings' + } + ] +}; + +const riskProfileSample = { + schemaVersion: 'StellaOps.RiskProfileEvidence@1' as const, + subjectDigest: `sha256:${hex('2')}`, + generatedAt: '2025-10-31T12:20:00Z', + riskScore: 42.5, + riskLevel: 'medium' as const, + factors: [ + { + name: 'exploitability', + weight: 0.6, + description: 'No known exploits published' + } + ] +}; + +const customEvidenceSample = { + schemaVersion: 'StellaOps.CustomEvidence@1' as const, + subjectDigest: `sha256:${hex('3')}`, + kind: 'org.stellaops.demo/custom', + generatedAt: '2025-10-31T12:25:00Z', + properties: [ + { key: 'note', value: 'Custom attestation payload' } + ] +}; + +test('BuildProvenance round-trip', () => { + const validated = validateBuildProvenance(structuredClone(buildProvenanceSample)); + assert.deepEqual(validated, buildProvenanceSample); + const canonical = canonicalizeBuildProvenance(buildProvenanceSample); + assert.equal(canonical, canonicalizeBuildProvenance(structuredClone(buildProvenanceSample))); +}); + +test('BuildProvenance validation failure on missing materials', () => { + const invalid = structuredClone(buildProvenanceSample); + (invalid.materials as unknown[]) = []; + assert.throws(() => validateBuildProvenance(invalid as unknown), /must contain at least 1 item/); +}); + +test('SBOMAttestation round-trip', () => { + const validated = validateSbomAttestation(structuredClone(sbomAttestationSample)); + assert.deepEqual(validated, sbomAttestationSample); + assert.equal(canonicalizeSbomAttestation(sbomAttestationSample), canonicalizeSbomAttestation(structuredClone(sbomAttestationSample))); +}); + +test('VexAttestation round-trip', () => { + const validated = validateVexAttestation(structuredClone(vexAttestationSample)); + assert.deepEqual(validated, vexAttestationSample); + assert.equal(canonicalizeVexAttestation(vexAttestationSample), canonicalizeVexAttestation(structuredClone(vexAttestationSample))); +}); + +test('ScanResults round-trip', () => { + const validated = validateScanResults(structuredClone(scanResultsSample)); + assert.deepEqual(validated, scanResultsSample); + assert.equal(canonicalizeScanResults(scanResultsSample), canonicalizeScanResults(structuredClone(scanResultsSample))); +}); + +test('PolicyEvaluation round-trip', () => { + const validated = validatePolicyEvaluation(structuredClone(policyEvaluationSample)); + assert.deepEqual(validated, policyEvaluationSample); + assert.equal(canonicalizePolicyEvaluation(policyEvaluationSample), canonicalizePolicyEvaluation(structuredClone(policyEvaluationSample))); +}); + +test('RiskProfileEvidence round-trip', () => { + const validated = validateRiskProfileEvidence(structuredClone(riskProfileSample)); + assert.deepEqual(validated, riskProfileSample); + assert.equal(canonicalizeRiskProfileEvidence(riskProfileSample), canonicalizeRiskProfileEvidence(structuredClone(riskProfileSample))); +}); + +test('CustomEvidence round-trip', () => { + const validated = validateCustomEvidence(structuredClone(customEvidenceSample)); + assert.deepEqual(validated, customEvidenceSample); + assert.equal(canonicalizeCustomEvidence(customEvidenceSample), canonicalizeCustomEvidence(structuredClone(customEvidenceSample))); +}); + +test('CustomEvidence fails when schema version mismatches', () => { + const invalid = { ...customEvidenceSample, schemaVersion: 'StellaOps.CustomEvidence@9' as const }; + assert.throws(() => validateCustomEvidence(invalid as unknown), /must equal 'StellaOps.CustomEvidence@1'/); +}); diff --git a/src/Attestor/StellaOps.Attestor.Types/generated/ts/index.ts b/src/Attestor/StellaOps.Attestor.Types/generated/ts/index.ts new file mode 100644 index 00000000..ec906840 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/generated/ts/index.ts @@ -0,0 +1,945 @@ +// +// Generated by StellaOps.Attestor.Types.Generator +/* eslint-disable */ +/* prettier-ignore */ + +export const FindingStatusValues = Object.freeze(['detected', 'confirmed', 'fixed', 'not_affected'] as const); +export type FindingStatus = typeof FindingStatusValues[number]; + +export const PolicyEffectValues = Object.freeze(['allow', 'deny', 'warn'] as const); +export type PolicyEffect = typeof PolicyEffectValues[number]; + +export const PolicyOutcomeValues = Object.freeze(['pass', 'fail', 'waived'] as const); +export type PolicyOutcome = typeof PolicyOutcomeValues[number]; + +export const RiskLevelValues = Object.freeze(['critical', 'high', 'medium', 'low', 'informational'] as const); +export type RiskLevel = typeof RiskLevelValues[number]; + +export const SbomFormatValues = Object.freeze(['CycloneDX-1.6', 'SBOM-3.0.0'] as const); +export type SbomFormat = typeof SbomFormatValues[number]; + +export const SeverityValues = Object.freeze(['critical', 'high', 'medium', 'low', 'info'] as const); +export type Severity = typeof SeverityValues[number]; + +export const VexStatusValues = Object.freeze(['not_affected', 'affected', 'under_investigation', 'fixed'] as const); +export type VexStatus = typeof VexStatusValues[number]; + +export interface BuildMetadata { + buildStartedOn: string; + buildFinishedOn: string; + reproducible?: boolean; + buildInvocationId?: string; +} + +export interface BuildProvenance { + schemaVersion: 'StellaOps.BuildProvenance@1'; + buildType: string; + builder: BuilderIdentity; + materials: Array; + metadata: BuildMetadata; + environment?: EnvironmentMetadata; +} + +export interface BuilderIdentity { + id: string; + version?: string; + platform?: string; +} + +export interface CustomEvidence { + schemaVersion: 'StellaOps.CustomEvidence@1'; + subjectDigest: string; + kind: string; + generatedAt: string; + properties?: Array; +} + +export interface CustomProperty { + key: string; + value: string; +} + +export interface DigestReference { + algorithm: string; + value: string; +} + +export interface EnvironmentMetadata { + platform?: string; + imageDigest?: DigestReference; +} + +export interface MaterialReference { + uri: string; + digests: Array; + note?: string; +} + +export interface PolicyDecision { + policyId: string; + ruleId: string; + effect: PolicyEffect; + reason?: string; + remediation?: string; +} + +export interface PolicyEvaluation { + schemaVersion: 'StellaOps.PolicyEvaluation@1'; + subjectDigest: string; + policyVersion: string; + evaluatedAt: string; + outcome: PolicyOutcome; + decisions: Array; +} + +export interface RiskFactor { + name: string; + weight: number; + description?: string; +} + +export interface RiskProfileEvidence { + schemaVersion: 'StellaOps.RiskProfileEvidence@1'; + subjectDigest: string; + generatedAt: string; + riskScore: number; + riskLevel: RiskLevel; + factors: Array; +} + +export interface SbomAttestation { + schemaVersion: 'StellaOps.SBOMAttestation@1'; + subjectDigest: string; + sbomFormat: SbomFormat; + sbomDigest: DigestReference; + sbomUri?: string; + componentCount: number; + packages?: Array; +} + +export interface SbomPackage { + purl: string; + version?: string; + licenses?: Array; +} + +export interface ScanFinding { + id: string; + severity: Severity; + status: FindingStatus; + packageName: string; + packageVersion?: string; + cvssScore?: number; + description?: string; + references?: Array; +} + +export interface ScanResults { + schemaVersion: 'StellaOps.ScanResults@1'; + subjectDigest: string; + scannerName: string; + scannerVersion: string; + generatedAt: string; + findings: Array; +} + +export interface VexAttestation { + schemaVersion: 'StellaOps.VEXAttestation@1'; + subjectDigest: string; + generatedAt: string; + statements: Array; +} + +export interface VexStatement { + vulnerabilityId: string; + status: VexStatus; + timestamp: string; + justification?: string; + impactStatement?: string; + actionStatement?: string; + references?: Array; +} + +function isRecord(value: unknown): value is Record { + return typeof value === 'object' && value !== null && !Array.isArray(value); +} + +function pathString(path: string[]): string { + return path.length === 0 ? 'value' : `value.${path.join('.')}`; +} + +function assertBuildMetadata(value: unknown, path: string[]): asserts value is BuildMetadata { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.buildStartedOn === undefined) { + throw new Error(`${pathString([...path, 'buildStartedOn'])} is required.`); + } + if (typeof value.buildStartedOn !== 'string') { + throw new Error(`${pathString([...path, 'buildStartedOn'])} must be a string.`); + } + if (value.buildFinishedOn === undefined) { + throw new Error(`${pathString([...path, 'buildFinishedOn'])} is required.`); + } + if (typeof value.buildFinishedOn !== 'string') { + throw new Error(`${pathString([...path, 'buildFinishedOn'])} must be a string.`); + } + if (value.reproducible !== undefined) { + if (typeof value.reproducible !== 'boolean') { + throw new Error(`${pathString([...path, 'reproducible'])} must be a boolean.`); + } + } + if (value.buildInvocationId !== undefined) { + if (typeof value.buildInvocationId !== 'string') { + throw new Error(`${pathString([...path, 'buildInvocationId'])} must be a string.`); + } + } +} + +function assertBuildProvenance(value: unknown, path: string[]): asserts value is BuildProvenance { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.schemaVersion === undefined) { + throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`); + } + if (typeof value.schemaVersion !== 'string') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`); + } + if (value.schemaVersion !== 'StellaOps.BuildProvenance@1') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.BuildProvenance@1'.`); + } + if (value.buildType === undefined) { + throw new Error(`${pathString([...path, 'buildType'])} is required.`); + } + if (typeof value.buildType !== 'string') { + throw new Error(`${pathString([...path, 'buildType'])} must be a string.`); + } + if (value.builder === undefined) { + throw new Error(`${pathString([...path, 'builder'])} is required.`); + } + assertBuilderIdentity(value.builder, [...path, 'builder']); + if (value.materials === undefined) { + throw new Error(`${pathString([...path, 'materials'])} is required.`); + } + if (!Array.isArray(value.materials)) { + throw new Error(`${pathString([...path, 'materials'])} must be an array.`); + } + if (value.materials.length < 1) { + throw new Error(`${pathString([...path, 'materials'])} must contain at least 1 item(s).`); + } + for (let i = 0; i < value.materials.length; i += 1) { + assertMaterialReference(value.materials[i], [...[...path, 'materials'], String(i)]); + } + if (value.metadata === undefined) { + throw new Error(`${pathString([...path, 'metadata'])} is required.`); + } + assertBuildMetadata(value.metadata, [...path, 'metadata']); + if (value.environment !== undefined) { + assertEnvironmentMetadata(value.environment, [...path, 'environment']); + } +} + +function assertBuilderIdentity(value: unknown, path: string[]): asserts value is BuilderIdentity { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.id === undefined) { + throw new Error(`${pathString([...path, 'id'])} is required.`); + } + if (typeof value.id !== 'string') { + throw new Error(`${pathString([...path, 'id'])} must be a string.`); + } + if (value.version !== undefined) { + if (typeof value.version !== 'string') { + throw new Error(`${pathString([...path, 'version'])} must be a string.`); + } + } + if (value.platform !== undefined) { + if (typeof value.platform !== 'string') { + throw new Error(`${pathString([...path, 'platform'])} must be a string.`); + } + } +} + +function assertCustomEvidence(value: unknown, path: string[]): asserts value is CustomEvidence { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.schemaVersion === undefined) { + throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`); + } + if (typeof value.schemaVersion !== 'string') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`); + } + if (value.schemaVersion !== 'StellaOps.CustomEvidence@1') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.CustomEvidence@1'.`); + } + if (value.subjectDigest === undefined) { + throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`); + } + if (typeof value.subjectDigest !== 'string') { + throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`); + } + if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) { + throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`); + } + if (value.kind === undefined) { + throw new Error(`${pathString([...path, 'kind'])} is required.`); + } + if (typeof value.kind !== 'string') { + throw new Error(`${pathString([...path, 'kind'])} must be a string.`); + } + if (value.generatedAt === undefined) { + throw new Error(`${pathString([...path, 'generatedAt'])} is required.`); + } + if (typeof value.generatedAt !== 'string') { + throw new Error(`${pathString([...path, 'generatedAt'])} must be a string.`); + } + if (value.properties !== undefined) { + if (!Array.isArray(value.properties)) { + throw new Error(`${pathString([...path, 'properties'])} must be an array.`); + } + for (let i = 0; i < value.properties.length; i += 1) { + assertCustomProperty(value.properties[i], [...[...path, 'properties'], String(i)]); + } + } +} + +function assertCustomProperty(value: unknown, path: string[]): asserts value is CustomProperty { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.key === undefined) { + throw new Error(`${pathString([...path, 'key'])} is required.`); + } + if (typeof value.key !== 'string') { + throw new Error(`${pathString([...path, 'key'])} must be a string.`); + } + if (value.value === undefined) { + throw new Error(`${pathString([...path, 'value'])} is required.`); + } + if (typeof value.value !== 'string') { + throw new Error(`${pathString([...path, 'value'])} must be a string.`); + } +} + +function assertDigestReference(value: unknown, path: string[]): asserts value is DigestReference { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.algorithm === undefined) { + throw new Error(`${pathString([...path, 'algorithm'])} is required.`); + } + if (typeof value.algorithm !== 'string') { + throw new Error(`${pathString([...path, 'algorithm'])} must be a string.`); + } + if (value.value === undefined) { + throw new Error(`${pathString([...path, 'value'])} is required.`); + } + if (typeof value.value !== 'string') { + throw new Error(`${pathString([...path, 'value'])} must be a string.`); + } + if (!/^[A-Fa-f0-9]{64}$/.test(value.value)) { + throw new Error(`${pathString([...path, 'value'])} does not match expected format.`); + } +} + +function assertEnvironmentMetadata(value: unknown, path: string[]): asserts value is EnvironmentMetadata { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.platform !== undefined) { + if (typeof value.platform !== 'string') { + throw new Error(`${pathString([...path, 'platform'])} must be a string.`); + } + } + if (value.imageDigest !== undefined) { + assertDigestReference(value.imageDigest, [...path, 'imageDigest']); + } +} + +function assertMaterialReference(value: unknown, path: string[]): asserts value is MaterialReference { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.uri === undefined) { + throw new Error(`${pathString([...path, 'uri'])} is required.`); + } + if (typeof value.uri !== 'string') { + throw new Error(`${pathString([...path, 'uri'])} must be a string.`); + } + if (value.digests === undefined) { + throw new Error(`${pathString([...path, 'digests'])} is required.`); + } + if (!Array.isArray(value.digests)) { + throw new Error(`${pathString([...path, 'digests'])} must be an array.`); + } + if (value.digests.length < 1) { + throw new Error(`${pathString([...path, 'digests'])} must contain at least 1 item(s).`); + } + for (let i = 0; i < value.digests.length; i += 1) { + assertDigestReference(value.digests[i], [...[...path, 'digests'], String(i)]); + } + if (value.note !== undefined) { + if (typeof value.note !== 'string') { + throw new Error(`${pathString([...path, 'note'])} must be a string.`); + } + } +} + +function assertPolicyDecision(value: unknown, path: string[]): asserts value is PolicyDecision { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.policyId === undefined) { + throw new Error(`${pathString([...path, 'policyId'])} is required.`); + } + if (typeof value.policyId !== 'string') { + throw new Error(`${pathString([...path, 'policyId'])} must be a string.`); + } + if (value.ruleId === undefined) { + throw new Error(`${pathString([...path, 'ruleId'])} is required.`); + } + if (typeof value.ruleId !== 'string') { + throw new Error(`${pathString([...path, 'ruleId'])} must be a string.`); + } + if (value.effect === undefined) { + throw new Error(`${pathString([...path, 'effect'])} is required.`); + } + if (!PolicyEffectValues.includes(value.effect as PolicyEffect)) { + throw new Error(`${pathString([...path, 'effect'])} must be one of ${PolicyEffectValues.join(', ')}`); + } + if (value.reason !== undefined) { + if (typeof value.reason !== 'string') { + throw new Error(`${pathString([...path, 'reason'])} must be a string.`); + } + } + if (value.remediation !== undefined) { + if (typeof value.remediation !== 'string') { + throw new Error(`${pathString([...path, 'remediation'])} must be a string.`); + } + } +} + +function assertPolicyEvaluation(value: unknown, path: string[]): asserts value is PolicyEvaluation { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.schemaVersion === undefined) { + throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`); + } + if (typeof value.schemaVersion !== 'string') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`); + } + if (value.schemaVersion !== 'StellaOps.PolicyEvaluation@1') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.PolicyEvaluation@1'.`); + } + if (value.subjectDigest === undefined) { + throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`); + } + if (typeof value.subjectDigest !== 'string') { + throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`); + } + if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) { + throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`); + } + if (value.policyVersion === undefined) { + throw new Error(`${pathString([...path, 'policyVersion'])} is required.`); + } + if (typeof value.policyVersion !== 'string') { + throw new Error(`${pathString([...path, 'policyVersion'])} must be a string.`); + } + if (value.evaluatedAt === undefined) { + throw new Error(`${pathString([...path, 'evaluatedAt'])} is required.`); + } + if (typeof value.evaluatedAt !== 'string') { + throw new Error(`${pathString([...path, 'evaluatedAt'])} must be a string.`); + } + if (value.outcome === undefined) { + throw new Error(`${pathString([...path, 'outcome'])} is required.`); + } + if (!PolicyOutcomeValues.includes(value.outcome as PolicyOutcome)) { + throw new Error(`${pathString([...path, 'outcome'])} must be one of ${PolicyOutcomeValues.join(', ')}`); + } + if (value.decisions === undefined) { + throw new Error(`${pathString([...path, 'decisions'])} is required.`); + } + if (!Array.isArray(value.decisions)) { + throw new Error(`${pathString([...path, 'decisions'])} must be an array.`); + } + for (let i = 0; i < value.decisions.length; i += 1) { + assertPolicyDecision(value.decisions[i], [...[...path, 'decisions'], String(i)]); + } +} + +function assertRiskFactor(value: unknown, path: string[]): asserts value is RiskFactor { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.name === undefined) { + throw new Error(`${pathString([...path, 'name'])} is required.`); + } + if (typeof value.name !== 'string') { + throw new Error(`${pathString([...path, 'name'])} must be a string.`); + } + if (value.weight === undefined) { + throw new Error(`${pathString([...path, 'weight'])} is required.`); + } + if (typeof value.weight !== 'number') { + throw new Error(`${pathString([...path, 'weight'])} must be a number.`); + } + if (value.weight < 0) { + throw new Error(`${pathString([...path, 'weight'])} must be >= 0`); + } + if (value.weight > 1) { + throw new Error(`${pathString([...path, 'weight'])} must be <= 1`); + } + if (value.description !== undefined) { + if (typeof value.description !== 'string') { + throw new Error(`${pathString([...path, 'description'])} must be a string.`); + } + } +} + +function assertRiskProfileEvidence(value: unknown, path: string[]): asserts value is RiskProfileEvidence { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.schemaVersion === undefined) { + throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`); + } + if (typeof value.schemaVersion !== 'string') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`); + } + if (value.schemaVersion !== 'StellaOps.RiskProfileEvidence@1') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.RiskProfileEvidence@1'.`); + } + if (value.subjectDigest === undefined) { + throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`); + } + if (typeof value.subjectDigest !== 'string') { + throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`); + } + if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) { + throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`); + } + if (value.generatedAt === undefined) { + throw new Error(`${pathString([...path, 'generatedAt'])} is required.`); + } + if (typeof value.generatedAt !== 'string') { + throw new Error(`${pathString([...path, 'generatedAt'])} must be a string.`); + } + if (value.riskScore === undefined) { + throw new Error(`${pathString([...path, 'riskScore'])} is required.`); + } + if (typeof value.riskScore !== 'number') { + throw new Error(`${pathString([...path, 'riskScore'])} must be a number.`); + } + if (value.riskScore < 0) { + throw new Error(`${pathString([...path, 'riskScore'])} must be >= 0`); + } + if (value.riskScore > 100) { + throw new Error(`${pathString([...path, 'riskScore'])} must be <= 100`); + } + if (value.riskLevel === undefined) { + throw new Error(`${pathString([...path, 'riskLevel'])} is required.`); + } + if (!RiskLevelValues.includes(value.riskLevel as RiskLevel)) { + throw new Error(`${pathString([...path, 'riskLevel'])} must be one of ${RiskLevelValues.join(', ')}`); + } + if (value.factors === undefined) { + throw new Error(`${pathString([...path, 'factors'])} is required.`); + } + if (!Array.isArray(value.factors)) { + throw new Error(`${pathString([...path, 'factors'])} must be an array.`); + } + for (let i = 0; i < value.factors.length; i += 1) { + assertRiskFactor(value.factors[i], [...[...path, 'factors'], String(i)]); + } +} + +function assertSbomAttestation(value: unknown, path: string[]): asserts value is SbomAttestation { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.schemaVersion === undefined) { + throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`); + } + if (typeof value.schemaVersion !== 'string') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`); + } + if (value.schemaVersion !== 'StellaOps.SBOMAttestation@1') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.SBOMAttestation@1'.`); + } + if (value.subjectDigest === undefined) { + throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`); + } + if (typeof value.subjectDigest !== 'string') { + throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`); + } + if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) { + throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`); + } + if (value.sbomFormat === undefined) { + throw new Error(`${pathString([...path, 'sbomFormat'])} is required.`); + } + if (!SbomFormatValues.includes(value.sbomFormat as SbomFormat)) { + throw new Error(`${pathString([...path, 'sbomFormat'])} must be one of ${SbomFormatValues.join(', ')}`); + } + if (value.sbomDigest === undefined) { + throw new Error(`${pathString([...path, 'sbomDigest'])} is required.`); + } + assertDigestReference(value.sbomDigest, [...path, 'sbomDigest']); + if (value.sbomUri !== undefined) { + if (typeof value.sbomUri !== 'string') { + throw new Error(`${pathString([...path, 'sbomUri'])} must be a string.`); + } + } + if (value.componentCount === undefined) { + throw new Error(`${pathString([...path, 'componentCount'])} is required.`); + } + if (typeof value.componentCount !== 'number') { + throw new Error(`${pathString([...path, 'componentCount'])} must be a number.`); + } + if (value.componentCount < 0) { + throw new Error(`${pathString([...path, 'componentCount'])} must be >= 0`); + } + if (value.packages !== undefined) { + if (!Array.isArray(value.packages)) { + throw new Error(`${pathString([...path, 'packages'])} must be an array.`); + } + for (let i = 0; i < value.packages.length; i += 1) { + assertSbomPackage(value.packages[i], [...[...path, 'packages'], String(i)]); + } + } +} + +function assertSbomPackage(value: unknown, path: string[]): asserts value is SbomPackage { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.purl === undefined) { + throw new Error(`${pathString([...path, 'purl'])} is required.`); + } + if (typeof value.purl !== 'string') { + throw new Error(`${pathString([...path, 'purl'])} must be a string.`); + } + if (value.version !== undefined) { + if (typeof value.version !== 'string') { + throw new Error(`${pathString([...path, 'version'])} must be a string.`); + } + } + if (value.licenses !== undefined) { + if (!Array.isArray(value.licenses)) { + throw new Error(`${pathString([...path, 'licenses'])} must be an array.`); + } + if (value.licenses.length < 1) { + throw new Error(`${pathString([...path, 'licenses'])} must contain at least 1 item(s).`); + } + for (let i = 0; i < value.licenses.length; i += 1) { + if (typeof value.licenses[i] !== 'string') { + throw new Error(`${pathString([...[...path, 'licenses'], String(i)])} must be a string.`); + } + } + } +} + +function assertScanFinding(value: unknown, path: string[]): asserts value is ScanFinding { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.id === undefined) { + throw new Error(`${pathString([...path, 'id'])} is required.`); + } + if (typeof value.id !== 'string') { + throw new Error(`${pathString([...path, 'id'])} must be a string.`); + } + if (value.severity === undefined) { + throw new Error(`${pathString([...path, 'severity'])} is required.`); + } + if (!SeverityValues.includes(value.severity as Severity)) { + throw new Error(`${pathString([...path, 'severity'])} must be one of ${SeverityValues.join(', ')}`); + } + if (value.status === undefined) { + throw new Error(`${pathString([...path, 'status'])} is required.`); + } + if (!FindingStatusValues.includes(value.status as FindingStatus)) { + throw new Error(`${pathString([...path, 'status'])} must be one of ${FindingStatusValues.join(', ')}`); + } + if (value.packageName === undefined) { + throw new Error(`${pathString([...path, 'packageName'])} is required.`); + } + if (typeof value.packageName !== 'string') { + throw new Error(`${pathString([...path, 'packageName'])} must be a string.`); + } + if (value.packageVersion !== undefined) { + if (typeof value.packageVersion !== 'string') { + throw new Error(`${pathString([...path, 'packageVersion'])} must be a string.`); + } + } + if (value.cvssScore !== undefined) { + if (typeof value.cvssScore !== 'number') { + throw new Error(`${pathString([...path, 'cvssScore'])} must be a number.`); + } + if (value.cvssScore < 0) { + throw new Error(`${pathString([...path, 'cvssScore'])} must be >= 0`); + } + if (value.cvssScore > 10) { + throw new Error(`${pathString([...path, 'cvssScore'])} must be <= 10`); + } + } + if (value.description !== undefined) { + if (typeof value.description !== 'string') { + throw new Error(`${pathString([...path, 'description'])} must be a string.`); + } + } + if (value.references !== undefined) { + if (!Array.isArray(value.references)) { + throw new Error(`${pathString([...path, 'references'])} must be an array.`); + } + if (value.references.length < 1) { + throw new Error(`${pathString([...path, 'references'])} must contain at least 1 item(s).`); + } + for (let i = 0; i < value.references.length; i += 1) { + if (typeof value.references[i] !== 'string') { + throw new Error(`${pathString([...[...path, 'references'], String(i)])} must be a string.`); + } + } + } +} + +function assertScanResults(value: unknown, path: string[]): asserts value is ScanResults { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.schemaVersion === undefined) { + throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`); + } + if (typeof value.schemaVersion !== 'string') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`); + } + if (value.schemaVersion !== 'StellaOps.ScanResults@1') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.ScanResults@1'.`); + } + if (value.subjectDigest === undefined) { + throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`); + } + if (typeof value.subjectDigest !== 'string') { + throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`); + } + if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) { + throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`); + } + if (value.scannerName === undefined) { + throw new Error(`${pathString([...path, 'scannerName'])} is required.`); + } + if (typeof value.scannerName !== 'string') { + throw new Error(`${pathString([...path, 'scannerName'])} must be a string.`); + } + if (value.scannerVersion === undefined) { + throw new Error(`${pathString([...path, 'scannerVersion'])} is required.`); + } + if (typeof value.scannerVersion !== 'string') { + throw new Error(`${pathString([...path, 'scannerVersion'])} must be a string.`); + } + if (value.generatedAt === undefined) { + throw new Error(`${pathString([...path, 'generatedAt'])} is required.`); + } + if (typeof value.generatedAt !== 'string') { + throw new Error(`${pathString([...path, 'generatedAt'])} must be a string.`); + } + if (value.findings === undefined) { + throw new Error(`${pathString([...path, 'findings'])} is required.`); + } + if (!Array.isArray(value.findings)) { + throw new Error(`${pathString([...path, 'findings'])} must be an array.`); + } + for (let i = 0; i < value.findings.length; i += 1) { + assertScanFinding(value.findings[i], [...[...path, 'findings'], String(i)]); + } +} + +function assertVexAttestation(value: unknown, path: string[]): asserts value is VexAttestation { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.schemaVersion === undefined) { + throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`); + } + if (typeof value.schemaVersion !== 'string') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`); + } + if (value.schemaVersion !== 'StellaOps.VEXAttestation@1') { + throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.VEXAttestation@1'.`); + } + if (value.subjectDigest === undefined) { + throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`); + } + if (typeof value.subjectDigest !== 'string') { + throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`); + } + if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) { + throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`); + } + if (value.generatedAt === undefined) { + throw new Error(`${pathString([...path, 'generatedAt'])} is required.`); + } + if (typeof value.generatedAt !== 'string') { + throw new Error(`${pathString([...path, 'generatedAt'])} must be a string.`); + } + if (value.statements === undefined) { + throw new Error(`${pathString([...path, 'statements'])} is required.`); + } + if (!Array.isArray(value.statements)) { + throw new Error(`${pathString([...path, 'statements'])} must be an array.`); + } + if (value.statements.length < 1) { + throw new Error(`${pathString([...path, 'statements'])} must contain at least 1 item(s).`); + } + for (let i = 0; i < value.statements.length; i += 1) { + assertVexStatement(value.statements[i], [...[...path, 'statements'], String(i)]); + } +} + +function assertVexStatement(value: unknown, path: string[]): asserts value is VexStatement { + if (!isRecord(value)) { + throw new Error(`${pathString(path)} must be an object.`); + } + if (value.vulnerabilityId === undefined) { + throw new Error(`${pathString([...path, 'vulnerabilityId'])} is required.`); + } + if (typeof value.vulnerabilityId !== 'string') { + throw new Error(`${pathString([...path, 'vulnerabilityId'])} must be a string.`); + } + if (value.status === undefined) { + throw new Error(`${pathString([...path, 'status'])} is required.`); + } + if (!VexStatusValues.includes(value.status as VexStatus)) { + throw new Error(`${pathString([...path, 'status'])} must be one of ${VexStatusValues.join(', ')}`); + } + if (value.timestamp === undefined) { + throw new Error(`${pathString([...path, 'timestamp'])} is required.`); + } + if (typeof value.timestamp !== 'string') { + throw new Error(`${pathString([...path, 'timestamp'])} must be a string.`); + } + if (value.justification !== undefined) { + if (typeof value.justification !== 'string') { + throw new Error(`${pathString([...path, 'justification'])} must be a string.`); + } + } + if (value.impactStatement !== undefined) { + if (typeof value.impactStatement !== 'string') { + throw new Error(`${pathString([...path, 'impactStatement'])} must be a string.`); + } + } + if (value.actionStatement !== undefined) { + if (typeof value.actionStatement !== 'string') { + throw new Error(`${pathString([...path, 'actionStatement'])} must be a string.`); + } + } + if (value.references !== undefined) { + if (!Array.isArray(value.references)) { + throw new Error(`${pathString([...path, 'references'])} must be an array.`); + } + if (value.references.length < 1) { + throw new Error(`${pathString([...path, 'references'])} must contain at least 1 item(s).`); + } + for (let i = 0; i < value.references.length; i += 1) { + if (typeof value.references[i] !== 'string') { + throw new Error(`${pathString([...[...path, 'references'], String(i)])} must be a string.`); + } + } + } +} + +export function validateBuildProvenance(value: unknown): BuildProvenance { + assertBuildProvenance(value, []); + return value as BuildProvenance; +} + +export function canonicalizeBuildProvenance(value: BuildProvenance): string { + assertBuildProvenance(value, []); + return canonicalStringify(value); +} + +export function validateCustomEvidence(value: unknown): CustomEvidence { + assertCustomEvidence(value, []); + return value as CustomEvidence; +} + +export function canonicalizeCustomEvidence(value: CustomEvidence): string { + assertCustomEvidence(value, []); + return canonicalStringify(value); +} + +export function validatePolicyEvaluation(value: unknown): PolicyEvaluation { + assertPolicyEvaluation(value, []); + return value as PolicyEvaluation; +} + +export function canonicalizePolicyEvaluation(value: PolicyEvaluation): string { + assertPolicyEvaluation(value, []); + return canonicalStringify(value); +} + +export function validateRiskProfileEvidence(value: unknown): RiskProfileEvidence { + assertRiskProfileEvidence(value, []); + return value as RiskProfileEvidence; +} + +export function canonicalizeRiskProfileEvidence(value: RiskProfileEvidence): string { + assertRiskProfileEvidence(value, []); + return canonicalStringify(value); +} + +export function validateSbomAttestation(value: unknown): SbomAttestation { + assertSbomAttestation(value, []); + return value as SbomAttestation; +} + +export function canonicalizeSbomAttestation(value: SbomAttestation): string { + assertSbomAttestation(value, []); + return canonicalStringify(value); +} + +export function validateScanResults(value: unknown): ScanResults { + assertScanResults(value, []); + return value as ScanResults; +} + +export function canonicalizeScanResults(value: ScanResults): string { + assertScanResults(value, []); + return canonicalStringify(value); +} + +export function validateVexAttestation(value: unknown): VexAttestation { + assertVexAttestation(value, []); + return value as VexAttestation; +} + +export function canonicalizeVexAttestation(value: VexAttestation): string { + assertVexAttestation(value, []); + return canonicalStringify(value); +} + +function canonicalStringify(input: unknown): string { + return JSON.stringify(sortValue(input)); +} + +function sortValue(value: unknown): unknown { + if (Array.isArray(value)) { + return value.map(sortValue); + } + if (isRecord(value)) { + const ordered: Record = {}; + const keys = Object.keys(value).sort(); + for (const key of keys) { + ordered[key] = sortValue(value[key]); + } + return ordered; + } + return value; +} + diff --git a/src/Attestor/StellaOps.Attestor.Types/generated/ts/package-lock.json b/src/Attestor/StellaOps.Attestor.Types/generated/ts/package-lock.json new file mode 100644 index 00000000..77e33fa9 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/generated/ts/package-lock.json @@ -0,0 +1,237 @@ +{ + "name": "@stellaops/attestor-types", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@stellaops/attestor-types", + "version": "0.1.0", + "devDependencies": { + "@types/node": "^22.7.4", + "ts-node": "^10.9.2", + "typescript": "^5.6.3" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.18.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.13.tgz", + "integrity": "sha512-Bo45YKIjnmFtv6I1TuC8AaHBbqXtIo+Om5fE4QiU1Tj8QR/qt+8O3BAtOimG5IFmwaWiPmB3Mv3jtYzBA4Us2A==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "license": "MIT" + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "license": "MIT" + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + } + } +} diff --git a/src/Attestor/StellaOps.Attestor.Types/generated/ts/package.json b/src/Attestor/StellaOps.Attestor.Types/generated/ts/package.json new file mode 100644 index 00000000..412e6665 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/generated/ts/package.json @@ -0,0 +1,16 @@ +{ + "name": "@stellaops/attestor-types", + "version": "0.1.0", + "private": true, + "type": "module", + "scripts": { + "build": "tsc --project tsconfig.json", + "clean": "rm -rf dist", + "test": "npm run build && node --test dist/index.test.js" + }, + "devDependencies": { + "@types/node": "^22.7.4", + "ts-node": "^10.9.2", + "typescript": "^5.6.3" + } +} diff --git a/src/Attestor/StellaOps.Attestor.Types/generated/ts/tsconfig.json b/src/Attestor/StellaOps.Attestor.Types/generated/ts/tsconfig.json new file mode 100644 index 00000000..b0b7a85c --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/generated/ts/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ES2022", + "moduleResolution": "node", + "outDir": "dist", + "declaration": true, + "declarationMap": false, + "sourceMap": false, + "strict": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "skipLibCheck": true + }, + "include": [ + "index.ts", + "index.test.ts" + ] +} diff --git a/src/Attestor/StellaOps.Attestor.Types/samples/README.md b/src/Attestor/StellaOps.Attestor.Types/samples/README.md new file mode 100644 index 00000000..024fb61d --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/samples/README.md @@ -0,0 +1,23 @@ +# StellaOps Attestation Golden Samples + +This directory contains deterministic JSON fixtures used across unit tests, regression checks, and documentation snippets for the Attestor predicate family. + +| File | Predicate | Notes | +| --- | --- | --- | +| `build-provenance.v1.json` | `StellaOps.BuildProvenance@1` | Mirrors SLSA provenance fields with StellaOps metadata extensions. | +| `sbom-attestation.v1.json` | `StellaOps.SBOMAttestation@1` | Links to CycloneDX 1.6 BOM data and dependency relationships. | +| `scan-results.v1.json` | `StellaOps.ScanResults@1` | Captures multi-type findings (vulnerability, policy) with lattice scores. | +| `policy-evaluation.v1.json` | `StellaOps.PolicyEvaluation@1` | Documents deny verdict trace with digests for inputs and policies. | +| `vex-attestation.v1.json` | `StellaOps.VEXAttestation@1` | Provides OpenVEX-compatible statements for the Scanner image. | +| `risk-profile-evidence.v1.json` | `StellaOps.RiskProfileEvidence@1` | Compresses risk matrix outputs from the Risk Engine. | +| `custom-evidence.v1.json` | `StellaOps.CustomEvidence@1` | Shows a Zastava runtime hardening checklist embedded as custom evidence. | + +All fixtures share the same `subject` digest (`ghcr.io/stellaops/scanner@sha256:d5f5…`) so they can be chained together inside multi-artefact verification tests. Keys are alphabetically ordered in every object to keep canonical JSON generation straightforward. + +When adding or updating fixtures: + +1. Preserve field ordering and avoid timestamps without explicit values. +2. Update `AttestationGoldenSamplesTests.cs` to assert any new invariants. +3. Reference the fixture in the relevant module documentation so downstream users can discover it. + +Run `npm run docs:attestor:validate` from the repository root to confirm fixtures, schemas, and generated SDKs remain in lock-step. diff --git a/src/Attestor/StellaOps.Attestor.Types/samples/build-provenance.v1.json b/src/Attestor/StellaOps.Attestor.Types/samples/build-provenance.v1.json new file mode 100644 index 00000000..845f04c5 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/samples/build-provenance.v1.json @@ -0,0 +1,71 @@ +{ + "_type": "https://in-toto.io/Statement/v1", + "predicate": { + "buildDefinition": { + "buildType": "https://stella-ops.org/build/docker-image@v1", + "builder": "stellaops://pipelines/scanner-ci", + "environment": { + "image": "ghcr.io/stellaops/buildkit:2025.10", + "platform": "linux/amd64", + "workerPool": "us-west-2/buildfarm/a" + }, + "source": { + "configDigest": { + "sha256": "f135b1eb59b38c46b7c9f95abfa23d8739d9a71717f7ceebf2e0fdf75c9b8d63" + }, + "entryPoint": "scanner/Dockerfile", + "uri": "git+https://git.stella-ops.org/stellaops/scanner.git@refs/heads/main" + } + }, + "materials": [ + { + "digest": { + "sha256": "61e7a7d1d0a64e788d4a32294e7255f2667ef45ef8d358b3f6262e2de3b2c13d" + }, + "uri": "git+https://git.stella-ops.org/stellaops/scanner.git#src" + }, + { + "digest": { + "sha256": "3f2dc5d957bbf9570da0e7af0fde61ebc9e32b0ad156bea6f3cfa4d94cda740a" + }, + "uri": "oci://ghcr.io/stellaops/base-images/builder:2025.09" + } + ], + "metadata": { + "builtOn": "2025-10-29T18:22:14Z", + "durationSeconds": 186, + "invocationId": "urn:uuid:7ac7b9b4-0f6a-4ad1-9d42-9e0df7bd4a94", + "licenseBundle": "stellaops://licenses/enterprise@2025-10" + }, + "runDetails": { + "arguments": [ + "buildx", + "bake", + "--file", + "scanner/docker-bake.hcl", + "--set", + "sbom.mode=strict" + ], + "builderImageDigest": "sha256:6ee70a4014258f9c0a40e10b5f5a4b1c6497e5d6b9cd848771c3d79f7f0d91da", + "exitCode": 0, + "logs": [ + { + "digest": { + "sha256": "4b4fd1f46633f475aa1ed58ec0d95af0ed3e4015d9d41fce7883c0e650f730fb" + }, + "uri": "s3://attestor-logs/scanner-ci/2025-10-29/build.log" + } + ] + } + }, + "predicateType": "StellaOps.BuildProvenance@1", + "predicateVersion": "1.0.0", + "subject": [ + { + "digest": { + "sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + }, + "name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + } + ] +} diff --git a/src/Attestor/StellaOps.Attestor.Types/samples/custom-evidence.v1.json b/src/Attestor/StellaOps.Attestor.Types/samples/custom-evidence.v1.json new file mode 100644 index 00000000..48b4c731 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/samples/custom-evidence.v1.json @@ -0,0 +1,48 @@ +{ + "_type": "https://in-toto.io/Statement/v1", + "predicate": { + "contentType": "application/json", + "description": "Runtime hardening checklist exported from Kubernetes admission controller.", + "payload": { + "controlResults": [ + { + "control": "NSA-CISA-Kubernetes-1.2.1", + "evidence": "pod security baseline profile enforced", + "status": "pass" + }, + { + "control": "NSA-CISA-Kubernetes-1.3.4", + "evidence": "imagePullPolicy set to Always", + "status": "pass" + }, + { + "control": "NSA-CISA-Kubernetes-1.4.2", + "evidence": "no container runs as privileged", + "status": "pass" + }, + { + "control": "NSA-CISA-Kubernetes-1.5.1", + "evidence": "hostPath mounts limited to /var/run/secrets", + "status": "warn" + } + ], + "generator": "stellaops://zastava/observer@2025.10.0", + "scopedNamespace": "scanner-prod" + }, + "schema": "https://stella-ops.org/custom/nsacisa-runtime-checklist@v1", + "tags": [ + "runtime-hardening", + "zastava" + ] + }, + "predicateType": "StellaOps.CustomEvidence@1", + "predicateVersion": "1.0.0", + "subject": [ + { + "digest": { + "sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + }, + "name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + } + ] +} diff --git a/src/Attestor/StellaOps.Attestor.Types/samples/policy-evaluation.v1.json b/src/Attestor/StellaOps.Attestor.Types/samples/policy-evaluation.v1.json new file mode 100644 index 00000000..a47d86de --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/samples/policy-evaluation.v1.json @@ -0,0 +1,69 @@ +{ + "_type": "https://in-toto.io/Statement/v1", + "predicate": { + "decision": "deny", + "explain": { + "failingAssertions": [ + { + "message": "critical vulnerability CVE-2025-30104 lacks mitigating VEX statement", + "path": "controls/critical-vulns.rego#L42" + }, + { + "message": "policy `runtime-tls` requires TLS 1.3 or higher", + "path": "controls/runtime-tls.rego#L18" + } + ], + "inputs": { + "policyDigest": "sha256:73acd8cb50ea52b9b98534fada689755fbdb0a12f8bd4f6069276e7ce21a0bd8", + "sbomDigest": "sha256:7c8d89e9c5f7ca1be3f75653d3cb4dd511ee1f65c1b9bc606cd19c578b57e9c2", + "vexDigest": "sha256:72b3ead4c7de0f65a3be9a9d73b7bc2840f1494de3d1666d4010e23bb1b82768" + }, + "trace": [ + { + "decision": "deny", + "policy": "controls/critical-vulns.rego", + "rule": "require_vex_or_patch" + }, + { + "decision": "warn", + "policy": "controls/license.rego", + "rule": "allow_mit_or_apache" + } + ] + }, + "policyDigest": "sha256:73acd8cb50ea52b9b98534fada689755fbdb0a12f8bd4f6069276e7ce21a0bd8", + "policyVersion": "2025.10.0", + "rules": [ + { + "id": "controls/critical-vulns", + "outcome": "deny", + "type": "rego" + }, + { + "id": "controls/runtime-tls", + "outcome": "deny", + "type": "rego" + }, + { + "id": "controls/license", + "outcome": "warn", + "type": "rego" + } + ], + "summary": { + "decision": "deny", + "evaluatedAt": "2025-10-29T18:24:10Z", + "inputDigest": "sha256:8f9047f8cb22dafeb8fff2e8e75ef4e93a005f619210d4c4282c86aa2addc81e" + } + }, + "predicateType": "StellaOps.PolicyEvaluation@1", + "predicateVersion": "1.0.0", + "subject": [ + { + "digest": { + "sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + }, + "name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + } + ] +} diff --git a/src/Attestor/StellaOps.Attestor.Types/samples/risk-profile-evidence.v1.json b/src/Attestor/StellaOps.Attestor.Types/samples/risk-profile-evidence.v1.json new file mode 100644 index 00000000..21e96e27 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/samples/risk-profile-evidence.v1.json @@ -0,0 +1,50 @@ +{ + "_type": "https://in-toto.io/Statement/v1", + "predicate": { + "baseline": { + "profile": "stellaops://risk/profiles/container-runtime@2025.10", + "source": "RiskEngine 4.2.0", + "timestamp": "2025-10-29T18:24:12Z" + }, + "riskMatrix": { + "categories": [ + { + "dimension": "attack_surface", + "level": "medium", + "rationale": "External TCP services restricted to TLS; ephemeral build pods." + }, + { + "dimension": "supply_chain", + "level": "low", + "rationale": "All dependencies pinned with SBOM attestations." + }, + { + "dimension": "runtime_controls", + "level": "medium", + "rationale": "Admission policy enforced; runtime drift monitoring pending rollout." + } + ] + }, + "scoring": { + "method": "stellaops://risk/models/v2", + "score": 682, + "tier": "moderate" + }, + "vectors": { + "cisaKevCoverage": 1.0, + "exploitMaturity": "poc_available", + "patchLatencyDays": 2, + "unknownServiceRatio": 0.08 + } + }, + "predicateType": "StellaOps.RiskProfileEvidence@1", + "predicateVersion": "1.0.0", + "subject": [ + { + "digest": { + "sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + }, + "name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + } + ] +} diff --git a/src/Attestor/StellaOps.Attestor.Types/samples/sbom-attestation.v1.json b/src/Attestor/StellaOps.Attestor.Types/samples/sbom-attestation.v1.json new file mode 100644 index 00000000..b961085a --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/samples/sbom-attestation.v1.json @@ -0,0 +1,80 @@ +{ + "_type": "https://in-toto.io/Statement/v1", + "predicate": { + "document": { + "bomRef": "urn:uuid:0cda40b3-4e89-4a89-8d94-65011fac1cb1", + "created": "2025-10-29T18:22:33Z", + "digest": { + "sha256": "7c8d89e9c5f7ca1be3f75653d3cb4dd511ee1f65c1b9bc606cd19c578b57e9c2" + }, + "name": "scanner-webservice", + "version": "2025.10.29+build.186" + }, + "packages": [ + { + "bomRef": "pkg:docker/alpine@3.19.2?arch=amd64", + "licenses": [ + { + "expression": "MIT" + } + ], + "name": "alpine", + "purl": "pkg:alpine/alpine-base@3.19.2?arch=amd64", + "version": "3.19.2" + }, + { + "bomRef": "pkg:nuget/MongoDB.Driver@2.23.0", + "licenses": [ + { + "expression": "Apache-2.0" + } + ], + "name": "MongoDB.Driver", + "purl": "pkg:nuget/MongoDB.Driver@2.23.0", + "version": "2.23.0" + }, + { + "bomRef": "pkg:nuget/Serilog@3.1.0", + "licenses": [ + { + "expression": "Apache-2.0" + } + ], + "name": "Serilog", + "purl": "pkg:nuget/Serilog@3.1.0", + "version": "3.1.0" + } + ], + "relationships": [ + { + "source": "pkg:docker/alpine@3.19.2?arch=amd64", + "target": "pkg:nuget/MongoDB.Driver@2.23.0", + "type": "contains" + }, + { + "source": "pkg:docker/alpine@3.19.2?arch=amd64", + "target": "pkg:nuget/Serilog@3.1.0", + "type": "contains" + } + ], + "sbomFormat": "CycloneDX", + "sbomSpecVersion": "1.6.0", + "summary": { + "componentCount": 143, + "dependencyEdges": 212, + "metadataDigest": { + "sha256": "ba7a324b42d9da408d6c03119f6355bf25cb9f7e1a23355b9ab29cbe588b6e9f" + } + } + }, + "predicateType": "StellaOps.SBOMAttestation@1", + "predicateVersion": "1.0.0", + "subject": [ + { + "digest": { + "sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + }, + "name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + } + ] +} diff --git a/src/Attestor/StellaOps.Attestor.Types/samples/scan-results.v1.json b/src/Attestor/StellaOps.Attestor.Types/samples/scan-results.v1.json new file mode 100644 index 00000000..92233027 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/samples/scan-results.v1.json @@ -0,0 +1,102 @@ +{ + "_type": "https://in-toto.io/Statement/v1", + "predicate": { + "artifacts": [ + { + "digest": { + "sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + }, + "name": "ghcr.io/stellaops/scanner:2025.10.29", + "sbomRef": "urn:uuid:0cda40b3-4e89-4a89-8d94-65011fac1cb1" + } + ], + "environment": { + "platform": "linux/amd64", + "runtime": "stellaops/scanner-worker@2025.10", + "tenant": "acme-industries" + }, + "findings": [ + { + "analysis": { + "evidence": "library reachable from entry trace", + "exploitability": "high", + "normalizedScore": 9.1 + }, + "id": "CVE-2025-30104", + "package": { + "name": "openssl", + "purl": "pkg:apk/alpine/openssl@3.2.2-r2", + "version": "3.2.2-r2" + }, + "references": [ + "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2025-30104" + ], + "severity": "critical", + "status": "affected", + "type": "vulnerability" + }, + { + "analysis": { + "evidence": "policy requires minimum TLS 1.3 support", + "exploitability": "medium", + "normalizedScore": 6.3 + }, + "id": "STELLAOPS-POLICY-2025-0007", + "package": { + "name": "runtime-config", + "purl": "pkg:generic/config/runtime@2025.10.29", + "version": "2025.10.29" + }, + "references": [ + "stellaops://policy/controls/tls-min-version" + ], + "severity": "medium", + "status": "affected", + "type": "policy" + }, + { + "analysis": { + "evidence": "package unused according to entry trace", + "exploitability": "none", + "normalizedScore": 0.0 + }, + "id": "CVE-2023-99999", + "package": { + "name": "curl", + "purl": "pkg:apk/alpine/curl@8.0.1-r0", + "version": "8.0.1-r0" + }, + "references": [ + "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-99999" + ], + "severity": "low", + "status": "not_affected", + "type": "vulnerability" + } + ], + "scanner": { + "name": "StellaOps.Scanner", + "rulesCommit": "a6dfc9b1f4e64d5e82b6f54b0f2dd1e8b13d1f8a", + "version": "2025.10.0" + }, + "summary": { + "affected": 2, + "critical": 1, + "high": 0, + "info": 7, + "low": 1, + "medium": 1, + "timestamp": "2025-10-29T18:24:07Z" + } + }, + "predicateType": "StellaOps.ScanResults@1", + "predicateVersion": "1.0.0", + "subject": [ + { + "digest": { + "sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + }, + "name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + } + ] +} diff --git a/src/Attestor/StellaOps.Attestor.Types/samples/vex-attestation.v1.json b/src/Attestor/StellaOps.Attestor.Types/samples/vex-attestation.v1.json new file mode 100644 index 00000000..96498fd6 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/samples/vex-attestation.v1.json @@ -0,0 +1,67 @@ +{ + "_type": "https://in-toto.io/Statement/v1", + "predicate": { + "context": { + "generator": "stellaops://vex/excititor@2025.10", + "issuedAt": "2025-10-30T04:05:19Z", + "supplier": "StellaOps Security Research" + }, + "statements": [ + { + "analysis": { + "detail": "Package not reachable from configured entry trace.", + "impact": "not_affected_usage_scope", + "lastReviewed": "2025-10-30T03:50:02Z" + }, + "expires": "2026-01-30T00:00:00Z", + "justification": "vex:component_not_present", + "product": { + "cpe": "cpe:2.3:a:stellaops:scanner_webservice:2025.10.29:*:*:*:*:*:*:*", + "purl": "pkg:docker/ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + }, + "status": "not_affected", + "subcomponents": [ + { + "path": "/app/bin/StellaOps.Scanner.dll", + "ref": "pkg:nuget/Serilog@3.1.0" + } + ], + "timestamp": "2025-10-30T04:05:19Z", + "vulnerability": "CVE-2025-21901" + }, + { + "analysis": { + "detail": "Patched in base image digest specified in SBOM.", + "impact": "no_known_exploit", + "lastReviewed": "2025-10-29T23:11:44Z" + }, + "expires": "2025-12-15T00:00:00Z", + "justification": "vex:fix_available", + "product": { + "cpe": "cpe:2.3:a:stellaops:scanner_webservice:2025.10.29:*:*:*:*:*:*:*", + "purl": "pkg:docker/ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + }, + "status": "affected", + "subcomponents": [ + { + "path": "/usr/lib/libssl.so.3", + "ref": "pkg:apk/alpine/openssl@3.2.2-r2" + } + ], + "timestamp": "2025-10-29T23:09:31Z", + "vulnerability": "CVE-2025-30104" + } + ], + "vexVersion": "OpenVEX-1.0.0" + }, + "predicateType": "StellaOps.VEXAttestation@1", + "predicateVersion": "1.0.0", + "subject": [ + { + "digest": { + "sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + }, + "name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72" + } + ] +} diff --git a/src/Attestor/StellaOps.Attestor.Types/schemas/attestation-common.v1.schema.json b/src/Attestor/StellaOps.Attestor.Types/schemas/attestation-common.v1.schema.json new file mode 100644 index 00000000..aec3d2d5 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/schemas/attestation-common.v1.schema.json @@ -0,0 +1,372 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://schemas.stella-ops.org/attestations/common/v1", + "title": "StellaOps Attestation Common Definitions v1", + "type": "object", + "description": "Shared schema components reused across StellaOps attestation predicates.", + "$defs": { + "schemaVersion": { + "type": "string", + "pattern": "^1\\.0\\.\\d+$", + "description": "Semantic version identifier for predicate schema revisions. Initial release is 1.0.x." + }, + "digestSet": { + "type": "object", + "description": "Map of hashing algorithm to lowercase hexadecimal digest.", + "minProperties": 1, + "maxProperties": 4, + "patternProperties": { + "^[A-Za-z0-9]+$": { + "type": "string", + "pattern": "^[a-f0-9]{32,128}$" + } + }, + "additionalProperties": false + }, + "subject": { + "type": "object", + "additionalProperties": false, + "required": [ + "subjectKind", + "digest" + ], + "properties": { + "subjectKind": { + "type": "string", + "enum": [ + "container-image", + "sbom", + "scan-report", + "policy-report", + "vex-statement", + "risk-profile", + "artifact" + ] + }, + "name": { + "type": "string", + "minLength": 1, + "maxLength": 512 + }, + "uri": { + "type": "string", + "format": "uri" + }, + "digest": { + "$ref": "#/$defs/digestSet" + }, + "annotations": { + "type": "object", + "additionalProperties": { + "type": "string", + "maxLength": 256 + } + }, + "imageDigest": { + "type": "string", + "pattern": "^sha256:[a-f0-9]{64}$" + }, + "mediaType": { + "type": "string", + "maxLength": 128 + }, + "sizeBytes": { + "type": "integer", + "minimum": 0 + } + } + }, + "subjectList": { + "type": "array", + "minItems": 1, + "uniqueItems": true, + "items": { + "$ref": "#/$defs/subject" + } + }, + "issuer": { + "type": "object", + "description": "Identity metadata describing the signer of the attestation predicate.", + "additionalProperties": false, + "required": [ + "issuerType", + "id", + "signingKey" + ], + "properties": { + "issuerType": { + "type": "string", + "enum": [ + "service", + "user", + "automation", + "device" + ] + }, + "id": { + "type": "string", + "minLength": 4, + "maxLength": 256 + }, + "tenantId": { + "type": "string", + "minLength": 1, + "maxLength": 128 + }, + "displayName": { + "type": "string", + "maxLength": 256 + }, + "email": { + "type": "string", + "format": "email" + }, + "workload": { + "type": "object", + "additionalProperties": false, + "required": [ + "service" + ], + "properties": { + "service": { + "type": "string", + "maxLength": 128 + }, + "cluster": { + "type": "string", + "maxLength": 128 + }, + "namespace": { + "type": "string", + "maxLength": 128 + }, + "region": { + "type": "string", + "maxLength": 64 + } + } + }, + "signingKey": { + "type": "object", + "additionalProperties": false, + "required": [ + "keyId", + "mode", + "algorithm" + ], + "properties": { + "keyId": { + "type": "string", + "maxLength": 256 + }, + "mode": { + "type": "string", + "enum": [ + "keyless", + "kms", + "hsm", + "fido2", + "offline" + ] + }, + "algorithm": { + "type": "string", + "maxLength": 64 + }, + "issuer": { + "type": "string", + "maxLength": 256 + }, + "certificateChain": { + "type": "array", + "maxItems": 5, + "items": { + "type": "string", + "minLength": 1 + } + }, + "proof": { + "type": "object", + "additionalProperties": false, + "properties": { + "fulcioIdentity": { + "type": "string", + "maxLength": 256 + }, + "hardwareClass": { + "type": "string", + "maxLength": 128 + } + } + } + } + } + } + }, + "material": { + "type": "object", + "additionalProperties": false, + "required": [ + "uri" + ], + "properties": { + "uri": { + "type": "string", + "minLength": 1, + "maxLength": 512 + }, + "digest": { + "$ref": "#/$defs/digestSet" + }, + "mediaType": { + "type": "string", + "maxLength": 128 + }, + "role": { + "type": "string", + "maxLength": 64 + }, + "annotations": { + "type": "object", + "additionalProperties": { + "type": "string", + "maxLength": 128 + } + } + } + }, + "transparencyLog": { + "type": "object", + "additionalProperties": false, + "required": [ + "logId", + "logUrl", + "uuid" + ], + "properties": { + "logId": { + "type": "string", + "maxLength": 128 + }, + "logUrl": { + "type": "string", + "format": "uri" + }, + "uuid": { + "type": "string", + "maxLength": 128 + }, + "index": { + "type": "integer", + "minimum": 0 + }, + "checkpoint": { + "type": "object", + "additionalProperties": false, + "required": [ + "origin", + "size", + "rootHash", + "timestamp" + ], + "properties": { + "origin": { + "type": "string", + "maxLength": 128 + }, + "size": { + "type": "integer", + "minimum": 0 + }, + "rootHash": { + "type": "string", + "pattern": "^[A-Za-z0-9\\+/=]{16,128}$" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + } + }, + "witnessed": { + "type": "boolean" + } + } + }, + "transparencyLogList": { + "type": "array", + "items": { + "$ref": "#/$defs/transparencyLog" + } + }, + "policyContext": { + "type": "object", + "additionalProperties": false, + "properties": { + "policyId": { + "type": "string", + "maxLength": 128 + }, + "policyVersion": { + "type": "string", + "maxLength": 32 + }, + "revisionDigest": { + "$ref": "#/$defs/digestSet" + }, + "mode": { + "type": "string", + "enum": [ + "enforce", + "dry-run" + ] + } + } + }, + "vexStatus": { + "type": "string", + "enum": [ + "not_affected", + "affected", + "fixed", + "under_investigation" + ] + }, + "severity": { + "type": "string", + "enum": [ + "critical", + "high", + "medium", + "low", + "informational" + ] + }, + "explainReference": { + "type": "object", + "additionalProperties": false, + "required": [ + "id", + "type" + ], + "properties": { + "id": { + "type": "string", + "maxLength": 128 + }, + "type": { + "type": "string", + "enum": [ + "rule", + "step", + "binding" + ] + }, + "message": { + "type": "string", + "maxLength": 2048 + } + } + } + } +} diff --git a/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-build-provenance.v1.schema.json b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-build-provenance.v1.schema.json new file mode 100644 index 00000000..8c770c7a --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-build-provenance.v1.schema.json @@ -0,0 +1,160 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://stella-ops.org/schemas/attestor/stellaops-build-provenance.v1.json", + "title": "Build provenance evidence capturing builder inputs and outputs.", + "type": "object", + "additionalProperties": false, + "required": [ + "schemaVersion", + "buildType", + "builder", + "materials", + "metadata" + ], + "properties": { + "schemaVersion": { + "type": "string", + "const": "StellaOps.BuildProvenance@1", + "description": "Schema version identifier." + }, + "buildType": { + "type": "string", + "description": "Build type or workflow identifier." + }, + "builder": { + "$ref": "#/$defs/BuilderIdentity", + "description": "Builder identity metadata." + }, + "materials": { + "type": "array", + "items": { + "$ref": "#/$defs/MaterialReference" + }, + "minItems": 1, + "description": "Materials consumed during the build." + }, + "metadata": { + "$ref": "#/$defs/BuildMetadata", + "description": "Build metadata information." + }, + "environment": { + "$ref": "#/$defs/EnvironmentMetadata", + "description": "Optional environment details for the build context." + } + }, + "$defs": { + "BuilderIdentity": { + "type": "object", + "additionalProperties": false, + "description": "Identifies the builder that produced the artifact.", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "string", + "description": "Unique builder identity (URI or name)." + }, + "version": { + "type": "string", + "description": "Builder version identifier." + }, + "platform": { + "type": "string", + "description": "Execution platform for the build." + } + } + }, + "DigestReference": { + "type": "object", + "additionalProperties": false, + "description": "Normalized digest entry containing algorithm and value.", + "required": [ + "algorithm", + "value" + ], + "properties": { + "algorithm": { + "type": "string", + "description": "Digest algorithm identifier (e.g., sha256)." + }, + "value": { + "type": "string", + "pattern": "^[A-Fa-f0-9]{64}$", + "description": "Hex-encoded digest value." + } + } + }, + "MaterialReference": { + "type": "object", + "additionalProperties": false, + "description": "Inputs used during build or analysis.", + "required": [ + "uri", + "digests" + ], + "properties": { + "uri": { + "type": "string", + "description": "Material location or identifier." + }, + "digests": { + "type": "array", + "items": { + "$ref": "#/$defs/DigestReference" + }, + "minItems": 1, + "description": "Digests associated with the material." + }, + "note": { + "type": "string", + "description": "Optional annotation about the material." + } + } + }, + "BuildMetadata": { + "type": "object", + "additionalProperties": false, + "description": "Metadata describing build timing and reproducibility.", + "required": [ + "buildStartedOn", + "buildFinishedOn" + ], + "properties": { + "buildStartedOn": { + "type": "string", + "format": "date-time", + "description": "UTC timestamp for build start." + }, + "buildFinishedOn": { + "type": "string", + "format": "date-time", + "description": "UTC timestamp for build completion." + }, + "reproducible": { + "type": "boolean", + "description": "Indicates whether the build is reproducible." + }, + "buildInvocationId": { + "type": "string", + "description": "Unique identifier for the build invocation." + } + } + }, + "EnvironmentMetadata": { + "type": "object", + "additionalProperties": false, + "description": "Optional environment metadata for build context.", + "properties": { + "platform": { + "type": "string", + "description": "Execution platform or runtime." + }, + "imageDigest": { + "$ref": "#/$defs/DigestReference", + "description": "Digest for the environment image." + } + } + } + } +} \ No newline at end of file diff --git a/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-custom-evidence.v1.schema.json b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-custom-evidence.v1.schema.json new file mode 100644 index 00000000..e218134c --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-custom-evidence.v1.schema.json @@ -0,0 +1,63 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://stella-ops.org/schemas/attestor/stellaops-custom-evidence.v1.json", + "title": "Generic evidence payload for bespoke attestations.", + "type": "object", + "additionalProperties": false, + "required": [ + "schemaVersion", + "subjectDigest", + "kind", + "generatedAt" + ], + "properties": { + "schemaVersion": { + "type": "string", + "const": "StellaOps.CustomEvidence@1", + "description": "Schema version identifier." + }, + "subjectDigest": { + "type": "string", + "pattern": "^sha256:[A-Fa-f0-9]{64}$", + "description": "Artifact digest this evidence references." + }, + "kind": { + "type": "string", + "description": "Custom evidence kind identifier." + }, + "generatedAt": { + "type": "string", + "format": "date-time", + "description": "Timestamp when the evidence was generated." + }, + "properties": { + "type": "array", + "items": { + "$ref": "#/$defs/CustomProperty" + }, + "minItems": 0, + "description": "Optional key/value properties for additional context." + } + }, + "$defs": { + "CustomProperty": { + "type": "object", + "additionalProperties": false, + "description": "Key/value entry for custom evidence.", + "required": [ + "key", + "value" + ], + "properties": { + "key": { + "type": "string", + "description": "Property key." + }, + "value": { + "type": "string", + "description": "Property value serialized as string." + } + } + } + } +} \ No newline at end of file diff --git a/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-policy-evaluation.v1.schema.json b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-policy-evaluation.v1.schema.json new file mode 100644 index 00000000..506ea70c --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-policy-evaluation.v1.schema.json @@ -0,0 +1,100 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://stella-ops.org/schemas/attestor/stellaops-policy-evaluation.v1.json", + "title": "Policy evaluation outcome for an artifact.", + "type": "object", + "additionalProperties": false, + "required": [ + "schemaVersion", + "subjectDigest", + "policyVersion", + "evaluatedAt", + "outcome", + "decisions" + ], + "properties": { + "schemaVersion": { + "type": "string", + "const": "StellaOps.PolicyEvaluation@1", + "description": "Schema version identifier." + }, + "subjectDigest": { + "type": "string", + "pattern": "^sha256:[A-Fa-f0-9]{64}$", + "description": "Artifact digest that was evaluated." + }, + "policyVersion": { + "type": "string", + "description": "Policy bundle version applied." + }, + "evaluatedAt": { + "type": "string", + "format": "date-time", + "description": "Timestamp when policy evaluation was executed." + }, + "outcome": { + "$ref": "#/$defs/PolicyOutcome", + "description": "Overall evaluation outcome." + }, + "decisions": { + "type": "array", + "items": { + "$ref": "#/$defs/PolicyDecision" + }, + "minItems": 0, + "description": "Detailed rule-level decisions." + } + }, + "$defs": { + "PolicyOutcome": { + "type": "string", + "description": "Policy evaluation outcome values.", + "enum": [ + "pass", + "fail", + "waived" + ] + }, + "PolicyEffect": { + "type": "string", + "description": "Policy rule effect values.", + "enum": [ + "allow", + "deny", + "warn" + ] + }, + "PolicyDecision": { + "type": "object", + "additionalProperties": false, + "description": "Outcome of an individual policy rule evaluation.", + "required": [ + "policyId", + "ruleId", + "effect" + ], + "properties": { + "policyId": { + "type": "string", + "description": "Policy identifier." + }, + "ruleId": { + "type": "string", + "description": "Specific rule identifier." + }, + "effect": { + "$ref": "#/$defs/PolicyEffect", + "description": "Resulting effect of the rule." + }, + "reason": { + "type": "string", + "description": "Explanation for the effect." + }, + "remediation": { + "type": "string", + "description": "Suggested remediation action." + } + } + } + } +} \ No newline at end of file diff --git a/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-risk-profile.v1.schema.json b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-risk-profile.v1.schema.json new file mode 100644 index 00000000..4a5f6bfe --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-risk-profile.v1.schema.json @@ -0,0 +1,88 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://stella-ops.org/schemas/attestor/stellaops-risk-profile.v1.json", + "title": "Risk scoring evidence summarising exposure for an artifact.", + "type": "object", + "additionalProperties": false, + "required": [ + "schemaVersion", + "subjectDigest", + "generatedAt", + "riskScore", + "riskLevel", + "factors" + ], + "properties": { + "schemaVersion": { + "type": "string", + "const": "StellaOps.RiskProfileEvidence@1", + "description": "Schema version identifier." + }, + "subjectDigest": { + "type": "string", + "pattern": "^sha256:[A-Fa-f0-9]{64}$", + "description": "Artifact digest that the risk profile describes." + }, + "generatedAt": { + "type": "string", + "format": "date-time", + "description": "Timestamp when scoring was performed." + }, + "riskScore": { + "type": "number", + "minimum": 0, + "maximum": 100, + "description": "Normalized risk score between 0 and 100." + }, + "riskLevel": { + "$ref": "#/$defs/RiskLevel", + "description": "Risk level classification." + }, + "factors": { + "type": "array", + "items": { + "$ref": "#/$defs/RiskFactor" + }, + "minItems": 0, + "description": "Factors contributing to the total risk." + } + }, + "$defs": { + "RiskLevel": { + "type": "string", + "description": "Risk level indicators.", + "enum": [ + "critical", + "high", + "medium", + "low", + "informational" + ] + }, + "RiskFactor": { + "type": "object", + "additionalProperties": false, + "description": "Factor contributing to risk calculation.", + "required": [ + "name", + "weight" + ], + "properties": { + "name": { + "type": "string", + "description": "Risk factor name." + }, + "weight": { + "type": "number", + "minimum": 0, + "maximum": 1, + "description": "Relative weight between 0 and 1." + }, + "description": { + "type": "string", + "description": "Additional context for the factor." + } + } + } + } +} \ No newline at end of file diff --git a/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-sbom-attestation.v1.schema.json b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-sbom-attestation.v1.schema.json new file mode 100644 index 00000000..c7a10cc5 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-sbom-attestation.v1.schema.json @@ -0,0 +1,107 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://stella-ops.org/schemas/attestor/stellaops-sbom-attestation.v1.json", + "title": "SBOM attestation linking an SBOM document to an artifact.", + "type": "object", + "additionalProperties": false, + "required": [ + "schemaVersion", + "subjectDigest", + "sbomFormat", + "sbomDigest", + "componentCount" + ], + "properties": { + "schemaVersion": { + "type": "string", + "const": "StellaOps.SBOMAttestation@1", + "description": "Schema version identifier." + }, + "subjectDigest": { + "type": "string", + "pattern": "^sha256:[A-Fa-f0-9]{64}$", + "description": "Artifact digest referenced by the SBOM." + }, + "sbomFormat": { + "$ref": "#/$defs/SbomFormat", + "description": "SBOM format identifier." + }, + "sbomDigest": { + "$ref": "#/$defs/DigestReference", + "description": "Digest of the SBOM document." + }, + "sbomUri": { + "type": "string", + "description": "Location where the SBOM can be retrieved." + }, + "componentCount": { + "type": "integer", + "minimum": 0, + "description": "Number of components described by the SBOM." + }, + "packages": { + "type": "array", + "items": { + "$ref": "#/$defs/SbomPackage" + }, + "minItems": 0, + "description": "Optional package listing for quick lookups." + } + }, + "$defs": { + "SbomFormat": { + "type": "string", + "description": "Supported SBOM formats.", + "enum": [ + "CycloneDX-1.6", + "SBOM-3.0.0" + ] + }, + "DigestReference": { + "type": "object", + "additionalProperties": false, + "description": "Normalized digest entry containing algorithm and value.", + "required": [ + "algorithm", + "value" + ], + "properties": { + "algorithm": { + "type": "string", + "description": "Digest algorithm identifier (e.g., sha256)." + }, + "value": { + "type": "string", + "pattern": "^[A-Fa-f0-9]{64}$", + "description": "Hex-encoded digest value." + } + } + }, + "SbomPackage": { + "type": "object", + "additionalProperties": false, + "description": "SBOM package entry.", + "required": [ + "purl" + ], + "properties": { + "purl": { + "type": "string", + "description": "Package URL reference." + }, + "version": { + "type": "string", + "description": "Resolved package version." + }, + "licenses": { + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "description": "Associated license identifiers." + } + } + } + } +} \ No newline at end of file diff --git a/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-scan-results.v1.schema.json b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-scan-results.v1.schema.json new file mode 100644 index 00000000..cc04c797 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-scan-results.v1.schema.json @@ -0,0 +1,122 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://stella-ops.org/schemas/attestor/stellaops-scan-results.v1.json", + "title": "Scanner findings for an artifact at a point in time.", + "type": "object", + "additionalProperties": false, + "required": [ + "schemaVersion", + "subjectDigest", + "scannerName", + "scannerVersion", + "generatedAt", + "findings" + ], + "properties": { + "schemaVersion": { + "type": "string", + "const": "StellaOps.ScanResults@1", + "description": "Schema version identifier." + }, + "subjectDigest": { + "type": "string", + "pattern": "^sha256:[A-Fa-f0-9]{64}$", + "description": "Artifact digest that was scanned." + }, + "scannerName": { + "type": "string", + "description": "Name of the scanner that produced the findings." + }, + "scannerVersion": { + "type": "string", + "description": "Scanner version string." + }, + "generatedAt": { + "type": "string", + "format": "date-time", + "description": "Timestamp when the scan results were generated." + }, + "findings": { + "type": "array", + "items": { + "$ref": "#/$defs/ScanFinding" + }, + "minItems": 0, + "description": "List of findings captured during the scan." + } + }, + "$defs": { + "Severity": { + "type": "string", + "description": "Finding severity scale.", + "enum": [ + "critical", + "high", + "medium", + "low", + "info" + ] + }, + "FindingStatus": { + "type": "string", + "description": "Finding lifecycle status.", + "enum": [ + "detected", + "confirmed", + "fixed", + "not_affected" + ] + }, + "ScanFinding": { + "type": "object", + "additionalProperties": false, + "description": "Individual finding from a scan.", + "required": [ + "id", + "severity", + "status", + "packageName" + ], + "properties": { + "id": { + "type": "string", + "description": "Scanner-issued identifier." + }, + "severity": { + "$ref": "#/$defs/Severity", + "description": "Severity classification." + }, + "status": { + "$ref": "#/$defs/FindingStatus", + "description": "Lifecycle state of the finding." + }, + "packageName": { + "type": "string", + "description": "Affected package name." + }, + "packageVersion": { + "type": "string", + "description": "Affected package version." + }, + "cvssScore": { + "type": "number", + "minimum": 0, + "maximum": 10, + "description": "CVSS base score if available." + }, + "description": { + "type": "string", + "description": "Human-readable description of the finding." + }, + "references": { + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "description": "Reference links or advisory identifiers." + } + } + } + } +} \ No newline at end of file diff --git a/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-vex-attestation.v1.schema.json b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-vex-attestation.v1.schema.json new file mode 100644 index 00000000..6edb2b27 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Types/schemas/stellaops-vex-attestation.v1.schema.json @@ -0,0 +1,95 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://stella-ops.org/schemas/attestor/stellaops-vex-attestation.v1.json", + "title": "VEX attestation describing vulnerability status for an artifact.", + "type": "object", + "additionalProperties": false, + "required": [ + "schemaVersion", + "subjectDigest", + "generatedAt", + "statements" + ], + "properties": { + "schemaVersion": { + "type": "string", + "const": "StellaOps.VEXAttestation@1", + "description": "Schema version identifier." + }, + "subjectDigest": { + "type": "string", + "pattern": "^sha256:[A-Fa-f0-9]{64}$", + "description": "Artifact digest covered by the VEX statements." + }, + "generatedAt": { + "type": "string", + "format": "date-time", + "description": "Timestamp when the VEX attestation was generated." + }, + "statements": { + "type": "array", + "items": { + "$ref": "#/$defs/VexStatement" + }, + "minItems": 1, + "description": "Collection of VEX statements." + } + }, + "$defs": { + "VexStatus": { + "type": "string", + "description": "VEX statement status values.", + "enum": [ + "not_affected", + "affected", + "under_investigation", + "fixed" + ] + }, + "VexStatement": { + "type": "object", + "additionalProperties": false, + "description": "Single VEX statement covering a vulnerability and status.", + "required": [ + "vulnerabilityId", + "status", + "timestamp" + ], + "properties": { + "vulnerabilityId": { + "type": "string", + "description": "Vulnerability identifier (e.g., CVE)." + }, + "status": { + "$ref": "#/$defs/VexStatus", + "description": "VEX status value." + }, + "timestamp": { + "type": "string", + "format": "date-time", + "description": "UTC timestamp for statement issuance." + }, + "justification": { + "type": "string", + "description": "Justification for the chosen status." + }, + "impactStatement": { + "type": "string", + "description": "Impact description for affected systems." + }, + "actionStatement": { + "type": "string", + "description": "Recommended remediation or action." + }, + "references": { + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "description": "Supporting reference URLs." + } + } + } + } +} \ No newline at end of file diff --git a/src/Attestor/StellaOps.Attestor.Verify/AttestorVerificationEngine.cs b/src/Attestor/StellaOps.Attestor.Verify/AttestorVerificationEngine.cs new file mode 100644 index 00000000..52073025 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Verify/AttestorVerificationEngine.cs @@ -0,0 +1,960 @@ +using System.Buffers.Binary; +using System.Collections.Immutable; +using System.IO; +using System.Linq; +using System.Security.Cryptography; +using System.Security.Cryptography.X509Certificates; +using System.Text; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Verification; + +namespace StellaOps.Attestor.Verify; + +public sealed class AttestorVerificationEngine : IAttestorVerificationEngine +{ + private readonly IDsseCanonicalizer _canonicalizer; + private readonly AttestorOptions _options; + private readonly ILogger _logger; + + public AttestorVerificationEngine( + IDsseCanonicalizer canonicalizer, + IOptions options, + ILogger logger) + { + _canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer)); + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task EvaluateAsync( + AttestorEntry entry, + AttestorSubmissionRequest.SubmissionBundle? bundle, + DateTimeOffset evaluationTime, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(entry); + + var signatureIssuer = await EvaluateSignatureAndIssuerAsync(entry, bundle, cancellationToken).ConfigureAwait(false); + var freshness = EvaluateFreshness(entry, evaluationTime); + var transparency = EvaluateTransparency(entry); + var policy = EvaluatePolicy(entry, signatureIssuer.Signatures, signatureIssuer.Issuer, freshness, transparency, bundle is not null); + + return new VerificationReport(policy, signatureIssuer.Issuer, freshness, signatureIssuer.Signatures, transparency); + } + + private async Task<(SignatureEvaluationResult Signatures, IssuerEvaluationResult Issuer)> EvaluateSignatureAndIssuerAsync( + AttestorEntry entry, + AttestorSubmissionRequest.SubmissionBundle? bundle, + CancellationToken cancellationToken) + { + var signatureIssues = new List(); + var issuerIssues = new List(); + + if (bundle is null) + { + var issuerFromEntry = entry.SignerIdentity; + return ( + new SignatureEvaluationResult + { + Status = VerificationSectionStatus.Skipped, + BundleProvided = false, + TotalSignatures = 0, + VerifiedSignatures = 0, + RequiredSignatures = Math.Max(1, _options.Verification.MinimumSignatures), + Issues = Array.Empty() + }, + new IssuerEvaluationResult + { + Status = VerificationSectionStatus.Skipped, + Mode = issuerFromEntry.Mode ?? "unknown", + Issuer = issuerFromEntry.Issuer, + SubjectAlternativeName = issuerFromEntry.SubjectAlternativeName, + KeyId = issuerFromEntry.KeyId, + Issues = Array.Empty() + }); + } + + var canonicalRequest = new AttestorSubmissionRequest + { + Bundle = bundle, + Meta = new AttestorSubmissionRequest.SubmissionMeta + { + Artifact = new AttestorSubmissionRequest.ArtifactInfo + { + Sha256 = entry.Artifact.Sha256, + Kind = entry.Artifact.Kind, + ImageDigest = entry.Artifact.ImageDigest, + SubjectUri = entry.Artifact.SubjectUri + }, + BundleSha256 = entry.BundleSha256 + } + }; + + byte[] canonicalBundle; + try + { + canonicalBundle = await _canonicalizer.CanonicalizeAsync(canonicalRequest, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (ex is CryptographicException or FormatException) + { + signatureIssues.Add("bundle_canonicalize_failed"); + _logger.LogWarning(ex, "Failed to canonicalize DSSE bundle for {Uuid}", entry.RekorUuid); + + var issuerFromEntry = entry.SignerIdentity; + return ( + new SignatureEvaluationResult + { + Status = VerificationSectionStatus.Fail, + BundleProvided = true, + TotalSignatures = bundle.Dsse.Signatures.Count, + VerifiedSignatures = 0, + RequiredSignatures = Math.Max(1, _options.Verification.MinimumSignatures), + Issues = signatureIssues.ToArray() + }, + new IssuerEvaluationResult + { + Status = VerificationSectionStatus.Warn, + Mode = issuerFromEntry.Mode ?? (bundle.Mode ?? "unknown"), + Issuer = issuerFromEntry.Issuer, + SubjectAlternativeName = issuerFromEntry.SubjectAlternativeName, + KeyId = issuerFromEntry.KeyId, + Issues = new[] { "issuer_verification_skipped" } + }); + } + + var computedHash = Convert.ToHexString(SHA256.HashData(canonicalBundle)).ToLowerInvariant(); + if (!string.Equals(computedHash, entry.BundleSha256, StringComparison.OrdinalIgnoreCase)) + { + signatureIssues.Add("bundle_hash_mismatch"); + } + + var mode = (entry.SignerIdentity.Mode ?? bundle.Mode ?? "unknown").ToLowerInvariant(); + var requiredSignatures = Math.Max(1, _options.Verification.MinimumSignatures); + var totalSignatures = bundle.Dsse.Signatures.Count; + var verifiedSignatures = 0; + string? subjectAlternativeName = null; + + if (!TryDecodeBase64(bundle.Dsse.PayloadBase64, out var payloadBytes)) + { + signatureIssues.Add("bundle_payload_invalid_base64"); + + return ( + new SignatureEvaluationResult + { + Status = VerificationSectionStatus.Fail, + BundleProvided = true, + TotalSignatures = bundle.Dsse.Signatures.Count, + VerifiedSignatures = 0, + RequiredSignatures = requiredSignatures, + Issues = signatureIssues.ToArray() + }, + new IssuerEvaluationResult + { + Status = VerificationSectionStatus.Warn, + Mode = mode, + Issuer = entry.SignerIdentity.Issuer, + SubjectAlternativeName = entry.SignerIdentity.SubjectAlternativeName, + KeyId = entry.SignerIdentity.KeyId, + Issues = issuerIssues.ToArray() + }); + } + + var preAuth = ComputePreAuthEncoding(bundle.Dsse.PayloadType, payloadBytes); + + switch (mode) + { + case "kms": + verifiedSignatures = EvaluateKmsSignature(bundle, preAuth, signatureIssues, issuerIssues); + break; + + case "keyless": + var keylessResult = EvaluateKeylessSignature(entry, bundle, preAuth, signatureIssues, issuerIssues); + verifiedSignatures = keylessResult.VerifiedSignatures; + subjectAlternativeName = keylessResult.SubjectAlternativeName; + break; + + default: + issuerIssues.Add(string.IsNullOrWhiteSpace(mode) ? "signer_mode_unknown" : $"signer_mode_unsupported:{mode}"); + break; + } + + var signatureStatus = DetermineSignatureStatus(signatureIssues, verifiedSignatures, requiredSignatures, totalSignatures); + var issuerStatus = DetermineIssuerStatus(issuerIssues, mode, verifiedSignatures > 0); + + return ( + new SignatureEvaluationResult + { + Status = signatureStatus, + BundleProvided = true, + TotalSignatures = totalSignatures, + VerifiedSignatures = verifiedSignatures, + RequiredSignatures = requiredSignatures, + Issues = signatureIssues.ToArray() + }, + new IssuerEvaluationResult + { + Status = issuerStatus, + Mode = mode, + Issuer = entry.SignerIdentity.Issuer, + SubjectAlternativeName = subjectAlternativeName ?? entry.SignerIdentity.SubjectAlternativeName, + KeyId = entry.SignerIdentity.KeyId, + Issues = issuerIssues.ToArray() + }); + } + + private int EvaluateKmsSignature( + AttestorSubmissionRequest.SubmissionBundle bundle, + byte[] preAuthEncoding, + List signatureIssues, + List issuerIssues) + { + if (_options.Security.SignerIdentity.KmsKeys.Count == 0) + { + issuerIssues.Add("kms_key_missing"); + return 0; + } + + var signatures = new List(); + foreach (var signature in bundle.Dsse.Signatures) + { + if (!TryDecodeBase64(signature.Signature, out var signatureBytes)) + { + signatureIssues.Add("signature_invalid_base64"); + return 0; + } + + signatures.Add(signatureBytes); + } + + var verified = 0; + + foreach (var secret in _options.Security.SignerIdentity.KmsKeys) + { + if (!TryDecodeSecret(secret, out var secretBytes)) + { + continue; + } + + using var hmac = new HMACSHA256(secretBytes); + var computed = hmac.ComputeHash(preAuthEncoding); + + foreach (var candidate in signatures) + { + if (CryptographicOperations.FixedTimeEquals(computed, candidate)) + { + verified++; + } + } + } + + if (verified == 0) + { + signatureIssues.Add("signature_invalid"); + } + + return verified; + } + + private (int VerifiedSignatures, string? SubjectAlternativeName) EvaluateKeylessSignature( + AttestorEntry entry, + AttestorSubmissionRequest.SubmissionBundle bundle, + byte[] preAuthEncoding, + List signatureIssues, + List issuerIssues) + { + if (bundle.CertificateChain.Count == 0) + { + issuerIssues.Add("certificate_chain_missing"); + return (0, null); + } + + var certificates = new List(); + try + { + foreach (var pem in bundle.CertificateChain) + { + certificates.Add(X509Certificate2.CreateFromPem(pem)); + } + } + catch (Exception ex) when (ex is CryptographicException or ArgumentException) + { + issuerIssues.Add("certificate_chain_invalid"); + _logger.LogWarning(ex, "Failed to parse certificate chain for {Uuid}", entry.RekorUuid); + return (0, null); + } + + var leafCertificate = certificates[0]; + var subjectAltName = GetSubjectAlternativeNames(leafCertificate).FirstOrDefault(); + + if (_options.Security.SignerIdentity.FulcioRoots.Count > 0) + { + using var chain = new X509Chain + { + ChainPolicy = + { + RevocationMode = X509RevocationMode.NoCheck, + VerificationFlags = X509VerificationFlags.NoFlag, + TrustMode = X509ChainTrustMode.CustomRootTrust + } + }; + + foreach (var rootPath in _options.Security.SignerIdentity.FulcioRoots) + { + try + { + if (File.Exists(rootPath)) + { + var rootCertificate = X509CertificateLoader.LoadCertificateFromFile(rootPath); + chain.ChainPolicy.CustomTrustStore.Add(rootCertificate); + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to load Fulcio root {Root}", rootPath); + } + } + + if (!chain.Build(leafCertificate)) + { + var status = string.Join(";", chain.ChainStatus.Select(s => s.StatusInformation.Trim())).Trim(';'); + issuerIssues.Add(string.IsNullOrEmpty(status) ? "certificate_chain_untrusted" : $"certificate_chain_untrusted:{status}"); + } + } + + if (_options.Security.SignerIdentity.AllowedSans.Count > 0) + { + var sans = GetSubjectAlternativeNames(leafCertificate); + if (!sans.Any(san => _options.Security.SignerIdentity.AllowedSans.Contains(san, StringComparer.OrdinalIgnoreCase))) + { + issuerIssues.Add("certificate_san_untrusted"); + } + } + + var verified = 0; + foreach (var signature in bundle.Dsse.Signatures) + { + if (!TryDecodeBase64(signature.Signature, out var signatureBytes)) + { + signatureIssues.Add("signature_invalid_base64"); + return (0, subjectAltName); + } + + if (TryVerifyWithCertificate(leafCertificate, preAuthEncoding, signatureBytes)) + { + verified++; + } + } + + if (verified == 0) + { + signatureIssues.Add("signature_invalid"); + } + + return (verified, subjectAltName); + } + + private FreshnessEvaluationResult EvaluateFreshness(AttestorEntry entry, DateTimeOffset evaluationTime) + { + if (entry.CreatedAt == default) + { + return new FreshnessEvaluationResult + { + Status = VerificationSectionStatus.Warn, + CreatedAt = entry.CreatedAt, + EvaluatedAt = evaluationTime, + Age = TimeSpan.Zero, + MaxAge = null, + Issues = new[] { "freshness_unknown" } + }; + } + + var age = evaluationTime - entry.CreatedAt; + var maxAgeMinutes = _options.Verification.FreshnessMaxAgeMinutes; + var warnAgeMinutes = _options.Verification.FreshnessWarnAgeMinutes; + + if (maxAgeMinutes is null) + { + return new FreshnessEvaluationResult + { + Status = VerificationSectionStatus.Skipped, + CreatedAt = entry.CreatedAt, + EvaluatedAt = evaluationTime, + Age = age, + MaxAge = null, + Issues = Array.Empty() + }; + } + + var maxAge = TimeSpan.FromMinutes(maxAgeMinutes.Value); + VerificationSectionStatus status; + var issues = new List(); + + if (age > maxAge) + { + status = VerificationSectionStatus.Fail; + issues.Add("freshness_stale"); + } + else if (warnAgeMinutes is not null && age > TimeSpan.FromMinutes(warnAgeMinutes.Value)) + { + status = VerificationSectionStatus.Warn; + issues.Add("freshness_warning"); + } + else + { + status = VerificationSectionStatus.Pass; + } + + return new FreshnessEvaluationResult + { + Status = status, + CreatedAt = entry.CreatedAt, + EvaluatedAt = evaluationTime, + Age = age, + MaxAge = maxAge, + Issues = issues.ToArray() + }; + } + + private TransparencyEvaluationResult EvaluateTransparency(AttestorEntry entry) + { + var issues = new List(); + + TransparencyEvaluationResult Finalize(VerificationSectionStatus finalStatus, bool proofPresent, bool checkpointPresent, bool inclusionPresent) + { + var witness = entry.Witness; + var witnessPresent = witness is not null; + var witnessMatches = false; + var witnessAggregator = witness?.Aggregator; + var witnessStatus = witness?.Status ?? "missing"; + + if (witness is null) + { + issues.Add("witness_missing"); + if (_options.Verification.RequireWitnessEndorsement) + { + finalStatus = VerificationSectionStatus.Fail; + } + else if (finalStatus != VerificationSectionStatus.Fail) + { + finalStatus = VerificationSectionStatus.Warn; + } + } + else + { + var normalizedStatus = string.IsNullOrWhiteSpace(witness.Status) ? "unknown" : witness.Status!; + if (!string.Equals(normalizedStatus, "endorsed", StringComparison.OrdinalIgnoreCase)) + { + issues.Add("witness_status_" + normalizedStatus.ToLowerInvariant()); + if (_options.Verification.RequireWitnessEndorsement) + { + finalStatus = VerificationSectionStatus.Fail; + } + else if (finalStatus != VerificationSectionStatus.Fail) + { + finalStatus = VerificationSectionStatus.Warn; + } + } + + if (!string.IsNullOrWhiteSpace(witness.RootHash) && entry.Proof?.Checkpoint?.RootHash is not null) + { + if (string.Equals(witness.RootHash, entry.Proof.Checkpoint.RootHash, StringComparison.OrdinalIgnoreCase)) + { + witnessMatches = true; + } + else + { + issues.Add("witness_root_mismatch"); + if (_options.Verification.RequireWitnessEndorsement) + { + finalStatus = VerificationSectionStatus.Fail; + } + else if (finalStatus != VerificationSectionStatus.Fail) + { + finalStatus = VerificationSectionStatus.Warn; + } + } + } + } + + return BuildTransparencyResult(finalStatus, issues, proofPresent, checkpointPresent, inclusionPresent, witnessPresent, witnessMatches, witnessAggregator, witnessStatus); + } + + if (entry.Proof is null) + { + issues.Add("proof_missing"); + var finalStatus = _options.Verification.RequireTransparencyInclusion ? VerificationSectionStatus.Fail : VerificationSectionStatus.Warn; + return Finalize(finalStatus, false, false, false); + } + + if (!TryDecodeHash(entry.BundleSha256, out var bundleHash)) + { + issues.Add("bundle_hash_decode_failed"); + return Finalize(VerificationSectionStatus.Fail, true, entry.Proof.Checkpoint is not null, entry.Proof.Inclusion is not null); + } + + if (entry.Proof.Inclusion is null) + { + issues.Add("proof_inclusion_missing"); + var finalStatus = _options.Verification.RequireTransparencyInclusion ? VerificationSectionStatus.Fail : VerificationSectionStatus.Warn; + return Finalize(finalStatus, true, entry.Proof.Checkpoint is not null, false); + } + + if (entry.Proof.Inclusion.LeafHash is not null) + { + if (!TryDecodeHash(entry.Proof.Inclusion.LeafHash, out var proofLeaf)) + { + issues.Add("proof_leafhash_decode_failed"); + return Finalize(VerificationSectionStatus.Fail, true, entry.Proof.Checkpoint is not null, true); + } + + if (!CryptographicOperations.FixedTimeEquals(bundleHash, proofLeaf)) + { + issues.Add("proof_leafhash_mismatch"); + } + } + + var current = bundleHash; + var inclusionNodesPresent = entry.Proof.Inclusion.Path.Count > 0; + + if (inclusionNodesPresent) + { + var nodes = new List(); + foreach (var element in entry.Proof.Inclusion.Path) + { + if (!ProofPathNode.TryParse(element, out var node)) + { + issues.Add("proof_path_decode_failed"); + return Finalize(VerificationSectionStatus.Fail, true, entry.Proof.Checkpoint is not null, true); + } + + if (!node.HasOrientation) + { + issues.Add("proof_path_orientation_missing"); + return Finalize(VerificationSectionStatus.Fail, true, entry.Proof.Checkpoint is not null, true); + } + + nodes.Add(node); + } + + foreach (var node in nodes) + { + current = node.Left ? HashInternal(node.Hash, current) : HashInternal(current, node.Hash); + } + } + + if (entry.Proof.Checkpoint is null) + { + issues.Add("checkpoint_missing"); + var finalStatus = _options.Verification.RequireCheckpoint ? VerificationSectionStatus.Fail : VerificationSectionStatus.Warn; + return Finalize(finalStatus, true, false, inclusionNodesPresent); + } + + if (!TryDecodeHash(entry.Proof.Checkpoint.RootHash, out var rootHash)) + { + issues.Add("checkpoint_root_decode_failed"); + return Finalize(VerificationSectionStatus.Fail, true, true, inclusionNodesPresent); + } + + if (!CryptographicOperations.FixedTimeEquals(current, rootHash)) + { + issues.Add("proof_root_mismatch"); + } + + var status = issues.Count == 0 ? VerificationSectionStatus.Pass : VerificationSectionStatus.Fail; + return Finalize(status, true, true, inclusionNodesPresent); + } + + private PolicyEvaluationResult EvaluatePolicy( + AttestorEntry entry, + SignatureEvaluationResult signatures, + IssuerEvaluationResult issuer, + FreshnessEvaluationResult freshness, + TransparencyEvaluationResult transparency, + bool bundleProvided) + { + var issues = new List(); + var status = VerificationSectionStatus.Pass; + + if (!string.Equals(entry.Status, "included", StringComparison.OrdinalIgnoreCase)) + { + issues.Add($"log_status_{entry.Status.ToLowerInvariant()}"); + status = VerificationSectionStatus.Fail; + } + + if (_options.Verification.RequireBundleForSignatureValidation && !bundleProvided) + { + issues.Add("bundle_required"); + status = VerificationSectionStatus.Fail; + } + + status = CombinePolicyStatus(status, signatures.Status, "signatures", issues); + status = CombinePolicyStatus(status, issuer.Status, "issuer", issues); + status = CombinePolicyStatus(status, freshness.Status, "freshness", issues, warnOnly: true); + status = CombinePolicyStatus(status, transparency.Status, "transparency", issues); + + var verdict = status switch + { + VerificationSectionStatus.Fail => "fail", + VerificationSectionStatus.Warn => "warn", + VerificationSectionStatus.Pass => "pass", + _ => "unknown" + }; + + var attributes = ImmutableDictionary.Empty + .Add("status", entry.Status ?? "unknown") + .Add("logBackend", entry.Log.Backend ?? "primary") + .Add("logUrl", entry.Log.Url ?? string.Empty); + + if (entry.Index.HasValue) + { + attributes = attributes.Add("index", entry.Index.Value.ToString()); + } + + if (entry.Proof?.Checkpoint?.Timestamp is not null) + { + attributes = attributes.Add("checkpointTs", entry.Proof.Checkpoint.Timestamp.Value.ToString("O")); + } + + return new PolicyEvaluationResult + { + Status = status, + PolicyId = _options.Verification.PolicyId, + PolicyVersion = _options.Verification.PolicyVersion, + Verdict = verdict, + Issues = issues.Distinct(StringComparer.OrdinalIgnoreCase).ToArray(), + Attributes = attributes + }; + } + + private static VerificationSectionStatus DetermineSignatureStatus( + IReadOnlyCollection issues, + int verified, + int required, + int total) + { + if (total == 0) + { + return VerificationSectionStatus.Fail; + } + + if (issues.Count > 0) + { + return issues.Contains("signature_invalid", StringComparer.OrdinalIgnoreCase) + || issues.Contains("bundle_payload_invalid_base64", StringComparer.OrdinalIgnoreCase) + || issues.Contains("bundle_hash_mismatch", StringComparer.OrdinalIgnoreCase) + ? VerificationSectionStatus.Fail + : VerificationSectionStatus.Warn; + } + + return verified >= required ? VerificationSectionStatus.Pass : VerificationSectionStatus.Fail; + } + + private static VerificationSectionStatus DetermineIssuerStatus( + IReadOnlyCollection issues, + string mode, + bool signatureVerified) + { + if (issues.Count == 0) + { + return signatureVerified ? VerificationSectionStatus.Pass : VerificationSectionStatus.Warn; + } + + if (issues.Any(issue => issue.StartsWith("certificate_", StringComparison.OrdinalIgnoreCase) || issue.StartsWith("kms_", StringComparison.OrdinalIgnoreCase))) + { + return VerificationSectionStatus.Fail; + } + + if (issues.Any(issue => issue.StartsWith("signer_mode", StringComparison.OrdinalIgnoreCase))) + { + return VerificationSectionStatus.Fail; + } + + return VerificationSectionStatus.Warn; + } + + private static VerificationSectionStatus CombinePolicyStatus( + VerificationSectionStatus current, + VerificationSectionStatus next, + string component, + List issues, + bool warnOnly = false) + { + if (next == VerificationSectionStatus.Fail) + { + issues.Add($"policy_blocked:{component}"); + return VerificationSectionStatus.Fail; + } + + if (next == VerificationSectionStatus.Warn && !warnOnly) + { + issues.Add($"policy_warn:{component}"); + return current == VerificationSectionStatus.Fail ? current : VerificationSectionStatus.Warn; + } + + if (next == VerificationSectionStatus.Warn && warnOnly) + { + issues.Add($"policy_warn:{component}"); + return current; + } + + return current; + } + + private static TransparencyEvaluationResult BuildTransparencyResult( + VerificationSectionStatus status, + List issues, + bool proofPresent, + bool checkpointPresent, + bool inclusionPresent, + bool witnessPresent, + bool witnessMatches, + string? witnessAggregator, + string witnessStatus) + { + return new TransparencyEvaluationResult + { + Status = status, + ProofPresent = proofPresent, + CheckpointPresent = checkpointPresent, + InclusionPathPresent = inclusionPresent, + WitnessPresent = witnessPresent, + WitnessMatchesRoot = witnessMatches, + WitnessAggregator = witnessAggregator, + WitnessStatus = witnessStatus, + Issues = issues.ToArray() + }; + } + + private static bool TryVerifyWithCertificate(X509Certificate2 certificate, byte[] preAuthEncoding, byte[] signature) + { + try + { + var ecdsa = certificate.GetECDsaPublicKey(); + if (ecdsa is not null) + { + using (ecdsa) + { + if (ecdsa.VerifyData(preAuthEncoding, signature, HashAlgorithmName.SHA256)) + { + return true; + } + } + } + + var rsa = certificate.GetRSAPublicKey(); + if (rsa is not null) + { + using (rsa) + { + if (rsa.VerifyData(preAuthEncoding, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1)) + { + return true; + } + } + } + } + catch (CryptographicException) + { + return false; + } + + return false; + } + + private static IEnumerable GetSubjectAlternativeNames(X509Certificate2 certificate) + { + foreach (var extension in certificate.Extensions) + { + if (string.Equals(extension.Oid?.Value, "2.5.29.17", StringComparison.Ordinal)) + { + var formatted = extension.Format(true); + var lines = formatted.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); + foreach (var line in lines) + { + var parts = line.Split('='); + if (parts.Length == 2) + { + yield return parts[1].Trim(); + } + } + } + } + } + + private static byte[] ComputePreAuthEncoding(string payloadType, byte[] payload) + { + var headerBytes = Encoding.UTF8.GetBytes(payloadType ?? string.Empty); + var buffer = new byte[6 + 8 + headerBytes.Length + 8 + payload.Length]; + var offset = 0; + + Encoding.ASCII.GetBytes("DSSEv1", 0, 6, buffer, offset); + offset += 6; + + BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)headerBytes.Length); + offset += 8; + Buffer.BlockCopy(headerBytes, 0, buffer, offset, headerBytes.Length); + offset += headerBytes.Length; + + BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)payload.Length); + offset += 8; + Buffer.BlockCopy(payload, 0, buffer, offset, payload.Length); + + return buffer; + } + + private static byte[] HashInternal(byte[] left, byte[] right) + { + using var sha = SHA256.Create(); + var buffer = new byte[1 + left.Length + right.Length]; + buffer[0] = 0x01; + Buffer.BlockCopy(left, 0, buffer, 1, left.Length); + Buffer.BlockCopy(right, 0, buffer, 1 + left.Length, right.Length); + return sha.ComputeHash(buffer); + } + + private static bool TryDecodeSecret(string value, out byte[] bytes) + { + if (string.IsNullOrWhiteSpace(value)) + { + bytes = Array.Empty(); + return false; + } + + value = value.Trim(); + + if (value.StartsWith("base64:", StringComparison.OrdinalIgnoreCase)) + { + return TryDecodeBase64(value[7..], out bytes); + } + + if (value.StartsWith("hex:", StringComparison.OrdinalIgnoreCase)) + { + return TryDecodeHex(value[4..], out bytes); + } + + if (TryDecodeBase64(value, out bytes)) + { + return true; + } + + if (TryDecodeHex(value, out bytes)) + { + return true; + } + + bytes = Array.Empty(); + return false; + } + + private static bool TryDecodeBase64(string value, out byte[] bytes) + { + try + { + bytes = Convert.FromBase64String(value); + return true; + } + catch (FormatException) + { + bytes = Array.Empty(); + return false; + } + } + + private static bool TryDecodeHex(string value, out byte[] bytes) + { + try + { + bytes = Convert.FromHexString(value); + return true; + } + catch (FormatException) + { + bytes = Array.Empty(); + return false; + } + } + + private static bool TryDecodeHash(string? value, out byte[] bytes) + { + bytes = Array.Empty(); + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var trimmed = value.Trim(); + + if (TryDecodeHex(trimmed, out bytes)) + { + return true; + } + + if (TryDecodeBase64(trimmed, out bytes)) + { + return true; + } + + bytes = Array.Empty(); + return false; + } + + private readonly struct ProofPathNode + { + private ProofPathNode(bool hasOrientation, bool left, byte[] hash) + { + HasOrientation = hasOrientation; + Left = left; + Hash = hash; + } + + public bool HasOrientation { get; } + + public bool Left { get; } + + public byte[] Hash { get; } + + public static bool TryParse(string value, out ProofPathNode node) + { + node = default; + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var trimmed = value.Trim(); + var parts = trimmed.Split(':', 2); + bool hasOrientation = false; + bool left = false; + string hashPart = trimmed; + + if (parts.Length == 2) + { + var prefix = parts[0].Trim().ToLowerInvariant(); + if (prefix is "l" or "left") + { + hasOrientation = true; + left = true; + } + else if (prefix is "r" or "right") + { + hasOrientation = true; + left = false; + } + + hashPart = parts[1].Trim(); + } + + if (!TryDecodeHash(hashPart, out var hash)) + { + return false; + } + + node = new ProofPathNode(hasOrientation, left, hash); + return true; + } + } +} diff --git a/src/Attestor/StellaOps.Attestor.Verify/IAttestorVerificationEngine.cs b/src/Attestor/StellaOps.Attestor.Verify/IAttestorVerificationEngine.cs new file mode 100644 index 00000000..f711fbdd --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Verify/IAttestorVerificationEngine.cs @@ -0,0 +1,14 @@ +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Verification; + +namespace StellaOps.Attestor.Verify; + +public interface IAttestorVerificationEngine +{ + Task EvaluateAsync( + AttestorEntry entry, + AttestorSubmissionRequest.SubmissionBundle? bundle, + DateTimeOffset evaluationTime, + CancellationToken cancellationToken = default); +} diff --git a/src/Attestor/StellaOps.Attestor.Verify/StellaOps.Attestor.Verify.csproj b/src/Attestor/StellaOps.Attestor.Verify/StellaOps.Attestor.Verify.csproj new file mode 100644 index 00000000..641e2afe --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.Verify/StellaOps.Attestor.Verify.csproj @@ -0,0 +1,12 @@ + + + net10.0 + preview + enable + enable + true + + + + + diff --git a/src/Attestor/StellaOps.Attestor.Verify/TASKS.md b/src/Attestor/StellaOps.Attestor.Verify/TASKS.md index 5fc5af25..e7816c37 100644 --- a/src/Attestor/StellaOps.Attestor.Verify/TASKS.md +++ b/src/Attestor/StellaOps.Attestor.Verify/TASKS.md @@ -3,11 +3,11 @@ ## Sprint 73 – Policy Integration | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| ATTEST-VERIFY-73-001 | TODO | Verification Guild, Policy Guild | VERPOL-73-001, ATTESTOR-73-002 | Implement verification engine: policy evaluation, issuer trust resolution, freshness, signature count, transparency checks; produce structured reports. | Engine returns report DTOs; policy rules honored; unit tests cover pass/fail scenarios. | -| ATTEST-VERIFY-73-002 | TODO | Verification Guild | ATTEST-VERIFY-73-001 | Add caching layer keyed by `(subject, envelope_id, policy_version)` with TTL and invalidation on new evidence. | Cache reduces repeated verification cost; tests cover cache hits/misses. | +| ATTEST-VERIFY-73-001 | DONE | Verification Guild, Policy Guild | VERPOL-73-001, ATTESTOR-73-002 | Implement verification engine: policy evaluation, issuer trust resolution, freshness, signature count, transparency checks; produce structured reports. | Engine returns report DTOs; policy rules honored; unit tests cover pass/fail scenarios. | +| ATTEST-VERIFY-73-002 | DONE | Verification Guild | ATTEST-VERIFY-73-001 | Add caching layer keyed by `(subject, envelope_id, policy_version)` with TTL and invalidation on new evidence. | Cache reduces repeated verification cost; tests cover cache hits/misses. | ## Sprint 74 – Explainability & Observability | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| ATTEST-VERIFY-74-001 | TODO | Verification Guild, Observability Guild | ATTEST-VERIFY-73-001 | Emit telemetry (spans/metrics) tagged by subject, issuer, policy, result; integrate with dashboards. | Metrics visible; spans present; SLO thresholds defined. | -| ATTEST-VERIFY-74-002 | TODO | Verification Guild, Docs Guild | ATTEST-VERIFY-73-001 | Document verification report schema and explainability in `/docs/modules/attestor/workflows.md`. | Documentation merged; examples verified via tests. | +| ATTEST-VERIFY-74-001 | DONE | Verification Guild, Observability Guild | ATTEST-VERIFY-73-001 | Emit telemetry (spans/metrics) tagged by subject, issuer, policy, result; integrate with dashboards. | Metrics visible; spans present; SLO thresholds defined. | +| ATTEST-VERIFY-74-002 | DONE (2025-11-01) | Verification Guild, Docs Guild | ATTEST-VERIFY-73-001 | Document verification report schema and explainability in `/docs/modules/attestor/workflows.md`. | Documentation merged; examples verified via tests. | diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Bulk/BulkVerificationModels.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Bulk/BulkVerificationModels.cs new file mode 100644 index 00000000..0b97f3d2 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Bulk/BulkVerificationModels.cs @@ -0,0 +1,94 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using StellaOps.Attestor.Core.Verification; + +namespace StellaOps.Attestor.Core.Bulk; + +public sealed class BulkVerificationJob +{ + public string Id { get; set; } = Guid.NewGuid().ToString("N"); + + public int Version { get; set; } + + public BulkVerificationJobStatus Status { get; set; } = BulkVerificationJobStatus.Queued; + + public DateTimeOffset CreatedAt { get; set; } = DateTimeOffset.UtcNow; + + public DateTimeOffset? StartedAt { get; set; } + + public DateTimeOffset? CompletedAt { get; set; } + + public BulkVerificationJobContext Context { get; set; } = new(); + + public IList Items { get; set; } = new List(); + + public int ProcessedCount { get; set; } + + public int SucceededCount { get; set; } + + public int FailedCount { get; set; } + + public string? FailureReason { get; set; } + + public bool AllCompleted => Items.Count > 0 && Items.All(i => i.Status is BulkVerificationItemStatus.Succeeded or BulkVerificationItemStatus.Failed); +} + +public sealed class BulkVerificationJobItem +{ + public int Index { get; set; } + + public BulkVerificationItemRequest Request { get; set; } = new(); + + public BulkVerificationItemStatus Status { get; set; } = BulkVerificationItemStatus.Pending; + + public DateTimeOffset? StartedAt { get; set; } + + public DateTimeOffset? CompletedAt { get; set; } + + public AttestorVerificationResult? Result { get; set; } + + public string? Error { get; set; } +} + +public sealed class BulkVerificationItemRequest +{ + public string? Uuid { get; set; } + + public string? ArtifactSha256 { get; set; } + + public string? Subject { get; set; } + + public string? EnvelopeId { get; set; } + + public string? PolicyVersion { get; set; } + + public bool RefreshProof { get; set; } +} + +public sealed class BulkVerificationJobContext +{ + public string? Tenant { get; set; } + + public string? RequestedBy { get; set; } + + public string? ClientId { get; set; } + + public IList Scopes { get; set; } = new List(); +} + +public enum BulkVerificationJobStatus +{ + Queued, + Running, + Completed, + Failed +} + +public enum BulkVerificationItemStatus +{ + Pending, + Running, + Succeeded, + Failed +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Bulk/IBulkVerificationJobStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Bulk/IBulkVerificationJobStore.cs new file mode 100644 index 00000000..1431c4eb --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Bulk/IBulkVerificationJobStore.cs @@ -0,0 +1,18 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Attestor.Core.Bulk; + +public interface IBulkVerificationJobStore +{ + Task CreateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default); + + Task GetAsync(string jobId, CancellationToken cancellationToken = default); + + Task TryAcquireAsync(CancellationToken cancellationToken = default); + + Task TryUpdateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default); + + Task CountQueuedAsync(CancellationToken cancellationToken = default); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorActivitySource.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorActivitySource.cs new file mode 100644 index 00000000..6a72bca2 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorActivitySource.cs @@ -0,0 +1,59 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; + +namespace StellaOps.Attestor.Core.Observability; + +public sealed class AttestorActivitySource : IDisposable +{ + public const string Name = "StellaOps.Attestor"; + + private readonly ActivitySource _source = new(Name); + private bool _disposed; + + public Activity? StartVerification(string subject, string issuer, string policy) + { + var tags = new ActivityTagsCollection + { + { AttestorTelemetryTags.Subject, subject }, + { AttestorTelemetryTags.Issuer, issuer }, + { AttestorTelemetryTags.Policy, policy } + }; + + return _source.StartActivity("attestor.verify", ActivityKind.Internal, default(ActivityContext), tags); + } + + public Activity? StartProofRefresh(string backend, string policy) + { + var tags = new ActivityTagsCollection + { + { "attestor.log.backend", backend }, + { AttestorTelemetryTags.Policy, policy } + }; + + return _source.StartActivity("attestor.verify.refresh_proof", ActivityKind.Internal, default(ActivityContext), tags); + } + + public Activity? StartWitnessFetch(string aggregator) + { + var tags = new ActivityTagsCollection + { + { AttestorTelemetryTags.WitnessAggregator, string.IsNullOrWhiteSpace(aggregator) ? "unknown" : aggregator } + }; + + return _source.StartActivity("attestor.verify.fetch_witness", ActivityKind.Internal, default(ActivityContext), tags); + } + + public ActivitySource Source => _source; + + public void Dispose() + { + if (_disposed) + { + return; + } + + _source.Dispose(); + _disposed = true; + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs index 2a605e95..bba220d2 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs @@ -1,45 +1,75 @@ -using System.Diagnostics.Metrics; - -namespace StellaOps.Attestor.Core.Observability; - -public sealed class AttestorMetrics : IDisposable -{ - public const string MeterName = "StellaOps.Attestor"; - - private readonly Meter _meter; - private bool _disposed; - - public AttestorMetrics() - { - _meter = new Meter(MeterName); - SubmitTotal = _meter.CreateCounter("attestor.submit_total", description: "Total submission attempts grouped by result and backend."); - SubmitLatency = _meter.CreateHistogram("attestor.submit_latency_seconds", unit: "s", description: "Submission latency in seconds per backend."); - ProofFetchTotal = _meter.CreateCounter("attestor.proof_fetch_total", description: "Proof fetch attempts grouped by result."); - VerifyTotal = _meter.CreateCounter("attestor.verify_total", description: "Verification attempts grouped by result."); - DedupeHitsTotal = _meter.CreateCounter("attestor.dedupe_hits_total", description: "Number of dedupe hits by outcome."); - ErrorTotal = _meter.CreateCounter("attestor.errors_total", description: "Total errors grouped by type."); - } - - public Counter SubmitTotal { get; } - - public Histogram SubmitLatency { get; } - - public Counter ProofFetchTotal { get; } - - public Counter VerifyTotal { get; } - - public Counter DedupeHitsTotal { get; } - - public Counter ErrorTotal { get; } - - public void Dispose() - { - if (_disposed) - { - return; - } - - _meter.Dispose(); - _disposed = true; - } -} +using System.Diagnostics.Metrics; + +namespace StellaOps.Attestor.Core.Observability; + +public sealed class AttestorMetrics : IDisposable +{ + public const string MeterName = "StellaOps.Attestor"; + + private readonly Meter _meter; + private bool _disposed; + + public AttestorMetrics() + { + _meter = new Meter(MeterName); + SubmitTotal = _meter.CreateCounter("attestor.submit_total", description: "Total submission attempts grouped by result and backend."); + SubmitLatency = _meter.CreateHistogram("attestor.submit_latency_seconds", unit: "s", description: "Submission latency in seconds per backend."); + SignTotal = _meter.CreateCounter("attestor.sign_total", description: "Total signing attempts grouped by result/algorithm/provider."); + SignLatency = _meter.CreateHistogram("attestor.sign_latency_seconds", unit: "s", description: "Signing latency in seconds grouped by algorithm/provider."); + ProofFetchTotal = _meter.CreateCounter("attestor.proof_fetch_total", description: "Proof fetch attempts grouped by result."); + WitnessFetchTotal = _meter.CreateCounter("attestor.witness_fetch_total", description: "Transparency witness fetch attempts grouped by result and aggregator."); + WitnessFetchLatency = _meter.CreateHistogram("attestor.witness_fetch_latency_seconds", unit: "s", description: "Transparency witness fetch latency grouped by aggregator."); + VerifyTotal = _meter.CreateCounter("attestor.verify_total", description: "Verification attempts grouped by subject, issuer, policy, and result."); + VerifyLatency = _meter.CreateHistogram("attestor.verify_latency_seconds", unit: "s", description: "Verification latency in seconds grouped by subject, issuer, policy, and result."); + VerifyCacheLookupTotal = _meter.CreateCounter("attestor.verify_cache_lookup_total", description: "Verification cache lookups."); + VerifyCacheHitTotal = _meter.CreateCounter("attestor.verify_cache_hit_total", description: "Verification cache hits."); + DedupeHitsTotal = _meter.CreateCounter("attestor.dedupe_hits_total", description: "Number of dedupe hits by outcome."); + BulkJobsTotal = _meter.CreateCounter("attestor.bulk_jobs_total", description: "Bulk verification jobs processed grouped by status."); + BulkItemsTotal = _meter.CreateCounter("attestor.bulk_items_total", description: "Bulk verification items processed grouped by result."); + BulkJobDuration = _meter.CreateHistogram("attestor.bulk_job_duration_seconds", unit: "s", description: "Bulk verification job duration in seconds grouped by status."); + ErrorTotal = _meter.CreateCounter("attestor.errors_total", description: "Total errors grouped by type."); + } + + public Counter SubmitTotal { get; } + + public Histogram SubmitLatency { get; } + + public Counter SignTotal { get; } + + public Histogram SignLatency { get; } + + public Counter ProofFetchTotal { get; } + + public Counter WitnessFetchTotal { get; } + + public Histogram WitnessFetchLatency { get; } + + public Counter VerifyTotal { get; } + + public Histogram VerifyLatency { get; } + + public Counter VerifyCacheLookupTotal { get; } + + public Counter VerifyCacheHitTotal { get; } + + public Counter DedupeHitsTotal { get; } + + public Counter BulkJobsTotal { get; } + + public Counter BulkItemsTotal { get; } + + public Histogram BulkJobDuration { get; } + + public Counter ErrorTotal { get; } + + public void Dispose() + { + if (_disposed) + { + return; + } + + _meter.Dispose(); + _disposed = true; + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorTelemetryTags.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorTelemetryTags.cs new file mode 100644 index 00000000..21a5b3e8 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorTelemetryTags.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Attestor.Core.Observability; + +public static class AttestorTelemetryTags +{ + public const string Subject = "attestor.subject"; + public const string Issuer = "attestor.issuer"; + public const string Policy = "attestor.policy"; + public const string Result = "result"; + public const string WitnessAggregator = "attestor.witness.aggregator"; +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Offline/AttestorOfflineBundle.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Offline/AttestorOfflineBundle.cs new file mode 100644 index 00000000..13ddb3a5 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Offline/AttestorOfflineBundle.cs @@ -0,0 +1,74 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Core.Offline; + +public static class AttestorBundleVersions +{ + public const string V1 = "stellaops.attestor.bundle/1"; + + public const string Current = V1; +} + +public sealed class AttestorBundlePackage +{ + public string Version { get; init; } = AttestorBundleVersions.Current; + + public DateTimeOffset GeneratedAt { get; init; } + + public IReadOnlyList Items { get; init; } = Array.Empty(); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ContinuationToken { get; init; } +} + +public sealed class AttestorBundleItem +{ + public AttestorEntry Entry { get; init; } = new(); + + /// + /// Canonical DSSE envelope encoded as base64 (UTF-8 JSON). + /// + public string CanonicalBundle { get; init; } = string.Empty; + + /// + /// Optional Rekor proof payload encoded as base64 (UTF-8 JSON). + /// + public string? Proof { get; init; } + + public IReadOnlyDictionary? Metadata { get; init; } +} + +public sealed class AttestorBundleExportRequest +{ + public IReadOnlyList Uuids { get; init; } = Array.Empty(); + + public string? Subject { get; init; } + + public string? Type { get; init; } + + public string? Issuer { get; init; } + + public string? Scope { get; init; } + + public DateTimeOffset? CreatedAfter { get; init; } + + public DateTimeOffset? CreatedBefore { get; init; } + + public int? Limit { get; init; } + + public string? ContinuationToken { get; init; } +} + +public sealed class AttestorBundleImportResult +{ + public int Imported { get; init; } + + public int Updated { get; init; } + + public int Skipped { get; init; } + + public IReadOnlyList Issues { get; init; } = Array.Empty(); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Offline/IAttestorBundleService.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Offline/IAttestorBundleService.cs new file mode 100644 index 00000000..be463dff --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Offline/IAttestorBundleService.cs @@ -0,0 +1,11 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Attestor.Core.Offline; + +public interface IAttestorBundleService +{ + Task ExportAsync(AttestorBundleExportRequest request, CancellationToken cancellationToken = default); + + Task ImportAsync(AttestorBundlePackage package, CancellationToken cancellationToken = default); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs index 59c0b40f..3254c984 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs @@ -1,148 +1,301 @@ -using System.Collections.Generic; - -namespace StellaOps.Attestor.Core.Options; - -/// -/// Strongly typed configuration for the Attestor service. -/// -public sealed class AttestorOptions -{ - public string Listen { get; set; } = "https://0.0.0.0:8444"; - - public SecurityOptions Security { get; set; } = new(); - - public RekorOptions Rekor { get; set; } = new(); - - public MongoOptions Mongo { get; set; } = new(); - - public RedisOptions Redis { get; set; } = new(); - - public S3Options S3 { get; set; } = new(); - - public QuotaOptions Quotas { get; set; } = new(); - - public TelemetryOptions Telemetry { get; set; } = new(); - - public sealed class SecurityOptions - { - public MtlsOptions Mtls { get; set; } = new(); - - public AuthorityOptions Authority { get; set; } = new(); - - public SignerIdentityOptions SignerIdentity { get; set; } = new(); - } - - public sealed class MtlsOptions - { - public bool RequireClientCertificate { get; set; } = true; - - public string? CaBundle { get; set; } - - public IList AllowedSubjects { get; set; } = new List(); - - public IList AllowedThumbprints { get; set; } = new List(); - } - - public sealed class AuthorityOptions - { - public string? Issuer { get; set; } - - public string? JwksUrl { get; set; } - - public string? RequireSenderConstraint { get; set; } - - public bool RequireHttpsMetadata { get; set; } = true; - - public IList Audiences { get; set; } = new List(); - - public IList RequiredScopes { get; set; } = new List(); - } - - public sealed class SignerIdentityOptions - { - public IList Mode { get; set; } = new List { "keyless", "kms" }; - - public IList FulcioRoots { get; set; } = new List(); - - public IList AllowedSans { get; set; } = new List(); - - public IList KmsKeys { get; set; } = new List(); - } - - public sealed class RekorOptions - { - public RekorBackendOptions Primary { get; set; } = new(); - - public RekorMirrorOptions Mirror { get; set; } = new(); - } - - public class RekorBackendOptions - { - public string? Url { get; set; } - - public int ProofTimeoutMs { get; set; } = 15_000; - - public int PollIntervalMs { get; set; } = 250; - - public int MaxAttempts { get; set; } = 60; - } - - public sealed class RekorMirrorOptions : RekorBackendOptions - { - public bool Enabled { get; set; } - } - - public sealed class MongoOptions - { - public string? Uri { get; set; } - - public string Database { get; set; } = "attestor"; - - public string EntriesCollection { get; set; } = "entries"; - - public string DedupeCollection { get; set; } = "dedupe"; - - public string AuditCollection { get; set; } = "audit"; - } - - public sealed class RedisOptions - { - public string? Url { get; set; } - - public string? DedupePrefix { get; set; } = "attestor:dedupe:"; - } - - public sealed class S3Options - { - public bool Enabled { get; set; } - - public string? Endpoint { get; set; } - - public string? Bucket { get; set; } - - public string? Prefix { get; set; } - - public string? ObjectLockMode { get; set; } - - public bool UseTls { get; set; } = true; - } - - public sealed class QuotaOptions - { - public PerCallerQuotaOptions PerCaller { get; set; } = new(); - } - - public sealed class PerCallerQuotaOptions - { - public int Qps { get; set; } = 50; - - public int Burst { get; set; } = 100; - } - - public sealed class TelemetryOptions - { - public bool EnableLogging { get; set; } = true; - - public bool EnableTracing { get; set; } = false; - } -} +using System.Collections.Generic; +using StellaOps.Cryptography; + +namespace StellaOps.Attestor.Core.Options; + +/// +/// Strongly typed configuration for the Attestor service. +/// +public sealed class AttestorOptions +{ + public string Listen { get; set; } = "https://0.0.0.0:8444"; + + public SecurityOptions Security { get; set; } = new(); + + public RekorOptions Rekor { get; set; } = new(); + + public SigningOptions Signing { get; set; } = new(); + + public MongoOptions Mongo { get; set; } = new(); + + public RedisOptions Redis { get; set; } = new(); + + public S3Options S3 { get; set; } = new(); + + public QuotaOptions Quotas { get; set; } = new(); + + public BulkVerificationOptions BulkVerification { get; set; } = new(); + + public CacheOptions Cache { get; set; } = new(); + + public TelemetryOptions Telemetry { get; set; } = new(); + public TransparencyWitnessOptions TransparencyWitness { get; set; } = new(); + public VerificationOptions Verification { get; set; } = new(); + + + public sealed class SecurityOptions + { + public MtlsOptions Mtls { get; set; } = new(); + + public AuthorityOptions Authority { get; set; } = new(); + + public SignerIdentityOptions SignerIdentity { get; set; } = new(); + + public SubmissionLimitOptions SubmissionLimits { get; set; } = new(); + } + + public sealed class MtlsOptions + { + public bool RequireClientCertificate { get; set; } = true; + + public string? CaBundle { get; set; } + + public IList AllowedSubjects { get; set; } = new List(); + + public IList AllowedThumbprints { get; set; } = new List(); + } + + public sealed class AuthorityOptions + { + public string? Issuer { get; set; } + + public string? JwksUrl { get; set; } + + public string? RequireSenderConstraint { get; set; } + + public bool RequireHttpsMetadata { get; set; } = true; + + public IList Audiences { get; set; } = new List(); + + public IList RequiredScopes { get; set; } = new List(); + } + + public sealed class SignerIdentityOptions + { + public IList Mode { get; set; } = new List { "keyless", "kms" }; + + public IList FulcioRoots { get; set; } = new List(); + + public IList AllowedSans { get; set; } = new List(); + + public IList KmsKeys { get; set; } = new List(); + } + + public sealed class SubmissionLimitOptions + { + /// + /// Maximum allowed DSSE payload size, in bytes, after base64 decoding. + /// + public int MaxPayloadBytes { get; set; } = 2 * 1024 * 1024; + + /// + /// Maximum number of DSSE signatures accepted per submission. + /// + public int MaxSignatures { get; set; } = 6; + + /// + /// Maximum number of certificates allowed in the leaf-to-root chain. + /// + public int MaxCertificateChainEntries { get; set; } = 6; + } + + public sealed class RekorOptions + { + public RekorBackendOptions Primary { get; set; } = new(); + + public RekorMirrorOptions Mirror { get; set; } = new(); + } + + public class RekorBackendOptions + { + public string? Url { get; set; } + + public int ProofTimeoutMs { get; set; } = 15_000; + + public int PollIntervalMs { get; set; } = 250; + + public int MaxAttempts { get; set; } = 60; + } + + public sealed class RekorMirrorOptions : RekorBackendOptions + { + public bool Enabled { get; set; } + } + + public sealed class MongoOptions + { + public string? Uri { get; set; } + + public string Database { get; set; } = "attestor"; + + public string EntriesCollection { get; set; } = "entries"; + + public string DedupeCollection { get; set; } = "dedupe"; + + public string AuditCollection { get; set; } = "audit"; + + public string BulkJobsCollection { get; set; } = "bulk_jobs"; + } + + public sealed class RedisOptions + { + public string? Url { get; set; } + + public string? DedupePrefix { get; set; } = "attestor:dedupe:"; + } + + public sealed class S3Options + { + public bool Enabled { get; set; } + + public string? Endpoint { get; set; } + + public string? Bucket { get; set; } + + public string? Prefix { get; set; } + + public string? ObjectLockMode { get; set; } + + public bool UseTls { get; set; } = true; + } + + public sealed class QuotaOptions + { + public PerCallerQuotaOptions PerCaller { get; set; } = new(); + + public BulkVerificationQuotaOptions Bulk { get; set; } = new(); + } + + public sealed class PerCallerQuotaOptions + { + public int Qps { get; set; } = 50; + + public int Burst { get; set; } = 100; + } + + public sealed class BulkVerificationQuotaOptions + { + public int RequestsPerMinute { get; set; } = 6; + + public int MaxItemsPerJob { get; set; } = 100; + + public int MaxQueuedJobs { get; set; } = 20; + + public int MaxConcurrentJobs { get; set; } = 1; + } + + public sealed class CacheOptions + { + public VerificationCacheOptions Verification { get; set; } = new(); + } + + public sealed class VerificationCacheOptions + { + public bool Enabled { get; set; } = true; + + public int TtlSeconds { get; set; } = 300; + } + + public sealed class TelemetryOptions + { + public bool EnableLogging { get; set; } = true; + + public bool EnableTracing { get; set; } = false; + } + + public sealed class BulkVerificationOptions + { + public int WorkerPollSeconds { get; set; } = 1; + + public int ItemDelayMilliseconds { get; set; } = 10; + + public int MaxAttemptsPerItem { get; set; } = 1; + } + + public sealed class VerificationOptions + { + public string PolicyId { get; set; } = "default"; + + public string PolicyVersion { get; set; } = "1.0.0"; + + public int MinimumSignatures { get; set; } = 1; + + public int? FreshnessMaxAgeMinutes { get; set; } + + public int? FreshnessWarnAgeMinutes { get; set; } + + public bool RequireTransparencyInclusion { get; set; } = true; + + public bool RequireCheckpoint { get; set; } = true; + + public bool RequireBundleForSignatureValidation { get; set; } = false; + + public bool RequireWitnessEndorsement { get; set; } = false; + } + + public sealed class TransparencyWitnessOptions + { + public bool Enabled { get; set; } + + public string? BaseUrl { get; set; } + + public string? ApiKey { get; set; } + + public int RequestTimeoutMs { get; set; } = 15_000; + + public int CacheTtlSeconds { get; set; } = 900; + + public string? AggregatorId { get; set; } + } + + public sealed class SigningOptions + { + public IList PreferredProviders { get; set; } = new List(); + + public IList Keys { get; set; } = new List(); + + public SigningKmsOptions? Kms { get; set; } + } + + public sealed class SigningKmsOptions + { + public bool Enabled { get; set; } = true; + + public string? RootPath { get; set; } + + public string? Password { get; set; } + + public string Algorithm { get; set; } = "ES256K"; + + public int? KeyDerivationIterations { get; set; } + } + + public sealed class SigningKeyOptions + { + public bool Enabled { get; set; } = true; + + public string KeyId { get; set; } = string.Empty; + + public string? ProviderKeyId { get; set; } + + public string? Provider { get; set; } + + public string? Mode { get; set; } + + public string? Algorithm { get; set; } + + public string? MaterialFormat { get; set; } + + public string? Material { get; set; } + + public string? MaterialPath { get; set; } + + public string? MaterialPassphrase { get; set; } + + public string? KmsKey { get; set; } + + public string? KmsVersionId { get; set; } + + public IList CertificateChain { get; set; } = new List(); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestationSignRequest.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestationSignRequest.cs new file mode 100644 index 00000000..e2efc445 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestationSignRequest.cs @@ -0,0 +1,50 @@ +using System.Collections.Generic; +using StellaOps.Attestor.Core.Submission; + +namespace StellaOps.Attestor.Core.Signing; + +/// +/// Input contract for attestation signing requests. +/// +public sealed class AttestationSignRequest +{ + /// + /// Identifier of the signing key to use. + /// + public string KeyId { get; set; } = string.Empty; + + /// + /// DSSE payload type (MIME). + /// + public string PayloadType { get; set; } = string.Empty; + + /// + /// Base64 encoded payload. + /// + public string PayloadBase64 { get; set; } = string.Empty; + + /// + /// Optional signing mode override (e.g. keyless, kms). + /// + public string? Mode { get; set; } + + /// + /// Optional certificate chain for keyless signatures. + /// + public IList CertificateChain { get; set; } = new List(); + + /// + /// Artifact metadata that will be embedded in the submission meta. + /// + public AttestorSubmissionRequest.ArtifactInfo Artifact { get; set; } = new(); + + /// + /// Preferred transparency log backend ("primary", "mirror", "both"). + /// + public string LogPreference { get; set; } = "primary"; + + /// + /// Whether the resulting bundle should be archived. + /// + public bool Archive { get; set; } = true; +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestationSignResult.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestationSignResult.cs new file mode 100644 index 00000000..4e8a0ab5 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestationSignResult.cs @@ -0,0 +1,24 @@ +using System; +using StellaOps.Attestor.Core.Submission; + +namespace StellaOps.Attestor.Core.Signing; + +/// +/// Represents the signed DSSE bundle ready for Rekor submission. +/// +public sealed class AttestationSignResult +{ + public AttestorSubmissionRequest.SubmissionBundle Bundle { get; init; } = new(); + + public AttestorSubmissionRequest.SubmissionMeta Meta { get; init; } = new(); + + public string KeyId { get; init; } = string.Empty; + + public string Algorithm { get; init; } = string.Empty; + + public string Mode { get; init; } = string.Empty; + + public string Provider { get; init; } = string.Empty; + + public DateTimeOffset SignedAt { get; init; } = DateTimeOffset.UtcNow; +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestorSigningException.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestorSigningException.cs new file mode 100644 index 00000000..b7f14879 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/AttestorSigningException.cs @@ -0,0 +1,20 @@ +using System; + +namespace StellaOps.Attestor.Core.Signing; + +public sealed class AttestorSigningException : Exception +{ + public AttestorSigningException(string code, string message) + : base(message) + { + Code = string.IsNullOrWhiteSpace(code) ? "signing_error" : code; + } + + public AttestorSigningException(string code, string message, Exception innerException) + : base(message, innerException) + { + Code = string.IsNullOrWhiteSpace(code) ? "signing_error" : code; + } + + public string Code { get; } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/DssePreAuthenticationEncoding.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/DssePreAuthenticationEncoding.cs new file mode 100644 index 00000000..cf15163e --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/DssePreAuthenticationEncoding.cs @@ -0,0 +1,36 @@ +using System; +using System.Buffers.Binary; +using System.Text; + +namespace StellaOps.Attestor.Core.Signing; + +/// +/// Computes DSSE pre-authentication encoding (PAE) for payload signing. +/// +public static class DssePreAuthenticationEncoding +{ + private static readonly byte[] Prefix = Encoding.ASCII.GetBytes("DSSEv1"); + + public static byte[] Compute(string payloadType, ReadOnlySpan payload) + { + var header = Encoding.UTF8.GetBytes(payloadType ?? string.Empty); + var buffer = new byte[Prefix.Length + sizeof(long) + header.Length + sizeof(long) + payload.Length]; + var offset = 0; + + Prefix.CopyTo(buffer, offset); + offset += Prefix.Length; + + BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, sizeof(long)), (ulong)header.Length); + offset += sizeof(long); + + header.CopyTo(buffer, offset); + offset += header.Length; + + BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, sizeof(long)), (ulong)payload.Length); + offset += sizeof(long); + + payload.CopyTo(buffer.AsSpan(offset)); + + return buffer; + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/IAttestationSigningService.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/IAttestationSigningService.cs new file mode 100644 index 00000000..afbd6b8f --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Signing/IAttestationSigningService.cs @@ -0,0 +1,13 @@ +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Submission; + +namespace StellaOps.Attestor.Core.Signing; + +public interface IAttestationSigningService +{ + Task SignAsync( + AttestationSignRequest request, + SubmissionContext context, + CancellationToken cancellationToken = default); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj index 2b6aadf4..825e4013 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj @@ -1,9 +1,13 @@ - - - net10.0 - preview - enable - enable - true - - + + + net10.0 + preview + enable + enable + true + + + + + + diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs index 9ec4b0e3..3fbe8730 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs @@ -1,105 +1,128 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Attestor.Core.Storage; - -/// -/// Canonical representation of a Rekor entry persisted in Mongo. -/// -public sealed class AttestorEntry -{ - public string RekorUuid { get; init; } = string.Empty; - - public ArtifactDescriptor Artifact { get; init; } = new(); - - public string BundleSha256 { get; init; } = string.Empty; - - public long? Index { get; init; } - - public ProofDescriptor? Proof { get; init; } - - public LogDescriptor Log { get; init; } = new(); - - public DateTimeOffset CreatedAt { get; init; } - - public string Status { get; init; } = "pending"; - - public SignerIdentityDescriptor SignerIdentity { get; init; } = new(); - - public LogReplicaDescriptor? Mirror { get; init; } - - public sealed class ArtifactDescriptor - { - public string Sha256 { get; init; } = string.Empty; - - public string Kind { get; init; } = string.Empty; - - public string? ImageDigest { get; init; } - - public string? SubjectUri { get; init; } - } - - public sealed class ProofDescriptor - { - public CheckpointDescriptor? Checkpoint { get; init; } - - public InclusionDescriptor? Inclusion { get; init; } - } - - public sealed class CheckpointDescriptor - { - public string? Origin { get; init; } - - public long Size { get; init; } - - public string? RootHash { get; init; } - - public DateTimeOffset? Timestamp { get; init; } - } - - public sealed class InclusionDescriptor - { - public string? LeafHash { get; init; } - - public IReadOnlyList Path { get; init; } = Array.Empty(); - } - - public sealed class LogDescriptor - { - public string Backend { get; init; } = "primary"; - - public string Url { get; init; } = string.Empty; - - public string? LogId { get; init; } - } - - public sealed class SignerIdentityDescriptor - { - public string Mode { get; init; } = string.Empty; - - public string? Issuer { get; init; } - - public string? SubjectAlternativeName { get; init; } - - public string? KeyId { get; init; } - } - - public sealed class LogReplicaDescriptor - { - public string Backend { get; init; } = string.Empty; - - public string Url { get; init; } = string.Empty; - - public string? Uuid { get; init; } - - public long? Index { get; init; } - - public string Status { get; init; } = "pending"; - - public ProofDescriptor? Proof { get; init; } - - public string? LogId { get; init; } - - public string? Error { get; init; } - } -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Attestor.Core.Storage; + +/// +/// Canonical representation of a Rekor entry persisted in Mongo. +/// +public sealed class AttestorEntry +{ + public string RekorUuid { get; init; } = string.Empty; + + public ArtifactDescriptor Artifact { get; init; } = new(); + + public string BundleSha256 { get; init; } = string.Empty; + + public long? Index { get; init; } + + public ProofDescriptor? Proof { get; init; } + + public WitnessDescriptor? Witness { get; init; } + + public LogDescriptor Log { get; init; } = new(); + + public DateTimeOffset CreatedAt { get; init; } + + public string Status { get; init; } = "pending"; + + public SignerIdentityDescriptor SignerIdentity { get; init; } = new(); + + public LogReplicaDescriptor? Mirror { get; init; } + + public sealed class ArtifactDescriptor + { + public string Sha256 { get; init; } = string.Empty; + + public string Kind { get; init; } = string.Empty; + + public string? ImageDigest { get; init; } + + public string? SubjectUri { get; init; } + } + + public sealed class ProofDescriptor + { + public CheckpointDescriptor? Checkpoint { get; init; } + + public InclusionDescriptor? Inclusion { get; init; } + } + + public sealed class WitnessDescriptor + { + public string Aggregator { get; init; } = string.Empty; + + public string Status { get; init; } = "unknown"; + + public string? RootHash { get; init; } + + public DateTimeOffset RetrievedAt { get; init; } + + public string? Statement { get; init; } + + public string? Signature { get; init; } + + public string? KeyId { get; init; } + + public string? Error { get; init; } + } + + public sealed class CheckpointDescriptor + { + public string? Origin { get; init; } + + public long Size { get; init; } + + public string? RootHash { get; init; } + + public DateTimeOffset? Timestamp { get; init; } + } + + public sealed class InclusionDescriptor + { + public string? LeafHash { get; init; } + + public IReadOnlyList Path { get; init; } = Array.Empty(); + } + + public sealed class LogDescriptor + { + public string Backend { get; init; } = "primary"; + + public string Url { get; init; } = string.Empty; + + public string? LogId { get; init; } + } + + public sealed class SignerIdentityDescriptor + { + public string Mode { get; init; } = string.Empty; + + public string? Issuer { get; init; } + + public string? SubjectAlternativeName { get; init; } + + public string? KeyId { get; init; } + } + + public sealed class LogReplicaDescriptor + { + public string Backend { get; init; } = string.Empty; + + public string Url { get; init; } = string.Empty; + + public string? Uuid { get; init; } + + public long? Index { get; init; } + + public string Status { get; init; } = "pending"; + + public ProofDescriptor? Proof { get; init; } + + public string? LogId { get; init; } + + public string? Error { get; init; } + + public WitnessDescriptor? Witness { get; init; } + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntryContinuationToken.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntryContinuationToken.cs new file mode 100644 index 00000000..9b9de69d --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntryContinuationToken.cs @@ -0,0 +1,83 @@ +using System; +using System.Globalization; +using System.Text; + +namespace StellaOps.Attestor.Core.Storage; + +/// +/// Encodes and decodes pagination state for attestor entry listings. +/// +public static class AttestorEntryContinuationToken +{ + private const char Separator = '|'; + + public readonly record struct Cursor(DateTimeOffset CreatedAt, string RekorUuid); + + public static string Encode(DateTimeOffset createdAt, string rekorUuid) + { + ArgumentException.ThrowIfNullOrEmpty(rekorUuid); + + var ticksText = createdAt.UtcTicks.ToString(CultureInfo.InvariantCulture); + var payload = string.Concat(ticksText, Separator, rekorUuid); + return Convert.ToBase64String(Encoding.UTF8.GetBytes(payload)); + } + + public static Cursor Parse(string token) + { + if (!TryParse(token, out var cursor)) + { + throw new FormatException("Invalid attestor continuation token."); + } + + return cursor; + } + + public static bool TryParse(string? token, out Cursor cursor) + { + cursor = default; + if (string.IsNullOrWhiteSpace(token)) + { + return false; + } + + byte[] data; + try + { + data = Convert.FromBase64String(token); + } + catch (FormatException) + { + return false; + } + + var decoded = Encoding.UTF8.GetString(data); + var separatorIndex = decoded.IndexOf(Separator, StringComparison.Ordinal); + if (separatorIndex <= 0 || separatorIndex == decoded.Length - 1) + { + return false; + } + + var ticksSpan = decoded.AsSpan(0, separatorIndex); + if (!long.TryParse(ticksSpan, NumberStyles.Integer, CultureInfo.InvariantCulture, out var ticks)) + { + return false; + } + + var uuid = decoded[(separatorIndex + 1)..]; + if (string.IsNullOrEmpty(uuid)) + { + return false; + } + + try + { + var createdAt = new DateTimeOffset(ticks, TimeSpan.Zero); + cursor = new Cursor(createdAt, uuid); + return true; + } + catch (ArgumentOutOfRangeException) + { + return false; + } + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntryQuery.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntryQuery.cs new file mode 100644 index 00000000..c5e80fcf --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntryQuery.cs @@ -0,0 +1,36 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Attestor.Core.Storage; + +/// +/// Query parameters for listing attestor entries. +/// +public sealed class AttestorEntryQuery +{ + public string? Subject { get; init; } + + public string? Type { get; init; } + + public string? Issuer { get; init; } + + public string? Scope { get; init; } + + public DateTimeOffset? CreatedAfter { get; init; } + + public DateTimeOffset? CreatedBefore { get; init; } + + public int PageSize { get; init; } = 50; + + public string? ContinuationToken { get; init; } +} + +/// +/// Represents a paginated page of attestor entries. +/// +public sealed class AttestorEntryQueryResult +{ + public IReadOnlyList Items { get; init; } = Array.Empty(); + + public string? ContinuationToken { get; init; } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs index d3e670f6..afe9117e 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs @@ -3,7 +3,9 @@ using System.Threading.Tasks; namespace StellaOps.Attestor.Core.Storage; -public interface IAttestorArchiveStore -{ - Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default); -} +public interface IAttestorArchiveStore +{ + Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default); + + Task GetBundleAsync(string bundleSha256, string rekorUuid, CancellationToken cancellationToken = default); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs index 64e09494..b3c402c1 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs @@ -8,9 +8,11 @@ public interface IAttestorEntryRepository { Task GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default); - Task GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default); - - Task> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default); - - Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default); -} + Task GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default); + + Task> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default); + + Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default); + + Task QueryAsync(AttestorEntryQuery query, CancellationToken cancellationToken = default); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs index 89fba827..293b6f62 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs @@ -1,83 +1,116 @@ -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace StellaOps.Attestor.Core.Submission; - -/// -/// Result returned to callers after processing a submission. -/// -public sealed class AttestorSubmissionResult -{ - [JsonPropertyName("uuid")] - public string? Uuid { get; set; } - - [JsonPropertyName("index")] - public long? Index { get; set; } - - [JsonPropertyName("proof")] - public RekorProof? Proof { get; set; } - - [JsonPropertyName("logURL")] - public string? LogUrl { get; set; } - - [JsonPropertyName("status")] - public string Status { get; set; } = "pending"; - - [JsonPropertyName("mirror")] - public MirrorLog? Mirror { get; set; } - - public sealed class RekorProof - { - [JsonPropertyName("checkpoint")] - public Checkpoint? Checkpoint { get; set; } - - [JsonPropertyName("inclusion")] - public InclusionProof? Inclusion { get; set; } - } - - public sealed class Checkpoint - { - [JsonPropertyName("origin")] - public string? Origin { get; set; } - - [JsonPropertyName("size")] - public long Size { get; set; } - - [JsonPropertyName("rootHash")] - public string? RootHash { get; set; } - - [JsonPropertyName("timestamp")] - public string? Timestamp { get; set; } - } - - public sealed class InclusionProof - { - [JsonPropertyName("leafHash")] - public string? LeafHash { get; set; } - - [JsonPropertyName("path")] - public IReadOnlyList Path { get; init; } = Array.Empty(); - } - - public sealed class MirrorLog - { - [JsonPropertyName("uuid")] - public string? Uuid { get; set; } - - [JsonPropertyName("index")] - public long? Index { get; set; } - - [JsonPropertyName("logURL")] - public string? LogUrl { get; set; } - - [JsonPropertyName("status")] - public string Status { get; set; } = "pending"; - - [JsonPropertyName("proof")] - public RekorProof? Proof { get; set; } - - [JsonPropertyName("error")] - public string? Error { get; set; } - } -} +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Attestor.Core.Submission; + +/// +/// Result returned to callers after processing a submission. +/// +public sealed class AttestorSubmissionResult +{ + [JsonPropertyName("uuid")] + public string? Uuid { get; set; } + + [JsonPropertyName("index")] + public long? Index { get; set; } + + [JsonPropertyName("proof")] + public RekorProof? Proof { get; set; } + + [JsonPropertyName("logURL")] + public string? LogUrl { get; set; } + + [JsonPropertyName("status")] + public string Status { get; set; } = "pending"; + + [JsonPropertyName("mirror")] + public MirrorLog? Mirror { get; set; } + + [JsonPropertyName("witness")] + public WitnessStatement? Witness { get; set; } + + public sealed class RekorProof + { + [JsonPropertyName("checkpoint")] + public Checkpoint? Checkpoint { get; set; } + + [JsonPropertyName("inclusion")] + public InclusionProof? Inclusion { get; set; } + } + + public sealed class Checkpoint + { + [JsonPropertyName("origin")] + public string? Origin { get; set; } + + [JsonPropertyName("size")] + public long Size { get; set; } + + [JsonPropertyName("rootHash")] + public string? RootHash { get; set; } + + [JsonPropertyName("timestamp")] + public string? Timestamp { get; set; } + } + + public sealed class InclusionProof + { + [JsonPropertyName("leafHash")] + public string? LeafHash { get; set; } + + [JsonPropertyName("path")] + public IReadOnlyList Path { get; init; } = Array.Empty(); + } + + public sealed class MirrorLog + { + [JsonPropertyName("uuid")] + public string? Uuid { get; set; } + + [JsonPropertyName("index")] + public long? Index { get; set; } + + [JsonPropertyName("logURL")] + public string? LogUrl { get; set; } + + [JsonPropertyName("status")] + public string Status { get; set; } = "pending"; + + [JsonPropertyName("proof")] + public RekorProof? Proof { get; set; } + + [JsonPropertyName("error")] + public string? Error { get; set; } + + [JsonPropertyName("witness")] + public WitnessStatement? Witness { get; set; } + } + + public sealed class WitnessStatement + { + [JsonPropertyName("aggregator")] + public string? Aggregator { get; set; } + + [JsonPropertyName("status")] + public string Status { get; set; } = "unknown"; + + [JsonPropertyName("rootHash")] + public string? RootHash { get; set; } + + [JsonPropertyName("retrievedAt")] + public string? RetrievedAt { get; set; } + + [JsonPropertyName("statement")] + public string? Statement { get; set; } + + [JsonPropertyName("signature")] + public string? Signature { get; set; } + + [JsonPropertyName("keyId")] + public string? KeyId { get; set; } + + [JsonPropertyName("error")] + public string? Error { get; set; } + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs index 71ce511f..c0ff7da5 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs @@ -12,19 +12,24 @@ public sealed class AttestorSubmissionValidator private static readonly string[] AllowedKinds = ["sbom", "report", "vex-export"]; private readonly IDsseCanonicalizer _canonicalizer; - private readonly HashSet _allowedModes; - - public AttestorSubmissionValidator(IDsseCanonicalizer canonicalizer, IEnumerable? allowedModes = null) - { - _canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer)); - _allowedModes = allowedModes is null - ? new HashSet(StringComparer.OrdinalIgnoreCase) - : new HashSet(allowedModes, StringComparer.OrdinalIgnoreCase); - } - - public async Task ValidateAsync(AttestorSubmissionRequest request, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); + private readonly HashSet _allowedModes; + private readonly AttestorSubmissionConstraints _constraints; + + public AttestorSubmissionValidator( + IDsseCanonicalizer canonicalizer, + IEnumerable? allowedModes = null, + AttestorSubmissionConstraints? constraints = null) + { + _canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer)); + _allowedModes = allowedModes is null + ? new HashSet(StringComparer.OrdinalIgnoreCase) + : new HashSet(allowedModes, StringComparer.OrdinalIgnoreCase); + _constraints = constraints ?? AttestorSubmissionConstraints.Default; + } + + public async Task ValidateAsync(AttestorSubmissionRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); if (request.Bundle is null) { @@ -42,20 +47,25 @@ public sealed class AttestorSubmissionValidator } if (string.IsNullOrWhiteSpace(request.Bundle.Dsse.PayloadBase64)) - { - throw new AttestorValidationException("payload_missing", "DSSE payload must be provided."); - } - - if (request.Bundle.Dsse.Signatures.Count == 0) - { - throw new AttestorValidationException("signature_missing", "At least one DSSE signature is required."); - } - - if (_allowedModes.Count > 0 && !string.IsNullOrWhiteSpace(request.Bundle.Mode) && !_allowedModes.Contains(request.Bundle.Mode)) - { - throw new AttestorValidationException("mode_not_allowed", $"Submission mode '{request.Bundle.Mode}' is not permitted."); - } - + { + throw new AttestorValidationException("payload_missing", "DSSE payload must be provided."); + } + + if (request.Bundle.Dsse.Signatures.Count == 0) + { + throw new AttestorValidationException("signature_missing", "At least one DSSE signature is required."); + } + + if (request.Bundle.Dsse.Signatures.Count > _constraints.MaxSignatures) + { + throw new AttestorValidationException("signature_limit_exceeded", $"A maximum of {_constraints.MaxSignatures} DSSE signatures is permitted per submission."); + } + + if (_allowedModes.Count > 0 && !string.IsNullOrWhiteSpace(request.Bundle.Mode) && !_allowedModes.Contains(request.Bundle.Mode)) + { + throw new AttestorValidationException("mode_not_allowed", $"Submission mode '{request.Bundle.Mode}' is not permitted."); + } + if (request.Meta is null) { throw new AttestorValidationException("meta_missing", "Submission metadata is required."); @@ -86,21 +96,31 @@ public sealed class AttestorSubmissionValidator throw new AttestorValidationException("bundle_sha_invalid", "bundleSha256 must be a 64-character hex string."); } - if (Array.IndexOf(AllowedKinds, request.Meta.Artifact.Kind) < 0) - { - throw new AttestorValidationException("artifact_kind_invalid", $"Artifact kind '{request.Meta.Artifact.Kind}' is not supported."); - } - - if (!Base64UrlDecode(request.Bundle.Dsse.PayloadBase64, out _)) - { - throw new AttestorValidationException("payload_invalid_base64", "DSSE payload must be valid base64."); - } - - var canonical = await _canonicalizer.CanonicalizeAsync(request, cancellationToken).ConfigureAwait(false); - Span hash = stackalloc byte[32]; - if (!SHA256.TryHashData(canonical, hash, out _)) - { - throw new AttestorValidationException("bundle_sha_failure", "Failed to compute canonical bundle hash."); + if (Array.IndexOf(AllowedKinds, request.Meta.Artifact.Kind) < 0) + { + throw new AttestorValidationException("artifact_kind_invalid", $"Artifact kind '{request.Meta.Artifact.Kind}' is not supported."); + } + + if (request.Bundle.CertificateChain.Count > _constraints.MaxCertificateChainEntries) + { + throw new AttestorValidationException("certificate_chain_too_long", $"Certificate chain length exceeds {_constraints.MaxCertificateChainEntries} entries."); + } + + if (!Base64UrlDecode(request.Bundle.Dsse.PayloadBase64, out var payloadBytes)) + { + throw new AttestorValidationException("payload_invalid_base64", "DSSE payload must be valid base64."); + } + + if (payloadBytes.Length > _constraints.MaxPayloadBytes) + { + throw new AttestorValidationException("payload_too_large", $"DSSE payload exceeds {_constraints.MaxPayloadBytes} bytes limit."); + } + + var canonical = await _canonicalizer.CanonicalizeAsync(request, cancellationToken).ConfigureAwait(false); + Span hash = stackalloc byte[32]; + if (!SHA256.TryHashData(canonical, hash, out _)) + { + throw new AttestorValidationException("bundle_sha_failure", "Failed to compute canonical bundle hash."); } var hashHex = Convert.ToHexString(hash).ToLowerInvariant(); @@ -172,5 +192,41 @@ public sealed class AttestorSubmissionValidator } return value; - } -} + } +} + +public sealed class AttestorSubmissionConstraints +{ + public static AttestorSubmissionConstraints Default { get; } = new(); + + public AttestorSubmissionConstraints( + int maxPayloadBytes = 2 * 1024 * 1024, + int maxSignatures = 6, + int maxCertificateChainEntries = 6) + { + if (maxPayloadBytes <= 0) + { + throw new ArgumentOutOfRangeException(nameof(maxPayloadBytes), "Max payload bytes must be positive."); + } + + if (maxSignatures <= 0) + { + throw new ArgumentOutOfRangeException(nameof(maxSignatures), "Max signatures must be positive."); + } + + if (maxCertificateChainEntries <= 0) + { + throw new ArgumentOutOfRangeException(nameof(maxCertificateChainEntries), "Max certificate chain entries must be positive."); + } + + MaxPayloadBytes = maxPayloadBytes; + MaxSignatures = maxSignatures; + MaxCertificateChainEntries = maxCertificateChainEntries; + } + + public int MaxPayloadBytes { get; } + + public int MaxSignatures { get; } + + public int MaxCertificateChainEntries { get; } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/ITransparencyWitnessClient.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/ITransparencyWitnessClient.cs new file mode 100644 index 00000000..08d809d3 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/ITransparencyWitnessClient.cs @@ -0,0 +1,9 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Attestor.Core.Transparency; + +public interface ITransparencyWitnessClient +{ + Task GetObservationAsync(TransparencyWitnessRequest request, CancellationToken cancellationToken = default); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/TransparencyWitnessObservation.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/TransparencyWitnessObservation.cs new file mode 100644 index 00000000..4aaf147b --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/TransparencyWitnessObservation.cs @@ -0,0 +1,22 @@ +using System; + +namespace StellaOps.Attestor.Core.Transparency; + +public sealed class TransparencyWitnessObservation +{ + public string Aggregator { get; init; } = string.Empty; + + public string Status { get; init; } = "unknown"; + + public string? RootHash { get; init; } + + public DateTimeOffset RetrievedAt { get; init; } = DateTimeOffset.UtcNow; + + public string? Statement { get; init; } + + public string? Signature { get; init; } + + public string? KeyId { get; init; } + + public string? Error { get; init; } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/TransparencyWitnessRequest.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/TransparencyWitnessRequest.cs new file mode 100644 index 00000000..83b1bb87 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Transparency/TransparencyWitnessRequest.cs @@ -0,0 +1,9 @@ +using System; + +namespace StellaOps.Attestor.Core.Transparency; + +public sealed record TransparencyWitnessRequest( + string Uuid, + string Backend, + Uri BackendUrl, + string? CheckpointRootHash); diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs index 2845196c..6b200dba 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs @@ -5,11 +5,23 @@ namespace StellaOps.Attestor.Core.Verification; /// public sealed class AttestorVerificationRequest { - public string? Uuid { get; set; } - - public Submission.AttestorSubmissionRequest.SubmissionBundle? Bundle { get; set; } - - public string? ArtifactSha256 { get; set; } - - public bool RefreshProof { get; set; } -} + public string? Uuid { get; set; } + + public Submission.AttestorSubmissionRequest.SubmissionBundle? Bundle { get; set; } + + public string? ArtifactSha256 { get; set; } + + public string? Subject { get; set; } + + public string? EnvelopeId { get; set; } + + public string? PolicyVersion { get; set; } + + public bool RefreshProof { get; set; } + + /// + /// When true, verification does not attempt to contact external transparency logs and + /// surfaces issues for missing proofs instead. + /// + public bool Offline { get; set; } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs index b9c95025..0e092fd0 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs @@ -11,11 +11,13 @@ public sealed class AttestorVerificationResult public long? Index { get; init; } - public string? LogUrl { get; init; } - - public DateTimeOffset CheckedAt { get; init; } = DateTimeOffset.UtcNow; - - public string Status { get; init; } = "unknown"; - - public IReadOnlyList Issues { get; init; } = Array.Empty(); -} + public string? LogUrl { get; init; } + + public DateTimeOffset CheckedAt { get; init; } = DateTimeOffset.UtcNow; + + public string Status { get; init; } = "unknown"; + + public IReadOnlyList Issues { get; init; } = Array.Empty(); + + public VerificationReport? Report { get; init; } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationCache.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationCache.cs new file mode 100644 index 00000000..a8b50743 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationCache.cs @@ -0,0 +1,13 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Attestor.Core.Verification; + +public interface IAttestorVerificationCache +{ + Task GetAsync(string subject, string envelopeId, string policyVersion, CancellationToken cancellationToken = default); + + Task SetAsync(string subject, string envelopeId, string policyVersion, AttestorVerificationResult result, CancellationToken cancellationToken = default); + + Task InvalidateSubjectAsync(string subject, CancellationToken cancellationToken = default); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/VerificationReport.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/VerificationReport.cs new file mode 100644 index 00000000..711f5120 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/VerificationReport.cs @@ -0,0 +1,185 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; + +namespace StellaOps.Attestor.Core.Verification; + +public sealed class PolicyEvaluationResult +{ + public VerificationSectionStatus Status { get; init; } = VerificationSectionStatus.Skipped; + + public string PolicyId { get; init; } = "default"; + + public string PolicyVersion { get; init; } = "0.0.0"; + + public string Verdict { get; init; } = "unknown"; + + public IReadOnlyList Issues { get; init; } = Array.Empty(); + + public IReadOnlyDictionary Attributes { get; init; } = + ImmutableDictionary.Empty; +} + +public sealed class IssuerEvaluationResult +{ + public VerificationSectionStatus Status { get; init; } = VerificationSectionStatus.Skipped; + + public string Mode { get; init; } = "unknown"; + + public string? Issuer { get; init; } + + public string? SubjectAlternativeName { get; init; } + + public string? KeyId { get; init; } + + public IReadOnlyList Issues { get; init; } = Array.Empty(); +} + +public sealed class FreshnessEvaluationResult +{ + public VerificationSectionStatus Status { get; init; } = VerificationSectionStatus.Skipped; + + public DateTimeOffset CreatedAt { get; init; } + + public DateTimeOffset EvaluatedAt { get; init; } + + public TimeSpan Age { get; init; } + + public TimeSpan? MaxAge { get; init; } + + public IReadOnlyList Issues { get; init; } = Array.Empty(); +} + +public sealed class SignatureEvaluationResult +{ + public VerificationSectionStatus Status { get; init; } = VerificationSectionStatus.Skipped; + + public bool BundleProvided { get; init; } + + public int TotalSignatures { get; init; } + + public int VerifiedSignatures { get; init; } + + public int RequiredSignatures { get; init; } + + public IReadOnlyList Issues { get; init; } = Array.Empty(); +} + +public sealed class TransparencyEvaluationResult +{ + public VerificationSectionStatus Status { get; init; } = VerificationSectionStatus.Skipped; + + public bool ProofPresent { get; init; } + + public bool CheckpointPresent { get; init; } + + public bool InclusionPathPresent { get; init; } + + public bool WitnessPresent { get; init; } + + public bool WitnessMatchesRoot { get; init; } + + public string? WitnessAggregator { get; init; } + + public string WitnessStatus { get; init; } = "missing"; + + public IReadOnlyList Issues { get; init; } = Array.Empty(); +} + +public sealed class VerificationReport +{ + public VerificationSectionStatus OverallStatus { get; } + + public PolicyEvaluationResult Policy { get; } + + public IssuerEvaluationResult Issuer { get; } + + public FreshnessEvaluationResult Freshness { get; } + + public SignatureEvaluationResult Signatures { get; } + + public TransparencyEvaluationResult Transparency { get; } + + public IReadOnlyList Issues { get; } + + public VerificationReport( + PolicyEvaluationResult policy, + IssuerEvaluationResult issuer, + FreshnessEvaluationResult freshness, + SignatureEvaluationResult signatures, + TransparencyEvaluationResult transparency) + { + Policy = policy ?? throw new ArgumentNullException(nameof(policy)); + Issuer = issuer ?? throw new ArgumentNullException(nameof(issuer)); + Freshness = freshness ?? throw new ArgumentNullException(nameof(freshness)); + Signatures = signatures ?? throw new ArgumentNullException(nameof(signatures)); + Transparency = transparency ?? throw new ArgumentNullException(nameof(transparency)); + + OverallStatus = DetermineOverallStatus(policy, issuer, freshness, signatures, transparency); + Issues = AggregateIssues(policy, issuer, freshness, signatures, transparency); + } + + public bool Succeeded => OverallStatus == VerificationSectionStatus.Pass || OverallStatus == VerificationSectionStatus.Warn; + + private static VerificationSectionStatus DetermineOverallStatus(params object[] sections) + { + var statuses = sections + .OfType() + .Select(section => section switch + { + PolicyEvaluationResult p => p.Status, + IssuerEvaluationResult i => i.Status, + FreshnessEvaluationResult f => f.Status, + SignatureEvaluationResult s => s.Status, + TransparencyEvaluationResult t => t.Status, + _ => VerificationSectionStatus.Skipped + }) + .ToArray(); + + if (statuses.Any(status => status == VerificationSectionStatus.Fail)) + { + return VerificationSectionStatus.Fail; + } + + if (statuses.Any(status => status == VerificationSectionStatus.Warn)) + { + return VerificationSectionStatus.Warn; + } + + if (statuses.All(status => status == VerificationSectionStatus.Skipped)) + { + return VerificationSectionStatus.Skipped; + } + + return VerificationSectionStatus.Pass; + } + + private static IReadOnlyList AggregateIssues(params object[] sections) + { + var set = new HashSet(StringComparer.OrdinalIgnoreCase); + + foreach (var section in sections) + { + var issues = section switch + { + PolicyEvaluationResult p => p.Issues, + IssuerEvaluationResult i => i.Issues, + FreshnessEvaluationResult f => f.Issues, + SignatureEvaluationResult s => s.Issues, + TransparencyEvaluationResult t => t.Issues, + _ => Array.Empty() + }; + + foreach (var issue in issues) + { + if (!string.IsNullOrWhiteSpace(issue)) + { + set.Add(issue); + } + } + } + + return set.ToArray(); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/VerificationSectionStatus.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/VerificationSectionStatus.cs new file mode 100644 index 00000000..2f55e4f9 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/VerificationSectionStatus.cs @@ -0,0 +1,12 @@ +namespace StellaOps.Attestor.Core.Verification; + +/// +/// Represents the evaluation status of an individual verification section. +/// +public enum VerificationSectionStatus +{ + Pass, + Warn, + Fail, + Skipped +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Bulk/BulkVerificationWorker.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Bulk/BulkVerificationWorker.cs new file mode 100644 index 00000000..dfda49f1 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Bulk/BulkVerificationWorker.cs @@ -0,0 +1,240 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Bulk; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Verification; + +namespace StellaOps.Attestor.Infrastructure.Bulk; + +internal sealed class BulkVerificationWorker : BackgroundService +{ + private readonly IBulkVerificationJobStore _jobStore; + private readonly IAttestorVerificationService _verificationService; + private readonly AttestorMetrics _metrics; + private readonly AttestorOptions _options; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + + public BulkVerificationWorker( + IBulkVerificationJobStore jobStore, + IAttestorVerificationService verificationService, + AttestorMetrics metrics, + IOptions options, + TimeProvider timeProvider, + ILogger logger) + { + _jobStore = jobStore ?? throw new ArgumentNullException(nameof(jobStore)); + _verificationService = verificationService ?? throw new ArgumentNullException(nameof(verificationService)); + _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + var pollDelay = TimeSpan.FromSeconds(Math.Max(1, _options.BulkVerification.WorkerPollSeconds)); + + while (!stoppingToken.IsCancellationRequested) + { + try + { + var job = await _jobStore.TryAcquireAsync(stoppingToken).ConfigureAwait(false); + if (job is null) + { + await Task.Delay(pollDelay, stoppingToken).ConfigureAwait(false); + continue; + } + + await ProcessJobAsync(job, stoppingToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Bulk verification worker loop failed."); + await Task.Delay(pollDelay, stoppingToken).ConfigureAwait(false); + } + } + } + + internal async Task ProcessJobAsync(BulkVerificationJob job, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(job); + + _logger.LogInformation("Processing bulk verification job {JobId} with {ItemCount} items.", job.Id, job.Items.Count); + + job.StartedAt ??= _timeProvider.GetUtcNow(); + if (!await PersistAsync(job, cancellationToken).ConfigureAwait(false)) + { + _logger.LogWarning("Failed to persist initial state for job {JobId}.", job.Id); + } + + var itemDelay = _options.BulkVerification.ItemDelayMilliseconds > 0 + ? TimeSpan.FromMilliseconds(_options.BulkVerification.ItemDelayMilliseconds) + : TimeSpan.Zero; + + foreach (var item in job.Items.OrderBy(i => i.Index)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (item.Status is not BulkVerificationItemStatus.Pending) + { + continue; + } + + await ExecuteItemAsync(job, item, cancellationToken).ConfigureAwait(false); + + if (itemDelay > TimeSpan.Zero) + { + try + { + await Task.Delay(itemDelay, cancellationToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + break; + } + } + } + + job.CompletedAt = _timeProvider.GetUtcNow(); + job.Status = job.FailureReason is null ? BulkVerificationJobStatus.Completed : BulkVerificationJobStatus.Failed; + + if (!await PersistAsync(job, cancellationToken).ConfigureAwait(false)) + { + _logger.LogWarning("Failed to persist completion state for job {JobId}.", job.Id); + } + + var durationSeconds = (job.CompletedAt - job.StartedAt)?.TotalSeconds ?? 0; + var statusTag = job.Status == BulkVerificationJobStatus.Completed && job.FailedCount == 0 ? "succeeded" : + job.Status == BulkVerificationJobStatus.Completed ? "completed_with_failures" : "failed"; + + _metrics.BulkJobsTotal.Add(1, new KeyValuePair("status", statusTag)); + _metrics.BulkJobDuration.Record(durationSeconds, new KeyValuePair("status", statusTag)); + + _logger.LogInformation("Finished bulk verification job {JobId}. Ran {Processed} items (success: {Success}, failed: {Failed}).", + job.Id, job.ProcessedCount, job.SucceededCount, job.FailedCount); + } + + private async Task ExecuteItemAsync(BulkVerificationJob job, BulkVerificationJobItem item, CancellationToken cancellationToken) + { + item.Status = BulkVerificationItemStatus.Running; + item.StartedAt = _timeProvider.GetUtcNow(); + await PersistAsync(job, cancellationToken).ConfigureAwait(false); + + var statusTag = "failed"; + try + { + var request = new AttestorVerificationRequest + { + Uuid = item.Request.Uuid, + ArtifactSha256 = item.Request.ArtifactSha256, + Subject = item.Request.Subject, + EnvelopeId = item.Request.EnvelopeId, + PolicyVersion = item.Request.PolicyVersion, + RefreshProof = item.Request.RefreshProof + }; + + var result = await _verificationService.VerifyAsync(request, cancellationToken).ConfigureAwait(false); + item.Result = result; + item.CompletedAt = _timeProvider.GetUtcNow(); + item.Status = result.Ok ? BulkVerificationItemStatus.Succeeded : BulkVerificationItemStatus.Failed; + statusTag = item.Status == BulkVerificationItemStatus.Succeeded ? "succeeded" : "verification_failed"; + + job.ProcessedCount++; + if (item.Status == BulkVerificationItemStatus.Succeeded) + { + job.SucceededCount++; + } + else + { + job.FailedCount++; + } + } + catch (AttestorVerificationException verificationEx) + { + item.CompletedAt = _timeProvider.GetUtcNow(); + item.Status = BulkVerificationItemStatus.Failed; + item.Error = $"{verificationEx.Code}:{verificationEx.Message}"; + job.ProcessedCount++; + job.FailedCount++; + job.FailureReason ??= "item_failure"; + statusTag = "verification_error"; + } + catch (Exception ex) + { + item.CompletedAt = _timeProvider.GetUtcNow(); + item.Status = BulkVerificationItemStatus.Failed; + item.Error = ex.Message; + job.ProcessedCount++; + job.FailedCount++; + job.FailureReason ??= "worker_exception"; + _logger.LogError(ex, "Bulk verification item {ItemIndex} failed for job {JobId}.", item.Index, job.Id); + statusTag = "exception"; + } + + if (!await PersistAsync(job, cancellationToken).ConfigureAwait(false)) + { + _logger.LogWarning("Failed to persist progress for job {JobId} item {ItemIndex}.", job.Id, item.Index); + } + + _metrics.BulkItemsTotal.Add(1, new KeyValuePair("status", statusTag)); + } + + private async Task PersistAsync(BulkVerificationJob job, CancellationToken cancellationToken) + { + for (var attempt = 0; attempt < 3; attempt++) + { + if (await _jobStore.TryUpdateAsync(job, cancellationToken).ConfigureAwait(false)) + { + return true; + } + + var refreshed = await _jobStore.GetAsync(job.Id, cancellationToken).ConfigureAwait(false); + if (refreshed is null) + { + return false; + } + + Synchronize(job, refreshed); + } + + return false; + } + + private static void Synchronize(BulkVerificationJob target, BulkVerificationJob source) + { + target.Version = source.Version; + target.Status = source.Status; + target.CreatedAt = source.CreatedAt; + target.StartedAt = source.StartedAt; + target.CompletedAt = source.CompletedAt; + target.ProcessedCount = source.ProcessedCount; + target.SucceededCount = source.SucceededCount; + target.FailedCount = source.FailedCount; + target.FailureReason = source.FailureReason; + + var sourceItems = source.Items.ToDictionary(i => i.Index); + foreach (var item in target.Items) + { + if (sourceItems.TryGetValue(item.Index, out var updated)) + { + item.Status = updated.Status; + item.StartedAt = updated.StartedAt; + item.CompletedAt = updated.CompletedAt; + item.Result = updated.Result; + item.Error = updated.Error; + } + } + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Bulk/MongoBulkVerificationJobStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Bulk/MongoBulkVerificationJobStore.cs new file mode 100644 index 00000000..af322f14 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Bulk/MongoBulkVerificationJobStore.cs @@ -0,0 +1,343 @@ +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; +using MongoDB.Driver; +using StellaOps.Attestor.Core.Bulk; +using StellaOps.Attestor.Core.Verification; + +namespace StellaOps.Attestor.Infrastructure.Bulk; + +internal sealed class MongoBulkVerificationJobStore : IBulkVerificationJobStore +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + + private readonly IMongoCollection _collection; + + public MongoBulkVerificationJobStore(IMongoCollection collection) + { + _collection = collection ?? throw new ArgumentNullException(nameof(collection)); + } + + public async Task CreateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(job); + + job.Version = 0; + var document = JobDocument.FromDomain(job, SerializerOptions); + await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + job.Version = document.Version; + return job; + } + + public async Task GetAsync(string jobId, CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(jobId)) + { + return null; + } + + var filter = Builders.Filter.Eq(doc => doc.Id, jobId); + var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document?.ToDomain(SerializerOptions); + } + + public async Task TryAcquireAsync(CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(doc => doc.Status, BulkVerificationJobStatus.Queued); + var update = Builders.Update + .Set(doc => doc.Status, BulkVerificationJobStatus.Running) + .Set(doc => doc.StartedAt, DateTimeOffset.UtcNow.UtcDateTime) + .Inc(doc => doc.Version, 1); + + var options = new FindOneAndUpdateOptions + { + Sort = Builders.Sort.Ascending(doc => doc.CreatedAt), + ReturnDocument = ReturnDocument.After + }; + + var document = await _collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false); + return document?.ToDomain(SerializerOptions); + } + + public async Task TryUpdateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(job); + + var currentVersion = job.Version; + var replacement = JobDocument.FromDomain(job, SerializerOptions); + replacement.Version = currentVersion + 1; + + var filter = Builders.Filter.Where(doc => doc.Id == job.Id && doc.Version == currentVersion); + var result = await _collection.ReplaceOneAsync(filter, replacement, cancellationToken: cancellationToken).ConfigureAwait(false); + + if (result.ModifiedCount == 0) + { + return false; + } + + job.Version = replacement.Version; + return true; + } + + public async Task CountQueuedAsync(CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(doc => doc.Status, BulkVerificationJobStatus.Queued); + var count = await _collection.CountDocumentsAsync(filter, cancellationToken: cancellationToken).ConfigureAwait(false); + return Convert.ToInt32(count); + } + + internal sealed class JobDocument + { + [BsonId] + [BsonElement("_id")] + public string Id { get; set; } = string.Empty; + + [BsonElement("version")] + public int Version { get; set; } + + [BsonElement("status")] + [BsonRepresentation(BsonType.String)] + public BulkVerificationJobStatus Status { get; set; } + + [BsonElement("createdAt")] + public DateTime CreatedAt { get; set; } + + [BsonElement("startedAt")] + [BsonIgnoreIfNull] + public DateTime? StartedAt { get; set; } + + [BsonElement("completedAt")] + [BsonIgnoreIfNull] + public DateTime? CompletedAt { get; set; } + + [BsonElement("context")] + public JobContextDocument Context { get; set; } = new(); + + [BsonElement("items")] + public List Items { get; set; } = new(); + + [BsonElement("processed")] + public int ProcessedCount { get; set; } + + [BsonElement("succeeded")] + public int SucceededCount { get; set; } + + [BsonElement("failed")] + public int FailedCount { get; set; } + + [BsonElement("failureReason")] + [BsonIgnoreIfNull] + public string? FailureReason { get; set; } + + public static JobDocument FromDomain(BulkVerificationJob job, JsonSerializerOptions serializerOptions) + { + return new JobDocument + { + Id = job.Id, + Version = job.Version, + Status = job.Status, + CreatedAt = job.CreatedAt.UtcDateTime, + StartedAt = job.StartedAt?.UtcDateTime, + CompletedAt = job.CompletedAt?.UtcDateTime, + Context = JobContextDocument.FromDomain(job.Context), + Items = JobItemDocument.FromDomain(job.Items, serializerOptions), + ProcessedCount = job.ProcessedCount, + SucceededCount = job.SucceededCount, + FailedCount = job.FailedCount, + FailureReason = job.FailureReason + }; + } + + public BulkVerificationJob ToDomain(JsonSerializerOptions serializerOptions) + { + return new BulkVerificationJob + { + Id = Id, + Version = Version, + Status = Status, + CreatedAt = DateTime.SpecifyKind(CreatedAt, DateTimeKind.Utc), + StartedAt = StartedAt is null ? null : DateTime.SpecifyKind(StartedAt.Value, DateTimeKind.Utc), + CompletedAt = CompletedAt is null ? null : DateTime.SpecifyKind(CompletedAt.Value, DateTimeKind.Utc), + Context = Context.ToDomain(), + Items = JobItemDocument.ToDomain(Items, serializerOptions), + ProcessedCount = ProcessedCount, + SucceededCount = SucceededCount, + FailedCount = FailedCount, + FailureReason = FailureReason + }; + } + } + + internal sealed class JobContextDocument + { + [BsonElement("tenant")] + [BsonIgnoreIfNull] + public string? Tenant { get; set; } + + [BsonElement("requestedBy")] + [BsonIgnoreIfNull] + public string? RequestedBy { get; set; } + + [BsonElement("clientId")] + [BsonIgnoreIfNull] + public string? ClientId { get; set; } + + [BsonElement("scopes")] + public List Scopes { get; set; } = new(); + + public static JobContextDocument FromDomain(BulkVerificationJobContext context) + { + return new JobContextDocument + { + Tenant = context.Tenant, + RequestedBy = context.RequestedBy, + ClientId = context.ClientId, + Scopes = new List(context.Scopes) + }; + } + + public BulkVerificationJobContext ToDomain() + { + return new BulkVerificationJobContext + { + Tenant = Tenant, + RequestedBy = RequestedBy, + ClientId = ClientId, + Scopes = new List(Scopes ?? new List()) + }; + } + } + + internal sealed class JobItemDocument + { + [BsonElement("index")] + public int Index { get; set; } + + [BsonElement("request")] + public ItemRequestDocument Request { get; set; } = new(); + + [BsonElement("status")] + [BsonRepresentation(BsonType.String)] + public BulkVerificationItemStatus Status { get; set; } + + [BsonElement("startedAt")] + [BsonIgnoreIfNull] + public DateTime? StartedAt { get; set; } + + [BsonElement("completedAt")] + [BsonIgnoreIfNull] + public DateTime? CompletedAt { get; set; } + + [BsonElement("result")] + [BsonIgnoreIfNull] + public string? ResultJson { get; set; } + + [BsonElement("error")] + [BsonIgnoreIfNull] + public string? Error { get; set; } + + public static List FromDomain(IEnumerable items, JsonSerializerOptions serializerOptions) + { + var list = new List(); + + foreach (var item in items) + { + list.Add(new JobItemDocument + { + Index = item.Index, + Request = ItemRequestDocument.FromDomain(item.Request), + Status = item.Status, + StartedAt = item.StartedAt?.UtcDateTime, + CompletedAt = item.CompletedAt?.UtcDateTime, + ResultJson = item.Result is null ? null : JsonSerializer.Serialize(item.Result, serializerOptions), + Error = item.Error + }); + } + + return list; + } + + public static IList ToDomain(IEnumerable documents, JsonSerializerOptions serializerOptions) + { + var list = new List(); + + foreach (var document in documents) + { + AttestorVerificationResult? result = null; + if (!string.IsNullOrWhiteSpace(document.ResultJson)) + { + result = JsonSerializer.Deserialize(document.ResultJson, serializerOptions); + } + + list.Add(new BulkVerificationJobItem + { + Index = document.Index, + Request = document.Request.ToDomain(), + Status = document.Status, + StartedAt = document.StartedAt is null ? null : DateTime.SpecifyKind(document.StartedAt.Value, DateTimeKind.Utc), + CompletedAt = document.CompletedAt is null ? null : DateTime.SpecifyKind(document.CompletedAt.Value, DateTimeKind.Utc), + Result = result, + Error = document.Error + }); + } + + return list; + } + } + + internal sealed class ItemRequestDocument + { + [BsonElement("uuid")] + [BsonIgnoreIfNull] + public string? Uuid { get; set; } + + [BsonElement("artifactSha256")] + [BsonIgnoreIfNull] + public string? ArtifactSha256 { get; set; } + + [BsonElement("subject")] + [BsonIgnoreIfNull] + public string? Subject { get; set; } + + [BsonElement("envelopeId")] + [BsonIgnoreIfNull] + public string? EnvelopeId { get; set; } + + [BsonElement("policyVersion")] + [BsonIgnoreIfNull] + public string? PolicyVersion { get; set; } + + [BsonElement("refreshProof")] + public bool RefreshProof { get; set; } + + public static ItemRequestDocument FromDomain(BulkVerificationItemRequest request) + { + return new ItemRequestDocument + { + Uuid = request.Uuid, + ArtifactSha256 = request.ArtifactSha256, + Subject = request.Subject, + EnvelopeId = request.EnvelopeId, + PolicyVersion = request.PolicyVersion, + RefreshProof = request.RefreshProof + }; + } + + public BulkVerificationItemRequest ToDomain() + { + return new BulkVerificationItemRequest + { + Uuid = Uuid, + ArtifactSha256 = ArtifactSha256, + Subject = Subject, + EnvelopeId = EnvelopeId, + PolicyVersion = PolicyVersion, + RefreshProof = RefreshProof + }; + } + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Offline/AttestorBundleService.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Offline/AttestorBundleService.cs new file mode 100644 index 00000000..e7a90d3c --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Offline/AttestorBundleService.cs @@ -0,0 +1,269 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Offline; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Infrastructure.Storage; + +namespace StellaOps.Attestor.Infrastructure.Offline; + +internal sealed class AttestorBundleService : IAttestorBundleService +{ + private readonly IAttestorEntryRepository _repository; + private readonly IAttestorArchiveStore _archiveStore; + private readonly TimeProvider _timeProvider; + private readonly AttestorOptions _options; + private readonly ILogger _logger; + + public AttestorBundleService( + IAttestorEntryRepository repository, + IAttestorArchiveStore archiveStore, + TimeProvider timeProvider, + IOptions options, + ILogger logger) + { + _repository = repository; + _archiveStore = archiveStore; + _timeProvider = timeProvider; + _options = options.Value; + _logger = logger; + } + + public async Task ExportAsync(AttestorBundleExportRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var (entries, continuationToken) = await ResolveEntriesAsync(request, cancellationToken).ConfigureAwait(false); + var items = new List(entries.Count); + + foreach (var entry in entries + .OrderBy(e => e.CreatedAt) + .ThenBy(e => e.RekorUuid, StringComparer.Ordinal)) + { + var archiveBundle = await _archiveStore.GetBundleAsync(entry.BundleSha256, entry.RekorUuid, cancellationToken).ConfigureAwait(false); + if (archiveBundle is null) + { + _logger.LogWarning("Archive bundle for {Uuid} ({BundleSha}) unavailable; exporting metadata only.", entry.RekorUuid, entry.BundleSha256); + items.Add(new AttestorBundleItem + { + Entry = entry, + CanonicalBundle = string.Empty, + Metadata = new Dictionary + { + ["archive.missing"] = "true" + } + }); + + continue; + } + + var metadata = archiveBundle.Metadata ?? new Dictionary(); + if (!metadata.ContainsKey("logUrl")) + { + metadata = new Dictionary(metadata) + { + ["logUrl"] = entry.Log.Url + }; + } + + items.Add(new AttestorBundleItem + { + Entry = entry, + CanonicalBundle = Convert.ToBase64String(archiveBundle.CanonicalBundleJson), + Proof = archiveBundle.ProofJson.Length > 0 ? Convert.ToBase64String(archiveBundle.ProofJson) : null, + Metadata = metadata + }); + } + + return new AttestorBundlePackage + { + Version = AttestorBundleVersions.Current, + GeneratedAt = _timeProvider.GetUtcNow(), + Items = items, + ContinuationToken = continuationToken + }; + } + + public async Task ImportAsync(AttestorBundlePackage package, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(package); + + if (!_options.S3.Enabled || _archiveStore is NullAttestorArchiveStore) + { + var skippedCount = package.Items?.Count ?? 0; + _logger.LogWarning("Attestor archive store disabled; skipping import for {Count} bundle item(s).", skippedCount); + return new AttestorBundleImportResult + { + Imported = 0, + Updated = 0, + Skipped = skippedCount, + Issues = new[] { "archive_disabled" } + }; + } + + if (package.Items is null || package.Items.Count == 0) + { + return new AttestorBundleImportResult + { + Imported = 0, + Updated = 0, + Skipped = 0, + Issues = Array.Empty() + }; + } + + var imported = 0; + var updated = 0; + var skipped = 0; + var issues = new List(); + + foreach (var item in package.Items) + { + if (item.Entry is null) + { + skipped++; + issues.Add("entry_missing"); + continue; + } + + if (string.IsNullOrWhiteSpace(item.Entry.RekorUuid)) + { + skipped++; + issues.Add("uuid_missing"); + continue; + } + + if (string.IsNullOrWhiteSpace(item.Entry.BundleSha256)) + { + skipped++; + issues.Add($"bundle_sha_missing:{item.Entry.RekorUuid}"); + continue; + } + + if (string.IsNullOrWhiteSpace(item.CanonicalBundle)) + { + skipped++; + issues.Add($"bundle_payload_missing:{item.Entry.RekorUuid}"); + continue; + } + + byte[] canonicalBytes; + try + { + canonicalBytes = Convert.FromBase64String(item.CanonicalBundle); + } + catch (FormatException) + { + skipped++; + issues.Add($"bundle_payload_invalid_base64:{item.Entry.RekorUuid}"); + continue; + } + + var computedSha = Convert.ToHexString(SHA256.HashData(canonicalBytes)).ToLowerInvariant(); + if (!string.Equals(computedSha, item.Entry.BundleSha256, StringComparison.OrdinalIgnoreCase)) + { + skipped++; + issues.Add($"bundle_hash_mismatch:{item.Entry.RekorUuid}"); + continue; + } + + byte[] proofBytes = Array.Empty(); + if (!string.IsNullOrEmpty(item.Proof)) + { + try + { + proofBytes = Convert.FromBase64String(item.Proof); + } + catch (FormatException) + { + issues.Add($"proof_invalid_base64:{item.Entry.RekorUuid}"); + } + } + + var archiveBundle = new AttestorArchiveBundle + { + RekorUuid = item.Entry.RekorUuid, + ArtifactSha256 = item.Entry.Artifact.Sha256, + BundleSha256 = item.Entry.BundleSha256, + CanonicalBundleJson = canonicalBytes, + ProofJson = proofBytes, + Metadata = item.Metadata ?? new Dictionary() + }; + + await _archiveStore.ArchiveBundleAsync(archiveBundle, cancellationToken).ConfigureAwait(false); + + var existing = await _repository.GetByUuidAsync(item.Entry.RekorUuid, cancellationToken).ConfigureAwait(false); + if (existing is null) + { + imported++; + } + else + { + updated++; + } + + await _repository.SaveAsync(item.Entry, cancellationToken).ConfigureAwait(false); + } + + return new AttestorBundleImportResult + { + Imported = imported, + Updated = updated, + Skipped = skipped, + Issues = issues + }; + } + + private async Task<(List Entries, string? ContinuationToken)> ResolveEntriesAsync(AttestorBundleExportRequest request, CancellationToken cancellationToken) + { + var entries = new List(); + + if (request.Uuids is { Count: > 0 }) + { + foreach (var uuid in request.Uuids.Where(u => !string.IsNullOrWhiteSpace(u)).Distinct(StringComparer.OrdinalIgnoreCase)) + { + var entry = await _repository.GetByUuidAsync(uuid, cancellationToken).ConfigureAwait(false); + if (entry is null) + { + _logger.LogWarning("Attestation {Uuid} not found; skipping export entry.", uuid); + continue; + } + + entries.Add(entry); + } + + return (entries, null); + } + + var limit = request.Limit.HasValue + ? Math.Clamp(request.Limit.Value, 1, 200) + : 100; + + var query = new AttestorEntryQuery + { + Subject = request.Subject, + Type = request.Type, + Issuer = request.Issuer, + Scope = request.Scope, + CreatedAfter = request.CreatedAfter, + CreatedBefore = request.CreatedBefore, + PageSize = limit, + ContinuationToken = request.ContinuationToken + }; + + var result = await _repository.QueryAsync(query, cancellationToken).ConfigureAwait(false); + if (result.Items.Count == 0) + { + _logger.LogInformation("No attestor entries matched export query."); + } + + entries.AddRange(result.Items.Take(limit)); + return (entries, result.ContinuationToken); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs index 5b7e4cf1..2a4d01f1 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs @@ -1,21 +1,24 @@ using System; -using Amazon.Runtime; -using Amazon.S3; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using MongoDB.Driver; -using StackExchange.Redis; -using StellaOps.Attestor.Core.Options; -using StellaOps.Attestor.Core.Observability; -using StellaOps.Attestor.Core.Rekor; -using StellaOps.Attestor.Core.Storage; -using StellaOps.Attestor.Core.Submission; -using StellaOps.Attestor.Infrastructure.Rekor; -using StellaOps.Attestor.Infrastructure.Storage; -using StellaOps.Attestor.Infrastructure.Submission; -using StellaOps.Attestor.Core.Verification; -using StellaOps.Attestor.Infrastructure.Verification; +using Amazon.Runtime; +using Amazon.S3; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StackExchange.Redis; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Rekor; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Transparency; +using StellaOps.Attestor.Core.Verification; +using StellaOps.Attestor.Infrastructure.Rekor; +using StellaOps.Attestor.Infrastructure.Storage; +using StellaOps.Attestor.Infrastructure.Submission; +using StellaOps.Attestor.Infrastructure.Transparency; +using StellaOps.Attestor.Infrastructure.Verification; namespace StellaOps.Attestor.Infrastructure; @@ -23,7 +26,9 @@ public static class ServiceCollectionExtensions { public static IServiceCollection AddAttestorInfrastructure(this IServiceCollection services) { - services.AddSingleton(); + services.AddMemoryCache(); + + services.AddSingleton(); services.AddSingleton(sp => { var canonicalizer = sp.GetRequiredService(); @@ -33,11 +38,34 @@ public static class ServiceCollectionExtensions services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); - services.AddHttpClient(client => - { - client.Timeout = TimeSpan.FromSeconds(30); - }); - services.AddSingleton(sp => sp.GetRequiredService()); + services.AddHttpClient(client => + { + client.Timeout = TimeSpan.FromSeconds(30); + }); + services.AddSingleton(sp => sp.GetRequiredService()); + + services.AddHttpClient((sp, client) => + { + var options = sp.GetRequiredService>().Value; + var timeoutMs = options.TransparencyWitness.RequestTimeoutMs; + if (timeoutMs <= 0) + { + timeoutMs = 15_000; + } + + client.Timeout = TimeSpan.FromMilliseconds(timeoutMs); + }); + + services.AddSingleton(sp => + { + var options = sp.GetRequiredService>().Value; + if (!options.TransparencyWitness.Enabled || string.IsNullOrWhiteSpace(options.TransparencyWitness.BaseUrl)) + { + return new NullTransparencyWitnessClient(); + } + + return sp.GetRequiredService(); + }); services.AddSingleton(sp => { diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs new file mode 100644 index 00000000..72926219 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs @@ -0,0 +1,347 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Security.Cryptography; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Org.BouncyCastle.Crypto.Parameters; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Signing; +using StellaOps.Cryptography; +using StellaOps.Cryptography.Kms; +using StellaOps.Cryptography.Plugin.BouncyCastle; + +namespace StellaOps.Attestor.Infrastructure.Signing; + +internal sealed class AttestorSigningKeyRegistry : IDisposable +{ + private readonly Dictionary _keys; + private readonly FileKmsClient? _kmsClient; + private readonly ILogger _logger; + + public AttestorSigningKeyRegistry( + IOptions options, + TimeProvider timeProvider, + ILogger logger) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + var attestorOptions = options?.Value ?? new AttestorOptions(); + var signingOptions = attestorOptions.Signing ?? new AttestorOptions.SigningOptions(); + + var providers = new List(); + var providerMap = new Dictionary(StringComparer.OrdinalIgnoreCase); + + void RegisterProvider(ICryptoProvider provider) + { + providers.Add(provider); + providerMap[provider.Name] = provider; + } + + var defaultProvider = new DefaultCryptoProvider(); + RegisterProvider(defaultProvider); + + var edProvider = new BouncyCastleEd25519CryptoProvider(); + RegisterProvider(edProvider); + + KmsCryptoProvider? kmsProvider = null; + if (RequiresKms(signingOptions)) + { + var kmsOptions = signingOptions.Kms ?? throw new InvalidOperationException("attestor.signing.kms is required when a signing key declares mode 'kms'."); + if (string.IsNullOrWhiteSpace(kmsOptions.RootPath)) + { + throw new InvalidOperationException("attestor.signing.kms.rootPath must be provided when using KMS-backed signing keys."); + } + + if (string.IsNullOrWhiteSpace(kmsOptions.Password)) + { + throw new InvalidOperationException("attestor.signing.kms.password must be provided when using KMS-backed signing keys."); + } + + var fileOptions = new FileKmsOptions + { + RootPath = Path.GetFullPath(kmsOptions.RootPath!), + Password = kmsOptions.Password!, + Algorithm = kmsOptions.Algorithm, + KeyDerivationIterations = kmsOptions.KeyDerivationIterations ?? 600_000 + }; + + _kmsClient = new FileKmsClient(fileOptions); + kmsProvider = new KmsCryptoProvider(_kmsClient); + RegisterProvider(kmsProvider); + } + + Registry = new CryptoProviderRegistry(providers, signingOptions.PreferredProviders); + _keys = new Dictionary(StringComparer.OrdinalIgnoreCase); + + foreach (var key in signingOptions.Keys ?? Array.Empty()) + { + if (key is null || !key.Enabled) + { + continue; + } + + var entry = CreateEntry( + key, + providerMap, + defaultProvider, + edProvider, + kmsProvider, + _kmsClient, + timeProvider); + + if (_keys.ContainsKey(entry.KeyId)) + { + throw new InvalidOperationException($"Duplicate signing key id '{entry.KeyId}' configured."); + } + + _keys[entry.KeyId] = entry; + _logger.LogInformation("Registered attestor signing key {KeyId} using provider {Provider} and algorithm {Algorithm}.", entry.KeyId, entry.ProviderName, entry.Algorithm); + } + } + + public ICryptoProviderRegistry Registry { get; } + + public SigningKeyEntry GetRequired(string keyId) + { + if (string.IsNullOrWhiteSpace(keyId)) + { + throw new AttestorSigningException("key_missing", "Signing key id must be provided."); + } + + if (_keys.TryGetValue(keyId, out var entry)) + { + return entry; + } + + throw new AttestorSigningException("key_not_found", $"Signing key '{keyId}' is not configured."); + } + + public void Dispose() + { + _kmsClient?.Dispose(); + } + + private static bool RequiresKms(AttestorOptions.SigningOptions signingOptions) + => signingOptions.Keys?.Any(static key => + string.Equals(key?.Mode, "kms", StringComparison.OrdinalIgnoreCase)) == true; + + private SigningKeyEntry CreateEntry( + AttestorOptions.SigningKeyOptions key, + IReadOnlyDictionary providers, + DefaultCryptoProvider defaultProvider, + BouncyCastleEd25519CryptoProvider edProvider, + KmsCryptoProvider? kmsProvider, + FileKmsClient? kmsClient, + TimeProvider timeProvider) + { + var providerName = ResolveProviderName(key); + if (!providers.TryGetValue(providerName, out var provider)) + { + throw new InvalidOperationException($"Signing provider '{providerName}' is not registered for key '{key.KeyId}'."); + } + + var providerKeyId = string.IsNullOrWhiteSpace(key.ProviderKeyId) ? key.KeyId : key.ProviderKeyId!; + if (string.IsNullOrWhiteSpace(providerKeyId)) + { + throw new InvalidOperationException($"Signing key '{key.KeyId}' must specify a provider key identifier."); + } + + var now = timeProvider.GetUtcNow(); + var normalizedAlgorithm = NormalizeAlgorithm(key.Algorithm ?? string.Empty); + + if (string.Equals(providerName, "kms", StringComparison.OrdinalIgnoreCase)) + { + if (kmsProvider is null || kmsClient is null) + { + throw new InvalidOperationException($"KMS signing provider is not configured but signing key '{key.KeyId}' requests mode 'kms'."); + } + + var versionId = key.KmsVersionId; + if (string.IsNullOrWhiteSpace(versionId)) + { + throw new InvalidOperationException($"Signing key '{key.KeyId}' must specify kmsVersionId when using mode 'kms'."); + } + + var material = kmsClient.ExportAsync(providerKeyId, versionId, default).GetAwaiter().GetResult(); + var parameters = new ECParameters + { + Curve = ECCurve.NamedCurves.nistP256, + D = material.D, + Q = new ECPoint + { + X = material.Qx, + Y = material.Qy + } + }; + + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["kms.version"] = material.VersionId + }; + + var signingKey = new CryptoSigningKey( + new CryptoKeyReference(providerKeyId, providerName), + normalizedAlgorithm, + in parameters, + now, + expiresAt: null, + metadata: metadata); + + kmsProvider.UpsertSigningKey(signingKey); + } + else if (string.Equals(providerName, "bouncycastle.ed25519", StringComparison.OrdinalIgnoreCase)) + { + var privateKeyBytes = LoadPrivateKeyBytes(key); + var privateKeyParameters = new Ed25519PrivateKeyParameters(privateKeyBytes, 0); + var publicKeyBytes = privateKeyParameters.GeneratePublicKey().GetEncoded(); + + var signingKey = new CryptoSigningKey( + new CryptoKeyReference(providerKeyId, providerName), + normalizedAlgorithm, + privateKeyBytes, + now, + publicKey: publicKeyBytes); + + edProvider.UpsertSigningKey(signingKey); + } + else + { + var parameters = LoadEcParameters(key); + var signingKey = new CryptoSigningKey( + new CryptoKeyReference(providerKeyId, providerName), + normalizedAlgorithm, + in parameters, + now); + + defaultProvider.UpsertSigningKey(signingKey); + } + + var mode = string.IsNullOrWhiteSpace(key.Mode) + ? (string.Equals(providerName, "kms", StringComparison.OrdinalIgnoreCase) ? "kms" : "keyful") + : key.Mode!; + + var certificateChain = key.CertificateChain?.Count > 0 + ? key.CertificateChain.ToArray() + : Array.Empty(); + + return new SigningKeyEntry( + key.KeyId, + providerKeyId, + providerName, + normalizedAlgorithm, + mode, + certificateChain); + } + + private static string ResolveProviderName(AttestorOptions.SigningKeyOptions key) + { + if (!string.IsNullOrWhiteSpace(key.Provider)) + { + return key.Provider!; + } + + if (string.Equals(key.Mode, "kms", StringComparison.OrdinalIgnoreCase)) + { + return "kms"; + } + + if (string.Equals(key.Algorithm, SignatureAlgorithms.Ed25519, StringComparison.OrdinalIgnoreCase) || + string.Equals(key.Algorithm, SignatureAlgorithms.EdDsa, StringComparison.OrdinalIgnoreCase)) + { + return "bouncycastle.ed25519"; + } + + return "default"; + } + + private static string NormalizeAlgorithm(string algorithm) + { + if (string.IsNullOrWhiteSpace(algorithm)) + { + return SignatureAlgorithms.Es256; + } + + if (string.Equals(algorithm, SignatureAlgorithms.EdDsa, StringComparison.OrdinalIgnoreCase)) + { + return SignatureAlgorithms.Ed25519; + } + + return algorithm.ToUpperInvariant(); + } + + private static byte[] LoadPrivateKeyBytes(AttestorOptions.SigningKeyOptions key) + { + var material = ReadMaterial(key); + return key.MaterialFormat?.ToLowerInvariant() switch + { + "base64" or null => Convert.FromBase64String(material), + "hex" => Convert.FromHexString(material), + _ => throw new InvalidOperationException($"Unsupported materialFormat '{key.MaterialFormat}' for Ed25519 signing key '{key.KeyId}'. Supported formats: base64, hex.") + }; + } + + private static ECParameters LoadEcParameters(AttestorOptions.SigningKeyOptions key) + { + var material = ReadMaterial(key); + using var ecdsa = ECDsa.Create(); + + switch (key.MaterialFormat?.ToLowerInvariant()) + { + case null: + case "pem": + ecdsa.ImportFromPem(material); + break; + case "base64": + { + var pkcs8 = Convert.FromBase64String(material); + ecdsa.ImportPkcs8PrivateKey(pkcs8, out _); + break; + } + case "hex": + { + var pkcs8 = Convert.FromHexString(material); + ecdsa.ImportPkcs8PrivateKey(pkcs8, out _); + break; + } + default: + throw new InvalidOperationException($"Unsupported materialFormat '{key.MaterialFormat}' for signing key '{key.KeyId}'. Supported formats: pem, base64, hex."); + } + + return ecdsa.ExportParameters(true); + } + + private static string ReadMaterial(AttestorOptions.SigningKeyOptions key) + { + if (!string.IsNullOrWhiteSpace(key.MaterialPassphrase)) + { + throw new InvalidOperationException($"Signing key '{key.KeyId}' specifies a materialPassphrase but encrypted keys are not yet supported."); + } + + if (!string.IsNullOrWhiteSpace(key.Material)) + { + return key.Material.Trim(); + } + + if (!string.IsNullOrWhiteSpace(key.MaterialPath)) + { + var path = Path.GetFullPath(key.MaterialPath); + if (!File.Exists(path)) + { + throw new InvalidOperationException($"Signing key material file '{path}' for key '{key.KeyId}' does not exist."); + } + + return File.ReadAllText(path).Trim(); + } + + throw new InvalidOperationException($"Signing key '{key.KeyId}' must provide either inline material or a materialPath."); + } + + internal sealed record SigningKeyEntry( + string KeyId, + string ProviderKeyId, + string ProviderName, + string Algorithm, + string Mode, + IReadOnlyList CertificateChain); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningService.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningService.cs new file mode 100644 index 00000000..6c16b8a4 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningService.cs @@ -0,0 +1,260 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Attestor.Core.Audit; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Signing; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Cryptography; + +namespace StellaOps.Attestor.Infrastructure.Signing; + +internal sealed class AttestorSigningService : IAttestationSigningService +{ + private readonly AttestorSigningKeyRegistry _registry; + private readonly IDsseCanonicalizer _canonicalizer; + private readonly StellaOps.Attestor.Core.Storage.IAttestorAuditSink _auditSink; + private readonly AttestorMetrics _metrics; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + + public AttestorSigningService( + AttestorSigningKeyRegistry registry, + IDsseCanonicalizer canonicalizer, + StellaOps.Attestor.Core.Storage.IAttestorAuditSink auditSink, + AttestorMetrics metrics, + ILogger logger, + TimeProvider timeProvider) + { + _registry = registry ?? throw new ArgumentNullException(nameof(registry)); + _canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer)); + _auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public async Task SignAsync( + AttestationSignRequest request, + SubmissionContext context, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(context); + + var stopwatch = Stopwatch.StartNew(); + try + { + if (string.IsNullOrWhiteSpace(request.KeyId)) + { + throw new AttestorSigningException("key_missing", "Signing key id is required."); + } + + if (string.IsNullOrWhiteSpace(request.PayloadType)) + { + throw new AttestorSigningException("payload_type_missing", "payloadType must be provided."); + } + + if (string.IsNullOrWhiteSpace(request.PayloadBase64)) + { + throw new AttestorSigningException("payload_missing", "payload must be provided as base64."); + } + + var entry = _registry.GetRequired(request.KeyId); + byte[] payloadBytes; + try + { + payloadBytes = Convert.FromBase64String(request.PayloadBase64.Trim()); + } + catch (FormatException) + { + throw new AttestorSigningException("payload_invalid_base64", "payload must be valid base64."); + } + + var normalizedPayload = Convert.ToBase64String(payloadBytes); + var preAuth = DssePreAuthenticationEncoding.Compute(request.PayloadType, payloadBytes); + + var signerResolution = _registry.Registry.ResolveSigner( + CryptoCapability.Signing, + entry.Algorithm, + new CryptoKeyReference(entry.ProviderKeyId, entry.ProviderName), + entry.ProviderName); + + var signatureBytes = await signerResolution.Signer.SignAsync(preAuth, cancellationToken).ConfigureAwait(false); + var signatureBase64 = Convert.ToBase64String(signatureBytes); + + var bundle = BuildBundle(request, entry, normalizedPayload, signatureBase64); + var meta = BuildMeta(request); + + var canonicalRequest = new AttestorSubmissionRequest + { + Bundle = bundle, + Meta = meta + }; + + var canonical = await _canonicalizer.CanonicalizeAsync(canonicalRequest, cancellationToken).ConfigureAwait(false); + meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant(); + + var elapsedSeconds = stopwatch.Elapsed.TotalSeconds; + RecordSuccessMetrics(entry, elapsedSeconds); + await WriteAuditAsync(context, entry, meta, elapsedSeconds, result: "signed", error: null, cancellationToken).ConfigureAwait(false); + + return new AttestationSignResult + { + Bundle = bundle, + Meta = meta, + KeyId = request.KeyId, + Algorithm = entry.Algorithm, + Mode = bundle.Mode, + Provider = entry.ProviderName, + SignedAt = _timeProvider.GetUtcNow() + }; + } + catch (AttestorSigningException) + { + var elapsedSeconds = stopwatch.Elapsed.TotalSeconds; + RecordFailureMetrics(elapsedSeconds); + await WriteAuditAsync(context, null, null, elapsedSeconds, "failed", error: "validation", cancellationToken).ConfigureAwait(false); + throw; + } + catch (Exception ex) + { + var elapsedSeconds = stopwatch.Elapsed.TotalSeconds; + RecordFailureMetrics(elapsedSeconds); + _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "sign")); + _logger.LogError(ex, "Unexpected error while signing attestation."); + await WriteAuditAsync(context, null, null, elapsedSeconds, "failed", error: "unexpected", cancellationToken).ConfigureAwait(false); + throw new AttestorSigningException("signing_failed", "Signing failed due to an internal error.", ex); + } + } + + private static AttestorSubmissionRequest.SubmissionBundle BuildBundle( + AttestationSignRequest request, + AttestorSigningKeyRegistry.SigningKeyEntry entry, + string normalizedPayload, + string signatureBase64) + { + var mode = string.IsNullOrWhiteSpace(request.Mode) ? entry.Mode : request.Mode!; + var certificateChain = new List(entry.CertificateChain.Count + (request.CertificateChain?.Count ?? 0)); + certificateChain.AddRange(entry.CertificateChain); + if (request.CertificateChain is not null) + { + foreach (var cert in request.CertificateChain) + { + if (!string.IsNullOrWhiteSpace(cert) && + !certificateChain.Contains(cert, StringComparer.Ordinal)) + { + certificateChain.Add(cert); + } + } + } + + var bundle = new AttestorSubmissionRequest.SubmissionBundle + { + Mode = mode, + Dsse = new AttestorSubmissionRequest.DsseEnvelope + { + PayloadType = request.PayloadType, + PayloadBase64 = normalizedPayload, + Signatures = + { + new AttestorSubmissionRequest.DsseSignature + { + KeyId = request.KeyId, + Signature = signatureBase64 + } + } + }, + CertificateChain = certificateChain + }; + + return bundle; + } + + private static AttestorSubmissionRequest.SubmissionMeta BuildMeta(AttestationSignRequest request) + { + var artifact = request.Artifact ?? new AttestorSubmissionRequest.ArtifactInfo(); + return new AttestorSubmissionRequest.SubmissionMeta + { + Artifact = new AttestorSubmissionRequest.ArtifactInfo + { + Sha256 = artifact.Sha256, + Kind = artifact.Kind, + ImageDigest = artifact.ImageDigest, + SubjectUri = artifact.SubjectUri + }, + Archive = request.Archive, + LogPreference = string.IsNullOrWhiteSpace(request.LogPreference) + ? "primary" + : request.LogPreference.Trim() + }; + } + + private void RecordSuccessMetrics(AttestorSigningKeyRegistry.SigningKeyEntry entry, double elapsedSeconds) + { + _metrics.SignTotal.Add(1, + new KeyValuePair("result", "success"), + new KeyValuePair("algorithm", entry.Algorithm), + new KeyValuePair("provider", entry.ProviderName)); + + _metrics.SignLatency.Record(elapsedSeconds, + new KeyValuePair("algorithm", entry.Algorithm), + new KeyValuePair("provider", entry.ProviderName)); + } + + private void RecordFailureMetrics(double elapsedSeconds) + { + _metrics.SignTotal.Add(1, new KeyValuePair("result", "failure")); + _metrics.SignLatency.Record(elapsedSeconds); + } + + private async Task WriteAuditAsync( + SubmissionContext context, + AttestorSigningKeyRegistry.SigningKeyEntry? entry, + AttestorSubmissionRequest.SubmissionMeta? meta, + double elapsedSeconds, + string result, + string? error, + CancellationToken cancellationToken) + { + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase); + if (entry is not null) + { + metadata["algorithm"] = entry.Algorithm; + metadata["provider"] = entry.ProviderName; + metadata["mode"] = entry.Mode; + metadata["keyId"] = entry.KeyId; + } + + if (!string.IsNullOrWhiteSpace(error)) + { + metadata["error"] = error!; + } + + var record = new AttestorAuditRecord + { + Action = "sign", + Result = result, + ArtifactSha256 = meta?.Artifact?.Sha256 ?? string.Empty, + BundleSha256 = meta?.BundleSha256 ?? string.Empty, + Backend = entry?.ProviderName ?? string.Empty, + LatencyMs = (long)(elapsedSeconds * 1000), + Timestamp = _timeProvider.GetUtcNow(), + Caller = new AttestorAuditRecord.CallerDescriptor + { + Subject = context.CallerSubject, + Audience = context.CallerAudience, + ClientId = context.CallerClientId, + MtlsThumbprint = context.MtlsThumbprint, + Tenant = context.CallerTenant + }, + Metadata = metadata + }; + + await _auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj index 69ea4f38..65e9eee0 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj @@ -1,21 +1,28 @@ - - - net10.0 - preview - enable - enable - true - - - - - - - - - - - - - - + + + net10.0 + preview + enable + enable + true + + + + + + + + + + + + + + + + + + + + + diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/CachingAttestorDedupeStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/CachingAttestorDedupeStore.cs new file mode 100644 index 00000000..7c277ee6 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/CachingAttestorDedupeStore.cs @@ -0,0 +1,56 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Infrastructure.Storage; + +internal sealed class CachingAttestorDedupeStore : IAttestorDedupeStore +{ + private readonly IAttestorDedupeStore _cache; + private readonly IAttestorDedupeStore _inner; + private readonly ILogger _logger; + + public CachingAttestorDedupeStore( + IAttestorDedupeStore cache, + IAttestorDedupeStore inner, + ILogger logger) + { + _cache = cache; + _inner = inner; + _logger = logger; + } + + public async Task TryGetExistingAsync(string bundleSha256, CancellationToken cancellationToken = default) + { + try + { + var cached = await _cache.TryGetExistingAsync(bundleSha256, cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(cached)) + { + return cached; + } + } + catch (Exception ex) + { + _logger.LogDebug(ex, "Dedupe cache lookup failed for bundle {BundleSha}", bundleSha256); + } + + return await _inner.TryGetExistingAsync(bundleSha256, cancellationToken).ConfigureAwait(false); + } + + public async Task SetAsync(string bundleSha256, string rekorUuid, TimeSpan ttl, CancellationToken cancellationToken = default) + { + await _inner.SetAsync(bundleSha256, rekorUuid, ttl, cancellationToken).ConfigureAwait(false); + + try + { + await _cache.SetAsync(bundleSha256, rekorUuid, ttl, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to update dedupe cache for bundle {BundleSha}", bundleSha256); + } + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs index 3d1b7763..3c49e1a8 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs @@ -9,20 +9,36 @@ using StellaOps.Attestor.Core.Storage; namespace StellaOps.Attestor.Infrastructure.Storage; -internal sealed class MongoAttestorAuditSink : IAttestorAuditSink -{ - private readonly IMongoCollection _collection; - - public MongoAttestorAuditSink(IMongoCollection collection) - { - _collection = collection; - } - - public Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default) - { - var document = AttestorAuditDocument.FromRecord(record); - return _collection.InsertOneAsync(document, cancellationToken: cancellationToken); - } +internal sealed class MongoAttestorAuditSink : IAttestorAuditSink +{ + private readonly IMongoCollection _collection; + private static int _indexesInitialized; + + public MongoAttestorAuditSink(IMongoCollection collection) + { + _collection = collection; + EnsureIndexes(); + } + + public Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default) + { + var document = AttestorAuditDocument.FromRecord(record); + return _collection.InsertOneAsync(document, cancellationToken: cancellationToken); + } + + private void EnsureIndexes() + { + if (Interlocked.Exchange(ref _indexesInitialized, 1) == 1) + { + return; + } + + var index = new CreateIndexModel( + Builders.IndexKeys.Descending(x => x.Timestamp), + new CreateIndexOptions { Name = "ts_desc" }); + + _collection.Indexes.CreateOne(index); + } internal sealed class AttestorAuditDocument { diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorDedupeStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorDedupeStore.cs new file mode 100644 index 00000000..a63ab457 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorDedupeStore.cs @@ -0,0 +1,111 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; +using MongoDB.Driver; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Infrastructure.Storage; + +internal sealed class MongoAttestorDedupeStore : IAttestorDedupeStore +{ + private readonly IMongoCollection _collection; + private readonly TimeProvider _timeProvider; + private static int _indexesInitialized; + + public MongoAttestorDedupeStore( + IMongoCollection collection, + TimeProvider timeProvider) + { + _collection = collection; + _timeProvider = timeProvider; + EnsureIndexes(); + } + + public async Task TryGetExistingAsync(string bundleSha256, CancellationToken cancellationToken = default) + { + var key = BuildKey(bundleSha256); + var now = _timeProvider.GetUtcNow().UtcDateTime; + var filter = Builders.Filter.Eq(x => x.Key, key); + + var document = await _collection + .Find(filter) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + if (document is null) + { + return null; + } + + if (document.TtlAt <= now) + { + await _collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false); + return null; + } + + return document.RekorUuid; + } + + public Task SetAsync(string bundleSha256, string rekorUuid, TimeSpan ttl, CancellationToken cancellationToken = default) + { + var now = _timeProvider.GetUtcNow().UtcDateTime; + var expiresAt = now.Add(ttl); + var key = BuildKey(bundleSha256); + var filter = Builders.Filter.Eq(x => x.Key, key); + + var update = Builders.Update + .SetOnInsert(x => x.Key, key) + .Set(x => x.RekorUuid, rekorUuid) + .Set(x => x.CreatedAt, now) + .Set(x => x.TtlAt, expiresAt); + + return _collection.UpdateOneAsync( + filter, + update, + new UpdateOptions { IsUpsert = true }, + cancellationToken); + } + + private static string BuildKey(string bundleSha256) => $"bundle:{bundleSha256}"; + + private void EnsureIndexes() + { + if (Interlocked.Exchange(ref _indexesInitialized, 1) == 1) + { + return; + } + + var indexes = new[] + { + new CreateIndexModel( + Builders.IndexKeys.Ascending(x => x.Key), + new CreateIndexOptions { Unique = true, Name = "dedupe_key_unique" }), + new CreateIndexModel( + Builders.IndexKeys.Ascending(x => x.TtlAt), + new CreateIndexOptions { ExpireAfter = TimeSpan.Zero, Name = "dedupe_ttl" }) + }; + + _collection.Indexes.CreateMany(indexes); + } + + [BsonIgnoreExtraElements] + internal sealed class AttestorDedupeDocument + { + [BsonId] + public ObjectId Id { get; set; } + + [BsonElement("key")] + public string Key { get; set; } = string.Empty; + + [BsonElement("rekorUuid")] + public string RekorUuid { get; set; } = string.Empty; + + [BsonElement("createdAt")] + public DateTime CreatedAt { get; set; } + + [BsonElement("ttlAt")] + public DateTime TtlAt { get; set; } + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs index 2673d71e..e759130d 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs @@ -1,342 +1,609 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using MongoDB.Bson; -using MongoDB.Bson.Serialization.Attributes; -using MongoDB.Driver; -using StellaOps.Attestor.Core.Storage; - -namespace StellaOps.Attestor.Infrastructure.Storage; - -internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository -{ - private readonly IMongoCollection _entries; - - public MongoAttestorEntryRepository(IMongoCollection entries) - { - _entries = entries; - } - - public async Task GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default) - { - var filter = Builders.Filter.Eq(x => x.BundleSha256, bundleSha256); - var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document?.ToDomain(); - } - - public async Task GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default) - { - var filter = Builders.Filter.Eq(x => x.Id, rekorUuid); - var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document?.ToDomain(); - } - - public async Task> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default) - { - var filter = Builders.Filter.Eq(x => x.Artifact.Sha256, artifactSha256); - var documents = await _entries.Find(filter).ToListAsync(cancellationToken).ConfigureAwait(false); - return documents.ConvertAll(static doc => doc.ToDomain()); - } - - public async Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default) - { - var document = AttestorEntryDocument.FromDomain(entry); - var filter = Builders.Filter.Eq(x => x.Id, document.Id); - await _entries.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); - } - - [BsonIgnoreExtraElements] - internal sealed class AttestorEntryDocument - { - [BsonId] - public string Id { get; set; } = string.Empty; - - [BsonElement("artifact")] - public ArtifactDocument Artifact { get; set; } = new(); - - [BsonElement("bundleSha256")] - public string BundleSha256 { get; set; } = string.Empty; - - [BsonElement("index")] - public long? Index { get; set; } - - [BsonElement("proof")] - public ProofDocument? Proof { get; set; } - - [BsonElement("log")] - public LogDocument Log { get; set; } = new(); - - [BsonElement("createdAt")] - public BsonDateTime CreatedAt { get; set; } = BsonDateTime.Create(System.DateTimeOffset.UtcNow); - - [BsonElement("status")] - public string Status { get; set; } = "pending"; - - [BsonElement("signerIdentity")] - public SignerIdentityDocument SignerIdentity { get; set; } = new(); - - [BsonElement("mirror")] - public MirrorDocument? Mirror { get; set; } - - public static AttestorEntryDocument FromDomain(AttestorEntry entry) - { - return new AttestorEntryDocument - { - Id = entry.RekorUuid, - Artifact = new ArtifactDocument - { - Sha256 = entry.Artifact.Sha256, - Kind = entry.Artifact.Kind, - ImageDigest = entry.Artifact.ImageDigest, - SubjectUri = entry.Artifact.SubjectUri - }, - BundleSha256 = entry.BundleSha256, - Index = entry.Index, - Proof = entry.Proof is null ? null : new ProofDocument - { - Checkpoint = entry.Proof.Checkpoint is null ? null : new CheckpointDocument - { - Origin = entry.Proof.Checkpoint.Origin, - Size = entry.Proof.Checkpoint.Size, - RootHash = entry.Proof.Checkpoint.RootHash, - Timestamp = entry.Proof.Checkpoint.Timestamp is null - ? null - : BsonDateTime.Create(entry.Proof.Checkpoint.Timestamp.Value) - }, - Inclusion = entry.Proof.Inclusion is null ? null : new InclusionDocument - { - LeafHash = entry.Proof.Inclusion.LeafHash, - Path = entry.Proof.Inclusion.Path - } - }, - Log = new LogDocument - { - Backend = entry.Log.Backend, - Url = entry.Log.Url, - LogId = entry.Log.LogId - }, - CreatedAt = BsonDateTime.Create(entry.CreatedAt.UtcDateTime), - Status = entry.Status, - SignerIdentity = new SignerIdentityDocument - { - Mode = entry.SignerIdentity.Mode, - Issuer = entry.SignerIdentity.Issuer, - SubjectAlternativeName = entry.SignerIdentity.SubjectAlternativeName, - KeyId = entry.SignerIdentity.KeyId - }, - Mirror = entry.Mirror is null ? null : MirrorDocument.FromDomain(entry.Mirror) - }; - } - - public AttestorEntry ToDomain() - { - return new AttestorEntry - { - RekorUuid = Id, - Artifact = new AttestorEntry.ArtifactDescriptor - { - Sha256 = Artifact.Sha256, - Kind = Artifact.Kind, - ImageDigest = Artifact.ImageDigest, - SubjectUri = Artifact.SubjectUri - }, - BundleSha256 = BundleSha256, - Index = Index, - Proof = Proof is null ? null : new AttestorEntry.ProofDescriptor - { - Checkpoint = Proof.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor - { - Origin = Proof.Checkpoint.Origin, - Size = Proof.Checkpoint.Size, - RootHash = Proof.Checkpoint.RootHash, - Timestamp = Proof.Checkpoint.Timestamp?.ToUniversalTime() - }, - Inclusion = Proof.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor - { - LeafHash = Proof.Inclusion.LeafHash, - Path = Proof.Inclusion.Path - } - }, - Log = new AttestorEntry.LogDescriptor - { - Backend = Log.Backend, - Url = Log.Url, - LogId = Log.LogId - }, - CreatedAt = CreatedAt.ToUniversalTime(), - Status = Status, - SignerIdentity = new AttestorEntry.SignerIdentityDescriptor - { - Mode = SignerIdentity.Mode, - Issuer = SignerIdentity.Issuer, - SubjectAlternativeName = SignerIdentity.SubjectAlternativeName, - KeyId = SignerIdentity.KeyId - }, - Mirror = Mirror?.ToDomain() - }; - } - - internal sealed class ArtifactDocument - { - [BsonElement("sha256")] - public string Sha256 { get; set; } = string.Empty; - - [BsonElement("kind")] - public string Kind { get; set; } = string.Empty; - - [BsonElement("imageDigest")] - public string? ImageDigest { get; set; } - - [BsonElement("subjectUri")] - public string? SubjectUri { get; set; } - } - - internal sealed class ProofDocument - { - [BsonElement("checkpoint")] - public CheckpointDocument? Checkpoint { get; set; } - - [BsonElement("inclusion")] - public InclusionDocument? Inclusion { get; set; } - } - - internal sealed class CheckpointDocument - { - [BsonElement("origin")] - public string? Origin { get; set; } - - [BsonElement("size")] - public long Size { get; set; } - - [BsonElement("rootHash")] - public string? RootHash { get; set; } - - [BsonElement("timestamp")] - public BsonDateTime? Timestamp { get; set; } - } - - internal sealed class InclusionDocument - { - [BsonElement("leafHash")] - public string? LeafHash { get; set; } - - [BsonElement("path")] - public IReadOnlyList Path { get; set; } = System.Array.Empty(); - } - - internal sealed class LogDocument - { - [BsonElement("backend")] - public string Backend { get; set; } = "primary"; - - [BsonElement("url")] - public string Url { get; set; } = string.Empty; - - [BsonElement("logId")] - public string? LogId { get; set; } - } - - internal sealed class SignerIdentityDocument - { - [BsonElement("mode")] - public string Mode { get; set; } = string.Empty; - - [BsonElement("issuer")] - public string? Issuer { get; set; } - - [BsonElement("san")] - public string? SubjectAlternativeName { get; set; } - - [BsonElement("kid")] - public string? KeyId { get; set; } - } - - internal sealed class MirrorDocument - { - [BsonElement("backend")] - public string Backend { get; set; } = string.Empty; - - [BsonElement("url")] - public string Url { get; set; } = string.Empty; - - [BsonElement("uuid")] - public string? Uuid { get; set; } - - [BsonElement("index")] - public long? Index { get; set; } - - [BsonElement("status")] - public string Status { get; set; } = "pending"; - - [BsonElement("proof")] - public ProofDocument? Proof { get; set; } - - [BsonElement("logId")] - public string? LogId { get; set; } - - [BsonElement("error")] - public string? Error { get; set; } - - public static MirrorDocument FromDomain(AttestorEntry.LogReplicaDescriptor mirror) - { - return new MirrorDocument - { - Backend = mirror.Backend, - Url = mirror.Url, - Uuid = mirror.Uuid, - Index = mirror.Index, - Status = mirror.Status, - Proof = mirror.Proof is null ? null : new ProofDocument - { - Checkpoint = mirror.Proof.Checkpoint is null ? null : new CheckpointDocument - { - Origin = mirror.Proof.Checkpoint.Origin, - Size = mirror.Proof.Checkpoint.Size, - RootHash = mirror.Proof.Checkpoint.RootHash, - Timestamp = mirror.Proof.Checkpoint.Timestamp is null - ? null - : BsonDateTime.Create(mirror.Proof.Checkpoint.Timestamp.Value) - }, - Inclusion = mirror.Proof.Inclusion is null ? null : new InclusionDocument - { - LeafHash = mirror.Proof.Inclusion.LeafHash, - Path = mirror.Proof.Inclusion.Path - } - }, - LogId = mirror.LogId, - Error = mirror.Error - }; - } - - public AttestorEntry.LogReplicaDescriptor ToDomain() - { - return new AttestorEntry.LogReplicaDescriptor - { - Backend = Backend, - Url = Url, - Uuid = Uuid, - Index = Index, - Status = Status, - Proof = Proof is null ? null : new AttestorEntry.ProofDescriptor - { - Checkpoint = Proof.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor - { - Origin = Proof.Checkpoint.Origin, - Size = Proof.Checkpoint.Size, - RootHash = Proof.Checkpoint.RootHash, - Timestamp = Proof.Checkpoint.Timestamp?.ToUniversalTime() - }, - Inclusion = Proof.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor - { - LeafHash = Proof.Inclusion.LeafHash, - Path = Proof.Inclusion.Path - } - }, - LogId = LogId, - Error = Error - }; - } - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Bson.Serialization.Attributes; +using MongoDB.Driver; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Infrastructure.Storage; + +internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository +{ + private const int DefaultPageSize = 50; + private const int MaxPageSize = 200; + + private readonly IMongoCollection _entries; + + public MongoAttestorEntryRepository(IMongoCollection entries) + { + _entries = entries ?? throw new ArgumentNullException(nameof(entries)); + EnsureIndexes(); + } + + public async Task GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(x => x.BundleSha256, bundleSha256); + var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document?.ToDomain(); + } + + public async Task GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(x => x.Id, rekorUuid); + var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document?.ToDomain(); + } + + public async Task> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(x => x.Artifact.Sha256, artifactSha256); + var documents = await _entries.Find(filter) + .Sort(Builders.Sort.Descending(x => x.CreatedAt)) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents.ConvertAll(static doc => doc.ToDomain()); + } + + public async Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(entry); + + var document = AttestorEntryDocument.FromDomain(entry); + var filter = Builders.Filter.Eq(x => x.Id, document.Id); + await _entries.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); + } + + public async Task QueryAsync(AttestorEntryQuery query, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(query); + + var pageSize = query.PageSize <= 0 ? DefaultPageSize : Math.Min(query.PageSize, MaxPageSize); + var filterBuilder = Builders.Filter; + var filter = filterBuilder.Empty; + + if (!string.IsNullOrWhiteSpace(query.Subject)) + { + var subject = query.Subject; + var subjectFilter = filterBuilder.Or( + filterBuilder.Eq(x => x.Artifact.Sha256, subject), + filterBuilder.Eq(x => x.Artifact.ImageDigest, subject), + filterBuilder.Eq(x => x.Artifact.SubjectUri, subject)); + filter &= subjectFilter; + } + + if (!string.IsNullOrWhiteSpace(query.Type)) + { + filter &= filterBuilder.Eq(x => x.Artifact.Kind, query.Type); + } + + if (!string.IsNullOrWhiteSpace(query.Issuer)) + { + filter &= filterBuilder.Eq(x => x.SignerIdentity.SubjectAlternativeName, query.Issuer); + } + + if (!string.IsNullOrWhiteSpace(query.Scope)) + { + filter &= filterBuilder.Eq(x => x.SignerIdentity.Issuer, query.Scope); + } + + if (query.CreatedAfter is { } createdAfter) + { + filter &= filterBuilder.Gte(x => x.CreatedAt, createdAfter.UtcDateTime); + } + + if (query.CreatedBefore is { } createdBefore) + { + filter &= filterBuilder.Lte(x => x.CreatedAt, createdBefore.UtcDateTime); + } + + if (!string.IsNullOrWhiteSpace(query.ContinuationToken)) + { + if (!AttestorEntryContinuationToken.TryParse(query.ContinuationToken, out var cursor)) + { + throw new FormatException("Invalid continuation token."); + } + + var cursorInstant = cursor.CreatedAt.UtcDateTime; + var continuationFilter = filterBuilder.Or( + filterBuilder.Lt(x => x.CreatedAt, cursorInstant), + filterBuilder.And( + filterBuilder.Eq(x => x.CreatedAt, cursorInstant), + filterBuilder.Gt(x => x.Id, cursor.RekorUuid))); + + filter &= continuationFilter; + } + + var sort = Builders.Sort + .Descending(x => x.CreatedAt) + .Ascending(x => x.Id); + + var documents = await _entries.Find(filter) + .Sort(sort) + .Limit(pageSize + 1) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + string? continuation = null; + if (documents.Count > pageSize) + { + var cursorDocument = documents[pageSize]; + var nextCreatedAt = DateTime.SpecifyKind(cursorDocument.CreatedAt, DateTimeKind.Utc); + continuation = AttestorEntryContinuationToken.Encode(new DateTimeOffset(nextCreatedAt), cursorDocument.Id); + + documents.RemoveRange(pageSize, documents.Count - pageSize); + } + + var items = documents.ConvertAll(static doc => doc.ToDomain()); + + return new AttestorEntryQueryResult + { + Items = items, + ContinuationToken = continuation + }; + } + + private void EnsureIndexes() + { + var keys = Builders.IndexKeys; + + var models = new[] + { + new CreateIndexModel( + keys.Ascending(x => x.BundleSha256), + new CreateIndexOptions { Name = "bundle_sha_unique", Unique = true }), + new CreateIndexModel( + keys.Descending(x => x.CreatedAt).Ascending(x => x.Id), + new CreateIndexOptions { Name = "created_at_uuid" }), + new CreateIndexModel( + keys.Ascending(x => x.Artifact.Sha256), + new CreateIndexOptions { Name = "artifact_sha" }), + new CreateIndexModel( + keys.Ascending(x => x.Artifact.ImageDigest), + new CreateIndexOptions { Name = "artifact_image_digest" }), + new CreateIndexModel( + keys.Ascending(x => x.Artifact.SubjectUri), + new CreateIndexOptions { Name = "artifact_subject_uri" }), + new CreateIndexModel( + keys.Ascending(x => x.SignerIdentity.Issuer) + .Ascending(x => x.Artifact.Kind) + .Descending(x => x.CreatedAt) + .Ascending(x => x.Id), + new CreateIndexOptions { Name = "scope_kind_created_at" }), + new CreateIndexModel( + keys.Ascending(x => x.SignerIdentity.SubjectAlternativeName), + new CreateIndexOptions { Name = "issuer_san" }) + }; + + _entries.Indexes.CreateMany(models); + } + + [BsonIgnoreExtraElements] + internal sealed class AttestorEntryDocument + { + [BsonId] + public string Id { get; set; } = string.Empty; + + [BsonElement("artifact")] + public ArtifactDocument Artifact { get; set; } = new(); + + [BsonElement("bundleSha256")] + public string BundleSha256 { get; set; } = string.Empty; + + [BsonElement("index")] + public long? Index { get; set; } + + [BsonElement("proof")] + public ProofDocument? Proof { get; set; } + + [BsonElement("witness")] + public WitnessDocument? Witness { get; set; } + + [BsonElement("log")] + public LogDocument Log { get; set; } = new(); + + [BsonElement("createdAt")] + [BsonDateTimeOptions(Kind = DateTimeKind.Utc)] + public DateTime CreatedAt { get; set; } + + [BsonElement("status")] + public string Status { get; set; } = "pending"; + + [BsonElement("signer")] + public SignerIdentityDocument SignerIdentity { get; set; } = new(); + + [BsonElement("mirror")] + public MirrorDocument? Mirror { get; set; } + + public static AttestorEntryDocument FromDomain(AttestorEntry entry) + { + ArgumentNullException.ThrowIfNull(entry); + + return new AttestorEntryDocument + { + Id = entry.RekorUuid, + Artifact = ArtifactDocument.FromDomain(entry.Artifact), + BundleSha256 = entry.BundleSha256, + Index = entry.Index, + Proof = ProofDocument.FromDomain(entry.Proof), + Witness = WitnessDocument.FromDomain(entry.Witness), + Log = LogDocument.FromDomain(entry.Log), + CreatedAt = entry.CreatedAt.UtcDateTime, + Status = entry.Status, + SignerIdentity = SignerIdentityDocument.FromDomain(entry.SignerIdentity), + Mirror = MirrorDocument.FromDomain(entry.Mirror) + }; + } + + public AttestorEntry ToDomain() + { + var createdAtUtc = DateTime.SpecifyKind(CreatedAt, DateTimeKind.Utc); + + return new AttestorEntry + { + RekorUuid = Id, + Artifact = Artifact.ToDomain(), + BundleSha256 = BundleSha256, + Index = Index, + Proof = Proof?.ToDomain(), + Witness = Witness?.ToDomain(), + Log = Log.ToDomain(), + CreatedAt = new DateTimeOffset(createdAtUtc), + Status = Status, + SignerIdentity = SignerIdentity.ToDomain(), + Mirror = Mirror?.ToDomain() + }; + } + } + + internal sealed class ArtifactDocument + { + [BsonElement("sha256")] + public string Sha256 { get; set; } = string.Empty; + + [BsonElement("kind")] + public string Kind { get; set; } = string.Empty; + + [BsonElement("imageDigest")] + public string? ImageDigest { get; set; } + + [BsonElement("subjectUri")] + public string? SubjectUri { get; set; } + + public static ArtifactDocument FromDomain(AttestorEntry.ArtifactDescriptor artifact) + { + ArgumentNullException.ThrowIfNull(artifact); + + return new ArtifactDocument + { + Sha256 = artifact.Sha256, + Kind = artifact.Kind, + ImageDigest = artifact.ImageDigest, + SubjectUri = artifact.SubjectUri + }; + } + + public AttestorEntry.ArtifactDescriptor ToDomain() + { + return new AttestorEntry.ArtifactDescriptor + { + Sha256 = Sha256, + Kind = Kind, + ImageDigest = ImageDigest, + SubjectUri = SubjectUri + }; + } + } + + internal sealed class ProofDocument + { + [BsonElement("checkpoint")] + public CheckpointDocument? Checkpoint { get; set; } + + [BsonElement("inclusion")] + public InclusionDocument? Inclusion { get; set; } + + public static ProofDocument? FromDomain(AttestorEntry.ProofDescriptor? proof) + { + if (proof is null) + { + return null; + } + + return new ProofDocument + { + Checkpoint = CheckpointDocument.FromDomain(proof.Checkpoint), + Inclusion = InclusionDocument.FromDomain(proof.Inclusion) + }; + } + + public AttestorEntry.ProofDescriptor ToDomain() + { + return new AttestorEntry.ProofDescriptor + { + Checkpoint = Checkpoint?.ToDomain(), + Inclusion = Inclusion?.ToDomain() + }; + } + } + + internal sealed class WitnessDocument + { + [BsonElement("aggregator")] + public string? Aggregator { get; set; } + + [BsonElement("status")] + public string Status { get; set; } = "unknown"; + + [BsonElement("rootHash")] + public string? RootHash { get; set; } + + [BsonElement("retrievedAt")] + [BsonDateTimeOptions(Kind = DateTimeKind.Utc)] + public DateTime RetrievedAt { get; set; } + + [BsonElement("statement")] + public string? Statement { get; set; } + + [BsonElement("signature")] + public string? Signature { get; set; } + + [BsonElement("keyId")] + public string? KeyId { get; set; } + + [BsonElement("error")] + public string? Error { get; set; } + + public static WitnessDocument? FromDomain(AttestorEntry.WitnessDescriptor? witness) + { + if (witness is null) + { + return null; + } + + return new WitnessDocument + { + Aggregator = witness.Aggregator, + Status = witness.Status, + RootHash = witness.RootHash, + RetrievedAt = witness.RetrievedAt.UtcDateTime, + Statement = witness.Statement, + Signature = witness.Signature, + KeyId = witness.KeyId, + Error = witness.Error + }; + } + + public AttestorEntry.WitnessDescriptor ToDomain() + { + return new AttestorEntry.WitnessDescriptor + { + Aggregator = Aggregator ?? string.Empty, + Status = string.IsNullOrWhiteSpace(Status) ? "unknown" : Status, + RootHash = RootHash, + RetrievedAt = new DateTimeOffset(DateTime.SpecifyKind(RetrievedAt, DateTimeKind.Utc)), + Statement = Statement, + Signature = Signature, + KeyId = KeyId, + Error = Error + }; + } + } + + internal sealed class CheckpointDocument + { + [BsonElement("origin")] + public string? Origin { get; set; } + + [BsonElement("size")] + public long Size { get; set; } + + [BsonElement("rootHash")] + public string? RootHash { get; set; } + + [BsonElement("timestamp")] + [BsonDateTimeOptions(Kind = DateTimeKind.Utc)] + public DateTime? Timestamp { get; set; } + + public static CheckpointDocument? FromDomain(AttestorEntry.CheckpointDescriptor? checkpoint) + { + if (checkpoint is null) + { + return null; + } + + return new CheckpointDocument + { + Origin = checkpoint.Origin, + Size = checkpoint.Size, + RootHash = checkpoint.RootHash, + Timestamp = checkpoint.Timestamp?.UtcDateTime + }; + } + + public AttestorEntry.CheckpointDescriptor ToDomain() + { + return new AttestorEntry.CheckpointDescriptor + { + Origin = Origin, + Size = Size, + RootHash = RootHash, + Timestamp = Timestamp is null ? null : new DateTimeOffset(DateTime.SpecifyKind(Timestamp.Value, DateTimeKind.Utc)) + }; + } + } + + internal sealed class InclusionDocument + { + [BsonElement("leafHash")] + public string? LeafHash { get; set; } + + [BsonElement("path")] + public IReadOnlyList Path { get; set; } = Array.Empty(); + + public static InclusionDocument? FromDomain(AttestorEntry.InclusionDescriptor? inclusion) + { + if (inclusion is null) + { + return null; + } + + return new InclusionDocument + { + LeafHash = inclusion.LeafHash, + Path = inclusion.Path + }; + } + + public AttestorEntry.InclusionDescriptor ToDomain() + { + return new AttestorEntry.InclusionDescriptor + { + LeafHash = LeafHash, + Path = Path + }; + } + } + + internal sealed class LogDocument + { + [BsonElement("backend")] + public string Backend { get; set; } = "primary"; + + [BsonElement("url")] + public string Url { get; set; } = string.Empty; + + [BsonElement("logId")] + public string? LogId { get; set; } + + public static LogDocument FromDomain(AttestorEntry.LogDescriptor log) + { + ArgumentNullException.ThrowIfNull(log); + + return new LogDocument + { + Backend = log.Backend, + Url = log.Url, + LogId = log.LogId + }; + } + + public AttestorEntry.LogDescriptor ToDomain() + { + return new AttestorEntry.LogDescriptor + { + Backend = Backend, + Url = Url, + LogId = LogId + }; + } + } + + internal sealed class SignerIdentityDocument + { + [BsonElement("mode")] + public string Mode { get; set; } = string.Empty; + + [BsonElement("issuer")] + public string? Issuer { get; set; } + + [BsonElement("san")] + public string? SubjectAlternativeName { get; set; } + + [BsonElement("kid")] + public string? KeyId { get; set; } + + public static SignerIdentityDocument FromDomain(AttestorEntry.SignerIdentityDescriptor signer) + { + ArgumentNullException.ThrowIfNull(signer); + + return new SignerIdentityDocument + { + Mode = signer.Mode, + Issuer = signer.Issuer, + SubjectAlternativeName = signer.SubjectAlternativeName, + KeyId = signer.KeyId + }; + } + + public AttestorEntry.SignerIdentityDescriptor ToDomain() + { + return new AttestorEntry.SignerIdentityDescriptor + { + Mode = Mode, + Issuer = Issuer, + SubjectAlternativeName = SubjectAlternativeName, + KeyId = KeyId + }; + } + } + + internal sealed class MirrorDocument + { + [BsonElement("backend")] + public string Backend { get; set; } = string.Empty; + + [BsonElement("url")] + public string Url { get; set; } = string.Empty; + + [BsonElement("uuid")] + public string? Uuid { get; set; } + + [BsonElement("index")] + public long? Index { get; set; } + + [BsonElement("status")] + public string Status { get; set; } = "pending"; + + [BsonElement("proof")] + public ProofDocument? Proof { get; set; } + + [BsonElement("witness")] + public WitnessDocument? Witness { get; set; } + + [BsonElement("logId")] + public string? LogId { get; set; } + + [BsonElement("error")] + public string? Error { get; set; } + + public static MirrorDocument? FromDomain(AttestorEntry.LogReplicaDescriptor? mirror) + { + if (mirror is null) + { + return null; + } + + return new MirrorDocument + { + Backend = mirror.Backend, + Url = mirror.Url, + Uuid = mirror.Uuid, + Index = mirror.Index, + Status = mirror.Status, + Proof = ProofDocument.FromDomain(mirror.Proof), + Witness = WitnessDocument.FromDomain(mirror.Witness), + LogId = mirror.LogId, + Error = mirror.Error + }; + } + + public AttestorEntry.LogReplicaDescriptor ToDomain() + { + return new AttestorEntry.LogReplicaDescriptor + { + Backend = Backend, + Url = Url, + Uuid = Uuid, + Index = Index, + Status = Status, + Proof = Proof?.ToDomain(), + Witness = Witness?.ToDomain(), + LogId = LogId, + Error = Error + }; + } + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs index f2643082..3181c5bf 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs @@ -14,9 +14,15 @@ internal sealed class NullAttestorArchiveStore : IAttestorArchiveStore _logger = logger; } - public Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default) - { - _logger.LogDebug("Archive disabled; skipping bundle {BundleSha}", bundle.BundleSha256); - return Task.CompletedTask; - } -} + public Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default) + { + _logger.LogDebug("Archive disabled; skipping bundle {BundleSha}", bundle.BundleSha256); + return Task.CompletedTask; + } + + public Task GetBundleAsync(string bundleSha256, string rekorUuid, CancellationToken cancellationToken = default) + { + _logger.LogDebug("Archive disabled; bundle {BundleSha} ({RekorUuid}) cannot be retrieved", bundleSha256, rekorUuid); + return Task.FromResult(null); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs index c63cba8d..a66939f3 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs @@ -1,72 +1,182 @@ -using System; -using System.IO; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Amazon.S3; -using Amazon.S3.Model; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Attestor.Core.Options; -using StellaOps.Attestor.Core.Storage; - -namespace StellaOps.Attestor.Infrastructure.Storage; - -internal sealed class S3AttestorArchiveStore : IAttestorArchiveStore, IDisposable -{ - private readonly IAmazonS3 _s3; - private readonly AttestorOptions.S3Options _options; - private readonly ILogger _logger; - private bool _disposed; - - public S3AttestorArchiveStore(IAmazonS3 s3, IOptions options, ILogger logger) - { - _s3 = s3; - _options = options.Value.S3; - _logger = logger; - } - - public async Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(_options.Bucket)) - { - _logger.LogWarning("S3 archive bucket is not configured; skipping archive for bundle {Bundle}", bundle.BundleSha256); - return; - } - - var prefix = _options.Prefix ?? "attest/"; - - await PutObjectAsync(prefix + "dsse/" + bundle.BundleSha256 + ".json", bundle.CanonicalBundleJson, cancellationToken).ConfigureAwait(false); - if (bundle.ProofJson.Length > 0) - { - await PutObjectAsync(prefix + "proof/" + bundle.RekorUuid + ".json", bundle.ProofJson, cancellationToken).ConfigureAwait(false); - } - - var metadataObject = JsonSerializer.SerializeToUtf8Bytes(bundle.Metadata); - await PutObjectAsync(prefix + "meta/" + bundle.RekorUuid + ".json", metadataObject, cancellationToken).ConfigureAwait(false); - } - - private Task PutObjectAsync(string key, byte[] content, CancellationToken cancellationToken) - { - using var stream = new MemoryStream(content); - var request = new PutObjectRequest - { - BucketName = _options.Bucket, - Key = key, - InputStream = stream, - AutoCloseStream = false - }; - return _s3.PutObjectAsync(request, cancellationToken); - } - - public void Dispose() - { - if (_disposed) - { - return; - } - - _s3.Dispose(); - _disposed = true; - } -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Amazon.S3; +using Amazon.S3.Model; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Infrastructure.Storage; + +internal sealed class S3AttestorArchiveStore : IAttestorArchiveStore, IDisposable +{ + private readonly IAmazonS3 _s3; + private readonly AttestorOptions.S3Options _options; + private readonly ILogger _logger; + private bool _disposed; + + public S3AttestorArchiveStore(IAmazonS3 s3, IOptions options, ILogger logger) + { + _s3 = s3; + _options = options.Value.S3; + _logger = logger; + } + + public async Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default) + { + EnsureNotDisposed(); + + if (string.IsNullOrWhiteSpace(_options.Bucket)) + { + _logger.LogWarning("S3 archive bucket is not configured; skipping archive for bundle {Bundle}", bundle.BundleSha256); + return; + } + + var prefix = _options.Prefix ?? "attest/"; + + await PutObjectAsync(prefix + "dsse/" + bundle.BundleSha256 + ".json", bundle.CanonicalBundleJson, cancellationToken).ConfigureAwait(false); + if (bundle.ProofJson.Length > 0) + { + await PutObjectAsync(prefix + "proof/" + bundle.RekorUuid + ".json", bundle.ProofJson, cancellationToken).ConfigureAwait(false); + await PutObjectAsync(prefix + "proof/" + bundle.BundleSha256 + ".json", bundle.ProofJson, cancellationToken).ConfigureAwait(false); + } + + var metadata = bundle.Metadata is { Count: > 0 } + ? new Dictionary(bundle.Metadata) + : new Dictionary(); + + metadata["artifact.sha256"] = bundle.ArtifactSha256; + metadata["bundle.sha256"] = bundle.BundleSha256; + metadata["rekor.uuid"] = bundle.RekorUuid; + + var metadataObject = JsonSerializer.SerializeToUtf8Bytes(metadata); + await PutObjectAsync(prefix + "meta/" + bundle.RekorUuid + ".json", metadataObject, cancellationToken).ConfigureAwait(false); + await PutObjectAsync(prefix + "meta/" + bundle.BundleSha256 + ".json", metadataObject, cancellationToken).ConfigureAwait(false); + } + + public async Task GetBundleAsync(string bundleSha256, string rekorUuid, CancellationToken cancellationToken = default) + { + EnsureNotDisposed(); + + if (string.IsNullOrWhiteSpace(_options.Bucket)) + { + _logger.LogWarning("S3 archive bucket is not configured; cannot retrieve bundle {Bundle}", bundleSha256); + return null; + } + + var prefix = _options.Prefix ?? "attest/"; + var canonical = await TryGetObjectAsync(prefix + "dsse/" + bundleSha256 + ".json", cancellationToken).ConfigureAwait(false); + if (canonical is null || canonical.Length == 0) + { + return null; + } + + var proof = + await TryGetObjectAsync(prefix + "proof/" + bundleSha256 + ".json", cancellationToken).ConfigureAwait(false) + ?? await TryGetObjectAsync(prefix + "proof/" + rekorUuid + ".json", cancellationToken).ConfigureAwait(false) + ?? Array.Empty(); + + var metadataBytes = + await TryGetObjectAsync(prefix + "meta/" + bundleSha256 + ".json", cancellationToken).ConfigureAwait(false) + ?? await TryGetObjectAsync(prefix + "meta/" + rekorUuid + ".json", cancellationToken).ConfigureAwait(false); + + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase); + if (metadataBytes is { Length: > 0 }) + { + try + { + var parsed = JsonSerializer.Deserialize>(metadataBytes); + if (parsed is not null) + { + foreach (var pair in parsed) + { + if (!string.IsNullOrWhiteSpace(pair.Key)) + { + metadata[pair.Key] = pair.Value; + } + } + } + } + catch (JsonException ex) + { + _logger.LogWarning(ex, "Failed to deserialize attestor archive metadata for {Bundle}", bundleSha256); + } + } + + metadata["rekor.uuid"] = rekorUuid; + metadata["bundle.sha256"] = bundleSha256; + var artifactSha = metadata.TryGetValue("artifact.sha256", out var artifact) ? artifact : string.Empty; + if (!string.IsNullOrWhiteSpace(artifactSha)) + { + metadata["artifact.sha256"] = artifactSha; + } + + return new AttestorArchiveBundle + { + RekorUuid = rekorUuid, + ArtifactSha256 = artifactSha, + BundleSha256 = bundleSha256, + CanonicalBundleJson = canonical, + ProofJson = proof, + Metadata = metadata + }; + } + + private async Task PutObjectAsync(string key, byte[] content, CancellationToken cancellationToken) + { + EnsureNotDisposed(); + + using var stream = new MemoryStream(content, writable: false); + var request = new PutObjectRequest + { + BucketName = _options.Bucket, + Key = key, + InputStream = stream, + AutoCloseStream = false + }; + + await _s3.PutObjectAsync(request, cancellationToken).ConfigureAwait(false); + } + + private async Task TryGetObjectAsync(string key, CancellationToken cancellationToken) + { + EnsureNotDisposed(); + + try + { + using var response = await _s3.GetObjectAsync(_options.Bucket, key, cancellationToken).ConfigureAwait(false); + await using var memory = new MemoryStream(); + await response.ResponseStream.CopyToAsync(memory, cancellationToken).ConfigureAwait(false); + return memory.ToArray(); + } + catch (AmazonS3Exception ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound) + { + _logger.LogDebug("S3 archive object {Key} not found", key); + return null; + } + } + private void EnsureNotDisposed() + { + if (_disposed) + { + throw new ObjectDisposedException(nameof(S3AttestorArchiveStore)); + } + } + + + public void Dispose() + { + if (_disposed) + { + return; + } + + _s3.Dispose(); + _disposed = true; + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs index 36c5aa5a..aa14688e 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs @@ -1,624 +1,765 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Attestor.Core.Audit; -using StellaOps.Attestor.Core.Options; -using StellaOps.Attestor.Core.Observability; -using StellaOps.Attestor.Core.Rekor; -using StellaOps.Attestor.Core.Storage; -using StellaOps.Attestor.Core.Submission; - -namespace StellaOps.Attestor.Infrastructure.Submission; - -internal sealed class AttestorSubmissionService : IAttestorSubmissionService -{ - private static readonly TimeSpan DedupeTtl = TimeSpan.FromHours(48); - - private readonly AttestorSubmissionValidator _validator; - private readonly IAttestorEntryRepository _repository; - private readonly IAttestorDedupeStore _dedupeStore; - private readonly IRekorClient _rekorClient; - private readonly IAttestorArchiveStore _archiveStore; - private readonly IAttestorAuditSink _auditSink; - private readonly ILogger _logger; - private readonly TimeProvider _timeProvider; - private readonly AttestorOptions _options; - private readonly AttestorMetrics _metrics; - - public AttestorSubmissionService( - AttestorSubmissionValidator validator, - IAttestorEntryRepository repository, - IAttestorDedupeStore dedupeStore, - IRekorClient rekorClient, - IAttestorArchiveStore archiveStore, - IAttestorAuditSink auditSink, - IOptions options, - ILogger logger, - TimeProvider timeProvider, - AttestorMetrics metrics) - { - _validator = validator; - _repository = repository; - _dedupeStore = dedupeStore; - _rekorClient = rekorClient; - _archiveStore = archiveStore; - _auditSink = auditSink; - _logger = logger; - _timeProvider = timeProvider; - _options = options.Value; - _metrics = metrics; - } - - public async Task SubmitAsync( - AttestorSubmissionRequest request, - SubmissionContext context, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentNullException.ThrowIfNull(context); - - var validation = await _validator.ValidateAsync(request, cancellationToken).ConfigureAwait(false); - var canonicalBundle = validation.CanonicalBundle; - - var preference = NormalizeLogPreference(request.Meta.LogPreference); - var requiresPrimary = preference is "primary" or "both"; - var requiresMirror = preference is "mirror" or "both"; - - if (!requiresPrimary && !requiresMirror) - { - requiresPrimary = true; - } - - if (requiresMirror && !_options.Rekor.Mirror.Enabled) - { - throw new AttestorValidationException("mirror_disabled", "Mirror log requested but not configured."); - } - - var existing = await TryGetExistingEntryAsync(request.Meta.BundleSha256, cancellationToken).ConfigureAwait(false); - if (existing is not null) - { - _metrics.DedupeHitsTotal.Add(1, new KeyValuePair("result", "hit")); - var updated = await EnsureBackendsAsync(existing, request, context, requiresPrimary, requiresMirror, cancellationToken).ConfigureAwait(false); - return ToResult(updated); - } - - _metrics.DedupeHitsTotal.Add(1, new KeyValuePair("result", "miss")); - - SubmissionOutcome? canonicalOutcome = null; - SubmissionOutcome? mirrorOutcome = null; - - if (requiresPrimary) - { - canonicalOutcome = await SubmitToBackendAsync(request, "primary", _options.Rekor.Primary, cancellationToken).ConfigureAwait(false); - } - - if (requiresMirror) - { - try - { - var mirror = await SubmitToBackendAsync(request, "mirror", _options.Rekor.Mirror, cancellationToken).ConfigureAwait(false); - if (canonicalOutcome is null) - { - canonicalOutcome = mirror; - } - else - { - mirrorOutcome = mirror; - } - } - catch (Exception ex) - { - if (canonicalOutcome is null) - { - throw; - } - - _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "submit_mirror")); - _logger.LogWarning(ex, "Mirror submission failed for bundle {BundleSha}", request.Meta.BundleSha256); - mirrorOutcome = SubmissionOutcome.Failure("mirror", _options.Rekor.Mirror.Url, ex, TimeSpan.Zero); - RecordSubmissionMetrics(mirrorOutcome); - } - } - - if (canonicalOutcome is null) - { - throw new InvalidOperationException("No Rekor submission outcome was produced."); - } - - var entry = CreateEntry(request, context, canonicalOutcome, mirrorOutcome); - await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false); - await _dedupeStore.SetAsync(request.Meta.BundleSha256, entry.RekorUuid, DedupeTtl, cancellationToken).ConfigureAwait(false); - - if (request.Meta.Archive) - { - await ArchiveAsync(entry, canonicalBundle, canonicalOutcome.Proof, cancellationToken).ConfigureAwait(false); - } - - await WriteAuditAsync(request, context, entry, canonicalOutcome, cancellationToken).ConfigureAwait(false); - if (mirrorOutcome is not null) - { - await WriteAuditAsync(request, context, entry, mirrorOutcome, cancellationToken).ConfigureAwait(false); - } - - return ToResult(entry); - } - - private static AttestorSubmissionResult ToResult(AttestorEntry entry) - { - var result = new AttestorSubmissionResult - { - Uuid = entry.RekorUuid, - Index = entry.Index, - LogUrl = entry.Log.Url, - Status = entry.Status, - Proof = ToResultProof(entry.Proof) - }; - - if (entry.Mirror is not null) - { - result.Mirror = new AttestorSubmissionResult.MirrorLog - { - Uuid = entry.Mirror.Uuid, - Index = entry.Mirror.Index, - LogUrl = entry.Mirror.Url, - Status = entry.Mirror.Status, - Proof = ToResultProof(entry.Mirror.Proof), - Error = entry.Mirror.Error - }; - } - - return result; - } - - private AttestorEntry CreateEntry( - AttestorSubmissionRequest request, - SubmissionContext context, - SubmissionOutcome canonicalOutcome, - SubmissionOutcome? mirrorOutcome) - { - if (canonicalOutcome.Submission is null) - { - throw new InvalidOperationException("Canonical submission outcome must include a Rekor response."); - } - - var submission = canonicalOutcome.Submission; - var now = _timeProvider.GetUtcNow(); - - return new AttestorEntry - { - RekorUuid = submission.Uuid, - Artifact = new AttestorEntry.ArtifactDescriptor - { - Sha256 = request.Meta.Artifact.Sha256, - Kind = request.Meta.Artifact.Kind, - ImageDigest = request.Meta.Artifact.ImageDigest, - SubjectUri = request.Meta.Artifact.SubjectUri - }, - BundleSha256 = request.Meta.BundleSha256, - Index = submission.Index, - Proof = ConvertProof(canonicalOutcome.Proof), - Log = new AttestorEntry.LogDescriptor - { - Backend = canonicalOutcome.Backend, - Url = submission.LogUrl ?? canonicalOutcome.Url, - LogId = null - }, - CreatedAt = now, - Status = submission.Status ?? "included", - SignerIdentity = new AttestorEntry.SignerIdentityDescriptor - { - Mode = request.Bundle.Mode, - Issuer = context.CallerAudience, - SubjectAlternativeName = context.CallerSubject, - KeyId = context.CallerClientId - }, - Mirror = mirrorOutcome is null ? null : CreateMirrorDescriptor(mirrorOutcome) - }; - } - - private static string NormalizeLogPreference(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return "primary"; - } - - var normalized = value.Trim().ToLowerInvariant(); - return normalized switch - { - "primary" => "primary", - "mirror" => "mirror", - "both" => "both", - _ => "primary" - }; - } - - private async Task TryGetExistingEntryAsync(string bundleSha256, CancellationToken cancellationToken) - { - var dedupeUuid = await _dedupeStore.TryGetExistingAsync(bundleSha256, cancellationToken).ConfigureAwait(false); - if (string.IsNullOrWhiteSpace(dedupeUuid)) - { - return null; - } - - return await _repository.GetByUuidAsync(dedupeUuid, cancellationToken).ConfigureAwait(false) - ?? await _repository.GetByBundleShaAsync(bundleSha256, cancellationToken).ConfigureAwait(false); - } - - private async Task EnsureBackendsAsync( - AttestorEntry existing, - AttestorSubmissionRequest request, - SubmissionContext context, - bool requiresPrimary, - bool requiresMirror, - CancellationToken cancellationToken) - { - var entry = existing; - var updated = false; - - if (requiresPrimary && !IsPrimary(entry)) - { - var outcome = await SubmitToBackendAsync(request, "primary", _options.Rekor.Primary, cancellationToken).ConfigureAwait(false); - entry = PromoteToPrimary(entry, outcome); - await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false); - await _dedupeStore.SetAsync(request.Meta.BundleSha256, entry.RekorUuid, DedupeTtl, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(request, context, entry, outcome, cancellationToken).ConfigureAwait(false); - updated = true; - } - - if (requiresMirror) - { - var mirrorSatisfied = entry.Mirror is not null - && entry.Mirror.Error is null - && string.Equals(entry.Mirror.Status, "included", StringComparison.OrdinalIgnoreCase) - && !string.IsNullOrEmpty(entry.Mirror.Uuid); - - if (!mirrorSatisfied) - { - try - { - var mirrorOutcome = await SubmitToBackendAsync(request, "mirror", _options.Rekor.Mirror, cancellationToken).ConfigureAwait(false); - entry = WithMirror(entry, mirrorOutcome); - await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(request, context, entry, mirrorOutcome, cancellationToken).ConfigureAwait(false); - updated = true; - } - catch (Exception ex) - { - _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "submit_mirror")); - _logger.LogWarning(ex, "Mirror submission failed for deduplicated bundle {BundleSha}", request.Meta.BundleSha256); - var failure = SubmissionOutcome.Failure("mirror", _options.Rekor.Mirror.Url, ex, TimeSpan.Zero); - RecordSubmissionMetrics(failure); - entry = WithMirror(entry, failure); - await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false); - await WriteAuditAsync(request, context, entry, failure, cancellationToken).ConfigureAwait(false); - updated = true; - } - } - } - - if (!updated) - { - _metrics.SubmitTotal.Add(1, - new KeyValuePair("result", "dedupe"), - new KeyValuePair("backend", "cache")); - } - - return entry; - } - - private static bool IsPrimary(AttestorEntry entry) => - string.Equals(entry.Log.Backend, "primary", StringComparison.OrdinalIgnoreCase); - - private async Task SubmitToBackendAsync( - AttestorSubmissionRequest request, - string backendName, - AttestorOptions.RekorBackendOptions backendOptions, - CancellationToken cancellationToken) - { - var backend = BuildBackend(backendName, backendOptions); - var stopwatch = Stopwatch.StartNew(); - try - { - var submission = await _rekorClient.SubmitAsync(request, backend, cancellationToken).ConfigureAwait(false); - stopwatch.Stop(); - - var proof = submission.Proof; - if (proof is null && string.Equals(submission.Status, "included", StringComparison.OrdinalIgnoreCase)) - { - try - { - proof = await _rekorClient.GetProofAsync(submission.Uuid, backend, cancellationToken).ConfigureAwait(false); - _metrics.ProofFetchTotal.Add(1, - new KeyValuePair("result", proof is null ? "missing" : "ok")); - } - catch (Exception ex) - { - _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "proof_fetch")); - _logger.LogWarning(ex, "Proof fetch failed for {Uuid} on backend {Backend}", submission.Uuid, backendName); - } - } - - var outcome = SubmissionOutcome.Success(backendName, backend.Url, submission, proof, stopwatch.Elapsed); - RecordSubmissionMetrics(outcome); - return outcome; - } - catch (Exception ex) - { - stopwatch.Stop(); - _metrics.ErrorTotal.Add(1, new KeyValuePair("type", $"submit_{backendName}")); - _logger.LogError(ex, "Failed to submit bundle {BundleSha} to Rekor backend {Backend}", request.Meta.BundleSha256, backendName); - throw; - } - } - - private void RecordSubmissionMetrics(SubmissionOutcome outcome) - { - var result = outcome.IsSuccess - ? outcome.Submission!.Status ?? "unknown" - : "failed"; - - _metrics.SubmitTotal.Add(1, - new KeyValuePair("result", result), - new KeyValuePair("backend", outcome.Backend)); - - if (outcome.Latency > TimeSpan.Zero) - { - _metrics.SubmitLatency.Record(outcome.Latency.TotalSeconds, - new KeyValuePair("backend", outcome.Backend)); - } - } - - private async Task ArchiveAsync( - AttestorEntry entry, - byte[] canonicalBundle, - RekorProofResponse? proof, - CancellationToken cancellationToken) - { - var metadata = new Dictionary - { - ["logUrl"] = entry.Log.Url, - ["status"] = entry.Status - }; - - if (entry.Mirror is not null) - { - metadata["mirror.backend"] = entry.Mirror.Backend; - metadata["mirror.uuid"] = entry.Mirror.Uuid ?? string.Empty; - metadata["mirror.status"] = entry.Mirror.Status; - } - - var archiveBundle = new AttestorArchiveBundle - { - RekorUuid = entry.RekorUuid, - ArtifactSha256 = entry.Artifact.Sha256, - BundleSha256 = entry.BundleSha256, - CanonicalBundleJson = canonicalBundle, - ProofJson = proof is null ? Array.Empty() : JsonSerializer.SerializeToUtf8Bytes(proof, JsonSerializerOptions.Default), - Metadata = metadata - }; - - try - { - await _archiveStore.ArchiveBundleAsync(archiveBundle, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to archive bundle {BundleSha}", entry.BundleSha256); - _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "archive")); - } - } - - private Task WriteAuditAsync( - AttestorSubmissionRequest request, - SubmissionContext context, - AttestorEntry entry, - SubmissionOutcome outcome, - CancellationToken cancellationToken) - { - var metadata = new Dictionary(); - if (!outcome.IsSuccess && outcome.Error is not null) - { - metadata["error"] = outcome.Error.Message; - } - - var record = new AttestorAuditRecord - { - Action = "submit", - Result = outcome.IsSuccess - ? outcome.Submission!.Status ?? "included" - : "failed", - RekorUuid = outcome.IsSuccess - ? outcome.Submission!.Uuid - : string.Equals(outcome.Backend, "primary", StringComparison.OrdinalIgnoreCase) - ? entry.RekorUuid - : entry.Mirror?.Uuid, - Index = outcome.Submission?.Index, - ArtifactSha256 = request.Meta.Artifact.Sha256, - BundleSha256 = request.Meta.BundleSha256, - Backend = outcome.Backend, - LatencyMs = (long)outcome.Latency.TotalMilliseconds, - Timestamp = _timeProvider.GetUtcNow(), - Caller = new AttestorAuditRecord.CallerDescriptor - { - Subject = context.CallerSubject, - Audience = context.CallerAudience, - ClientId = context.CallerClientId, - MtlsThumbprint = context.MtlsThumbprint, - Tenant = context.CallerTenant - }, - Metadata = metadata - }; - - return _auditSink.WriteAsync(record, cancellationToken); - } - - private static AttestorEntry.ProofDescriptor? ConvertProof(RekorProofResponse? proof) - { - if (proof is null) - { - return null; - } - - return new AttestorEntry.ProofDescriptor - { - Checkpoint = proof.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor - { - Origin = proof.Checkpoint.Origin, - Size = proof.Checkpoint.Size, - RootHash = proof.Checkpoint.RootHash, - Timestamp = proof.Checkpoint.Timestamp - }, - Inclusion = proof.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor - { - LeafHash = proof.Inclusion.LeafHash, - Path = proof.Inclusion.Path - } - }; - } - - private static AttestorSubmissionResult.RekorProof? ToResultProof(AttestorEntry.ProofDescriptor? proof) - { - if (proof is null) - { - return null; - } - - return new AttestorSubmissionResult.RekorProof - { - Checkpoint = proof.Checkpoint is null ? null : new AttestorSubmissionResult.Checkpoint - { - Origin = proof.Checkpoint.Origin, - Size = proof.Checkpoint.Size, - RootHash = proof.Checkpoint.RootHash, - Timestamp = proof.Checkpoint.Timestamp?.ToString("O") - }, - Inclusion = proof.Inclusion is null ? null : new AttestorSubmissionResult.InclusionProof - { - LeafHash = proof.Inclusion.LeafHash, - Path = proof.Inclusion.Path - } - }; - } - - private static AttestorEntry.LogReplicaDescriptor CreateMirrorDescriptor(SubmissionOutcome outcome) - { - return new AttestorEntry.LogReplicaDescriptor - { - Backend = outcome.Backend, - Url = outcome.IsSuccess - ? outcome.Submission!.LogUrl ?? outcome.Url - : outcome.Url, - Uuid = outcome.Submission?.Uuid, - Index = outcome.Submission?.Index, - Status = outcome.IsSuccess - ? outcome.Submission!.Status ?? "included" - : "failed", - Proof = outcome.IsSuccess ? ConvertProof(outcome.Proof) : null, - Error = outcome.Error?.Message - }; - } - - private static AttestorEntry WithMirror(AttestorEntry entry, SubmissionOutcome outcome) - { - return new AttestorEntry - { - RekorUuid = entry.RekorUuid, - Artifact = entry.Artifact, - BundleSha256 = entry.BundleSha256, - Index = entry.Index, - Proof = entry.Proof, - Log = entry.Log, - CreatedAt = entry.CreatedAt, - Status = entry.Status, - SignerIdentity = entry.SignerIdentity, - Mirror = CreateMirrorDescriptor(outcome) - }; - } - - private AttestorEntry PromoteToPrimary(AttestorEntry existing, SubmissionOutcome outcome) - { - if (outcome.Submission is null) - { - throw new InvalidOperationException("Cannot promote to primary without a successful submission."); - } - - var mirrorDescriptor = existing.Mirror; - if (mirrorDescriptor is null && !string.Equals(existing.Log.Backend, outcome.Backend, StringComparison.OrdinalIgnoreCase)) - { - mirrorDescriptor = CreateMirrorDescriptorFromEntry(existing); - } - - return new AttestorEntry - { - RekorUuid = outcome.Submission.Uuid, - Artifact = existing.Artifact, - BundleSha256 = existing.BundleSha256, - Index = outcome.Submission.Index, - Proof = ConvertProof(outcome.Proof), - Log = new AttestorEntry.LogDescriptor - { - Backend = outcome.Backend, - Url = outcome.Submission.LogUrl ?? outcome.Url, - LogId = existing.Log.LogId - }, - CreatedAt = existing.CreatedAt, - Status = outcome.Submission.Status ?? "included", - SignerIdentity = existing.SignerIdentity, - Mirror = mirrorDescriptor - }; - } - - private static AttestorEntry.LogReplicaDescriptor CreateMirrorDescriptorFromEntry(AttestorEntry entry) - { - return new AttestorEntry.LogReplicaDescriptor - { - Backend = entry.Log.Backend, - Url = entry.Log.Url, - Uuid = entry.RekorUuid, - Index = entry.Index, - Status = entry.Status, - Proof = entry.Proof, - LogId = entry.Log.LogId - }; - } - - private sealed record SubmissionOutcome( - string Backend, - string Url, - RekorSubmissionResponse? Submission, - RekorProofResponse? Proof, - TimeSpan Latency, - Exception? Error) - { - public bool IsSuccess => Submission is not null && Error is null; - - public static SubmissionOutcome Success(string backend, Uri backendUrl, RekorSubmissionResponse submission, RekorProofResponse? proof, TimeSpan latency) => - new SubmissionOutcome(backend, backendUrl.ToString(), submission, proof, latency, null); - - public static SubmissionOutcome Failure(string backend, string? url, Exception error, TimeSpan latency) => - new SubmissionOutcome(backend, url ?? string.Empty, null, null, latency, error); - } - - private static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options) - { - if (string.IsNullOrWhiteSpace(options.Url)) - { - throw new InvalidOperationException($"Rekor backend '{name}' is not configured."); - } - - return new RekorBackend - { - Name = name, - Url = new Uri(options.Url, UriKind.Absolute), - ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs), - PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs), - MaxAttempts = options.MaxAttempts - }; - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Audit; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Rekor; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Transparency; +using StellaOps.Attestor.Core.Verification; + +namespace StellaOps.Attestor.Infrastructure.Submission; + +internal sealed class AttestorSubmissionService : IAttestorSubmissionService +{ + private static readonly TimeSpan DedupeTtl = TimeSpan.FromHours(48); + + private readonly AttestorSubmissionValidator _validator; + private readonly IAttestorEntryRepository _repository; + private readonly IAttestorDedupeStore _dedupeStore; + private readonly IRekorClient _rekorClient; + private readonly ITransparencyWitnessClient _witnessClient; + private readonly IAttestorArchiveStore _archiveStore; + private readonly IAttestorAuditSink _auditSink; + private readonly IAttestorVerificationCache _verificationCache; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + private readonly AttestorOptions _options; + private readonly AttestorMetrics _metrics; + + public AttestorSubmissionService( + AttestorSubmissionValidator validator, + IAttestorEntryRepository repository, + IAttestorDedupeStore dedupeStore, + IRekorClient rekorClient, + ITransparencyWitnessClient witnessClient, + IAttestorArchiveStore archiveStore, + IAttestorAuditSink auditSink, + IAttestorVerificationCache verificationCache, + IOptions options, + ILogger logger, + TimeProvider timeProvider, + AttestorMetrics metrics) + { + _validator = validator; + _repository = repository; + _dedupeStore = dedupeStore; + _rekorClient = rekorClient; + _witnessClient = witnessClient ?? throw new ArgumentNullException(nameof(witnessClient)); + _archiveStore = archiveStore; + _auditSink = auditSink; + _verificationCache = verificationCache; + _logger = logger; + _timeProvider = timeProvider; + _options = options.Value; + _metrics = metrics; + } + + public async Task SubmitAsync( + AttestorSubmissionRequest request, + SubmissionContext context, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(context); + + var validation = await _validator.ValidateAsync(request, cancellationToken).ConfigureAwait(false); + var canonicalBundle = validation.CanonicalBundle; + var cacheSubject = ResolveCacheSubject(request); + + var preference = NormalizeLogPreference(request.Meta.LogPreference); + var requiresPrimary = preference is "primary" or "both"; + var requiresMirror = preference is "mirror" or "both"; + + if (!requiresPrimary && !requiresMirror) + { + requiresPrimary = true; + } + + if (requiresMirror && !_options.Rekor.Mirror.Enabled) + { + throw new AttestorValidationException("mirror_disabled", "Mirror log requested but not configured."); + } + + var existing = await TryGetExistingEntryAsync(request.Meta.BundleSha256, cancellationToken).ConfigureAwait(false); + if (existing is not null) + { + _metrics.DedupeHitsTotal.Add(1, new KeyValuePair("result", "hit")); + var updated = await EnsureBackendsAsync(existing, request, context, requiresPrimary, requiresMirror, cacheSubject, cancellationToken).ConfigureAwait(false); + return ToResult(updated); + } + + _metrics.DedupeHitsTotal.Add(1, new KeyValuePair("result", "miss")); + + SubmissionOutcome? canonicalOutcome = null; + SubmissionOutcome? mirrorOutcome = null; + + if (requiresPrimary) + { + canonicalOutcome = await SubmitToBackendAsync(request, "primary", _options.Rekor.Primary, cancellationToken).ConfigureAwait(false); + } + + if (requiresMirror) + { + try + { + var mirror = await SubmitToBackendAsync(request, "mirror", _options.Rekor.Mirror, cancellationToken).ConfigureAwait(false); + if (canonicalOutcome is null) + { + canonicalOutcome = mirror; + } + else + { + mirrorOutcome = mirror; + } + } + catch (Exception ex) + { + if (canonicalOutcome is null) + { + throw; + } + + _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "submit_mirror")); + _logger.LogWarning(ex, "Mirror submission failed for bundle {BundleSha}", request.Meta.BundleSha256); + mirrorOutcome = SubmissionOutcome.Failure("mirror", _options.Rekor.Mirror.Url, ex, TimeSpan.Zero); + RecordSubmissionMetrics(mirrorOutcome); + } + } + + if (canonicalOutcome is null) + { + throw new InvalidOperationException("No Rekor submission outcome was produced."); + } + + var entry = CreateEntry(request, context, canonicalOutcome, mirrorOutcome); + await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false); + await InvalidateVerificationCacheAsync(cacheSubject, cancellationToken).ConfigureAwait(false); + await _dedupeStore.SetAsync(request.Meta.BundleSha256, entry.RekorUuid, DedupeTtl, cancellationToken).ConfigureAwait(false); + + if (request.Meta.Archive) + { + await ArchiveAsync(entry, canonicalBundle, canonicalOutcome.Proof, cancellationToken).ConfigureAwait(false); + } + + await WriteAuditAsync(request, context, entry, canonicalOutcome, cancellationToken).ConfigureAwait(false); + if (mirrorOutcome is not null) + { + await WriteAuditAsync(request, context, entry, mirrorOutcome, cancellationToken).ConfigureAwait(false); + } + + return ToResult(entry); + } + + private static AttestorSubmissionResult ToResult(AttestorEntry entry) + { + var result = new AttestorSubmissionResult + { + Uuid = entry.RekorUuid, + Index = entry.Index, + LogUrl = entry.Log.Url, + Status = entry.Status, + Proof = ToResultProof(entry.Proof), + Witness = ToResultWitness(entry.Witness) + }; + + if (entry.Mirror is not null) + { + result.Mirror = new AttestorSubmissionResult.MirrorLog + { + Uuid = entry.Mirror.Uuid, + Index = entry.Mirror.Index, + LogUrl = entry.Mirror.Url, + Status = entry.Mirror.Status, + Proof = ToResultProof(entry.Mirror.Proof), + Witness = ToResultWitness(entry.Mirror.Witness), + Error = entry.Mirror.Error + }; + } + + return result; + } + + private AttestorEntry CreateEntry( + AttestorSubmissionRequest request, + SubmissionContext context, + SubmissionOutcome canonicalOutcome, + SubmissionOutcome? mirrorOutcome) + { + if (canonicalOutcome.Submission is null) + { + throw new InvalidOperationException("Canonical submission outcome must include a Rekor response."); + } + + var submission = canonicalOutcome.Submission; + var now = _timeProvider.GetUtcNow(); + + return new AttestorEntry + { + RekorUuid = submission.Uuid, + Artifact = new AttestorEntry.ArtifactDescriptor + { + Sha256 = request.Meta.Artifact.Sha256, + Kind = request.Meta.Artifact.Kind, + ImageDigest = request.Meta.Artifact.ImageDigest, + SubjectUri = request.Meta.Artifact.SubjectUri + }, + BundleSha256 = request.Meta.BundleSha256, + Index = submission.Index, + Proof = ConvertProof(canonicalOutcome.Proof), + Log = new AttestorEntry.LogDescriptor + { + Backend = canonicalOutcome.Backend, + Url = submission.LogUrl ?? canonicalOutcome.Url, + LogId = null + }, + CreatedAt = now, + Status = submission.Status ?? "included", + SignerIdentity = new AttestorEntry.SignerIdentityDescriptor + { + Mode = request.Bundle.Mode, + Issuer = context.CallerAudience, + SubjectAlternativeName = context.CallerSubject, + KeyId = context.CallerClientId + }, + Witness = ConvertWitness(canonicalOutcome.Witness), + Mirror = mirrorOutcome is null ? null : CreateMirrorDescriptor(mirrorOutcome) + }; + } + + private static string? ResolveCacheSubject(AttestorSubmissionRequest request) + { + if (request.Meta.Artifact.SubjectUri is { Length: > 0 } subjectUri) + { + return subjectUri; + } + + if (request.Meta.Artifact.ImageDigest is { Length: > 0 } imageDigest) + { + return imageDigest; + } + + return string.IsNullOrWhiteSpace(request.Meta.Artifact.Sha256) + ? null + : request.Meta.Artifact.Sha256; + } + + private Task InvalidateVerificationCacheAsync(string? subject, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(subject)) + { + return Task.CompletedTask; + } + + return _verificationCache.InvalidateSubjectAsync(subject, cancellationToken); + } + + private static string NormalizeLogPreference(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return "primary"; + } + + var normalized = value.Trim().ToLowerInvariant(); + return normalized switch + { + "primary" => "primary", + "mirror" => "mirror", + "both" => "both", + _ => "primary" + }; + } + + private async Task TryGetExistingEntryAsync(string bundleSha256, CancellationToken cancellationToken) + { + var dedupeUuid = await _dedupeStore.TryGetExistingAsync(bundleSha256, cancellationToken).ConfigureAwait(false); + if (string.IsNullOrWhiteSpace(dedupeUuid)) + { + return null; + } + + return await _repository.GetByUuidAsync(dedupeUuid, cancellationToken).ConfigureAwait(false) + ?? await _repository.GetByBundleShaAsync(bundleSha256, cancellationToken).ConfigureAwait(false); + } + + private async Task EnsureBackendsAsync( + AttestorEntry existing, + AttestorSubmissionRequest request, + SubmissionContext context, + bool requiresPrimary, + bool requiresMirror, + string? cacheSubject, + CancellationToken cancellationToken) + { + var entry = existing; + var updated = false; + + if (requiresPrimary && !IsPrimary(entry)) + { + var outcome = await SubmitToBackendAsync(request, "primary", _options.Rekor.Primary, cancellationToken).ConfigureAwait(false); + entry = PromoteToPrimary(entry, outcome); + await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false); + await InvalidateVerificationCacheAsync(cacheSubject, cancellationToken).ConfigureAwait(false); + await _dedupeStore.SetAsync(request.Meta.BundleSha256, entry.RekorUuid, DedupeTtl, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(request, context, entry, outcome, cancellationToken).ConfigureAwait(false); + updated = true; + } + + if (requiresMirror) + { + var mirrorSatisfied = entry.Mirror is not null + && entry.Mirror.Error is null + && string.Equals(entry.Mirror.Status, "included", StringComparison.OrdinalIgnoreCase) + && !string.IsNullOrEmpty(entry.Mirror.Uuid); + + if (!mirrorSatisfied) + { + try + { + var mirrorOutcome = await SubmitToBackendAsync(request, "mirror", _options.Rekor.Mirror, cancellationToken).ConfigureAwait(false); + entry = WithMirror(entry, mirrorOutcome); + await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false); + await InvalidateVerificationCacheAsync(cacheSubject, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(request, context, entry, mirrorOutcome, cancellationToken).ConfigureAwait(false); + updated = true; + } + catch (Exception ex) + { + _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "submit_mirror")); + _logger.LogWarning(ex, "Mirror submission failed for deduplicated bundle {BundleSha}", request.Meta.BundleSha256); + var failure = SubmissionOutcome.Failure("mirror", _options.Rekor.Mirror.Url, ex, TimeSpan.Zero); + RecordSubmissionMetrics(failure); + entry = WithMirror(entry, failure); + await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false); + await InvalidateVerificationCacheAsync(cacheSubject, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(request, context, entry, failure, cancellationToken).ConfigureAwait(false); + updated = true; + } + } + } + + if (!updated) + { + _metrics.SubmitTotal.Add(1, + new KeyValuePair("result", "dedupe"), + new KeyValuePair("backend", "cache")); + } + + return entry; + } + + private static bool IsPrimary(AttestorEntry entry) => + string.Equals(entry.Log.Backend, "primary", StringComparison.OrdinalIgnoreCase); + + private async Task SubmitToBackendAsync( + AttestorSubmissionRequest request, + string backendName, + AttestorOptions.RekorBackendOptions backendOptions, + CancellationToken cancellationToken) + { + var backend = BuildBackend(backendName, backendOptions); + var stopwatch = Stopwatch.StartNew(); + try + { + var submission = await _rekorClient.SubmitAsync(request, backend, cancellationToken).ConfigureAwait(false); + stopwatch.Stop(); + + var proof = submission.Proof; + if (proof is null && string.Equals(submission.Status, "included", StringComparison.OrdinalIgnoreCase)) + { + try + { + proof = await _rekorClient.GetProofAsync(submission.Uuid, backend, cancellationToken).ConfigureAwait(false); + _metrics.ProofFetchTotal.Add(1, + new KeyValuePair("result", proof is null ? "missing" : "ok")); + } + catch (Exception ex) + { + _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "proof_fetch")); + _logger.LogWarning(ex, "Proof fetch failed for {Uuid} on backend {Backend}", submission.Uuid, backendName); + } + } + + var witness = await FetchWitnessAsync(backendName, backendOptions, submission, proof, cancellationToken).ConfigureAwait(false); + var outcome = SubmissionOutcome.Success(backendName, backend.Url, submission, proof, witness, stopwatch.Elapsed); + RecordSubmissionMetrics(outcome); + return outcome; + } + catch (Exception ex) + { + stopwatch.Stop(); + _metrics.ErrorTotal.Add(1, new KeyValuePair("type", $"submit_{backendName}")); + _logger.LogError(ex, "Failed to submit bundle {BundleSha} to Rekor backend {Backend}", request.Meta.BundleSha256, backendName); + throw; + } + } + + private async Task FetchWitnessAsync( + string backend, + AttestorOptions.RekorBackendOptions backendOptions, + RekorSubmissionResponse submission, + RekorProofResponse? proof, + CancellationToken cancellationToken) + { + if (!_options.TransparencyWitness.Enabled || string.IsNullOrWhiteSpace(_options.TransparencyWitness.BaseUrl)) + { + return null; + } + + var logUrl = submission.LogUrl; + if (string.IsNullOrWhiteSpace(logUrl)) + { + logUrl = backendOptions.Url; + } + + if (string.IsNullOrWhiteSpace(logUrl)) + { + return null; + } + + var request = new TransparencyWitnessRequest( + submission.Uuid, + backend, + new Uri(logUrl, UriKind.Absolute), + proof?.Checkpoint?.RootHash); + + try + { + return await _witnessClient.GetObservationAsync(request, cancellationToken).ConfigureAwait(false); + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) + { + _logger.LogDebug(ex, "Transparency witness fetch failed for {Uuid} on backend {Backend}", submission.Uuid, backend); + return new TransparencyWitnessObservation + { + Aggregator = _options.TransparencyWitness.AggregatorId ?? backend, + Status = "error", + RootHash = proof?.Checkpoint?.RootHash, + RetrievedAt = _timeProvider.GetUtcNow(), + Error = ex.Message + }; + } + } + + private static AttestorEntry.WitnessDescriptor? ConvertWitness(TransparencyWitnessObservation? witness) + { + if (witness is null) + { + return null; + } + + return new AttestorEntry.WitnessDescriptor + { + Aggregator = witness.Aggregator ?? string.Empty, + Status = string.IsNullOrWhiteSpace(witness.Status) ? "unknown" : witness.Status!, + RootHash = witness.RootHash, + RetrievedAt = witness.RetrievedAt, + Statement = witness.Statement, + Signature = witness.Signature, + KeyId = witness.KeyId, + Error = witness.Error + }; + } + + private void RecordSubmissionMetrics(SubmissionOutcome outcome) + { + var result = outcome.IsSuccess + ? outcome.Submission!.Status ?? "unknown" + : "failed"; + + _metrics.SubmitTotal.Add(1, + new KeyValuePair("result", result), + new KeyValuePair("backend", outcome.Backend)); + + if (outcome.Latency > TimeSpan.Zero) + { + _metrics.SubmitLatency.Record(outcome.Latency.TotalSeconds, + new KeyValuePair("backend", outcome.Backend)); + } + } + + private async Task ArchiveAsync( + AttestorEntry entry, + byte[] canonicalBundle, + RekorProofResponse? proof, + CancellationToken cancellationToken) + { + var metadata = new Dictionary + { + ["logUrl"] = entry.Log.Url, + ["status"] = entry.Status + }; + + if (entry.Mirror is not null) + { + metadata["mirror.backend"] = entry.Mirror.Backend; + metadata["mirror.uuid"] = entry.Mirror.Uuid ?? string.Empty; + metadata["mirror.status"] = entry.Mirror.Status; + } + + var archiveBundle = new AttestorArchiveBundle + { + RekorUuid = entry.RekorUuid, + ArtifactSha256 = entry.Artifact.Sha256, + BundleSha256 = entry.BundleSha256, + CanonicalBundleJson = canonicalBundle, + ProofJson = proof is null ? Array.Empty() : JsonSerializer.SerializeToUtf8Bytes(proof, JsonSerializerOptions.Default), + Metadata = metadata + }; + + try + { + await _archiveStore.ArchiveBundleAsync(archiveBundle, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to archive bundle {BundleSha}", entry.BundleSha256); + _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "archive")); + } + } + + private Task WriteAuditAsync( + AttestorSubmissionRequest request, + SubmissionContext context, + AttestorEntry entry, + SubmissionOutcome outcome, + CancellationToken cancellationToken) + { + var metadata = new Dictionary(); + if (!outcome.IsSuccess && outcome.Error is not null) + { + metadata["error"] = outcome.Error.Message; + } + + var record = new AttestorAuditRecord + { + Action = "submit", + Result = outcome.IsSuccess + ? outcome.Submission!.Status ?? "included" + : "failed", + RekorUuid = outcome.IsSuccess + ? outcome.Submission!.Uuid + : string.Equals(outcome.Backend, "primary", StringComparison.OrdinalIgnoreCase) + ? entry.RekorUuid + : entry.Mirror?.Uuid, + Index = outcome.Submission?.Index, + ArtifactSha256 = request.Meta.Artifact.Sha256, + BundleSha256 = request.Meta.BundleSha256, + Backend = outcome.Backend, + LatencyMs = (long)outcome.Latency.TotalMilliseconds, + Timestamp = _timeProvider.GetUtcNow(), + Caller = new AttestorAuditRecord.CallerDescriptor + { + Subject = context.CallerSubject, + Audience = context.CallerAudience, + ClientId = context.CallerClientId, + MtlsThumbprint = context.MtlsThumbprint, + Tenant = context.CallerTenant + }, + Metadata = metadata + }; + + return _auditSink.WriteAsync(record, cancellationToken); + } + + private static AttestorEntry.ProofDescriptor? ConvertProof(RekorProofResponse? proof) + { + if (proof is null) + { + return null; + } + + return new AttestorEntry.ProofDescriptor + { + Checkpoint = proof.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor + { + Origin = proof.Checkpoint.Origin, + Size = proof.Checkpoint.Size, + RootHash = proof.Checkpoint.RootHash, + Timestamp = proof.Checkpoint.Timestamp + }, + Inclusion = proof.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor + { + LeafHash = proof.Inclusion.LeafHash, + Path = proof.Inclusion.Path + } + }; + } + + private static AttestorSubmissionResult.WitnessStatement? ToResultWitness(AttestorEntry.WitnessDescriptor? witness) + { + if (witness is null) + { + return null; + } + + return new AttestorSubmissionResult.WitnessStatement + { + Aggregator = witness.Aggregator, + Status = witness.Status, + RootHash = witness.RootHash, + RetrievedAt = witness.RetrievedAt == default ? null : witness.RetrievedAt.ToString("O"), + Statement = witness.Statement, + Signature = witness.Signature, + KeyId = witness.KeyId, + Error = witness.Error + }; + } + + private static AttestorSubmissionResult.RekorProof? ToResultProof(AttestorEntry.ProofDescriptor? proof) + { + if (proof is null) + { + return null; + } + + return new AttestorSubmissionResult.RekorProof + { + Checkpoint = proof.Checkpoint is null ? null : new AttestorSubmissionResult.Checkpoint + { + Origin = proof.Checkpoint.Origin, + Size = proof.Checkpoint.Size, + RootHash = proof.Checkpoint.RootHash, + Timestamp = proof.Checkpoint.Timestamp?.ToString("O") + }, + Inclusion = proof.Inclusion is null ? null : new AttestorSubmissionResult.InclusionProof + { + LeafHash = proof.Inclusion.LeafHash, + Path = proof.Inclusion.Path + } + }; + } + + private static AttestorEntry.LogReplicaDescriptor CreateMirrorDescriptor(SubmissionOutcome outcome) + { + return new AttestorEntry.LogReplicaDescriptor + { + Backend = outcome.Backend, + Url = outcome.IsSuccess + ? outcome.Submission!.LogUrl ?? outcome.Url + : outcome.Url, + Uuid = outcome.Submission?.Uuid, + Index = outcome.Submission?.Index, + Status = outcome.IsSuccess + ? outcome.Submission!.Status ?? "included" + : "failed", + Proof = outcome.IsSuccess ? ConvertProof(outcome.Proof) : null, + Witness = ConvertWitness(outcome.Witness), + Error = outcome.Error?.Message + }; + } + + private static AttestorEntry WithMirror(AttestorEntry entry, SubmissionOutcome outcome) + { + return new AttestorEntry + { + RekorUuid = entry.RekorUuid, + Artifact = entry.Artifact, + BundleSha256 = entry.BundleSha256, + Index = entry.Index, + Proof = entry.Proof, + Log = entry.Log, + CreatedAt = entry.CreatedAt, + Status = entry.Status, + SignerIdentity = entry.SignerIdentity, + Witness = entry.Witness, + Mirror = CreateMirrorDescriptor(outcome) + }; + } + + private AttestorEntry PromoteToPrimary(AttestorEntry existing, SubmissionOutcome outcome) + { + if (outcome.Submission is null) + { + throw new InvalidOperationException("Cannot promote to primary without a successful submission."); + } + + var mirrorDescriptor = existing.Mirror; + if (mirrorDescriptor is null && !string.Equals(existing.Log.Backend, outcome.Backend, StringComparison.OrdinalIgnoreCase)) + { + mirrorDescriptor = CreateMirrorDescriptorFromEntry(existing); + } + + return new AttestorEntry + { + RekorUuid = outcome.Submission.Uuid, + Artifact = existing.Artifact, + BundleSha256 = existing.BundleSha256, + Index = outcome.Submission.Index, + Proof = ConvertProof(outcome.Proof), + Log = new AttestorEntry.LogDescriptor + { + Backend = outcome.Backend, + Url = outcome.Submission.LogUrl ?? outcome.Url, + LogId = existing.Log.LogId + }, + CreatedAt = existing.CreatedAt, + Status = outcome.Submission.Status ?? "included", + SignerIdentity = existing.SignerIdentity, + Witness = ConvertWitness(outcome.Witness), + Mirror = mirrorDescriptor + }; + } + + private static AttestorEntry.LogReplicaDescriptor CreateMirrorDescriptorFromEntry(AttestorEntry entry) + { + return new AttestorEntry.LogReplicaDescriptor + { + Backend = entry.Log.Backend, + Url = entry.Log.Url, + Uuid = entry.RekorUuid, + Index = entry.Index, + Status = entry.Status, + Proof = entry.Proof, + Witness = entry.Witness, + LogId = entry.Log.LogId + }; + } + + private sealed record SubmissionOutcome( + string Backend, + string Url, + RekorSubmissionResponse? Submission, + RekorProofResponse? Proof, + TransparencyWitnessObservation? Witness, + TimeSpan Latency, + Exception? Error) + { + public bool IsSuccess => Submission is not null && Error is null; + + public static SubmissionOutcome Success(string backend, Uri backendUrl, RekorSubmissionResponse submission, RekorProofResponse? proof, TransparencyWitnessObservation? witness, TimeSpan latency) => + new SubmissionOutcome(backend, backendUrl.ToString(), submission, proof, witness, latency, null); + + public static SubmissionOutcome Failure(string backend, string? url, Exception error, TimeSpan latency) => + new SubmissionOutcome(backend, url ?? string.Empty, null, null, null, latency, error); + } + + private static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options) + { + if (string.IsNullOrWhiteSpace(options.Url)) + { + throw new InvalidOperationException($"Rekor backend '{name}' is not configured."); + } + + return new RekorBackend + { + Name = name, + Url = new Uri(options.Url, UriKind.Absolute), + ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs), + PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs), + MaxAttempts = options.MaxAttempts + }; + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Transparency/HttpTransparencyWitnessClient.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Transparency/HttpTransparencyWitnessClient.cs new file mode 100644 index 00000000..b0f7abd5 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Transparency/HttpTransparencyWitnessClient.cs @@ -0,0 +1,223 @@ +using System; +using System.Diagnostics; +using System.Net.Http; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Transparency; + +namespace StellaOps.Attestor.Infrastructure.Transparency; + +internal sealed class HttpTransparencyWitnessClient : ITransparencyWitnessClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + + private readonly HttpClient _client; + private readonly IMemoryCache _cache; + private readonly AttestorOptions _options; + private readonly AttestorMetrics _metrics; + private readonly AttestorActivitySource _activitySource; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public HttpTransparencyWitnessClient( + HttpClient client, + IMemoryCache cache, + IOptions options, + AttestorMetrics metrics, + AttestorActivitySource activitySource, + TimeProvider timeProvider, + ILogger logger) + { + _client = client ?? throw new ArgumentNullException(nameof(client)); + _cache = cache ?? throw new ArgumentNullException(nameof(cache)); + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); + _activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task GetObservationAsync(TransparencyWitnessRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var witnessOptions = _options.TransparencyWitness; + if (!witnessOptions.Enabled || string.IsNullOrWhiteSpace(witnessOptions.BaseUrl)) + { + return null; + } + + var cacheKey = BuildCacheKey(request); + if (_cache.TryGetValue(cacheKey, out TransparencyWitnessObservation? cached) && cached is not null) + { + return cached; + } + + var aggregatorId = witnessOptions.AggregatorId ?? request.Backend; + using var activity = _activitySource.StartWitnessFetch(aggregatorId); + var stopwatch = Stopwatch.StartNew(); + + try + { + var requestUri = BuildRequestUri(request, witnessOptions.BaseUrl); + using var httpRequest = new HttpRequestMessage(HttpMethod.Get, requestUri); + if (!string.IsNullOrWhiteSpace(witnessOptions.ApiKey)) + { + httpRequest.Headers.Add("X-API-Key", witnessOptions.ApiKey); + } + + using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + if (witnessOptions.RequestTimeoutMs > 0) + { + linkedCts.CancelAfter(TimeSpan.FromMilliseconds(witnessOptions.RequestTimeoutMs)); + } + + var response = await _client.SendAsync(httpRequest, HttpCompletionOption.ResponseHeadersRead, linkedCts.Token).ConfigureAwait(false); + stopwatch.Stop(); + + RecordWitnessMetrics(aggregatorId, response.IsSuccessStatusCode ? "ok" : "error", stopwatch.Elapsed.TotalSeconds); + + if (!response.IsSuccessStatusCode) + { + activity?.SetStatus(ActivityStatusCode.Error, response.ReasonPhrase); + return CacheAndReturn(cacheKey, BuildErrorObservation(aggregatorId, request.CheckpointRootHash, "http_" + ((int)response.StatusCode).ToString())); + } + + await using var stream = await response.Content.ReadAsStreamAsync(linkedCts.Token).ConfigureAwait(false); + var payload = await JsonSerializer.DeserializeAsync(stream, SerializerOptions, linkedCts.Token).ConfigureAwait(false); + if (payload is null) + { + return CacheAndReturn(cacheKey, BuildErrorObservation(aggregatorId, request.CheckpointRootHash, "response_empty")); + } + + var observation = MapObservation(payload, aggregatorId, request.CheckpointRootHash); + return CacheAndReturn(cacheKey, observation); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + catch (Exception ex) + { + stopwatch.Stop(); + RecordWitnessMetrics(aggregatorId, "error", stopwatch.Elapsed.TotalSeconds); + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + _logger.LogWarning(ex, "Failed to fetch transparency witness data for {Uuid} ({Backend})", request.Uuid, request.Backend); + return CacheAndReturn(cacheKey, BuildErrorObservation(aggregatorId, request.CheckpointRootHash, ex.GetType().Name, ex.Message)); + } + } + + private TransparencyWitnessObservation CacheAndReturn(string key, TransparencyWitnessObservation observation) + { + var ttlSeconds = Math.Max(1, _options.TransparencyWitness.CacheTtlSeconds); + var entryOptions = new MemoryCacheEntryOptions + { + AbsoluteExpirationRelativeToNow = TimeSpan.FromSeconds(ttlSeconds) + }; + + _cache.Set(key, observation, entryOptions); + return observation; + } + + private static string BuildCacheKey(TransparencyWitnessRequest request) + { + var root = string.IsNullOrWhiteSpace(request.CheckpointRootHash) ? string.Empty : request.CheckpointRootHash; + return "witness::" + request.Backend + "::" + request.Uuid + "::" + root; + } + + private static Uri BuildRequestUri(TransparencyWitnessRequest request, string baseUrl) + { + if (!Uri.TryCreate(baseUrl, UriKind.Absolute, out var baseUri)) + { + throw new InvalidOperationException("Transparency witness base URL is invalid."); + } + + var basePath = baseUri.AbsolutePath.TrimEnd('/'); + var escapedUuid = Uri.EscapeDataString(request.Uuid); + + var builder = new UriBuilder(baseUri) + { + Path = (basePath.Length == 0 ? string.Empty : basePath) + "/v1/witness/" + escapedUuid + }; + + var query = "backend=" + Uri.EscapeDataString(request.Backend) + "&logUrl=" + Uri.EscapeDataString(request.BackendUrl.ToString()); + if (!string.IsNullOrWhiteSpace(request.CheckpointRootHash)) + { + query += "&rootHash=" + Uri.EscapeDataString(request.CheckpointRootHash); + } + + builder.Query = query; + return builder.Uri; + } + + private void RecordWitnessMetrics(string aggregatorId, string result, double latencySeconds) + { + _metrics.WitnessFetchTotal.Add(1, + new KeyValuePair(AttestorTelemetryTags.WitnessAggregator, aggregatorId), + new KeyValuePair(AttestorTelemetryTags.Result, result)); + + _metrics.WitnessFetchLatency.Record(latencySeconds, + new KeyValuePair(AttestorTelemetryTags.WitnessAggregator, aggregatorId)); + } + + private TransparencyWitnessObservation MapObservation(WitnessResponse payload, string aggregatorId, string? requestedRoot) + { + var status = string.IsNullOrWhiteSpace(payload.Status) ? "unknown" : payload.Status!; + var root = string.IsNullOrWhiteSpace(payload.RootHash) ? requestedRoot : payload.RootHash; + var timestamp = payload.Timestamp ?? _timeProvider.GetUtcNow(); + + return new TransparencyWitnessObservation + { + Aggregator = string.IsNullOrWhiteSpace(payload.Aggregator) ? aggregatorId : payload.Aggregator!, + Status = status, + RootHash = root, + RetrievedAt = timestamp, + Statement = payload.Statement, + Signature = payload.Signature?.Value, + KeyId = payload.Signature?.KeyId, + Error = payload.Error + }; + } + + private TransparencyWitnessObservation BuildErrorObservation(string aggregatorId, string? requestedRoot, string errorCode, string? details = null) + { + return new TransparencyWitnessObservation + { + Aggregator = aggregatorId, + Status = errorCode, + RootHash = requestedRoot, + RetrievedAt = _timeProvider.GetUtcNow(), + Error = details + }; + } + + private sealed class WitnessResponse + { + public string? Aggregator { get; set; } + + public string? Status { get; set; } + + public string? RootHash { get; set; } + + public string? Statement { get; set; } + + public WitnessSignature? Signature { get; set; } + + public DateTimeOffset? Timestamp { get; set; } + + public string? Error { get; set; } + } + + private sealed class WitnessSignature + { + public string? KeyId { get; set; } + + public string? Value { get; set; } + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Transparency/NullTransparencyWitnessClient.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Transparency/NullTransparencyWitnessClient.cs new file mode 100644 index 00000000..3fe1b240 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Transparency/NullTransparencyWitnessClient.cs @@ -0,0 +1,13 @@ +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Transparency; + +namespace StellaOps.Attestor.Infrastructure.Transparency; + +internal sealed class NullTransparencyWitnessClient : ITransparencyWitnessClient +{ + public Task GetObservationAsync(TransparencyWitnessRequest request, CancellationToken cancellationToken = default) + { + return Task.FromResult(null); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs index ab0ed164..a96d386c 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs @@ -1,754 +1,356 @@ -using System; -using System.Buffers.Binary; -using System.Collections.Generic; -using System.Globalization; -using System.IO; -using System.Linq; -using System.Security.Cryptography; -using System.Security.Cryptography.X509Certificates; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Attestor.Core.Options; -using StellaOps.Attestor.Core.Rekor; -using StellaOps.Attestor.Core.Storage; -using StellaOps.Attestor.Core.Submission; -using StellaOps.Attestor.Core.Verification; -using StellaOps.Attestor.Core.Observability; - -namespace StellaOps.Attestor.Infrastructure.Verification; - -internal sealed class AttestorVerificationService : IAttestorVerificationService -{ - private readonly IAttestorEntryRepository _repository; - private readonly IDsseCanonicalizer _canonicalizer; - private readonly IRekorClient _rekorClient; - private readonly ILogger _logger; - private readonly AttestorOptions _options; - private readonly AttestorMetrics _metrics; - - public AttestorVerificationService( - IAttestorEntryRepository repository, - IDsseCanonicalizer canonicalizer, - IRekorClient rekorClient, - IOptions options, - ILogger logger, - AttestorMetrics metrics) - { - _repository = repository; - _canonicalizer = canonicalizer; - _rekorClient = rekorClient; - _logger = logger; - _options = options.Value; - _metrics = metrics; - } - - public async Task VerifyAsync(AttestorVerificationRequest request, CancellationToken cancellationToken = default) - { - if (request is null) - { - throw new ArgumentNullException(nameof(request)); - } - - var entry = await ResolveEntryAsync(request, cancellationToken).ConfigureAwait(false); - if (entry is null) - { - throw new AttestorVerificationException("not_found", "No attestor entry matched the supplied query."); - } - - var issues = new List(); - - if (request.Bundle is not null) - { - var canonicalBundle = await _canonicalizer.CanonicalizeAsync(new AttestorSubmissionRequest - { - Bundle = request.Bundle, - Meta = new AttestorSubmissionRequest.SubmissionMeta - { - Artifact = new AttestorSubmissionRequest.ArtifactInfo - { - Sha256 = entry.Artifact.Sha256, - Kind = entry.Artifact.Kind - }, - BundleSha256 = entry.BundleSha256 - } - }, cancellationToken).ConfigureAwait(false); - - var computedHash = Convert.ToHexString(SHA256.HashData(canonicalBundle)).ToLowerInvariant(); - if (!string.Equals(computedHash, entry.BundleSha256, StringComparison.OrdinalIgnoreCase)) - { - issues.Add("bundle_hash_mismatch"); - } - - if (!TryDecodeBase64(request.Bundle.Dsse.PayloadBase64, out var payloadBytes)) - { - issues.Add("bundle_payload_invalid_base64"); - } - else - { - var preAuth = ComputePreAuthEncoding(request.Bundle.Dsse.PayloadType, payloadBytes); - VerifySignatures(entry, request.Bundle, preAuth, issues); - } - } - else - { - _logger.LogDebug("No DSSE bundle supplied for verification of {Uuid}; signature checks skipped.", entry.RekorUuid); - } - - if (request.RefreshProof || entry.Proof is null) - { - var backend = BuildBackend("primary", _options.Rekor.Primary); - try - { - var proof = await _rekorClient.GetProofAsync(entry.RekorUuid, backend, cancellationToken).ConfigureAwait(false); - if (proof is not null) - { - var updated = CloneWithProof(entry, proof.ToProofDescriptor()); - await _repository.SaveAsync(updated, cancellationToken).ConfigureAwait(false); - entry = updated; - } - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to refresh proof for entry {Uuid}", entry.RekorUuid); - issues.Add("Proof refresh failed: " + ex.Message); - } - } - - VerifyMerkleProof(entry, issues); - - var ok = issues.Count == 0 && string.Equals(entry.Status, "included", StringComparison.OrdinalIgnoreCase); - - _metrics.VerifyTotal.Add(1, new KeyValuePair("result", ok ? "ok" : "failed")); - - return new AttestorVerificationResult - { - Ok = ok, - Uuid = entry.RekorUuid, - Index = entry.Index, - LogUrl = entry.Log.Url, - Status = entry.Status, - Issues = issues, - CheckedAt = DateTimeOffset.UtcNow - }; - } - - public Task GetEntryAsync(string rekorUuid, bool refreshProof, CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(rekorUuid)) - { - throw new ArgumentException("Value cannot be null or whitespace.", nameof(rekorUuid)); - } - - return ResolveEntryByUuidAsync(rekorUuid, refreshProof, cancellationToken); - } - - private async Task ResolveEntryAsync(AttestorVerificationRequest request, CancellationToken cancellationToken) - { - if (!string.IsNullOrWhiteSpace(request.Uuid)) - { - return await ResolveEntryByUuidAsync(request.Uuid, request.RefreshProof, cancellationToken).ConfigureAwait(false); - } - - if (request.Bundle is not null) - { - var canonical = await _canonicalizer.CanonicalizeAsync(new AttestorSubmissionRequest - { - Bundle = request.Bundle, - Meta = new AttestorSubmissionRequest.SubmissionMeta - { - Artifact = new AttestorSubmissionRequest.ArtifactInfo - { - Sha256 = string.Empty, - Kind = string.Empty - } - } - }, cancellationToken).ConfigureAwait(false); - - var bundleSha = Convert.ToHexString(System.Security.Cryptography.SHA256.HashData(canonical)).ToLowerInvariant(); - return await ResolveEntryByBundleShaAsync(bundleSha, request.RefreshProof, cancellationToken).ConfigureAwait(false); - } - - if (!string.IsNullOrWhiteSpace(request.ArtifactSha256)) - { - return await ResolveEntryByArtifactAsync(request.ArtifactSha256, request.RefreshProof, cancellationToken).ConfigureAwait(false); - } - - throw new AttestorVerificationException("invalid_query", "At least one of uuid, bundle, or artifactSha256 must be provided."); - } - - private async Task ResolveEntryByUuidAsync(string uuid, bool refreshProof, CancellationToken cancellationToken) - { - var entry = await _repository.GetByUuidAsync(uuid, cancellationToken).ConfigureAwait(false); - if (entry is null || !refreshProof) - { - return entry; - } - - var backend = BuildBackend("primary", _options.Rekor.Primary); - try - { - var proof = await _rekorClient.GetProofAsync(uuid, backend, cancellationToken).ConfigureAwait(false); - if (proof is not null) - { - var updated = CloneWithProof(entry, proof.ToProofDescriptor()); - await _repository.SaveAsync(updated, cancellationToken).ConfigureAwait(false); - entry = updated; - } - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to refresh proof for entry {Uuid}", uuid); - } - - return entry; - } - - private async Task ResolveEntryByBundleShaAsync(string bundleSha, bool refreshProof, CancellationToken cancellationToken) - { - var entry = await _repository.GetByBundleShaAsync(bundleSha, cancellationToken).ConfigureAwait(false); - if (entry is null || !refreshProof) - { - return entry; - } - - return await ResolveEntryByUuidAsync(entry.RekorUuid, true, cancellationToken).ConfigureAwait(false); - } - - private async Task ResolveEntryByArtifactAsync(string artifactSha256, bool refreshProof, CancellationToken cancellationToken) - { - var entries = await _repository.GetByArtifactShaAsync(artifactSha256, cancellationToken).ConfigureAwait(false); - var entry = entries.OrderByDescending(e => e.CreatedAt).FirstOrDefault(); - if (entry is null) - { - return null; - } - - return refreshProof - ? await ResolveEntryByUuidAsync(entry.RekorUuid, true, cancellationToken).ConfigureAwait(false) - : entry; - } - - private void VerifySignatures(AttestorEntry entry, AttestorSubmissionRequest.SubmissionBundle bundle, byte[] preAuthEncoding, IList issues) - { - var mode = (entry.SignerIdentity.Mode ?? bundle.Mode ?? string.Empty).ToLowerInvariant(); - - if (mode == "kms") - { - if (!VerifyKmsSignature(bundle, preAuthEncoding, issues)) - { - issues.Add("signature_invalid_kms"); - } - - return; - } - - if (mode == "keyless") - { - VerifyKeylessSignature(entry, bundle, preAuthEncoding, issues); - return; - } - - issues.Add(string.IsNullOrEmpty(mode) - ? "signer_mode_unknown" - : $"signer_mode_unsupported:{mode}"); - } - - private bool VerifyKmsSignature(AttestorSubmissionRequest.SubmissionBundle bundle, byte[] preAuthEncoding, IList issues) - { - if (_options.Security.SignerIdentity.KmsKeys.Count == 0) - { - issues.Add("kms_key_missing"); - return false; - } - - var signatures = new List(); - foreach (var signature in bundle.Dsse.Signatures) - { - if (!TryDecodeBase64(signature.Signature, out var signatureBytes)) - { - issues.Add("signature_invalid_base64"); - return false; - } - - signatures.Add(signatureBytes); - } - - foreach (var secret in _options.Security.SignerIdentity.KmsKeys) - { - if (!TryDecodeSecret(secret, out var secretBytes)) - { - continue; - } - - using var hmac = new HMACSHA256(secretBytes); - var computed = hmac.ComputeHash(preAuthEncoding); - - foreach (var signatureBytes in signatures) - { - if (CryptographicOperations.FixedTimeEquals(computed, signatureBytes)) - { - return true; - } - } - } - - return false; - } - - private void VerifyKeylessSignature(AttestorEntry entry, AttestorSubmissionRequest.SubmissionBundle bundle, byte[] preAuthEncoding, IList issues) - { - if (bundle.CertificateChain.Count == 0) - { - issues.Add("certificate_chain_missing"); - return; - } - - var certificates = new List(); - try - { - foreach (var pem in bundle.CertificateChain) - { - certificates.Add(X509Certificate2.CreateFromPem(pem)); - } - } - catch (Exception ex) when (ex is CryptographicException or ArgumentException) - { - issues.Add("certificate_chain_invalid"); - _logger.LogWarning(ex, "Failed to parse certificate chain for {Uuid}", entry.RekorUuid); - return; - } - - var leafCertificate = certificates[0]; - - if (_options.Security.SignerIdentity.FulcioRoots.Count > 0) - { - using var chain = new X509Chain - { - ChainPolicy = - { - RevocationMode = X509RevocationMode.NoCheck, - VerificationFlags = X509VerificationFlags.NoFlag, - TrustMode = X509ChainTrustMode.CustomRootTrust - } - }; - - foreach (var rootPath in _options.Security.SignerIdentity.FulcioRoots) - { - try - { - if (File.Exists(rootPath)) - { - var rootCertificate = X509CertificateLoader.LoadCertificateFromFile(rootPath); - chain.ChainPolicy.CustomTrustStore.Add(rootCertificate); - } - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to load Fulcio root {Root}", rootPath); - } - } - - if (!chain.Build(leafCertificate)) - { - var status = string.Join(";", chain.ChainStatus.Select(s => s.StatusInformation.Trim())) - .Trim(';'); - issues.Add(string.IsNullOrEmpty(status) ? "certificate_chain_untrusted" : $"certificate_chain_untrusted:{status}"); - } - } - - if (_options.Security.SignerIdentity.AllowedSans.Count > 0) - { - var sans = GetSubjectAlternativeNames(leafCertificate); - if (!sans.Any(san => _options.Security.SignerIdentity.AllowedSans.Contains(san, StringComparer.OrdinalIgnoreCase))) - { - issues.Add("certificate_san_untrusted"); - } - } - - var signatureVerified = false; - foreach (var signature in bundle.Dsse.Signatures) - { - if (!TryDecodeBase64(signature.Signature, out var signatureBytes)) - { - issues.Add("signature_invalid_base64"); - return; - } - - if (TryVerifyWithCertificate(leafCertificate, preAuthEncoding, signatureBytes)) - { - signatureVerified = true; - break; - } - } - - if (!signatureVerified) - { - issues.Add("signature_invalid"); - } - } - - private static bool TryVerifyWithCertificate(X509Certificate2 certificate, byte[] preAuthEncoding, byte[] signature) - { - try - { - var ecdsa = certificate.GetECDsaPublicKey(); - if (ecdsa is not null) - { - using (ecdsa) - { - return ecdsa.VerifyData(preAuthEncoding, signature, HashAlgorithmName.SHA256); - } - } - - var rsa = certificate.GetRSAPublicKey(); - if (rsa is not null) - { - using (rsa) - { - return rsa.VerifyData(preAuthEncoding, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - } - } - } - catch (CryptographicException) - { - return false; - } - - return false; - } - - private static IEnumerable GetSubjectAlternativeNames(X509Certificate2 certificate) - { - foreach (var extension in certificate.Extensions) - { - if (!string.Equals(extension.Oid?.Value, "2.5.29.17", StringComparison.Ordinal)) - { - continue; - } - - var formatted = extension.Format(true); - var lines = formatted.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); - foreach (var line in lines) - { - var parts = line.Split('='); - if (parts.Length == 2) - { - yield return parts[1].Trim(); - } - } - } - } - - private static byte[] ComputePreAuthEncoding(string payloadType, byte[] payload) - { - var headerBytes = Encoding.UTF8.GetBytes(payloadType ?? string.Empty); - var buffer = new byte[6 + 8 + headerBytes.Length + 8 + payload.Length]; - var offset = 0; - - Encoding.ASCII.GetBytes("DSSEv1", 0, 6, buffer, offset); - offset += 6; - - BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)headerBytes.Length); - offset += 8; - Buffer.BlockCopy(headerBytes, 0, buffer, offset, headerBytes.Length); - offset += headerBytes.Length; - - BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)payload.Length); - offset += 8; - Buffer.BlockCopy(payload, 0, buffer, offset, payload.Length); - - return buffer; - } - - private void VerifyMerkleProof(AttestorEntry entry, IList issues) - { - if (entry.Proof is null) - { - issues.Add("proof_missing"); - return; - } - - if (!TryDecodeHash(entry.BundleSha256, out var bundleHash)) - { - issues.Add("bundle_hash_decode_failed"); - return; - } - - if (entry.Proof.Inclusion is null) - { - issues.Add("proof_inclusion_missing"); - return; - } - - if (entry.Proof.Inclusion.LeafHash is not null) - { - if (!TryDecodeHash(entry.Proof.Inclusion.LeafHash, out var proofLeaf)) - { - issues.Add("proof_leafhash_decode_failed"); - return; - } - - if (!CryptographicOperations.FixedTimeEquals(bundleHash, proofLeaf)) - { - issues.Add("proof_leafhash_mismatch"); - } - } - - var current = bundleHash; - - if (entry.Proof.Inclusion.Path.Count > 0) - { - var nodes = new List(); - foreach (var element in entry.Proof.Inclusion.Path) - { - if (!ProofPathNode.TryParse(element, out var node)) - { - issues.Add("proof_path_decode_failed"); - return; - } - - if (!node.HasOrientation) - { - issues.Add("proof_path_orientation_missing"); - return; - } - - nodes.Add(node); - } - - foreach (var node in nodes) - { - current = node.Left - ? HashInternal(node.Hash, current) - : HashInternal(current, node.Hash); - } - } - - if (entry.Proof.Checkpoint is null) - { - issues.Add("checkpoint_missing"); - return; - } - - if (!TryDecodeHash(entry.Proof.Checkpoint.RootHash, out var rootHash)) - { - issues.Add("checkpoint_root_decode_failed"); - return; - } - - if (!CryptographicOperations.FixedTimeEquals(current, rootHash)) - { - issues.Add("proof_root_mismatch"); - } - } - - private static byte[] HashInternal(byte[] left, byte[] right) - { - using var sha = SHA256.Create(); - var buffer = new byte[1 + left.Length + right.Length]; - buffer[0] = 0x01; - Buffer.BlockCopy(left, 0, buffer, 1, left.Length); - Buffer.BlockCopy(right, 0, buffer, 1 + left.Length, right.Length); - return sha.ComputeHash(buffer); - } - - private static bool TryDecodeSecret(string value, out byte[] bytes) - { - if (string.IsNullOrWhiteSpace(value)) - { - bytes = Array.Empty(); - return false; - } - - value = value.Trim(); - - if (value.StartsWith("base64:", StringComparison.OrdinalIgnoreCase)) - { - return TryDecodeBase64(value[7..], out bytes); - } - - if (value.StartsWith("hex:", StringComparison.OrdinalIgnoreCase)) - { - return TryDecodeHex(value[4..], out bytes); - } - - if (TryDecodeBase64(value, out bytes)) - { - return true; - } - - if (TryDecodeHex(value, out bytes)) - { - return true; - } - - bytes = Array.Empty(); - return false; - } - - private static bool TryDecodeBase64(string value, out byte[] bytes) - { - try - { - bytes = Convert.FromBase64String(value); - return true; - } - catch (FormatException) - { - bytes = Array.Empty(); - return false; - } - } - - private static bool TryDecodeHex(string value, out byte[] bytes) - { - try - { - bytes = Convert.FromHexString(value); - return true; - } - catch (FormatException) - { - bytes = Array.Empty(); - return false; - } - } - - private static bool TryDecodeHash(string? value, out byte[] bytes) - { - bytes = Array.Empty(); - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - var trimmed = value.Trim(); - - if (TryDecodeHex(trimmed, out bytes)) - { - return true; - } - - if (TryDecodeBase64(trimmed, out bytes)) - { - return true; - } - - bytes = Array.Empty(); - return false; - } - - private readonly struct ProofPathNode - { - private ProofPathNode(bool hasOrientation, bool left, byte[] hash) - { - HasOrientation = hasOrientation; - Left = left; - Hash = hash; - } - - public bool HasOrientation { get; } - - public bool Left { get; } - - public byte[] Hash { get; } - - public static bool TryParse(string value, out ProofPathNode node) - { - node = default; - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - var trimmed = value.Trim(); - var parts = trimmed.Split(':', 2); - bool hasOrientation = false; - bool left = false; - string hashPart = trimmed; - - if (parts.Length == 2) - { - var prefix = parts[0].Trim().ToLowerInvariant(); - if (prefix is "l" or "left") - { - hasOrientation = true; - left = true; - } - else if (prefix is "r" or "right") - { - hasOrientation = true; - left = false; - } - - hashPart = parts[1].Trim(); - } - - if (!TryDecodeHash(hashPart, out var hash)) - { - return false; - } - - node = new ProofPathNode(hasOrientation, left, hash); - return true; - } - } - - private static AttestorEntry CloneWithProof(AttestorEntry entry, AttestorEntry.ProofDescriptor? proof) - { - return new AttestorEntry - { - RekorUuid = entry.RekorUuid, - Artifact = entry.Artifact, - BundleSha256 = entry.BundleSha256, - Index = entry.Index, - Proof = proof, - Log = entry.Log, - CreatedAt = entry.CreatedAt, - Status = entry.Status, - SignerIdentity = entry.SignerIdentity - }; - } - - private static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options) - { - if (string.IsNullOrWhiteSpace(options.Url)) - { - throw new InvalidOperationException($"Rekor backend '{name}' is not configured."); - } - - return new RekorBackend - { - Name = name, - Url = new Uri(options.Url, UriKind.Absolute), - ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs), - PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs), - MaxAttempts = options.MaxAttempts - }; - } -} - -internal static class RekorProofResponseExtensions -{ - public static AttestorEntry.ProofDescriptor ToProofDescriptor(this RekorProofResponse response) - { - return new AttestorEntry.ProofDescriptor - { - Checkpoint = response.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor - { - Origin = response.Checkpoint.Origin, - Size = response.Checkpoint.Size, - RootHash = response.Checkpoint.RootHash, - Timestamp = response.Checkpoint.Timestamp - }, - Inclusion = response.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor - { - LeafHash = response.Inclusion.LeafHash, - Path = response.Inclusion.Path - } - }; - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Rekor; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Transparency; +using StellaOps.Attestor.Core.Verification; +using StellaOps.Attestor.Verify; + +namespace StellaOps.Attestor.Infrastructure.Verification; + +internal sealed class AttestorVerificationService : IAttestorVerificationService +{ + private readonly IAttestorEntryRepository _repository; + private readonly IDsseCanonicalizer _canonicalizer; + private readonly IRekorClient _rekorClient; + private readonly ITransparencyWitnessClient _witnessClient; + private readonly IAttestorVerificationEngine _engine; + private readonly ILogger _logger; + private readonly AttestorOptions _options; + private readonly AttestorMetrics _metrics; + private readonly AttestorActivitySource _activitySource; + private readonly TimeProvider _timeProvider; + + public AttestorVerificationService( + IAttestorEntryRepository repository, + IDsseCanonicalizer canonicalizer, + IRekorClient rekorClient, + ITransparencyWitnessClient witnessClient, + IAttestorVerificationEngine engine, + IOptions options, + ILogger logger, + AttestorMetrics metrics, + AttestorActivitySource activitySource, + TimeProvider timeProvider) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer)); + _rekorClient = rekorClient ?? throw new ArgumentNullException(nameof(rekorClient)); + _witnessClient = witnessClient ?? throw new ArgumentNullException(nameof(witnessClient)); + _engine = engine ?? throw new ArgumentNullException(nameof(engine)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); + _activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value; + } + + public async Task VerifyAsync(AttestorVerificationRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var started = Stopwatch.StartNew(); + var entry = await ResolveEntryAsync(request, cancellationToken).ConfigureAwait(false); + if (entry is null) + { + throw new AttestorVerificationException("not_found", "No attestor entry matched the supplied query."); + } + + var subjectTag = NormalizeTag(entry.Artifact.Sha256); + var issuerTag = NormalizeTag(entry.SignerIdentity.Issuer); + var policyId = _options.Verification.PolicyId; + using var activity = _activitySource.StartVerification(subjectTag, issuerTag, policyId); + + var evaluationTime = _timeProvider.GetUtcNow(); + var report = await _engine.EvaluateAsync(entry, request.Bundle, evaluationTime, cancellationToken).ConfigureAwait(false); + + var result = report.Succeeded ? "ok" : "failed"; + activity?.SetTag(AttestorTelemetryTags.Result, result); + if (!report.Succeeded) + { + activity?.SetStatus(ActivityStatusCode.Error, string.Join(",", report.Issues)); + } + + _metrics.VerifyTotal.Add( + 1, + new KeyValuePair(AttestorTelemetryTags.Subject, subjectTag), + new KeyValuePair(AttestorTelemetryTags.Issuer, issuerTag), + new KeyValuePair(AttestorTelemetryTags.Policy, policyId), + new KeyValuePair(AttestorTelemetryTags.Result, result)); + + started.Stop(); + _metrics.VerifyLatency.Record( + started.Elapsed.TotalSeconds, + new KeyValuePair(AttestorTelemetryTags.Subject, subjectTag), + new KeyValuePair(AttestorTelemetryTags.Issuer, issuerTag), + new KeyValuePair(AttestorTelemetryTags.Policy, policyId), + new KeyValuePair(AttestorTelemetryTags.Result, result)); + + return new AttestorVerificationResult + { + Ok = report.Succeeded, + Uuid = entry.RekorUuid, + Index = entry.Index, + LogUrl = entry.Log.Url, + Status = entry.Status, + Issues = report.Issues, + CheckedAt = evaluationTime, + Report = report + }; + } + + public Task GetEntryAsync(string rekorUuid, bool refreshProof, CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(rekorUuid)) + { + throw new ArgumentException("Value cannot be null or whitespace.", nameof(rekorUuid)); + } + + return ResolveEntryByUuidAsync(rekorUuid, refreshProof, cancellationToken); + } + + private async Task ResolveEntryAsync(AttestorVerificationRequest request, CancellationToken cancellationToken) + { + var refresh = request.RefreshProof && !request.Offline; + + if (!string.IsNullOrWhiteSpace(request.Uuid)) + { + return await ResolveEntryByUuidAsync(request.Uuid, refresh, cancellationToken).ConfigureAwait(false); + } + + if (request.Bundle is not null) + { + var canonical = await _canonicalizer.CanonicalizeAsync(new AttestorSubmissionRequest + { + Bundle = request.Bundle, + Meta = new AttestorSubmissionRequest.SubmissionMeta + { + Artifact = new AttestorSubmissionRequest.ArtifactInfo() + } + }, cancellationToken).ConfigureAwait(false); + + var bundleSha = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant(); + return await ResolveEntryByBundleShaAsync(bundleSha, refresh, cancellationToken).ConfigureAwait(false); + } + + if (!string.IsNullOrWhiteSpace(request.ArtifactSha256)) + { + return await ResolveEntryByArtifactAsync(request.ArtifactSha256, refresh, cancellationToken).ConfigureAwait(false); + } + + throw new AttestorVerificationException("invalid_query", "At least one of uuid, bundle, or artifactSha256 must be provided."); + } + + private async Task ResolveEntryByUuidAsync(string uuid, bool refreshProof, CancellationToken cancellationToken) + { + var entry = await _repository.GetByUuidAsync(uuid, cancellationToken).ConfigureAwait(false); + if (entry is null || !refreshProof) + { + return entry; + } + + return await RefreshProofAsync(entry, uuid, cancellationToken).ConfigureAwait(false); + } + + private async Task ResolveEntryByBundleShaAsync(string bundleSha, bool refreshProof, CancellationToken cancellationToken) + { + var entry = await _repository.GetByBundleShaAsync(bundleSha, cancellationToken).ConfigureAwait(false); + if (entry is null || !refreshProof) + { + return entry; + } + + return await ResolveEntryByUuidAsync(entry.RekorUuid, true, cancellationToken).ConfigureAwait(false); + } + + private async Task ResolveEntryByArtifactAsync(string artifactSha256, bool refreshProof, CancellationToken cancellationToken) + { + var entries = await _repository.GetByArtifactShaAsync(artifactSha256, cancellationToken).ConfigureAwait(false); + var entry = entries.OrderByDescending(e => e.CreatedAt).FirstOrDefault(); + if (entry is null) + { + return null; + } + + return refreshProof + ? await ResolveEntryByUuidAsync(entry.RekorUuid, true, cancellationToken).ConfigureAwait(false) + : entry; + } + + private async Task RefreshProofAsync(AttestorEntry entry, string uuid, CancellationToken cancellationToken) + { + string backendName = entry.Log.Backend; + if (string.IsNullOrWhiteSpace(backendName)) + { + backendName = "primary"; + } + var backendOptions = string.Equals(backendName, "mirror", StringComparison.OrdinalIgnoreCase) + ? _options.Rekor.Mirror + : _options.Rekor.Primary; + var backend = BuildBackend(backendName ?? "primary", backendOptions); + + using var activity = _activitySource.StartProofRefresh(backend.Name, _options.Verification.PolicyId); + + try + { + var proof = await _rekorClient.GetProofAsync(uuid, backend, cancellationToken).ConfigureAwait(false); + if (proof is null) + { + _metrics.ProofFetchTotal.Add(1, new KeyValuePair(AttestorTelemetryTags.Result, "missing")); + return entry; + } + + var witness = await RefreshWitnessAsync(entry, backendName!, backendOptions, proof, cancellationToken).ConfigureAwait(false); + var updated = CloneWithProof(entry, proof.ToProofDescriptor(), ConvertWitness(witness)); + await _repository.SaveAsync(updated, cancellationToken).ConfigureAwait(false); + _metrics.ProofFetchTotal.Add(1, new KeyValuePair(AttestorTelemetryTags.Result, "ok")); + return updated; + } + catch (Exception ex) + { + _metrics.ProofFetchTotal.Add(1, new KeyValuePair(AttestorTelemetryTags.Result, "error")); + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + _logger.LogWarning(ex, "Failed to refresh proof for entry {Uuid}", uuid); + return entry; + } + } + + private async Task RefreshWitnessAsync( + AttestorEntry entry, + string backend, + AttestorOptions.RekorBackendOptions backendOptions, + RekorProofResponse? proof, + CancellationToken cancellationToken) + { + if (!_options.TransparencyWitness.Enabled || string.IsNullOrWhiteSpace(_options.TransparencyWitness.BaseUrl)) + { + return null; + } + + var logUrl = entry.Log.Url; + if (string.IsNullOrWhiteSpace(logUrl)) + { + logUrl = backendOptions.Url; + } + + if (string.IsNullOrWhiteSpace(logUrl)) + { + return null; + } + + var request = new TransparencyWitnessRequest( + entry.RekorUuid, + backend, + new Uri(logUrl, UriKind.Absolute), + proof?.Checkpoint?.RootHash); + + try + { + return await _witnessClient.GetObservationAsync(request, cancellationToken).ConfigureAwait(false); + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) + { + _logger.LogDebug(ex, "Transparency witness refresh failed for {Uuid} on backend {Backend}", entry.RekorUuid, backend); + return new TransparencyWitnessObservation + { + Aggregator = _options.TransparencyWitness.AggregatorId ?? backend, + Status = "error", + RootHash = proof?.Checkpoint?.RootHash, + RetrievedAt = _timeProvider.GetUtcNow(), + Error = ex.Message + }; + } + } + + private static AttestorEntry.WitnessDescriptor? ConvertWitness(TransparencyWitnessObservation? witness) + { + if (witness is null) + { + return null; + } + + return new AttestorEntry.WitnessDescriptor + { + Aggregator = witness.Aggregator ?? string.Empty, + Status = string.IsNullOrWhiteSpace(witness.Status) ? "unknown" : witness.Status!, + RootHash = witness.RootHash, + RetrievedAt = witness.RetrievedAt, + Statement = witness.Statement, + Signature = witness.Signature, + KeyId = witness.KeyId, + Error = witness.Error + }; + } + + private static AttestorEntry CloneWithProof(AttestorEntry entry, AttestorEntry.ProofDescriptor? proof, AttestorEntry.WitnessDescriptor? witness) + { + return new AttestorEntry + { + RekorUuid = entry.RekorUuid, + Artifact = entry.Artifact, + BundleSha256 = entry.BundleSha256, + Index = entry.Index, + Proof = proof, + Witness = witness ?? entry.Witness, + Log = entry.Log, + CreatedAt = entry.CreatedAt, + Status = entry.Status, + SignerIdentity = entry.SignerIdentity + }; + } + + private static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options) + { + if (string.IsNullOrWhiteSpace(options.Url)) + { + throw new InvalidOperationException($"Rekor backend '{name}' is not configured."); + } + + return new RekorBackend + { + Name = name, + Url = new Uri(options.Url, UriKind.Absolute), + ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs), + PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs), + MaxAttempts = options.MaxAttempts + }; + } + + private static string NormalizeTag(string? value) => string.IsNullOrWhiteSpace(value) ? "unknown" : value; +} + +internal static class RekorProofResponseExtensions +{ + public static AttestorEntry.ProofDescriptor ToProofDescriptor(this RekorProofResponse response) + { + return new AttestorEntry.ProofDescriptor + { + Checkpoint = response.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor + { + Origin = response.Checkpoint.Origin, + Size = response.Checkpoint.Size, + RootHash = response.Checkpoint.RootHash, + Timestamp = response.Checkpoint.Timestamp + }, + Inclusion = response.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor + { + LeafHash = response.Inclusion.LeafHash, + Path = response.Inclusion.Path + } + }; + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/CachedAttestorVerificationService.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/CachedAttestorVerificationService.cs new file mode 100644 index 00000000..3cd36591 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/CachedAttestorVerificationService.cs @@ -0,0 +1,96 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Verification; + +namespace StellaOps.Attestor.Infrastructure.Verification; + +internal sealed class CachedAttestorVerificationService : IAttestorVerificationService +{ + private readonly IAttestorVerificationService _inner; + private readonly IAttestorVerificationCache _cache; + private readonly AttestorMetrics _metrics; + private readonly ILogger _logger; + private readonly bool _cacheEnabled; + + public CachedAttestorVerificationService( + IAttestorVerificationService inner, + IAttestorVerificationCache cache, + AttestorMetrics metrics, + IOptions options, + ILogger logger) + { + _inner = inner ?? throw new ArgumentNullException(nameof(inner)); + _cache = cache ?? throw new ArgumentNullException(nameof(cache)); + _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + ArgumentNullException.ThrowIfNull(options); + _cacheEnabled = options.Value.Cache.Verification.Enabled; + } + + public async Task VerifyAsync(AttestorVerificationRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + CacheDescriptor? cacheDescriptor = null; + if (_cacheEnabled && TryBuildDescriptor(request, out var descriptor)) + { + cacheDescriptor = descriptor; + _metrics.VerifyCacheLookupTotal.Add(1, new KeyValuePair("status", "lookup")); + + var cached = await _cache.GetAsync(descriptor.Subject, descriptor.EnvelopeId, descriptor.PolicyVersion, cancellationToken).ConfigureAwait(false); + if (cached is not null) + { + _metrics.VerifyCacheHitTotal.Add(1, new KeyValuePair("status", "hit")); + _logger.LogDebug("Verification cache hit for subject {Subject} envelope {Envelope} policy {Policy}.", descriptor.Subject, descriptor.EnvelopeId, descriptor.PolicyVersion); + return cached; + } + + _logger.LogDebug("Verification cache miss for subject {Subject} envelope {Envelope} policy {Policy}.", descriptor.Subject, descriptor.EnvelopeId, descriptor.PolicyVersion); + } + + var result = await _inner.VerifyAsync(request, cancellationToken).ConfigureAwait(false); + + if (cacheDescriptor is not null) + { + await _cache.SetAsync(cacheDescriptor.Value.Subject, cacheDescriptor.Value.EnvelopeId, cacheDescriptor.Value.PolicyVersion, result, cancellationToken).ConfigureAwait(false); + } + + return result; + } + + public Task GetEntryAsync(string rekorUuid, bool refreshProof, CancellationToken cancellationToken = default) => + _inner.GetEntryAsync(rekorUuid, refreshProof, cancellationToken); + + private static bool TryBuildDescriptor(AttestorVerificationRequest request, out CacheDescriptor descriptor) + { + descriptor = default; + + if (request.RefreshProof) + { + return false; + } + + var subject = Normalize(request.Subject); + var envelopeId = Normalize(request.EnvelopeId); + var policyVersion = Normalize(request.PolicyVersion); + + if (string.IsNullOrEmpty(subject) || string.IsNullOrEmpty(envelopeId) || string.IsNullOrEmpty(policyVersion)) + { + return false; + } + + descriptor = new CacheDescriptor(subject, envelopeId, policyVersion); + return true; + } + + private static string Normalize(string? value) => string.IsNullOrWhiteSpace(value) ? string.Empty : value.Trim(); + + private readonly record struct CacheDescriptor(string Subject, string EnvelopeId, string PolicyVersion); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/InMemoryAttestorVerificationCache.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/InMemoryAttestorVerificationCache.cs new file mode 100644 index 00000000..79ed8a20 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/InMemoryAttestorVerificationCache.cs @@ -0,0 +1,115 @@ +using System; +using System.Collections.Concurrent; +using System.Globalization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Verification; + +namespace StellaOps.Attestor.Infrastructure.Verification; + +internal sealed class InMemoryAttestorVerificationCache : IAttestorVerificationCache +{ + private readonly IMemoryCache _cache; + private readonly ILogger _logger; + private readonly TimeSpan _ttl; + private readonly ConcurrentDictionary> _subjectIndex = new(StringComparer.Ordinal); + + public InMemoryAttestorVerificationCache( + IMemoryCache cache, + IOptions options, + ILogger logger) + { + _cache = cache ?? throw new ArgumentNullException(nameof(cache)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + ArgumentNullException.ThrowIfNull(options); + var ttlSeconds = Math.Max(1, options.Value.Cache.Verification.TtlSeconds); + _ttl = TimeSpan.FromSeconds(ttlSeconds); + } + + public Task GetAsync(string subject, string envelopeId, string policyVersion, CancellationToken cancellationToken = default) + { + var cacheKey = BuildCacheKey(subject, envelopeId, policyVersion); + if (_cache.TryGetValue(cacheKey, out AttestorVerificationResult? result) && result is not null) + { + return Task.FromResult(result); + } + + return Task.FromResult(null); + } + + public Task SetAsync(string subject, string envelopeId, string policyVersion, AttestorVerificationResult result, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(result); + + var cacheKey = BuildCacheKey(subject, envelopeId, policyVersion); + var subjectKey = Normalize(subject); + + var entryOptions = new MemoryCacheEntryOptions + { + AbsoluteExpirationRelativeToNow = _ttl, + Size = 1 + }; + + entryOptions.RegisterPostEvictionCallback((key, _, _, state) => + { + if (key is string removedKey && state is string removedSubject) + { + RemoveFromIndex(removedSubject, removedKey); + } + }, subjectKey); + + _cache.Set(cacheKey, result, entryOptions); + + var keys = _subjectIndex.GetOrAdd(subjectKey, _ => new ConcurrentDictionary(StringComparer.Ordinal)); + keys[cacheKey] = 0; + + _logger.LogDebug("Cached verification result for subject {Subject} envelope {Envelope} policy {Policy} with TTL {TtlSeconds}s.", + subjectKey, Normalize(envelopeId), Normalize(policyVersion), _ttl.TotalSeconds.ToString(CultureInfo.InvariantCulture)); + + return Task.CompletedTask; + } + + public Task InvalidateSubjectAsync(string subject, CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(subject)) + { + return Task.CompletedTask; + } + + var subjectKey = Normalize(subject); + if (!_subjectIndex.TryRemove(subjectKey, out var keys)) + { + return Task.CompletedTask; + } + + foreach (var entry in keys.Keys) + { + _cache.Remove(entry); + } + + _logger.LogDebug("Invalidated verification cache for subject {Subject}.", subjectKey); + return Task.CompletedTask; + } + + private static string BuildCacheKey(string subject, string envelopeId, string policyVersion) => + string.Concat(Normalize(subject), "|", Normalize(envelopeId), "|", Normalize(policyVersion)); + + private static string Normalize(string value) => (value ?? string.Empty).Trim(); + + private void RemoveFromIndex(string subject, string cacheKey) + { + if (_subjectIndex.TryGetValue(subject, out var keys)) + { + keys.TryRemove(cacheKey, out _); + if (keys.IsEmpty) + { + _subjectIndex.TryRemove(subject, out _); + } + } + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/NoOpAttestorVerificationCache.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/NoOpAttestorVerificationCache.cs new file mode 100644 index 00000000..e51882c6 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/NoOpAttestorVerificationCache.cs @@ -0,0 +1,17 @@ +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Verification; + +namespace StellaOps.Attestor.Infrastructure.Verification; + +internal sealed class NoOpAttestorVerificationCache : IAttestorVerificationCache +{ + public Task GetAsync(string subject, string envelopeId, string policyVersion, CancellationToken cancellationToken = default) + => Task.FromResult(null); + + public Task SetAsync(string subject, string envelopeId, string policyVersion, AttestorVerificationResult result, CancellationToken cancellationToken = default) + => Task.CompletedTask; + + public Task InvalidateSubjectAsync(string subject, CancellationToken cancellationToken = default) + => Task.CompletedTask; +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestationBundleEndpointsTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestationBundleEndpointsTests.cs new file mode 100644 index 00000000..856cb82c --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestationBundleEndpointsTests.cs @@ -0,0 +1,271 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Net.Http.Json; +using System.Security.Claims; +using System.Text; +using System.Text.Encodings.Web; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Authentication; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.AspNetCore.Hosting; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Options; +using Microsoft.AspNetCore.TestHost; +using MongoDB.Driver; +using StackExchange.Redis; +using StellaOps.Attestor.Core.Offline; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Infrastructure.Storage; +using StellaOps.Attestor.Infrastructure.Offline; +using StellaOps.Attestor.Core.Signing; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Transparency; +using StellaOps.Attestor.Core.Bulk; +using StellaOps.Attestor.WebService; +using StellaOps.Attestor.Tests.Support; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class AttestationBundleEndpointsTests +{ + [Fact] + public async Task ExportEndpoint_RequiresAuthentication() + { + using var factory = new AttestorWebApplicationFactory(); + var client = factory.CreateClient(); + + var response = await client.PostAsync("/api/v1/attestations:export", new StringContent("{}", Encoding.UTF8, "application/json")); + + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + } + + [Fact] + public async Task ExportAndImportEndpoints_RoundTripBundles() + { + using var factory = new AttestorWebApplicationFactory(); + var client = factory.CreateClient(); + AttachAuth(client); + + var canonicalBytes = Encoding.UTF8.GetBytes("{\"payloadType\":\"application/vnd.test\"}"); + var bundleHashBytes = System.Security.Cryptography.SHA256.HashData(canonicalBytes); + var bundleSha = Convert.ToHexString(bundleHashBytes).ToLowerInvariant(); + + using (var scope = factory.Services.CreateScope()) + { + var repository = scope.ServiceProvider.GetRequiredService(); + var archiveStore = scope.ServiceProvider.GetRequiredService(); + + var entry = new AttestorEntry + { + RekorUuid = "uuid-export-01", + Artifact = new AttestorEntry.ArtifactDescriptor + { + Sha256 = "feedface", + Kind = "sbom" + }, + BundleSha256 = bundleSha, + CreatedAt = DateTimeOffset.UtcNow, + Status = "included", + Log = new AttestorEntry.LogDescriptor + { + Backend = "primary", + Url = "https://rekor.example/log/entries/uuid-export-01" + }, + SignerIdentity = new AttestorEntry.SignerIdentityDescriptor + { + Issuer = "tenant-a" + } + }; + + await repository.SaveAsync(entry); + await archiveStore.ArchiveBundleAsync(new AttestorArchiveBundle + { + RekorUuid = entry.RekorUuid, + ArtifactSha256 = entry.Artifact.Sha256, + BundleSha256 = entry.BundleSha256, + CanonicalBundleJson = canonicalBytes, + ProofJson = Array.Empty(), + Metadata = new Dictionary + { + ["status"] = entry.Status + } + }); + + var canonicalBytes2 = Encoding.UTF8.GetBytes("{\"payloadType\":\"application/vnd.test\",\"sequence\":2}"); + var bundleSha2 = Convert.ToHexString(System.Security.Cryptography.SHA256.HashData(canonicalBytes2)).ToLowerInvariant(); + var secondEntry = new AttestorEntry + { + RekorUuid = "uuid-export-02", + Artifact = new AttestorEntry.ArtifactDescriptor + { + Sha256 = "deadcafe", + Kind = "sbom" + }, + BundleSha256 = bundleSha2, + CreatedAt = DateTimeOffset.UtcNow.AddMinutes(1), + Status = "included", + Log = new AttestorEntry.LogDescriptor + { + Backend = "primary", + Url = "https://rekor.example/log/entries/uuid-export-02" + }, + SignerIdentity = new AttestorEntry.SignerIdentityDescriptor + { + Issuer = "tenant-a" + } + }; + + await repository.SaveAsync(secondEntry); + await archiveStore.ArchiveBundleAsync(new AttestorArchiveBundle + { + RekorUuid = secondEntry.RekorUuid, + ArtifactSha256 = secondEntry.Artifact.Sha256, + BundleSha256 = secondEntry.BundleSha256, + CanonicalBundleJson = canonicalBytes2, + ProofJson = Array.Empty(), + Metadata = new Dictionary + { + ["status"] = secondEntry.Status + } + }); + } + + var exportResponse = await client.PostAsJsonAsync("/api/v1/attestations:export", new + { + scope = "tenant-a", + limit = 1 + }); + + exportResponse.EnsureSuccessStatusCode(); + var exportPayload = await exportResponse.Content.ReadAsStringAsync(); + Assert.False(string.IsNullOrWhiteSpace(exportPayload), "Export response payload was empty."); + var package = JsonSerializer.Deserialize( + exportPayload, + new JsonSerializerOptions(JsonSerializerDefaults.Web) { PropertyNameCaseInsensitive = true }); + Assert.NotNull(package); + Assert.Single(package!.Items); + Assert.NotNull(package.ContinuationToken); + + var importResponse = await client.PostAsJsonAsync("/api/v1/attestations:import", package); + importResponse.EnsureSuccessStatusCode(); + var importPayload = await importResponse.Content.ReadAsStringAsync(); + Assert.False(string.IsNullOrWhiteSpace(importPayload), "Import response payload was empty."); + var importResult = JsonSerializer.Deserialize( + importPayload, + new JsonSerializerOptions(JsonSerializerDefaults.Web) { PropertyNameCaseInsensitive = true }); + Assert.NotNull(importResult); + Assert.Equal(0, importResult!.Imported); + Assert.Equal(1, importResult.Updated); + Assert.Empty(importResult.Issues); + } + + private static void AttachAuth(HttpClient client) + { + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", "test-token"); + } +} + +internal sealed class AttestorWebApplicationFactory : WebApplicationFactory +{ + protected override void ConfigureWebHost(IWebHostBuilder builder) + { + builder.UseEnvironment("Testing"); + builder.ConfigureAppConfiguration((_, configuration) => + { + var settings = new Dictionary + { + ["attestor:s3:enabled"] = "true", + ["attestor:s3:bucket"] = "attestor-test", + ["attestor:s3:endpoint"] = "http://localhost", + ["attestor:s3:useTls"] = "false", + ["attestor:redis:url"] = string.Empty, + ["attestor:mongo:uri"] = "mongodb://localhost:27017/attestor-tests", + ["attestor:mongo:database"] = "attestor-tests" + }; + + configuration.AddInMemoryCollection(settings!); + }); + + builder.ConfigureServices((context, services) => + { + services.RemoveAll(); + services.RemoveAll(); + services.RemoveAll(); + services.RemoveAll(); + services.RemoveAll(); + services.RemoveAll(); + services.RemoveAll(); + services.RemoveAll(); + services.RemoveAll(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + var authBuilder = services.AddAuthentication(options => + { + options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName; + options.DefaultChallengeScheme = TestAuthHandler.SchemeName; + }); + + authBuilder.AddScheme( + authenticationScheme: TestAuthHandler.SchemeName, + displayName: null, + configureOptions: options => { options.TimeProvider ??= TimeProvider.System; }); +#pragma warning disable CS0618 + services.TryAddSingleton(); +#pragma warning restore CS0618 + }); + } +} + +internal sealed class TestAuthHandler : AuthenticationHandler +{ + public const string SchemeName = "Test"; + + #pragma warning disable CS0618 + public TestAuthHandler( + IOptionsMonitor options, + ILoggerFactory logger, + UrlEncoder encoder, + ISystemClock clock) + : base(options, logger, encoder, clock) + { + } + #pragma warning restore CS0618 + + protected override Task HandleAuthenticateAsync() + { + if (!Request.Headers.TryGetValue("Authorization", out var header) || header.Count == 0) + { + return Task.FromResult(AuthenticateResult.NoResult()); + } + + var claims = new[] + { + new Claim(ClaimTypes.NameIdentifier, "test-user"), + new Claim("scope", "attestor.read attestor.write attestor.verify") + }; + + var schemeName = Scheme?.Name ?? SchemeName; + var identity = new ClaimsIdentity(claims, schemeName); + var principal = new ClaimsPrincipal(identity); + var ticket = new AuthenticationTicket(principal, schemeName); + return Task.FromResult(AuthenticateResult.Success(ticket)); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestationQueryTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestationQueryTests.cs new file mode 100644 index 00000000..e23529ed --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestationQueryTests.cs @@ -0,0 +1,109 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Http.HttpResults; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.WebService.Contracts; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class AttestationQueryTests +{ + [Fact] + public async Task QueryAsync_FiltersAndPagination_Work() + { + var repository = new InMemoryAttestorEntryRepository(); + var origin = DateTimeOffset.UtcNow; + + for (var index = 0; index < 10; index++) + { + var scope = index % 2 == 0 ? "tenant-a" : "tenant-b"; + var type = index % 2 == 0 ? "sbom" : "report"; + var issuer = $"issuer-{index % 2}"; + + var entry = new AttestorEntry + { + RekorUuid = $"uuid-{index:D2}", + BundleSha256 = $"bundle-{index:D2}", + Artifact = new AttestorEntry.ArtifactDescriptor + { + Sha256 = $"sha-{index:D2}", + Kind = type, + ImageDigest = $"sha256:{index:D2}", + SubjectUri = $"pkg:example/app@{index}" + }, + CreatedAt = origin.AddMinutes(index), + Status = "included", + Log = new AttestorEntry.LogDescriptor + { + Backend = "primary", + Url = $"https://rekor.example/entries/{index:D2}" + }, + SignerIdentity = new AttestorEntry.SignerIdentityDescriptor + { + Mode = "keyless", + Issuer = scope, + SubjectAlternativeName = issuer, + KeyId = $"kid-{index}" + } + }; + + await repository.SaveAsync(entry); + } + + var query = new AttestorEntryQuery + { + Scope = "tenant-a", + Type = "sbom", + PageSize = 3 + }; + + var firstPage = await repository.QueryAsync(query); + Assert.Equal(3, firstPage.Items.Count); + Assert.NotNull(firstPage.ContinuationToken); + Assert.All(firstPage.Items, item => + { + Assert.Equal("tenant-a", item.SignerIdentity.Issuer); + Assert.Equal("sbom", item.Artifact.Kind); + }); + var firstPageIds = firstPage.Items.Select(item => item.RekorUuid).ToHashSet(StringComparer.Ordinal); + + var secondPage = await repository.QueryAsync(new AttestorEntryQuery + { + Scope = query.Scope, + Type = query.Type, + PageSize = query.PageSize, + ContinuationToken = firstPage.ContinuationToken + }); + + Assert.True(secondPage.Items.Count > 0); + Assert.All(secondPage.Items, item => Assert.DoesNotContain(item.RekorUuid, firstPageIds)); + } + + [Fact] + public void TryBuildQuery_ValidatesInputs() + { + var httpContext = new DefaultHttpContext(); + httpContext.Request.QueryString = new QueryString("?subject=sha-01&type=sbom&issuer=issuer-0&scope=tenant-a&pageSize=25&createdAfter=2025-01-01T00:00:00Z&createdBefore=2025-01-31T00:00:00Z"); + + var success = AttestationListContracts.TryBuildQuery(httpContext.Request, out var query, out var error); + Assert.True(success); + Assert.Null(error); + Assert.Equal("sha-01", query.Subject); + Assert.Equal("sbom", query.Type); + Assert.Equal("issuer-0", query.Issuer); + Assert.Equal("tenant-a", query.Scope); + Assert.Equal(25, query.PageSize); + Assert.Equal(DateTimeOffset.Parse("2025-01-01T00:00:00Z"), query.CreatedAfter); + Assert.Equal(DateTimeOffset.Parse("2025-01-31T00:00:00Z"), query.CreatedBefore); + + httpContext.Request.QueryString = new QueryString("?pageSize=-5"); + success = AttestationListContracts.TryBuildQuery(httpContext.Request, out _, out error); + Assert.False(success); + var problem = Assert.IsType(error); + Assert.Equal(StatusCodes.Status400BadRequest, problem.StatusCode); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorEntryRepositoryTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorEntryRepositoryTests.cs new file mode 100644 index 00000000..78ae141d --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorEntryRepositoryTests.cs @@ -0,0 +1,117 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Storage; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class AttestorEntryRepositoryTests +{ + [Fact] + public async Task QueryAsync_FiltersAndPagination_Work() + { + var repository = new InMemoryAttestorEntryRepository(); + var origin = DateTimeOffset.UtcNow; + + for (var index = 0; index < 6; index++) + { + var scope = index % 2 == 0 ? "tenant-a" : "tenant-b"; + var kind = index % 2 == 0 ? "sbom" : "report"; + var entry = CreateEntry(index, origin.AddMinutes(index), scope, kind); + + await repository.SaveAsync(entry); + } + + var firstPage = await repository.QueryAsync(new AttestorEntryQuery + { + Scope = "tenant-a", + Type = "sbom", + PageSize = 2 + }); + + Assert.Equal(2, firstPage.Items.Count); + Assert.NotNull(firstPage.ContinuationToken); + Assert.All(firstPage.Items, item => + { + Assert.Equal("tenant-a", item.SignerIdentity.Issuer); + Assert.Equal("sbom", item.Artifact.Kind); + }); + + var seen = firstPage.Items.Select(item => item.RekorUuid).ToHashSet(StringComparer.Ordinal); + + var secondPage = await repository.QueryAsync(new AttestorEntryQuery + { + Scope = "tenant-a", + Type = "sbom", + PageSize = 2, + ContinuationToken = firstPage.ContinuationToken + }); + + Assert.True(secondPage.Items.Count > 0); + Assert.All(secondPage.Items, item => Assert.DoesNotContain(item.RekorUuid, seen)); + } + + [Fact] + public async Task SaveAsync_EnforcesUniqueBundleSha() + { + var repository = new InMemoryAttestorEntryRepository(); + var now = DateTimeOffset.UtcNow; + + var entryA = CreateEntry(100, now, "tenant-a", "sbom"); + var entryB = CreateEntry(200, now.AddMinutes(1), "tenant-b", "report", entryA.BundleSha256); + + await repository.SaveAsync(entryA); + + await Assert.ThrowsAsync(() => repository.SaveAsync(entryB)); + } + + private static AttestorEntry CreateEntry(int index, DateTimeOffset createdAt, string scope, string kind, string? bundleShaOverride = null) + { + var uuid = $"uuid-{index:D4}"; + return new AttestorEntry + { + RekorUuid = uuid, + BundleSha256 = bundleShaOverride ?? MakeHex(10_000 + index), + Artifact = new AttestorEntry.ArtifactDescriptor + { + Sha256 = MakeHex(20_000 + index), + Kind = kind, + ImageDigest = $"sha256:{index:D4}", + SubjectUri = $"pkg:example/app@{index}" + }, + Index = index, + Proof = null, + Log = new AttestorEntry.LogDescriptor + { + Backend = "primary", + Url = $"https://rekor.example/entries/{index:D4}", + LogId = null + }, + CreatedAt = createdAt, + Status = "included", + SignerIdentity = new AttestorEntry.SignerIdentityDescriptor + { + Mode = "keyless", + Issuer = scope, + SubjectAlternativeName = $"issuer-{index % 3}", + KeyId = $"kid-{index:D4}" + }, + Mirror = new AttestorEntry.LogReplicaDescriptor + { + Backend = "mirror", + Url = $"https://rekor-mirror.example/{index:D4}", + Uuid = $"mirror-{uuid}", + Index = index, + Status = "pending", + Proof = null, + LogId = null, + Error = null + } + }; + } + + private static string MakeHex(int seed) + => Convert.ToHexString(BitConverter.GetBytes(seed)).ToLowerInvariant().PadLeft(64, '0'); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSigningServiceTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSigningServiceTests.cs new file mode 100644 index 00000000..b5afacce --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSigningServiceTests.cs @@ -0,0 +1,239 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Security.Cryptography; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Signing; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Infrastructure.Signing; +using StellaOps.Attestor.Infrastructure.Submission; +using StellaOps.Attestor.Tests; +using StellaOps.Cryptography; +using StellaOps.Cryptography.Kms; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class AttestorSigningServiceTests : IDisposable +{ + private readonly List _temporaryPaths = new(); + + [Fact] + public async Task SignAsync_Ed25519Key_ReturnsValidSignature() + { + var privateKey = new byte[32]; + for (var i = 0; i < privateKey.Length; i++) + { + privateKey[i] = (byte)i; + } + + var options = Options.Create(new AttestorOptions + { + Signing = new AttestorOptions.SigningOptions + { + Keys = + { + new AttestorOptions.SigningKeyOptions + { + KeyId = "ed25519-1", + Algorithm = StellaOps.Cryptography.SignatureAlgorithms.Ed25519, + Mode = "keyful", + Material = Convert.ToBase64String(privateKey), + MaterialFormat = "base64" + } + } + } + }); + + using var metrics = new AttestorMetrics(); + using var registry = new AttestorSigningKeyRegistry(options, TimeProvider.System, NullLogger.Instance); + var auditSink = new InMemoryAttestorAuditSink(); + var service = new AttestorSigningService( + registry, + new DefaultDsseCanonicalizer(), + auditSink, + metrics, + NullLogger.Instance, + TimeProvider.System); + + var payloadBytes = Encoding.UTF8.GetBytes("{}"); + var request = new AttestationSignRequest + { + KeyId = "ed25519-1", + PayloadType = "application/json", + PayloadBase64 = Convert.ToBase64String(payloadBytes), + Artifact = new AttestorSubmissionRequest.ArtifactInfo + { + Sha256 = new string('a', 64), + Kind = "sbom" + } + }; + + var context = new SubmissionContext + { + CallerSubject = "urn:subject", + CallerAudience = "attestor", + CallerClientId = "client", + CallerTenant = "tenant", + MtlsThumbprint = "thumbprint" + }; + + var result = await service.SignAsync(request, context); + + Assert.NotNull(result); + Assert.Equal("ed25519-1", result.KeyId); + Assert.Equal("keyful", result.Mode); + Assert.Equal("bouncycastle.ed25519", result.Provider); + Assert.False(string.IsNullOrWhiteSpace(result.Meta.BundleSha256)); + Assert.Single(result.Bundle.Dsse.Signatures); + + var signature = Convert.FromBase64String(result.Bundle.Dsse.Signatures[0].Signature); + var preAuth = DssePreAuthenticationEncoding.Compute(result.Bundle.Dsse.PayloadType, Convert.FromBase64String(result.Bundle.Dsse.PayloadBase64)); + var verifier = new Org.BouncyCastle.Crypto.Signers.Ed25519Signer(); + var privateParams = new Org.BouncyCastle.Crypto.Parameters.Ed25519PrivateKeyParameters(privateKey, 0); + verifier.Init(false, privateParams.GeneratePublicKey()); + verifier.BlockUpdate(preAuth, 0, preAuth.Length); + Assert.True(verifier.VerifySignature(signature)); + + Assert.Single(auditSink.Records); + Assert.Equal("sign", auditSink.Records[0].Action); + Assert.Equal("signed", auditSink.Records[0].Result); + } + + [Fact] + public async Task SignAsync_KmsKey_ProducesVerifiableSignature() + { + var kmsRoot = CreateTempDirectory(); + const string kmsPassword = "Test-Kms-Password!"; + const string kmsKeyId = "kms-key-1"; + const string kmsVersion = "v1"; + + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var keyParameters = ecdsa.ExportParameters(true); + var publicParameters = new ECParameters + { + Curve = keyParameters.Curve, + Q = keyParameters.Q + }; + + using (var kmsClient = new FileKmsClient(new FileKmsOptions + { + RootPath = kmsRoot, + Password = kmsPassword + })) + { + var material = new KmsKeyMaterial( + kmsKeyId, + kmsVersion, + KmsAlgorithms.Es256, + "P-256", + keyParameters.D!, + keyParameters.Q.X!, + keyParameters.Q.Y!, + DateTimeOffset.UtcNow); + await kmsClient.ImportAsync(kmsKeyId, material); + } + + var options = Options.Create(new AttestorOptions + { + Signing = new AttestorOptions.SigningOptions + { + Kms = new AttestorOptions.SigningKmsOptions + { + Enabled = true, + RootPath = kmsRoot, + Password = kmsPassword + }, + Keys = + { + new AttestorOptions.SigningKeyOptions + { + KeyId = kmsKeyId, + Algorithm = StellaOps.Cryptography.SignatureAlgorithms.Es256, + Mode = "kms", + ProviderKeyId = kmsKeyId, + KmsVersionId = kmsVersion + } + } + } + }); + + using var metrics = new AttestorMetrics(); + using var registry = new AttestorSigningKeyRegistry(options, TimeProvider.System, NullLogger.Instance); + var auditSink = new InMemoryAttestorAuditSink(); + var service = new AttestorSigningService( + registry, + new DefaultDsseCanonicalizer(), + auditSink, + metrics, + NullLogger.Instance, + TimeProvider.System); + + var payload = Encoding.UTF8.GetBytes("{\"value\":1}"); + var request = new AttestationSignRequest + { + KeyId = kmsKeyId, + PayloadType = "application/json", + PayloadBase64 = Convert.ToBase64String(payload), + Artifact = new AttestorSubmissionRequest.ArtifactInfo + { + Sha256 = new string('b', 64), + Kind = "report" + } + }; + + var context = new SubmissionContext + { + CallerSubject = "urn:subject", + CallerAudience = "attestor", + CallerClientId = "signer-service", + CallerTenant = "tenant" + }; + + var result = await service.SignAsync(request, context); + Assert.Equal("kms", result.Mode); + Assert.Equal("kms", result.Provider); + Assert.False(string.IsNullOrWhiteSpace(result.Meta.BundleSha256)); + + var signature = Convert.FromBase64String(result.Bundle.Dsse.Signatures[0].Signature); + var preAuth = DssePreAuthenticationEncoding.Compute(result.Bundle.Dsse.PayloadType, Convert.FromBase64String(result.Bundle.Dsse.PayloadBase64)); + using var verifier = ECDsa.Create(publicParameters); + + Assert.True(verifier.VerifyData(preAuth, signature, HashAlgorithmName.SHA256)); + Assert.Single(auditSink.Records); + Assert.Equal("sign", auditSink.Records[0].Action); + Assert.Equal("signed", auditSink.Records[0].Result); + } + + private string CreateTempDirectory() + { + var path = Path.Combine(Path.GetTempPath(), "attestor-signing-tests", Guid.NewGuid().ToString("N")); + Directory.CreateDirectory(path); + _temporaryPaths.Add(path); + return path; + } + + public void Dispose() + { + foreach (var path in _temporaryPaths) + { + try + { + if (Directory.Exists(path)) + { + Directory.Delete(path, recursive: true); + } + } + catch + { + // ignore cleanup failures in tests + } + } + _temporaryPaths.Clear(); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorStorageTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorStorageTests.cs new file mode 100644 index 00000000..375277f4 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorStorageTests.cs @@ -0,0 +1,105 @@ +using System; +using System.Diagnostics; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Infrastructure.Storage; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class AttestorStorageTests +{ + [Fact] + public async Task SaveAsync_PersistsAndFetchesEntry() + { + var repository = new InMemoryAttestorEntryRepository(); + var entry = CreateEntry(); + + await repository.SaveAsync(entry); + + var byUuid = await repository.GetByUuidAsync(entry.RekorUuid); + var byBundle = await repository.GetByBundleShaAsync(entry.BundleSha256); + var byArtifact = await repository.GetByArtifactShaAsync(entry.Artifact.Sha256); + + Assert.NotNull(byUuid); + Assert.NotNull(byBundle); + Assert.Equal(entry.RekorUuid, byBundle!.RekorUuid); + Assert.Single(byArtifact); + } + + [Fact] + public async Task SaveAsync_UpsertsExistingDocument() + { + var repository = new InMemoryAttestorEntryRepository(); + var entry = CreateEntry(status: "included"); + await repository.SaveAsync(entry); + + var updated = CreateEntry( + rekorUuid: entry.RekorUuid, + bundleSha: entry.BundleSha256, + artifactSha: entry.Artifact.Sha256, + status: "pending"); + + await repository.SaveAsync(updated); + + var stored = await repository.GetByUuidAsync(entry.RekorUuid); + Assert.NotNull(stored); + Assert.Equal("pending", stored!.Status); + } + + [Fact] + public async Task InMemoryDedupeStore_RoundTripsAndExpires() + { + var store = new InMemoryAttestorDedupeStore(); + var bundleSha = Guid.NewGuid().ToString("N"); + var uuid = Guid.NewGuid().ToString("N"); + + await store.SetAsync(bundleSha, uuid, TimeSpan.FromMilliseconds(50)); + + var first = await store.TryGetExistingAsync(bundleSha); + Assert.Equal(uuid, first); + + // fast-forward past expiry + await Task.Delay(TimeSpan.FromMilliseconds(75)); + var second = await store.TryGetExistingAsync(bundleSha); + Assert.Null(second); + } + + private static AttestorEntry CreateEntry( + string? rekorUuid = null, + string? bundleSha = null, + string? artifactSha = null, + string status = "included") + { + return new AttestorEntry + { + RekorUuid = rekorUuid ?? Guid.NewGuid().ToString("N"), + Artifact = new AttestorEntry.ArtifactDescriptor + { + Sha256 = artifactSha ?? "sha256:" + Guid.NewGuid().ToString("N"), + Kind = "sbom", + ImageDigest = "sha256:" + Guid.NewGuid().ToString("N"), + SubjectUri = "oci://registry.example/app" + }, + BundleSha256 = bundleSha ?? Guid.NewGuid().ToString("N"), + Index = 42, + Proof = null, + Log = new AttestorEntry.LogDescriptor + { + Backend = "primary", + Url = "https://rekor.example/api/v1/log", + LogId = "log-1" + }, + CreatedAt = DateTimeOffset.UtcNow, + Status = status, + SignerIdentity = new AttestorEntry.SignerIdentityDescriptor + { + Mode = "keyless", + Issuer = "tenant-a", + SubjectAlternativeName = "signer@example", + KeyId = "kid" + }, + Mirror = null + }; + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs index 03c8fb5e..2ae365fb 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs @@ -1,15 +1,20 @@ -using System; -using System.Security.Cryptography; -using System.Text; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Attestor.Core.Options; -using StellaOps.Attestor.Core.Submission; -using StellaOps.Attestor.Core.Observability; -using StellaOps.Attestor.Infrastructure.Rekor; -using StellaOps.Attestor.Infrastructure.Storage; -using StellaOps.Attestor.Infrastructure.Submission; +using System; +using System.Collections.Generic; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Transparency; +using StellaOps.Attestor.Core.Verification; +using StellaOps.Attestor.Infrastructure.Rekor; +using StellaOps.Attestor.Infrastructure.Storage; +using StellaOps.Attestor.Infrastructure.Submission; +using StellaOps.Attestor.Tests.Support; using Xunit; namespace StellaOps.Attestor.Tests; @@ -41,22 +46,26 @@ public sealed class AttestorSubmissionServiceTests var validator = new AttestorSubmissionValidator(canonicalizer); var repository = new InMemoryAttestorEntryRepository(); var dedupeStore = new InMemoryAttestorDedupeStore(); - var rekorClient = new StubRekorClient(new NullLogger()); - var archiveStore = new NullAttestorArchiveStore(new NullLogger()); - var auditSink = new InMemoryAttestorAuditSink(); - var logger = new NullLogger(); - using var metrics = new AttestorMetrics(); - var service = new AttestorSubmissionService( - validator, - repository, - dedupeStore, - rekorClient, - archiveStore, - auditSink, - options, - logger, - TimeProvider.System, - metrics); + var rekorClient = new StubRekorClient(new NullLogger()); + var archiveStore = new NullAttestorArchiveStore(new NullLogger()); + var auditSink = new InMemoryAttestorAuditSink(); + var witnessClient = new TestTransparencyWitnessClient(); + var logger = new NullLogger(); + using var metrics = new AttestorMetrics(); + var verificationCache = new StubVerificationCache(); + var service = new AttestorSubmissionService( + validator, + repository, + dedupeStore, + rekorClient, + witnessClient, + archiveStore, + auditSink, + verificationCache, + options, + logger, + TimeProvider.System, + metrics); var request = CreateValidRequest(canonicalizer); var context = new SubmissionContext @@ -72,12 +81,14 @@ public sealed class AttestorSubmissionServiceTests var first = await service.SubmitAsync(request, context); var second = await service.SubmitAsync(request, context); - Assert.NotNull(first.Uuid); - Assert.Equal(first.Uuid, second.Uuid); - - var stored = await repository.GetByBundleShaAsync(request.Meta.BundleSha256); - Assert.NotNull(stored); - Assert.Equal(first.Uuid, stored!.RekorUuid); + Assert.NotNull(first.Uuid); + Assert.Equal(first.Uuid, second.Uuid); + + var stored = await repository.GetByBundleShaAsync(request.Meta.BundleSha256); + Assert.NotNull(stored); + Assert.Equal(first.Uuid, stored!.RekorUuid); + Assert.Single(verificationCache.InvalidatedSubjects); + Assert.Equal(request.Meta.Artifact.Sha256, verificationCache.InvalidatedSubjects[0]); } [Fact] @@ -115,22 +126,25 @@ public sealed class AttestorSubmissionServiceTests var repository = new InMemoryAttestorEntryRepository(); var dedupeStore = new InMemoryAttestorDedupeStore(); var rekorClient = new StubRekorClient(new NullLogger()); - var archiveStore = new NullAttestorArchiveStore(new NullLogger()); - var auditSink = new InMemoryAttestorAuditSink(); - var logger = new NullLogger(); - using var metrics = new AttestorMetrics(); - - var service = new AttestorSubmissionService( - validator, - repository, - dedupeStore, - rekorClient, - archiveStore, - auditSink, - options, - logger, - TimeProvider.System, - metrics); + var archiveStore = new NullAttestorArchiveStore(new NullLogger()); + var auditSink = new InMemoryAttestorAuditSink(); + var witnessClient = new TestTransparencyWitnessClient(); + var logger = new NullLogger(); + using var metrics = new AttestorMetrics(); + + var service = new AttestorSubmissionService( + validator, + repository, + dedupeStore, + rekorClient, + witnessClient, + archiveStore, + auditSink, + new StubVerificationCache(), + options, + logger, + TimeProvider.System, + metrics); var request = CreateValidRequest(canonicalizer); request.Meta.LogPreference = "mirror"; @@ -178,22 +192,25 @@ public sealed class AttestorSubmissionServiceTests var repository = new InMemoryAttestorEntryRepository(); var dedupeStore = new InMemoryAttestorDedupeStore(); var rekorClient = new StubRekorClient(new NullLogger()); - var archiveStore = new NullAttestorArchiveStore(new NullLogger()); - var auditSink = new InMemoryAttestorAuditSink(); - var logger = new NullLogger(); - using var metrics = new AttestorMetrics(); - - var service = new AttestorSubmissionService( - validator, - repository, - dedupeStore, - rekorClient, - archiveStore, - auditSink, - options, - logger, - TimeProvider.System, - metrics); + var archiveStore = new NullAttestorArchiveStore(new NullLogger()); + var auditSink = new InMemoryAttestorAuditSink(); + var witnessClient = new TestTransparencyWitnessClient(); + var logger = new NullLogger(); + using var metrics = new AttestorMetrics(); + + var service = new AttestorSubmissionService( + validator, + repository, + dedupeStore, + rekorClient, + witnessClient, + archiveStore, + auditSink, + new StubVerificationCache(), + options, + logger, + TimeProvider.System, + metrics); var request = CreateValidRequest(canonicalizer); request.Meta.LogPreference = "both"; @@ -244,22 +261,25 @@ public sealed class AttestorSubmissionServiceTests var repository = new InMemoryAttestorEntryRepository(); var dedupeStore = new InMemoryAttestorDedupeStore(); var rekorClient = new StubRekorClient(new NullLogger()); - var archiveStore = new NullAttestorArchiveStore(new NullLogger()); - var auditSink = new InMemoryAttestorAuditSink(); - var logger = new NullLogger(); - using var metrics = new AttestorMetrics(); - - var service = new AttestorSubmissionService( - validator, - repository, - dedupeStore, - rekorClient, - archiveStore, - auditSink, - options, - logger, - TimeProvider.System, - metrics); + var archiveStore = new NullAttestorArchiveStore(new NullLogger()); + var auditSink = new InMemoryAttestorAuditSink(); + var witnessClient = new TestTransparencyWitnessClient(); + var logger = new NullLogger(); + using var metrics = new AttestorMetrics(); + + var service = new AttestorSubmissionService( + validator, + repository, + dedupeStore, + rekorClient, + witnessClient, + archiveStore, + auditSink, + new StubVerificationCache(), + options, + logger, + TimeProvider.System, + metrics); var request = CreateValidRequest(canonicalizer); request.Meta.LogPreference = "mirror"; @@ -278,14 +298,31 @@ public sealed class AttestorSubmissionServiceTests var stored = await repository.GetByBundleShaAsync(request.Meta.BundleSha256); Assert.NotNull(stored); Assert.Equal("mirror", stored!.Log.Backend); - Assert.Null(result.Mirror); - } - - private static AttestorSubmissionRequest CreateValidRequest(DefaultDsseCanonicalizer canonicalizer) - { - var request = new AttestorSubmissionRequest - { - Bundle = new AttestorSubmissionRequest.SubmissionBundle + Assert.Null(result.Mirror); + } + + private sealed class StubVerificationCache : IAttestorVerificationCache + { + public List InvalidatedSubjects { get; } = new(); + + public Task GetAsync(string subject, string envelopeId, string policyVersion, CancellationToken cancellationToken = default) + => Task.FromResult(null); + + public Task SetAsync(string subject, string envelopeId, string policyVersion, AttestorVerificationResult result, CancellationToken cancellationToken = default) + => Task.CompletedTask; + + public Task InvalidateSubjectAsync(string subject, CancellationToken cancellationToken = default) + { + InvalidatedSubjects.Add(subject); + return Task.CompletedTask; + } + } + + private static AttestorSubmissionRequest CreateValidRequest(DefaultDsseCanonicalizer canonicalizer) + { + var request = new AttestorSubmissionRequest + { + Bundle = new AttestorSubmissionRequest.SubmissionBundle { Mode = "keyless", Dsse = new AttestorSubmissionRequest.DsseEnvelope diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionValidatorHardeningTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionValidatorHardeningTests.cs new file mode 100644 index 00000000..963ce3d8 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionValidatorHardeningTests.cs @@ -0,0 +1,169 @@ +using System; +using System.Collections.Generic; +using System.Security.Cryptography; +using System.Text; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Infrastructure.Submission; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class AttestorSubmissionValidatorHardeningTests +{ + private static readonly DefaultDsseCanonicalizer Canonicalizer = new(); + + [Fact] + public async Task ValidateAsync_ThrowsWhenPayloadExceedsLimit() + { + var constraints = new AttestorSubmissionConstraints( + maxPayloadBytes: 16, + maxSignatures: 6, + maxCertificateChainEntries: 6); + var validator = new AttestorSubmissionValidator(Canonicalizer, constraints: constraints); + + var oversized = CreateValidRequest(payloadSize: 32); + + var exception = await Assert.ThrowsAsync(() => validator.ValidateAsync(oversized)); + Assert.Equal("payload_too_large", exception.Code); + } + + [Fact] + public async Task ValidateAsync_ThrowsWhenCertificateChainTooLong() + { + var constraints = new AttestorSubmissionConstraints( + maxPayloadBytes: 2048, + maxSignatures: 6, + maxCertificateChainEntries: 2); + var validator = new AttestorSubmissionValidator(Canonicalizer, constraints: constraints); + + var request = CreateValidRequest(certificateCount: 3); + + var exception = await Assert.ThrowsAsync(() => validator.ValidateAsync(request)); + Assert.Equal("certificate_chain_too_long", exception.Code); + } + + [Fact] + public async Task ValidateAsync_FuzzedInputs_DoNotCrash() + { + var constraints = new AttestorSubmissionConstraints(); + var validator = new AttestorSubmissionValidator(Canonicalizer, constraints: constraints); + var random = new Random(0x715f_c3a1); + + for (var i = 0; i < 200; i++) + { + var mutated = CreateValidRequest(); + Mutate(mutated, random); + + try + { + await validator.ValidateAsync(mutated); + } + catch (AttestorValidationException) + { + // Expected for malformed inputs. + } + } + } + + private static AttestorSubmissionRequest CreateValidRequest(int payloadSize = 16, int signatureCount = 1, int certificateCount = 0) + { + if (payloadSize <= 0) + { + payloadSize = 1; + } + + var payload = new byte[payloadSize]; + for (var i = 0; i < payload.Length; i++) + { + payload[i] = (byte)'A'; + } + + var request = new AttestorSubmissionRequest + { + Bundle = new AttestorSubmissionRequest.SubmissionBundle + { + Mode = "kms", + Dsse = new AttestorSubmissionRequest.DsseEnvelope + { + PayloadType = "application/vnd.in-toto+json", + PayloadBase64 = Convert.ToBase64String(payload) + } + }, + Meta = new AttestorSubmissionRequest.SubmissionMeta + { + Artifact = new AttestorSubmissionRequest.ArtifactInfo + { + Sha256 = new string('a', 64), + Kind = "sbom" + }, + LogPreference = "primary", + Archive = false + } + }; + + for (var i = 0; i < Math.Max(1, signatureCount); i++) + { + var signatureBytes = Encoding.UTF8.GetBytes($"sig-{i}"); + request.Bundle.Dsse.Signatures.Add(new AttestorSubmissionRequest.DsseSignature + { + KeyId = $"sig-{i}", + Signature = Convert.ToBase64String(signatureBytes) + }); + } + + for (var i = 0; i < certificateCount; i++) + { + request.Bundle.CertificateChain.Add($"-----BEGIN CERTIFICATE-----FAKE{i}-----END CERTIFICATE-----"); + } + + var canonical = Canonicalizer.CanonicalizeAsync(request).GetAwaiter().GetResult(); + request.Meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant(); + return request; + } + + private static void Mutate(AttestorSubmissionRequest request, Random random) + { + switch (random.Next(0, 7)) + { + case 0: + request.Bundle.Dsse.PayloadBase64 = RandomString(random, random.Next(0, 32)); + break; + case 1: + request.Meta.Artifact.Sha256 = RandomString(random, random.Next(0, 70)); + break; + case 2: + request.Bundle.Dsse.Signatures.Clear(); + break; + case 3: + request.Meta.BundleSha256 = RandomString(random, random.Next(10, 40)); + break; + case 4: + request.Meta.LogPreference = "invalid-" + random.Next(1, 9999); + break; + case 5: + request.Bundle.CertificateChain.Add(RandomString(random, random.Next(5, 25))); + break; + default: + request.Bundle.Dsse.PayloadType = RandomString(random, random.Next(0, 20)); + break; + } + } + + private static string RandomString(Random random, int length) + { + const string alphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_"; + if (length <= 0) + { + return string.Empty; + } + + Span buffer = stackalloc char[length]; + for (var i = 0; i < buffer.Length; i++) + { + buffer[i] = alphabet[random.Next(alphabet.Length)]; + } + + return buffer.ToString(); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs index 34bcd96a..6395553f 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs @@ -1,267 +1,610 @@ -using System.Buffers.Binary; -using System.Collections.Generic; -using System.Security.Cryptography; -using System.Text; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Attestor.Core.Options; -using StellaOps.Attestor.Core.Submission; -using StellaOps.Attestor.Core.Verification; -using StellaOps.Attestor.Infrastructure.Storage; -using StellaOps.Attestor.Infrastructure.Submission; -using StellaOps.Attestor.Infrastructure.Verification; -using StellaOps.Attestor.Infrastructure.Rekor; -using StellaOps.Attestor.Core.Observability; -using Xunit; - -namespace StellaOps.Attestor.Tests; - -public sealed class AttestorVerificationServiceTests -{ - private static readonly byte[] HmacSecret = Encoding.UTF8.GetBytes("attestor-hmac-secret"); - private static readonly string HmacSecretBase64 = Convert.ToBase64String(HmacSecret); - - [Fact] - public async Task VerifyAsync_ReturnsOk_ForExistingUuid() - { - var options = Options.Create(new AttestorOptions - { - Redis = new AttestorOptions.RedisOptions - { - Url = string.Empty - }, - Rekor = new AttestorOptions.RekorOptions - { - Primary = new AttestorOptions.RekorBackendOptions - { - Url = "https://rekor.stellaops.test", - ProofTimeoutMs = 1000, - PollIntervalMs = 50, - MaxAttempts = 2 - } - }, - Security = new AttestorOptions.SecurityOptions - { - SignerIdentity = new AttestorOptions.SignerIdentityOptions - { - Mode = { "kms" }, - KmsKeys = { HmacSecretBase64 } - } - } - }); - - using var metrics = new AttestorMetrics(); - var canonicalizer = new DefaultDsseCanonicalizer(); - var repository = new InMemoryAttestorEntryRepository(); - var dedupeStore = new InMemoryAttestorDedupeStore(); - var rekorClient = new StubRekorClient(new NullLogger()); - var archiveStore = new NullAttestorArchiveStore(new NullLogger()); - var auditSink = new InMemoryAttestorAuditSink(); - var submissionService = new AttestorSubmissionService( - new AttestorSubmissionValidator(canonicalizer), - repository, - dedupeStore, - rekorClient, - archiveStore, - auditSink, - options, - new NullLogger(), - TimeProvider.System, - metrics); - - var submission = CreateSubmissionRequest(canonicalizer, HmacSecret); - var context = new SubmissionContext - { - CallerSubject = "urn:stellaops:signer", - CallerAudience = "attestor", - CallerClientId = "signer-service", - CallerTenant = "default" - }; - - var response = await submissionService.SubmitAsync(submission, context); - - var verificationService = new AttestorVerificationService( - repository, - canonicalizer, - rekorClient, - options, - new NullLogger(), - metrics); - - var verifyResult = await verificationService.VerifyAsync(new AttestorVerificationRequest - { - Uuid = response.Uuid, - Bundle = submission.Bundle - }); - - Assert.True(verifyResult.Ok); - Assert.Equal(response.Uuid, verifyResult.Uuid); - Assert.Empty(verifyResult.Issues); - } - - [Fact] - public async Task VerifyAsync_FlagsTamperedBundle() - { - var options = Options.Create(new AttestorOptions - { - Redis = new AttestorOptions.RedisOptions { Url = string.Empty }, - Rekor = new AttestorOptions.RekorOptions - { - Primary = new AttestorOptions.RekorBackendOptions - { - Url = "https://rekor.example/", - ProofTimeoutMs = 1000, - PollIntervalMs = 50, - MaxAttempts = 2 - } - }, - Security = new AttestorOptions.SecurityOptions - { - SignerIdentity = new AttestorOptions.SignerIdentityOptions - { - Mode = { "kms" }, - KmsKeys = { HmacSecretBase64 } - } - } - }); - - using var metrics = new AttestorMetrics(); - var canonicalizer = new DefaultDsseCanonicalizer(); - var repository = new InMemoryAttestorEntryRepository(); - var dedupeStore = new InMemoryAttestorDedupeStore(); - var rekorClient = new StubRekorClient(new NullLogger()); - var archiveStore = new NullAttestorArchiveStore(new NullLogger()); - var auditSink = new InMemoryAttestorAuditSink(); - var submissionService = new AttestorSubmissionService( - new AttestorSubmissionValidator(canonicalizer), - repository, - dedupeStore, - rekorClient, - archiveStore, - auditSink, - options, - new NullLogger(), - TimeProvider.System, - metrics); - - var submission = CreateSubmissionRequest(canonicalizer, HmacSecret); - var context = new SubmissionContext - { - CallerSubject = "urn:stellaops:signer", - CallerAudience = "attestor", - CallerClientId = "signer-service", - CallerTenant = "default" - }; - - var response = await submissionService.SubmitAsync(submission, context); - - var verificationService = new AttestorVerificationService( - repository, - canonicalizer, - rekorClient, - options, - new NullLogger(), - metrics); - - var tamperedBundle = CloneBundle(submission.Bundle); - tamperedBundle.Dsse.PayloadBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes("{\"tampered\":true}")); - - var result = await verificationService.VerifyAsync(new AttestorVerificationRequest - { - Uuid = response.Uuid, - Bundle = tamperedBundle - }); - - Assert.False(result.Ok); - Assert.Contains(result.Issues, issue => issue.Contains("signature_invalid", StringComparison.OrdinalIgnoreCase)); - } - - private static AttestorSubmissionRequest CreateSubmissionRequest(DefaultDsseCanonicalizer canonicalizer, byte[] hmacSecret) - { - var payload = Encoding.UTF8.GetBytes("{}"); - var request = new AttestorSubmissionRequest - { - Bundle = new AttestorSubmissionRequest.SubmissionBundle - { - Mode = "kms", - Dsse = new AttestorSubmissionRequest.DsseEnvelope - { - PayloadType = "application/vnd.in-toto+json", - PayloadBase64 = Convert.ToBase64String(payload) - } - }, - Meta = new AttestorSubmissionRequest.SubmissionMeta - { - Artifact = new AttestorSubmissionRequest.ArtifactInfo - { - Sha256 = new string('a', 64), - Kind = "sbom" - }, - LogPreference = "primary", - Archive = false - } - }; - - var preAuth = ComputePreAuthEncodingForTests(request.Bundle.Dsse.PayloadType, payload); - using (var hmac = new HMACSHA256(hmacSecret)) - { - var signature = hmac.ComputeHash(preAuth); - request.Bundle.Dsse.Signatures.Add(new AttestorSubmissionRequest.DsseSignature - { - KeyId = "kms-test", - Signature = Convert.ToBase64String(signature) - }); - } - - var canonical = canonicalizer.CanonicalizeAsync(request).GetAwaiter().GetResult(); - request.Meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant(); - return request; - } - - private static AttestorSubmissionRequest.SubmissionBundle CloneBundle(AttestorSubmissionRequest.SubmissionBundle source) - { - var clone = new AttestorSubmissionRequest.SubmissionBundle - { - Mode = source.Mode, - Dsse = new AttestorSubmissionRequest.DsseEnvelope - { - PayloadType = source.Dsse.PayloadType, - PayloadBase64 = source.Dsse.PayloadBase64 - } - }; - - foreach (var certificate in source.CertificateChain) - { - clone.CertificateChain.Add(certificate); - } - - foreach (var signature in source.Dsse.Signatures) - { - clone.Dsse.Signatures.Add(new AttestorSubmissionRequest.DsseSignature - { - KeyId = signature.KeyId, - Signature = signature.Signature - }); - } - - return clone; - } - - private static byte[] ComputePreAuthEncodingForTests(string payloadType, byte[] payload) - { - var headerBytes = Encoding.UTF8.GetBytes(payloadType ?? string.Empty); - var buffer = new byte[6 + 8 + headerBytes.Length + 8 + payload.Length]; - var offset = 0; - Encoding.ASCII.GetBytes("DSSEv1", 0, 6, buffer, offset); - offset += 6; - BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)headerBytes.Length); - offset += 8; - Buffer.BlockCopy(headerBytes, 0, buffer, offset, headerBytes.Length); - offset += headerBytes.Length; - BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)payload.Length); - offset += 8; - Buffer.BlockCopy(payload, 0, buffer, offset, payload.Length); - return buffer; - } -} +using System.Buffers.Binary; +using System.Collections.Generic; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Rekor; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Verification; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Transparency; +using StellaOps.Attestor.Infrastructure.Storage; +using StellaOps.Attestor.Infrastructure.Submission; +using StellaOps.Attestor.Infrastructure.Verification; +using StellaOps.Attestor.Infrastructure.Rekor; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Infrastructure.Transparency; +using StellaOps.Attestor.Verify; +using StellaOps.Attestor.Tests.Support; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class AttestorVerificationServiceTests +{ + private static readonly byte[] HmacSecret = Encoding.UTF8.GetBytes("attestor-hmac-secret"); + private static readonly string HmacSecretBase64 = Convert.ToBase64String(HmacSecret); + + [Fact] + public async Task VerifyAsync_ReturnsOk_ForExistingUuid() + { + var options = Options.Create(new AttestorOptions + { + Redis = new AttestorOptions.RedisOptions + { + Url = string.Empty + }, + Rekor = new AttestorOptions.RekorOptions + { + Primary = new AttestorOptions.RekorBackendOptions + { + Url = "https://rekor.stellaops.test", + ProofTimeoutMs = 1000, + PollIntervalMs = 50, + MaxAttempts = 2 + } + }, + Security = new AttestorOptions.SecurityOptions + { + SignerIdentity = new AttestorOptions.SignerIdentityOptions + { + Mode = { "kms" }, + KmsKeys = { HmacSecretBase64 } + } + } + }); + + using var metrics = new AttestorMetrics(); + using var activitySource = new AttestorActivitySource(); + var canonicalizer = new DefaultDsseCanonicalizer(); + var engine = new AttestorVerificationEngine(canonicalizer, options, NullLogger.Instance); + var repository = new InMemoryAttestorEntryRepository(); + var dedupeStore = new InMemoryAttestorDedupeStore(); + var rekorClient = new StubRekorClient(new NullLogger()); + var archiveStore = new NullAttestorArchiveStore(new NullLogger()); + var auditSink = new InMemoryAttestorAuditSink(); + var submissionService = new AttestorSubmissionService( + new AttestorSubmissionValidator(canonicalizer), + repository, + dedupeStore, + rekorClient, + new NullTransparencyWitnessClient(), + archiveStore, + auditSink, + new NullVerificationCache(), + options, + new NullLogger(), + TimeProvider.System, + metrics); + + var submission = CreateSubmissionRequest(canonicalizer, HmacSecret); + var context = new SubmissionContext + { + CallerSubject = "urn:stellaops:signer", + CallerAudience = "attestor", + CallerClientId = "signer-service", + CallerTenant = "default" + }; + + var response = await submissionService.SubmitAsync(submission, context); + + var verificationService = new AttestorVerificationService( + repository, + canonicalizer, + rekorClient, + new NullTransparencyWitnessClient(), + engine, + options, + new NullLogger(), + metrics, + activitySource, + TimeProvider.System); + + var verifyResult = await verificationService.VerifyAsync(new AttestorVerificationRequest + { + Uuid = response.Uuid, + Bundle = submission.Bundle + }); + + Assert.True(verifyResult.Ok); + Assert.Equal(response.Uuid, verifyResult.Uuid); + Assert.Contains("witness_missing", verifyResult.Issues); + Assert.Contains("policy_warn:transparency", verifyResult.Issues); + Assert.NotNull(verifyResult.Report); + Assert.Equal(VerificationSectionStatus.Warn, verifyResult.Report!.OverallStatus); + Assert.False(verifyResult.Report.Transparency.WitnessPresent); + Assert.Equal("missing", verifyResult.Report.Transparency.WitnessStatus); + } + + [Fact] + public async Task VerifyAsync_FlagsTamperedBundle() + { + var options = Options.Create(new AttestorOptions + { + Redis = new AttestorOptions.RedisOptions { Url = string.Empty }, + Rekor = new AttestorOptions.RekorOptions + { + Primary = new AttestorOptions.RekorBackendOptions + { + Url = "https://rekor.example/", + ProofTimeoutMs = 1000, + PollIntervalMs = 50, + MaxAttempts = 2 + } + }, + Security = new AttestorOptions.SecurityOptions + { + SignerIdentity = new AttestorOptions.SignerIdentityOptions + { + Mode = { "kms" }, + KmsKeys = { HmacSecretBase64 } + } + } + }); + + using var metrics = new AttestorMetrics(); + using var activitySource = new AttestorActivitySource(); + var canonicalizer = new DefaultDsseCanonicalizer(); + var engine = new AttestorVerificationEngine(canonicalizer, options, NullLogger.Instance); + var repository = new InMemoryAttestorEntryRepository(); + var dedupeStore = new InMemoryAttestorDedupeStore(); + var rekorClient = new StubRekorClient(new NullLogger()); + var archiveStore = new NullAttestorArchiveStore(new NullLogger()); + var auditSink = new InMemoryAttestorAuditSink(); + var submissionService = new AttestorSubmissionService( + new AttestorSubmissionValidator(canonicalizer), + repository, + dedupeStore, + rekorClient, + new NullTransparencyWitnessClient(), + archiveStore, + auditSink, + new NullVerificationCache(), + options, + new NullLogger(), + TimeProvider.System, + metrics); + + var submission = CreateSubmissionRequest(canonicalizer, HmacSecret); + var context = new SubmissionContext + { + CallerSubject = "urn:stellaops:signer", + CallerAudience = "attestor", + CallerClientId = "signer-service", + CallerTenant = "default" + }; + + var response = await submissionService.SubmitAsync(submission, context); + + var verificationService = new AttestorVerificationService( + repository, + canonicalizer, + rekorClient, + new NullTransparencyWitnessClient(), + engine, + options, + new NullLogger(), + metrics, + activitySource, + TimeProvider.System); + + var tamperedBundle = CloneBundle(submission.Bundle); + tamperedBundle.Dsse.PayloadBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes("{\"tampered\":true}")); + + var result = await verificationService.VerifyAsync(new AttestorVerificationRequest + { + Uuid = response.Uuid, + Bundle = tamperedBundle + }); + + Assert.False(result.Ok); + Assert.Contains(result.Issues, issue => issue.Contains("signature_invalid", StringComparison.OrdinalIgnoreCase)); + Assert.NotNull(result.Report); + Assert.Equal(VerificationSectionStatus.Fail, result.Report!.Signatures.Status); + Assert.Contains("signature_invalid", result.Report!.Signatures.Issues); + } + + private sealed class NullVerificationCache : IAttestorVerificationCache + { + public Task GetAsync(string subject, string envelopeId, string policyVersion, CancellationToken cancellationToken = default) + => Task.FromResult(null); + + public Task SetAsync(string subject, string envelopeId, string policyVersion, AttestorVerificationResult result, CancellationToken cancellationToken = default) + => Task.CompletedTask; + + public Task InvalidateSubjectAsync(string subject, CancellationToken cancellationToken = default) + => Task.CompletedTask; + } + + private static AttestorSubmissionRequest CreateSubmissionRequest(DefaultDsseCanonicalizer canonicalizer, byte[] hmacSecret) + { + var payload = Encoding.UTF8.GetBytes("{}"); + var request = new AttestorSubmissionRequest + { + Bundle = new AttestorSubmissionRequest.SubmissionBundle + { + Mode = "kms", + Dsse = new AttestorSubmissionRequest.DsseEnvelope + { + PayloadType = "application/vnd.in-toto+json", + PayloadBase64 = Convert.ToBase64String(payload) + } + }, + Meta = new AttestorSubmissionRequest.SubmissionMeta + { + Artifact = new AttestorSubmissionRequest.ArtifactInfo + { + Sha256 = new string('a', 64), + Kind = "sbom" + }, + LogPreference = "primary", + Archive = false + } + }; + + var preAuth = ComputePreAuthEncodingForTests(request.Bundle.Dsse.PayloadType, payload); + using (var hmac = new HMACSHA256(hmacSecret)) + { + var signature = hmac.ComputeHash(preAuth); + request.Bundle.Dsse.Signatures.Add(new AttestorSubmissionRequest.DsseSignature + { + KeyId = "kms-test", + Signature = Convert.ToBase64String(signature) + }); + } + + var canonical = canonicalizer.CanonicalizeAsync(request).GetAwaiter().GetResult(); + request.Meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant(); + return request; + } + + private static AttestorSubmissionRequest.SubmissionBundle CloneBundle(AttestorSubmissionRequest.SubmissionBundle source) + { + var clone = new AttestorSubmissionRequest.SubmissionBundle + { + Mode = source.Mode, + Dsse = new AttestorSubmissionRequest.DsseEnvelope + { + PayloadType = source.Dsse.PayloadType, + PayloadBase64 = source.Dsse.PayloadBase64 + } + }; + + foreach (var certificate in source.CertificateChain) + { + clone.CertificateChain.Add(certificate); + } + + foreach (var signature in source.Dsse.Signatures) + { + clone.Dsse.Signatures.Add(new AttestorSubmissionRequest.DsseSignature + { + KeyId = signature.KeyId, + Signature = signature.Signature + }); + } + + return clone; + } + + private static byte[] ComputePreAuthEncodingForTests(string payloadType, byte[] payload) + { + var headerBytes = Encoding.UTF8.GetBytes(payloadType ?? string.Empty); + var buffer = new byte[6 + 8 + headerBytes.Length + 8 + payload.Length]; + var offset = 0; + Encoding.ASCII.GetBytes("DSSEv1", 0, 6, buffer, offset); + offset += 6; + BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)headerBytes.Length); + offset += 8; + Buffer.BlockCopy(headerBytes, 0, buffer, offset, headerBytes.Length); + offset += headerBytes.Length; + BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)payload.Length); + offset += 8; + Buffer.BlockCopy(payload, 0, buffer, offset, payload.Length); + return buffer; + } + + [Fact] + public async Task VerifyAsync_OfflineSkipsProofRefreshWhenMissing() + { + var options = Options.Create(new AttestorOptions + { + Rekor = new AttestorOptions.RekorOptions + { + Primary = new AttestorOptions.RekorBackendOptions + { + Url = "https://rekor.stellaops.test" + } + } + }); + + using var metrics = new AttestorMetrics(); + using var activitySource = new AttestorActivitySource(); + var canonicalizer = new DefaultDsseCanonicalizer(); + var engine = new AttestorVerificationEngine(canonicalizer, options, NullLogger.Instance); + var repository = new InMemoryAttestorEntryRepository(); + var rekorClient = new RecordingRekorClient(); + + var entry = new AttestorEntry + { + RekorUuid = "offline-test", + Artifact = new AttestorEntry.ArtifactDescriptor + { + Sha256 = "deadbeef", + Kind = "sbom" + }, + BundleSha256 = "abc123", + CreatedAt = DateTimeOffset.UtcNow, + Status = "included", + Log = new AttestorEntry.LogDescriptor + { + Backend = "primary", + Url = "https://rekor.example/log/entries/offline-test" + } + }; + + await repository.SaveAsync(entry); + + var verificationService = new AttestorVerificationService( + repository, + canonicalizer, + rekorClient, + new NullTransparencyWitnessClient(), + engine, + options, + new NullLogger(), + metrics, + activitySource, + TimeProvider.System); + + var result = await verificationService.VerifyAsync(new AttestorVerificationRequest + { + Uuid = entry.RekorUuid, + Offline = true + }); + + Assert.Contains("proof_missing", result.Issues); + Assert.Equal(0, rekorClient.ProofRequests); + } + + [Fact] + public async Task VerifyAsync_OfflineUsesImportedProof() + { + var options = Options.Create(new AttestorOptions + { + Rekor = new AttestorOptions.RekorOptions + { + Primary = new AttestorOptions.RekorBackendOptions + { + Url = "https://rekor.stellaops.test" + } + } + }); + + using var metrics = new AttestorMetrics(); + using var activitySource = new AttestorActivitySource(); + var canonicalizer = new DefaultDsseCanonicalizer(); + var engine = new AttestorVerificationEngine(canonicalizer, options, NullLogger.Instance); + var repository = new InMemoryAttestorEntryRepository(); + var rekorClient = new RecordingRekorClient(); + + var canonicalBytes = Encoding.UTF8.GetBytes("{\"payloadType\":\"application/vnd.test\"}"); + var bundleHashBytes = SHA256.HashData(canonicalBytes); + var bundleSha = Convert.ToHexString(bundleHashBytes).ToLowerInvariant(); + var siblingBytes = SHA256.HashData(Encoding.UTF8.GetBytes("sibling-node")); + var rootHashBytes = ComputeMerkleNode(siblingBytes, bundleHashBytes); + var rootHash = Convert.ToHexString(rootHashBytes).ToLowerInvariant(); + + var entry = new AttestorEntry + { + RekorUuid = "offline-proof-test", + Artifact = new AttestorEntry.ArtifactDescriptor + { + Sha256 = "cafebabe", + Kind = "sbom" + }, + BundleSha256 = bundleSha, + CreatedAt = DateTimeOffset.UtcNow, + Status = "included", + Proof = new AttestorEntry.ProofDescriptor + { + Checkpoint = new AttestorEntry.CheckpointDescriptor + { + Origin = "rekor.stellaops.test", + Size = 2, + RootHash = rootHash, + Timestamp = DateTimeOffset.UtcNow + }, + Inclusion = new AttestorEntry.InclusionDescriptor + { + LeafHash = bundleSha, + Path = new[] { $"L:{Convert.ToHexString(siblingBytes).ToLowerInvariant()}" } + } + }, + Log = new AttestorEntry.LogDescriptor + { + Backend = "primary", + Url = "https://rekor.example/log/entries/offline-proof-test" + } + }; + + await repository.SaveAsync(entry); + + var verificationService = new AttestorVerificationService( + repository, + canonicalizer, + rekorClient, + new NullTransparencyWitnessClient(), + engine, + options, + new NullLogger(), + metrics, + activitySource, + TimeProvider.System); + + var result = await verificationService.VerifyAsync(new AttestorVerificationRequest + { + Uuid = entry.RekorUuid, + Offline = true + }); + + Assert.True(result.Ok); + Assert.DoesNotContain("proof_missing", result.Issues); + Assert.Equal(0, rekorClient.ProofRequests); + } + + [Fact] + public async Task VerifyAsync_FailsWhenWitnessRootMismatch() + { + var options = Options.Create(new AttestorOptions + { + Redis = new AttestorOptions.RedisOptions + { + Url = string.Empty + }, + Rekor = new AttestorOptions.RekorOptions + { + Primary = new AttestorOptions.RekorBackendOptions + { + Url = "https://rekor.witness.test", + ProofTimeoutMs = 1000, + PollIntervalMs = 50, + MaxAttempts = 2 + } + }, + Security = new AttestorOptions.SecurityOptions + { + SignerIdentity = new AttestorOptions.SignerIdentityOptions + { + Mode = { "kms" }, + KmsKeys = { HmacSecretBase64 } + } + }, + Verification = new AttestorOptions.VerificationOptions + { + RequireWitnessEndorsement = true + }, + TransparencyWitness = new AttestorOptions.TransparencyWitnessOptions + { + Enabled = true, + BaseUrl = "https://witness.stellaops.test" + } + }); + + using var metrics = new AttestorMetrics(); + using var activitySource = new AttestorActivitySource(); + var canonicalizer = new DefaultDsseCanonicalizer(); + var engine = new AttestorVerificationEngine(canonicalizer, options, NullLogger.Instance); + var repository = new InMemoryAttestorEntryRepository(); + var dedupeStore = new InMemoryAttestorDedupeStore(); + var rekorClient = new StubRekorClient(new NullLogger()); + var archiveStore = new NullAttestorArchiveStore(new NullLogger()); + var auditSink = new InMemoryAttestorAuditSink(); + var witnessClient = new TestTransparencyWitnessClient + { + DefaultObservation = new TransparencyWitnessObservation + { + Aggregator = "stub-aggregator", + Status = "endorsed", + RootHash = "mismatched", + RetrievedAt = DateTimeOffset.UtcNow + } + }; + + var submissionService = new AttestorSubmissionService( + new AttestorSubmissionValidator(canonicalizer), + repository, + dedupeStore, + rekorClient, + witnessClient, + archiveStore, + auditSink, + new NullVerificationCache(), + options, + new NullLogger(), + TimeProvider.System, + metrics); + + var submission = CreateSubmissionRequest(canonicalizer, HmacSecret); + var context = new SubmissionContext + { + CallerSubject = "urn:stellaops:signer", + CallerAudience = "attestor", + CallerClientId = "signer-service", + CallerTenant = "default" + }; + + var response = await submissionService.SubmitAsync(submission, context); + + var verificationService = new AttestorVerificationService( + repository, + canonicalizer, + rekorClient, + witnessClient, + engine, + options, + new NullLogger(), + metrics, + activitySource, + TimeProvider.System); + + var result = await verificationService.VerifyAsync(new AttestorVerificationRequest + { + Uuid = response.Uuid, + Bundle = submission.Bundle + }); + + Assert.False(result.Ok); + Assert.Contains(result.Issues, issue => issue.StartsWith("witness_root_mismatch", StringComparison.OrdinalIgnoreCase)); + Assert.True(result.Report!.Transparency.WitnessPresent); + Assert.False(result.Report.Transparency.WitnessMatchesRoot); + Assert.Equal("stub-aggregator", result.Report.Transparency.WitnessAggregator); + Assert.Equal("endorsed", result.Report.Transparency.WitnessStatus); + Assert.NotEmpty(witnessClient.Requests); + } + + private static byte[] ComputeMerkleNode(byte[] left, byte[] right) + { + using var sha = SHA256.Create(); + var buffer = new byte[1 + left.Length + right.Length]; + buffer[0] = 0x01; + Buffer.BlockCopy(left, 0, buffer, 1, left.Length); + Buffer.BlockCopy(right, 0, buffer, 1 + left.Length, right.Length); + return sha.ComputeHash(buffer); + } + + private sealed class RecordingRekorClient : IRekorClient + { + public int ProofRequests { get; private set; } + + public Task SubmitAsync(AttestorSubmissionRequest request, RekorBackend backend, CancellationToken cancellationToken = default) + { + throw new NotSupportedException(); + } + + public Task GetProofAsync(string rekorUuid, RekorBackend backend, CancellationToken cancellationToken = default) + { + ProofRequests++; + return Task.FromResult(new RekorProofResponse + { + Checkpoint = new RekorProofResponse.RekorCheckpoint + { + Origin = backend.Url.Host, + Size = 1, + RootHash = string.Empty, + Timestamp = DateTimeOffset.UtcNow + }, + Inclusion = new RekorProofResponse.RekorInclusionProof + { + LeafHash = string.Empty, + Path = Array.Empty() + } + }); + } + } + +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/BulkVerificationContractsTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/BulkVerificationContractsTests.cs new file mode 100644 index 00000000..a6613033 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/BulkVerificationContractsTests.cs @@ -0,0 +1,76 @@ +using System.Linq; +using Microsoft.AspNetCore.Http; +using StellaOps.Attestor.Core.Bulk; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.WebService.Contracts; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class BulkVerificationContractsTests +{ + [Fact] + public void TryBuildJob_ReturnsError_WhenItemsMissing() + { + var options = new AttestorOptions(); + var context = new BulkVerificationJobContext(); + + var success = BulkVerificationContracts.TryBuildJob(null, options, context, out var job, out var error); + + Assert.False(success); + Assert.Null(job); + Assert.NotNull(error); + } + + [Fact] + public void TryBuildJob_AppliesDefaults() + { + var options = new AttestorOptions + { + Quotas = new AttestorOptions.QuotaOptions + { + Bulk = new AttestorOptions.BulkVerificationQuotaOptions + { + MaxItemsPerJob = 10 + } + } + }; + + var dto = new BulkVerificationRequestDto + { + PolicyVersion = "policy@1", + RefreshProof = true, + Items = new[] + { + new BulkVerificationRequestItemDto + { + Subject = "pkg:docker/example", + EnvelopeId = "envelope-1" + }, + new BulkVerificationRequestItemDto + { + Uuid = "rekor-123", + RefreshProof = false + } + } + }; + + var context = new BulkVerificationJobContext + { + Tenant = "tenant-a", + RequestedBy = "user-1" + }; + + var success = BulkVerificationContracts.TryBuildJob(dto, options, context, out var job, out var error); + + Assert.True(success); + Assert.Null(error); + Assert.NotNull(job); + Assert.Equal(2, job!.Items.Count); + Assert.Equal("policy@1", job.Items[0].Request.PolicyVersion); + Assert.True(job.Items[0].Request.RefreshProof); + Assert.False(job.Items[1].Request.RefreshProof); + Assert.Equal("tenant-a", job.Context.Tenant); + Assert.Equal(BulkVerificationJobStatus.Queued, job.Status); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/BulkVerificationWorkerTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/BulkVerificationWorkerTests.cs new file mode 100644 index 00000000..3062b06d --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/BulkVerificationWorkerTests.cs @@ -0,0 +1,243 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Bulk; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Verification; +using StellaOps.Attestor.Infrastructure.Bulk; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class BulkVerificationWorkerTests +{ + [Fact] + public async Task ProcessJobAsync_CompletesAllItems() + { + var jobStore = new InMemoryBulkVerificationJobStore(); + var verificationService = new StubVerificationService(); + using var metrics = new AttestorMetrics(); + var options = Options.Create(new AttestorOptions + { + BulkVerification = new AttestorOptions.BulkVerificationOptions + { + WorkerPollSeconds = 1, + ItemDelayMilliseconds = 0 + }, + Quotas = new AttestorOptions.QuotaOptions + { + Bulk = new AttestorOptions.BulkVerificationQuotaOptions + { + MaxItemsPerJob = 10 + } + } + }); + + var job = new BulkVerificationJob + { + Id = "job-1", + Items = new List + { + new() + { + Index = 0, + Request = new BulkVerificationItemRequest + { + Subject = "pkg:docker/example", + EnvelopeId = "env-1", + PolicyVersion = "policy" + } + }, + new() + { + Index = 1, + Request = new BulkVerificationItemRequest + { + Uuid = "rekor-1" + } + } + } + }; + + await jobStore.CreateAsync(job); + var worker = new BulkVerificationWorker( + jobStore, + verificationService, + metrics, + options, + TimeProvider.System, + NullLogger.Instance); + + var acquired = await jobStore.TryAcquireAsync(); + Assert.NotNull(acquired); + + await worker.ProcessJobAsync(acquired!, CancellationToken.None); + + var stored = await jobStore.GetAsync(job.Id); + Assert.NotNull(stored); + Assert.Equal(BulkVerificationJobStatus.Completed, stored!.Status); + Assert.Equal(2, stored.ProcessedCount); + Assert.Equal(2, stored.SucceededCount); + Assert.Equal(0, stored.FailedCount); + Assert.All(stored.Items, item => Assert.Equal(BulkVerificationItemStatus.Succeeded, item.Status)); + } + + private sealed class StubVerificationService : IAttestorVerificationService + { + public Task VerifyAsync(AttestorVerificationRequest request, CancellationToken cancellationToken = default) + { + return Task.FromResult(new AttestorVerificationResult + { + Ok = true, + Uuid = request.Uuid ?? "uuid-placeholder", + Status = "verified" + }); + } + + public Task GetEntryAsync(string rekorUuid, bool refreshProof, CancellationToken cancellationToken = default) + { + return Task.FromResult(null); + } + } +} + +internal sealed class InMemoryBulkVerificationJobStore : IBulkVerificationJobStore +{ + private readonly object _sync = new(); + private readonly Dictionary _jobs = new(StringComparer.Ordinal); + + public Task CreateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(job); + + lock (_sync) + { + job.Version = 0; + job.Status = BulkVerificationJobStatus.Queued; + _jobs[job.Id] = Clone(job); + return Task.FromResult(Clone(job)); + } + } + + public Task GetAsync(string jobId, CancellationToken cancellationToken = default) + { + lock (_sync) + { + return Task.FromResult(_jobs.TryGetValue(jobId, out var stored) ? Clone(stored) : null); + } + } + + public Task TryAcquireAsync(CancellationToken cancellationToken = default) + { + lock (_sync) + { + foreach (var entry in _jobs.Values.OrderBy(job => job.CreatedAt)) + { + if (entry.Status != BulkVerificationJobStatus.Queued) + { + continue; + } + + entry.Status = BulkVerificationJobStatus.Running; + entry.StartedAt = DateTimeOffset.UtcNow; + entry.Version += 1; + _jobs[entry.Id] = Clone(entry); + return Task.FromResult(Clone(entry)!); + } + } + + return Task.FromResult(null); + } + + public Task TryUpdateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default) + { + lock (_sync) + { + if (!_jobs.TryGetValue(job.Id, out var current)) + { + return Task.FromResult(false); + } + + if (current.Version != job.Version) + { + return Task.FromResult(false); + } + + job.Version += 1; + _jobs[job.Id] = Clone(job); + return Task.FromResult(true); + } + } + + public Task CountQueuedAsync(CancellationToken cancellationToken = default) + { + lock (_sync) + { + var count = _jobs.Values.Count(job => job.Status == BulkVerificationJobStatus.Queued); + return Task.FromResult(count); + } + } + + private static BulkVerificationJob Clone(BulkVerificationJob job) + { + var context = job.Context ?? new BulkVerificationJobContext(); + return new BulkVerificationJob + { + Id = job.Id, + Version = job.Version, + Status = job.Status, + CreatedAt = job.CreatedAt, + StartedAt = job.StartedAt, + CompletedAt = job.CompletedAt, + Context = new BulkVerificationJobContext + { + Tenant = context.Tenant, + RequestedBy = context.RequestedBy, + ClientId = context.ClientId, + Scopes = context.Scopes?.ToList() ?? new List() + }, + ProcessedCount = job.ProcessedCount, + SucceededCount = job.SucceededCount, + FailedCount = job.FailedCount, + FailureReason = job.FailureReason, + Items = (job.Items ?? Array.Empty()).Select(CloneItem).ToList() + }; + } + + private static BulkVerificationJobItem CloneItem(BulkVerificationJobItem item) + { + var request = item.Request ?? new BulkVerificationItemRequest(); + return new BulkVerificationJobItem + { + Index = item.Index, + Request = new BulkVerificationItemRequest + { + Uuid = request.Uuid, + ArtifactSha256 = request.ArtifactSha256, + Subject = request.Subject, + EnvelopeId = request.EnvelopeId, + PolicyVersion = request.PolicyVersion, + RefreshProof = request.RefreshProof + }, + Status = item.Status, + StartedAt = item.StartedAt, + CompletedAt = item.CompletedAt, + Result = item.Result is null ? null : new AttestorVerificationResult + { + Ok = item.Result.Ok, + Uuid = item.Result.Uuid, + Index = item.Result.Index, + LogUrl = item.Result.LogUrl, + CheckedAt = item.Result.CheckedAt, + Status = item.Result.Status, + Issues = item.Result.Issues.ToArray(), + Report = item.Result.Report + }, + Error = item.Error + }; + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/CachedAttestorVerificationServiceTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/CachedAttestorVerificationServiceTests.cs new file mode 100644 index 00000000..d88017a7 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/CachedAttestorVerificationServiceTests.cs @@ -0,0 +1,122 @@ +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Verification; +using StellaOps.Attestor.Infrastructure.Verification; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class CachedAttestorVerificationServiceTests +{ + [Fact] + public async Task VerifyAsync_ReturnsCachedResult_OnRepeatedCalls() + { + var options = Options.Create(new AttestorOptions()); + using var memoryCache = new MemoryCache(new MemoryCacheOptions()); + using var metrics = new AttestorMetrics(); + var cache = new InMemoryAttestorVerificationCache(memoryCache, options, new NullLogger()); + var inner = new StubVerificationService(); + var service = new CachedAttestorVerificationService( + inner, + cache, + metrics, + options, + new NullLogger()); + + var request = new AttestorVerificationRequest + { + Subject = "urn:stellaops:test", + EnvelopeId = "bundle-123", + PolicyVersion = "policy-v1" + }; + + var first = await service.VerifyAsync(request); + var second = await service.VerifyAsync(request); + + Assert.True(first.Ok); + Assert.Same(first, second); + Assert.Equal(1, inner.VerifyCallCount); + } + + [Fact] + public async Task VerifyAsync_BypassesCache_WhenRefreshProofRequested() + { + var options = Options.Create(new AttestorOptions()); + using var memoryCache = new MemoryCache(new MemoryCacheOptions()); + using var metrics = new AttestorMetrics(); + var cache = new InMemoryAttestorVerificationCache(memoryCache, options, new NullLogger()); + var inner = new StubVerificationService(); + var service = new CachedAttestorVerificationService( + inner, + cache, + metrics, + options, + new NullLogger()); + + var request = new AttestorVerificationRequest + { + Subject = "urn:stellaops:test", + EnvelopeId = "bundle-123", + PolicyVersion = "policy-v1", + RefreshProof = true + }; + + var first = await service.VerifyAsync(request); + var second = await service.VerifyAsync(request); + + Assert.True(first.Ok); + Assert.True(second.Ok); + Assert.Equal(2, inner.VerifyCallCount); + } + + [Fact] + public async Task VerifyAsync_BypassesCache_WhenDescriptorIncomplete() + { + var options = Options.Create(new AttestorOptions()); + using var memoryCache = new MemoryCache(new MemoryCacheOptions()); + using var metrics = new AttestorMetrics(); + var cache = new InMemoryAttestorVerificationCache(memoryCache, options, new NullLogger()); + var inner = new StubVerificationService(); + var service = new CachedAttestorVerificationService( + inner, + cache, + metrics, + options, + new NullLogger()); + + var request = new AttestorVerificationRequest + { + Subject = "urn:stellaops:test", + EnvelopeId = "bundle-123" + }; + + await service.VerifyAsync(request); + await service.VerifyAsync(request); + + Assert.Equal(2, inner.VerifyCallCount); + } + + private sealed class StubVerificationService : IAttestorVerificationService + { + public int VerifyCallCount { get; private set; } + + public Task VerifyAsync(AttestorVerificationRequest request, CancellationToken cancellationToken = default) + { + VerifyCallCount++; + return Task.FromResult(new() + { + Ok = true, + Uuid = "uuid" + }); + } + + public Task GetEntryAsync(string rekorUuid, bool refreshProof, CancellationToken cancellationToken = default) + => Task.FromResult(null); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpTransparencyWitnessClientTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpTransparencyWitnessClientTests.cs new file mode 100644 index 00000000..9a063458 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpTransparencyWitnessClientTests.cs @@ -0,0 +1,187 @@ +using System; +using System.Net; +using System.Net.Http; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Transparency; +using StellaOps.Attestor.Infrastructure.Transparency; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class HttpTransparencyWitnessClientTests +{ + [Fact] + public async Task GetObservationAsync_CachesSuccessfulResponses() + { + var handler = new StubHttpMessageHandler(_ => + { + var payload = JsonSerializer.Serialize(new + { + aggregator = "aggregator.test", + status = "endorsed", + rootHash = "abc123", + statement = "test-statement", + signature = new { keyId = "sig-key", value = "sig-value" }, + timestamp = "2025-11-02T00:00:00Z" + }); + + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(payload) + }; + }); + + using var client = new HttpClient(handler); + using var cache = new MemoryCache(new MemoryCacheOptions()); + using var metrics = new AttestorMetrics(); + using var activitySource = new AttestorActivitySource(); + + var options = Options.Create(new AttestorOptions + { + TransparencyWitness = new AttestorOptions.TransparencyWitnessOptions + { + Enabled = true, + BaseUrl = "https://witness.test", + CacheTtlSeconds = 60 + } + }); + + var sut = new HttpTransparencyWitnessClient( + client, + cache, + options, + metrics, + activitySource, + TimeProvider.System, + NullLogger.Instance); + + var request = new TransparencyWitnessRequest( + "uuid-1", + "primary", + new Uri("https://rekor.example"), + "abc123"); + + var first = await sut.GetObservationAsync(request); + var second = await sut.GetObservationAsync(request); + + Assert.NotNull(first); + Assert.Same(first, second); + Assert.Equal("aggregator.test", first!.Aggregator); + Assert.Equal("endorsed", first.Status); + Assert.Equal(1, handler.CallCount); + } + + [Fact] + public async Task GetObservationAsync_ReturnsErrorObservation_OnNonSuccess() + { + var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.BadGateway)); + + using var client = new HttpClient(handler); + using var cache = new MemoryCache(new MemoryCacheOptions()); + using var metrics = new AttestorMetrics(); + using var activitySource = new AttestorActivitySource(); + + var options = Options.Create(new AttestorOptions + { + TransparencyWitness = new AttestorOptions.TransparencyWitnessOptions + { + Enabled = true, + BaseUrl = "https://witness.test" + } + }); + + var sut = new HttpTransparencyWitnessClient( + client, + cache, + options, + metrics, + activitySource, + TimeProvider.System, + NullLogger.Instance); + + var request = new TransparencyWitnessRequest( + "uuid-2", + "primary", + new Uri("https://rekor.example"), + "root-hash"); + + var observation = await sut.GetObservationAsync(request); + + Assert.NotNull(observation); + Assert.Equal("primary", observation!.Aggregator); + Assert.Equal("http_502", observation.Status); + Assert.Equal("root-hash", observation.RootHash); + Assert.Equal(1, handler.CallCount); + } + + [Fact] + public async Task GetObservationAsync_ReturnsCachedErrorObservation_OnException() + { + var handler = new StubHttpMessageHandler(_ => throw new HttpRequestException("boom")); + + using var client = new HttpClient(handler); + using var cache = new MemoryCache(new MemoryCacheOptions()); + using var metrics = new AttestorMetrics(); + using var activitySource = new AttestorActivitySource(); + + var options = Options.Create(new AttestorOptions + { + TransparencyWitness = new AttestorOptions.TransparencyWitnessOptions + { + Enabled = true, + BaseUrl = "https://witness.test", + CacheTtlSeconds = 30 + } + }); + + var sut = new HttpTransparencyWitnessClient( + client, + cache, + options, + metrics, + activitySource, + TimeProvider.System, + NullLogger.Instance); + + var request = new TransparencyWitnessRequest( + "uuid-3", + "mirror", + new Uri("https://rekor.mirror"), + null); + + var first = await sut.GetObservationAsync(request); + var second = await sut.GetObservationAsync(request); + + Assert.NotNull(first); + Assert.Same(first, second); + Assert.Equal("mirror", first!.Aggregator); + Assert.Equal("HttpRequestException", first.Status); + Assert.Equal("boom", first.Error); + Assert.Equal(1, handler.CallCount); + } + + private sealed class StubHttpMessageHandler : HttpMessageHandler + { + private readonly Func _handler; + + public StubHttpMessageHandler(Func handler) + { + _handler = handler; + } + + public int CallCount { get; private set; } + + protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + CallCount++; + return Task.FromResult(_handler(request)); + } + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/LiveDedupeStoreTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/LiveDedupeStoreTests.cs new file mode 100644 index 00000000..13f16e95 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/LiveDedupeStoreTests.cs @@ -0,0 +1,110 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using MongoDB.Driver; +using StackExchange.Redis; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Infrastructure.Storage; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class LiveDedupeStoreTests +{ + private const string Category = "LiveTTL"; + + [Fact] + [Trait("Category", Category)] + public async Task Mongo_dedupe_document_expires_via_ttl_index() + { + var mongoUri = Environment.GetEnvironmentVariable("ATTESTOR_LIVE_MONGO_URI"); + if (string.IsNullOrWhiteSpace(mongoUri)) + { + return; + } + var mongoUrl = new MongoUrl(mongoUri); + var client = new MongoClient(mongoUrl); + var databaseName = $"{(string.IsNullOrWhiteSpace(mongoUrl.DatabaseName) ? "attestor_live_ttl" : mongoUrl.DatabaseName)}_{Guid.NewGuid():N}"; + var database = client.GetDatabase(databaseName); + var collection = database.GetCollection("dedupe"); + + try + { + var store = new MongoAttestorDedupeStore(collection, TimeProvider.System); + + var indexes = await (await collection.Indexes.ListAsync()).ToListAsync(); + Assert.Contains(indexes, doc => doc.TryGetElement("name", out var element) && element.Value == "dedupe_ttl"); + + var bundle = Guid.NewGuid().ToString("N"); + var ttl = TimeSpan.FromSeconds(20); + await store.SetAsync(bundle, "rekor-live", ttl); + + var filter = Builders.Filter.Eq(x => x.Key, $"bundle:{bundle}"); + Assert.True(await collection.Find(filter).AnyAsync(), "Seed document was not written."); + + var deadline = DateTime.UtcNow + ttl + TimeSpan.FromMinutes(2); + while (DateTime.UtcNow < deadline) + { + if (!await collection.Find(filter).AnyAsync()) + { + return; + } + + await Task.Delay(TimeSpan.FromSeconds(5)); + } + + throw new TimeoutException("TTL document remained in MongoDB after waiting for expiry."); + } + finally + { + await client.DropDatabaseAsync(databaseName); + } + } + + [Fact] + [Trait("Category", Category)] + public async Task Redis_dedupe_entry_sets_time_to_live() + { + var redisConnection = Environment.GetEnvironmentVariable("ATTESTOR_LIVE_REDIS_URI"); + if (string.IsNullOrWhiteSpace(redisConnection)) + { + return; + } + var options = Options.Create(new AttestorOptions + { + Redis = new AttestorOptions.RedisOptions + { + Url = redisConnection, + DedupePrefix = "attestor:ttl:live:" + } + }); + + var multiplexer = await ConnectionMultiplexer.ConnectAsync(redisConnection); + try + { + var store = new RedisAttestorDedupeStore(multiplexer, options); + var database = multiplexer.GetDatabase(); + + var bundle = Guid.NewGuid().ToString("N"); + var ttl = TimeSpan.FromSeconds(30); + + await store.SetAsync(bundle, "rekor-redis", ttl); + var value = await store.TryGetExistingAsync(bundle); + Assert.Equal("rekor-redis", value); + + var redisKey = (RedisKey)(options.Value.Redis.DedupePrefix + $"bundle:{bundle}"); + var timeToLive = await database.KeyTimeToLiveAsync(redisKey); + + Assert.NotNull(timeToLive); + Assert.InRange(timeToLive!.Value, TimeSpan.Zero, ttl); + } + finally + { + await multiplexer.CloseAsync(); + await multiplexer.DisposeAsync(); + } + } + +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj index 736aea19..2c84e17b 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj @@ -1,4 +1,3 @@ - net10.0 @@ -11,7 +10,6 @@ - @@ -22,5 +20,6 @@ + - \ No newline at end of file + diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs index af61d6cc..92ae0b02 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs @@ -1,54 +1,212 @@ -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Attestor.Core.Audit; -using StellaOps.Attestor.Core.Storage; - -namespace StellaOps.Attestor.Tests; - -internal sealed class InMemoryAttestorEntryRepository : IAttestorEntryRepository -{ - private readonly ConcurrentDictionary _entries = new(); - - public Task GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default) - { - var entry = _entries.Values.FirstOrDefault(e => string.Equals(e.BundleSha256, bundleSha256, StringComparison.OrdinalIgnoreCase)); - return Task.FromResult(entry); - } - - public Task GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default) - { - _entries.TryGetValue(rekorUuid, out var entry); - return Task.FromResult(entry); - } - - public Task> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default) - { - var entries = _entries.Values - .Where(e => string.Equals(e.Artifact.Sha256, artifactSha256, StringComparison.OrdinalIgnoreCase)) - .OrderBy(e => e.CreatedAt) - .ToList(); - - return Task.FromResult>(entries); - } - - public Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default) - { - _entries[entry.RekorUuid] = entry; - return Task.CompletedTask; - } -} - -internal sealed class InMemoryAttestorAuditSink : IAttestorAuditSink -{ - public List Records { get; } = new(); - - public Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default) - { - Records.Add(record); - return Task.CompletedTask; - } -} +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Audit; +using StellaOps.Attestor.Core.Verification; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Tests; + +internal sealed class InMemoryAttestorEntryRepository : IAttestorEntryRepository +{ + private readonly ConcurrentDictionary _entries = new(); + private readonly Dictionary _bundleIndex = new(StringComparer.OrdinalIgnoreCase); + private readonly object _sync = new(); + + public Task GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default) + { + string? uuid; + lock (_sync) + { + _bundleIndex.TryGetValue(bundleSha256, out uuid); + } + + if (uuid is not null && _entries.TryGetValue(uuid, out var entry)) + { + return Task.FromResult(entry); + } + + return Task.FromResult(null); + } + + public Task GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default) + { + _entries.TryGetValue(rekorUuid, out var entry); + return Task.FromResult(entry); + } + + public Task> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default) + { + List snapshot; + lock (_sync) + { + snapshot = _entries.Values.ToList(); + } + + var entries = snapshot + .Where(e => string.Equals(e.Artifact.Sha256, artifactSha256, StringComparison.OrdinalIgnoreCase)) + .OrderBy(e => e.CreatedAt) + .ToList(); + + return Task.FromResult>(entries); + } + + public Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(entry); + + lock (_sync) + { + if (_bundleIndex.TryGetValue(entry.BundleSha256, out var existingUuid) && + !string.Equals(existingUuid, entry.RekorUuid, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Bundle SHA '{entry.BundleSha256}' already exists."); + } + + if (_entries.TryGetValue(entry.RekorUuid, out var existing) && + !string.Equals(existing.BundleSha256, entry.BundleSha256, StringComparison.OrdinalIgnoreCase)) + { + _bundleIndex.Remove(existing.BundleSha256); + } + + _entries[entry.RekorUuid] = entry; + _bundleIndex[entry.BundleSha256] = entry.RekorUuid; + } + + return Task.CompletedTask; + } + + public Task QueryAsync(AttestorEntryQuery query, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(query); + + var pageSize = query.PageSize <= 0 ? 50 : Math.Min(query.PageSize, 200); + + List snapshot; + lock (_sync) + { + snapshot = _entries.Values.ToList(); + } + + IEnumerable sequence = snapshot; + + if (!string.IsNullOrWhiteSpace(query.Subject)) + { + var subject = query.Subject; + sequence = sequence.Where(e => + string.Equals(e.Artifact.Sha256, subject, StringComparison.OrdinalIgnoreCase) || + string.Equals(e.Artifact.ImageDigest, subject, StringComparison.OrdinalIgnoreCase) || + string.Equals(e.Artifact.SubjectUri, subject, StringComparison.OrdinalIgnoreCase)); + } + + if (!string.IsNullOrWhiteSpace(query.Type)) + { + sequence = sequence.Where(e => string.Equals(e.Artifact.Kind, query.Type, StringComparison.OrdinalIgnoreCase)); + } + + if (!string.IsNullOrWhiteSpace(query.Issuer)) + { + sequence = sequence.Where(e => string.Equals(e.SignerIdentity.SubjectAlternativeName, query.Issuer, StringComparison.OrdinalIgnoreCase)); + } + + if (!string.IsNullOrWhiteSpace(query.Scope)) + { + sequence = sequence.Where(e => string.Equals(e.SignerIdentity.Issuer, query.Scope, StringComparison.OrdinalIgnoreCase)); + } + + if (query.CreatedAfter is { } createdAfter) + { + sequence = sequence.Where(e => e.CreatedAt >= createdAfter); + } + + if (query.CreatedBefore is { } createdBefore) + { + sequence = sequence.Where(e => e.CreatedAt <= createdBefore); + } + + if (!string.IsNullOrWhiteSpace(query.ContinuationToken)) + { + var continuation = AttestorEntryContinuationToken.Parse(query.ContinuationToken); + sequence = sequence.Where(e => + { + var createdAt = e.CreatedAt; + if (createdAt < continuation.CreatedAt) + { + return true; + } + + if (createdAt > continuation.CreatedAt) + { + return false; + } + + return string.CompareOrdinal(e.RekorUuid, continuation.RekorUuid) >= 0; + }); + } + + var ordered = sequence + .OrderByDescending(e => e.CreatedAt) + .ThenBy(e => e.RekorUuid, StringComparer.Ordinal); + + var page = ordered.Take(pageSize + 1).ToList(); + AttestorEntry? next = null; + if (page.Count > pageSize) + { + next = page[^1]; + page.RemoveAt(page.Count - 1); + } + + var result = new AttestorEntryQueryResult + { + Items = page, + ContinuationToken = next is null + ? null + : AttestorEntryContinuationToken.Encode(next.CreatedAt, next.RekorUuid) + }; + + return Task.FromResult(result); + } +} + +internal sealed class InMemoryAttestorAuditSink : IAttestorAuditSink +{ + public List Records { get; } = new(); + + public Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default) + { + Records.Add(record); + return Task.CompletedTask; + } +} + +internal sealed class InMemoryAttestorArchiveStore : IAttestorArchiveStore +{ + private readonly ConcurrentDictionary _bundles = new(); + + public Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default) + { + _bundles[bundle.BundleSha256] = new AttestorArchiveBundle + { + RekorUuid = bundle.RekorUuid, + ArtifactSha256 = bundle.ArtifactSha256, + BundleSha256 = bundle.BundleSha256, + CanonicalBundleJson = bundle.CanonicalBundleJson, + ProofJson = bundle.ProofJson, + Metadata = bundle.Metadata + }; + return Task.CompletedTask; + } + + public Task GetBundleAsync(string bundleSha256, string rekorUuid, CancellationToken cancellationToken = default) + { + if (_bundles.TryGetValue(bundleSha256, out var bundle)) + { + return Task.FromResult(bundle); + } + + return Task.FromResult(null); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/TestSupport/TestAttestorDoubles.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/TestSupport/TestAttestorDoubles.cs new file mode 100644 index 00000000..c3ad0fcc --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/TestSupport/TestAttestorDoubles.cs @@ -0,0 +1,110 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Bulk; +using StellaOps.Attestor.Core.Signing; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Transparency; + +namespace StellaOps.Attestor.Tests.Support; + +internal sealed class TestTransparencyWitnessClient : ITransparencyWitnessClient +{ + public List Requests { get; } = new(); + + public TransparencyWitnessObservation? DefaultObservation { get; set; } + + public Func? OnRequest { get; set; } + + public Task GetObservationAsync(TransparencyWitnessRequest request, CancellationToken cancellationToken = default) + { + Requests.Add(request); + if (OnRequest is not null) + { + return Task.FromResult(OnRequest(request)); + } + + return Task.FromResult(DefaultObservation); + } +} + +internal sealed class TestAttestationSigningService : IAttestationSigningService +{ + public List Requests { get; } = new(); + + public AttestationSignResult Result { get; set; } = new(); + + public Func? OnSign { get; set; } + + public Task SignAsync(AttestationSignRequest request, SubmissionContext context, CancellationToken cancellationToken = default) + { + Requests.Add(request); + if (OnSign is not null) + { + return Task.FromResult(OnSign(request, context)); + } + + return Task.FromResult(Result); + } +} + +internal sealed class TestBulkVerificationJobStore : IBulkVerificationJobStore +{ + private readonly ConcurrentDictionary _jobs = new(StringComparer.Ordinal); + + public Func>? OnTryAcquireAsync { get; set; } + + public Task CreateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default) + { + var id = string.IsNullOrWhiteSpace(job.Id) ? Guid.NewGuid().ToString("N") : job.Id; + job.Id = id; + _jobs[id] = job; + return Task.FromResult(job); + } + + public Task GetAsync(string jobId, CancellationToken cancellationToken = default) + { + _jobs.TryGetValue(jobId, out var job); + return Task.FromResult(job); + } + + public Task TryAcquireAsync(CancellationToken cancellationToken = default) + { + if (OnTryAcquireAsync is not null) + { + return OnTryAcquireAsync(); + } + + foreach (var job in _jobs.Values) + { + if (job.Status == BulkVerificationJobStatus.Queued) + { + return Task.FromResult(job); + } + } + + return Task.FromResult(null); + } + + public Task TryUpdateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default) + { + _jobs[job.Id] = job; + return Task.FromResult(true); + } + + public Task CountQueuedAsync(CancellationToken cancellationToken = default) + { + var count = 0; + foreach (var job in _jobs.Values) + { + if (job.Status == BulkVerificationJobStatus.Queued) + { + count++; + } + } + + return Task.FromResult(count); + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationBundleContracts.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationBundleContracts.cs new file mode 100644 index 00000000..27446784 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationBundleContracts.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.AspNetCore.Http; +using StellaOps.Attestor.Core.Offline; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.WebService.Contracts; + +internal sealed class AttestationExportRequestDto +{ + public List? Uuids { get; init; } + + public string? Subject { get; init; } + + public string? Type { get; init; } + + public string? Issuer { get; init; } + + public string? Scope { get; init; } + + public DateTimeOffset? CreatedAfter { get; init; } + + public DateTimeOffset? CreatedBefore { get; init; } + + public int? Limit { get; init; } + + public string? ContinuationToken { get; init; } + + public bool TryToDomain(out AttestorBundleExportRequest request, out IResult? error) + { + error = null; + + if (Limit is { } limit && limit <= 0) + { + request = default!; + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "`limit` must be greater than zero."); + return false; + } + + if (CreatedAfter.HasValue && CreatedBefore.HasValue && CreatedAfter > CreatedBefore) + { + request = default!; + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "`createdAfter` must be earlier than `createdBefore`."); + return false; + } + + if (!string.IsNullOrWhiteSpace(ContinuationToken) && !AttestorEntryContinuationToken.TryParse(ContinuationToken, out _)) + { + request = default!; + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "Invalid continuation token."); + return false; + } + + if (!string.IsNullOrWhiteSpace(ContinuationToken) && Uuids is { Count: > 0 }) + { + request = default!; + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "`continuationToken` cannot be combined with explicit `uuids`."); + return false; + } + + int? sanitizedLimit = Limit.HasValue ? Math.Clamp(Limit.Value, 1, 200) : null; + + IReadOnlyList uuids = Array.Empty(); + if (Uuids is { Count: > 0 } uuidList) + { + uuids = uuidList + .Where(value => !string.IsNullOrWhiteSpace(value)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + request = new AttestorBundleExportRequest + { + Uuids = uuids, + Subject = Subject, + Type = Type, + Issuer = Issuer, + Scope = Scope, + CreatedAfter = CreatedAfter, + CreatedBefore = CreatedBefore, + Limit = sanitizedLimit, + ContinuationToken = ContinuationToken + }; + + return true; + } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationListContracts.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationListContracts.cs new file mode 100644 index 00000000..7480224e --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationListContracts.cs @@ -0,0 +1,145 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Primitives; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.WebService.Contracts; + +internal static class AttestationListContracts +{ + private const int DefaultPageSize = 50; + private const int MaxPageSize = 200; + + public static bool TryBuildQuery(HttpRequest request, out AttestorEntryQuery query, out IResult? error) + { + error = null; + var collection = request.Query; + + var subject = GetOptional(collection, "subject"); + var type = GetOptional(collection, "type"); + var issuer = GetOptional(collection, "issuer"); + var scope = GetOptional(collection, "scope"); + var continuationToken = GetOptional(collection, "continuationToken"); + + var pageSize = DefaultPageSize; + if (collection.TryGetValue("pageSize", out var pageSizeValues) && !StringValues.IsNullOrEmpty(pageSizeValues)) + { + if (!int.TryParse(pageSizeValues.ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out pageSize) || pageSize <= 0) + { + query = default!; + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "Invalid pageSize query parameter."); + return false; + } + + pageSize = pageSize > MaxPageSize ? MaxPageSize : pageSize; + } + + DateTimeOffset? createdAfter = null; + if (collection.TryGetValue("createdAfter", out var createdAfterValues) && !StringValues.IsNullOrEmpty(createdAfterValues)) + { + if (!DateTimeOffset.TryParse(createdAfterValues.ToString(), CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out var parsedAfter)) + { + query = default!; + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "Invalid createdAfter query parameter."); + return false; + } + + createdAfter = parsedAfter; + } + + DateTimeOffset? createdBefore = null; + if (collection.TryGetValue("createdBefore", out var createdBeforeValues) && !StringValues.IsNullOrEmpty(createdBeforeValues)) + { + if (!DateTimeOffset.TryParse(createdBeforeValues.ToString(), CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out var parsedBefore)) + { + query = default!; + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "Invalid createdBefore query parameter."); + return false; + } + + createdBefore = parsedBefore; + } + + if (createdAfter.HasValue && createdBefore.HasValue && createdAfter > createdBefore) + { + query = default!; + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "`createdAfter` must be earlier than `createdBefore`."); + return false; + } + + if (continuationToken is not null && !AttestorEntryContinuationToken.TryParse(continuationToken, out _)) + { + query = default!; + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "Invalid continuation token."); + return false; + } + + query = new AttestorEntryQuery + { + Subject = subject, + Type = type, + Issuer = issuer, + Scope = scope, + CreatedAfter = createdAfter, + CreatedBefore = createdBefore, + PageSize = pageSize, + ContinuationToken = continuationToken + }; + + return true; + } + + private static string? GetOptional(IQueryCollection collection, string key) + { + if (!collection.TryGetValue(key, out var values) || StringValues.IsNullOrEmpty(values)) + { + return null; + } + + return values.ToString(); + } +} + +public sealed class AttestationListResponseDto +{ + public required IReadOnlyList Items { get; init; } + + public string? ContinuationToken { get; init; } +} + +public sealed class AttestationListItemDto +{ + public required string Uuid { get; init; } + public required string Status { get; init; } + public required string CreatedAt { get; init; } + public required AttestationArtifactDto Artifact { get; init; } + public required AttestationSignerDto Signer { get; init; } + public required AttestationLogDto Log { get; init; } + public AttestationLogDto? Mirror { get; init; } +} + +public sealed class AttestationArtifactDto +{ + public required string Sha256 { get; init; } + public required string Kind { get; init; } + public string? ImageDigest { get; init; } + public string? SubjectUri { get; init; } +} + +public sealed class AttestationSignerDto +{ + public required string Mode { get; init; } + public string? Issuer { get; init; } + public string? Subject { get; init; } + public string? KeyId { get; init; } +} + +public sealed class AttestationLogDto +{ + public required string Backend { get; init; } + public string? Url { get; init; } + public long? Index { get; init; } + public string? Status { get; init; } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationSignContracts.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationSignContracts.cs new file mode 100644 index 00000000..ed050cbd --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/AttestationSignContracts.cs @@ -0,0 +1,56 @@ +using System.Collections.Generic; +using StellaOps.Attestor.Core.Submission; + +namespace StellaOps.Attestor.WebService.Contracts; + +public sealed class AttestationSignRequestDto +{ + public string KeyId { get; set; } = string.Empty; + + public string PayloadType { get; set; } = string.Empty; + + public string Payload { get; set; } = string.Empty; + + public string? Mode { get; set; } + + public List? CertificateChain { get; set; } + + public AttestationSignArtifactDto Artifact { get; set; } = new(); + + public string? LogPreference { get; set; } + + public bool? Archive { get; set; } +} + +public sealed class AttestationSignArtifactDto +{ + public string Sha256 { get; set; } = string.Empty; + + public string Kind { get; set; } = string.Empty; + + public string? ImageDigest { get; set; } + + public string? SubjectUri { get; set; } +} + +public sealed class AttestationSignResponseDto +{ + public AttestorSubmissionRequest.SubmissionBundle Bundle { get; init; } = new(); + + public AttestorSubmissionRequest.SubmissionMeta Meta { get; init; } = new(); + + public AttestationSignKeyDto Key { get; init; } = new(); +} + +public sealed class AttestationSignKeyDto +{ + public string KeyId { get; init; } = string.Empty; + + public string Algorithm { get; init; } = string.Empty; + + public string Mode { get; init; } = string.Empty; + + public string Provider { get; init; } = string.Empty; + + public string SignedAt { get; init; } = string.Empty; +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/BulkVerificationContracts.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/BulkVerificationContracts.cs new file mode 100644 index 00000000..cd25d67f --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Contracts/BulkVerificationContracts.cs @@ -0,0 +1,216 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.AspNetCore.Http; +using StellaOps.Attestor.Core.Bulk; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Verification; + +namespace StellaOps.Attestor.WebService.Contracts; + +internal static class BulkVerificationContracts +{ + public static bool TryBuildJob( + BulkVerificationRequestDto? requestDto, + AttestorOptions options, + BulkVerificationJobContext context, + out BulkVerificationJob? job, + out IResult? error) + { + job = null; + error = null; + + if (requestDto is null || requestDto.Items is null || requestDto.Items.Length == 0) + { + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "At least one verification item is required."); + return false; + } + + var maxItems = Math.Max(1, options.Quotas.Bulk.MaxItemsPerJob); + if (requestDto.Items.Length > maxItems) + { + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: $"Too many items; maximum allowed is {maxItems}."); + return false; + } + + var defaultPolicy = Normalize(requestDto.PolicyVersion); + var defaultRefresh = requestDto.RefreshProof ?? false; + + var requests = new List(requestDto.Items.Length); + + for (var index = 0; index < requestDto.Items.Length; index++) + { + var itemDto = requestDto.Items[index]; + if (itemDto is null) + { + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: $"Item at index {index} is null."); + return false; + } + + var request = new BulkVerificationItemRequest + { + Uuid = Normalize(itemDto.Uuid), + ArtifactSha256 = Normalize(itemDto.ArtifactSha256), + Subject = Normalize(itemDto.Subject), + EnvelopeId = Normalize(itemDto.EnvelopeId), + PolicyVersion = Normalize(itemDto.PolicyVersion) ?? defaultPolicy, + RefreshProof = itemDto.RefreshProof ?? defaultRefresh + }; + + if (!IsRequestValid(request)) + { + error = Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: $"Item {index} must define uuid, artifactSha256, or subject+envelopeId."); + return false; + } + + requests.Add(new BulkVerificationJobItem + { + Index = index, + Request = request + }); + } + + job = new BulkVerificationJob + { + Context = context, + Items = requests + }; + return true; + } + + public static object MapJob(BulkVerificationJob job) + { + return new BulkVerificationJobDto + { + Id = job.Id, + Status = job.Status.ToString().ToLowerInvariant(), + CreatedAt = job.CreatedAt.ToString("O"), + StartedAt = job.StartedAt?.ToString("O"), + CompletedAt = job.CompletedAt?.ToString("O"), + Processed = job.ProcessedCount, + Succeeded = job.SucceededCount, + Failed = job.FailedCount, + Total = job.Items.Count, + FailureReason = job.FailureReason, + Items = job.Items + .OrderBy(item => item.Index) + .Select(MapItem) + .ToArray(), + Context = new BulkVerificationJobContextDto + { + Tenant = job.Context.Tenant, + RequestedBy = job.Context.RequestedBy, + ClientId = job.Context.ClientId, + Scopes = job.Context.Scopes?.ToArray() ?? Array.Empty() + } + }; + } + + private static BulkVerificationJobItemDto MapItem(BulkVerificationJobItem item) + { + return new BulkVerificationJobItemDto + { + Index = item.Index, + Status = item.Status.ToString().ToLowerInvariant(), + StartedAt = item.StartedAt?.ToString("O"), + CompletedAt = item.CompletedAt?.ToString("O"), + Error = item.Error, + Request = new BulkVerificationItemRequestDto + { + Uuid = item.Request.Uuid, + ArtifactSha256 = item.Request.ArtifactSha256, + Subject = item.Request.Subject, + EnvelopeId = item.Request.EnvelopeId, + PolicyVersion = item.Request.PolicyVersion, + RefreshProof = item.Request.RefreshProof + }, + Result = item.Result + }; + } + + private static bool IsRequestValid(BulkVerificationItemRequest request) + { + if (!string.IsNullOrEmpty(request.Uuid)) + { + return true; + } + + if (!string.IsNullOrEmpty(request.ArtifactSha256)) + { + return true; + } + + return !string.IsNullOrEmpty(request.Subject) && !string.IsNullOrEmpty(request.EnvelopeId); + } + + private static string? Normalize(string? value) => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + + internal sealed record BulkVerificationJobDto + { + public required string Id { get; init; } + public required string Status { get; init; } + public required string CreatedAt { get; init; } + public string? StartedAt { get; init; } + public string? CompletedAt { get; init; } + public int Total { get; init; } + public int Processed { get; init; } + public int Succeeded { get; init; } + public int Failed { get; init; } + public string? FailureReason { get; init; } + public required BulkVerificationJobItemDto[] Items { get; init; } + public required BulkVerificationJobContextDto Context { get; init; } + } + + internal sealed record BulkVerificationJobContextDto + { + public string? Tenant { get; init; } + public string? RequestedBy { get; init; } + public string? ClientId { get; init; } + public string[] Scopes { get; init; } = Array.Empty(); + } + + internal sealed record BulkVerificationJobItemDto + { + public required int Index { get; init; } + public required string Status { get; init; } + public string? StartedAt { get; init; } + public string? CompletedAt { get; init; } + public string? Error { get; init; } + public required BulkVerificationItemRequestDto Request { get; init; } + public AttestorVerificationResult? Result { get; init; } + } + + internal sealed record BulkVerificationItemRequestDto + { + public string? Uuid { get; init; } + public string? ArtifactSha256 { get; init; } + public string? Subject { get; init; } + public string? EnvelopeId { get; init; } + public string? PolicyVersion { get; init; } + public bool RefreshProof { get; init; } + } +} + +internal sealed class BulkVerificationRequestDto +{ + public BulkVerificationRequestItemDto[]? Items { get; init; } + + public string? PolicyVersion { get; init; } + + public bool? RefreshProof { get; init; } +} + +internal sealed class BulkVerificationRequestItemDto +{ + public string? Uuid { get; init; } + + public string? ArtifactSha256 { get; init; } + + public string? Subject { get; init; } + + public string? EnvelopeId { get; init; } + + public string? PolicyVersion { get; init; } + + public bool? RefreshProof { get; init; } +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs index 339b1d1a..d06b8462 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs @@ -1,405 +1,809 @@ -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Security.Authentication; -using System.Security.Cryptography; -using System.Security.Claims; -using System.Security.Cryptography.X509Certificates; -using System.Threading.RateLimiting; -using Serilog; -using Serilog.Events; -using StellaOps.Attestor.Core.Options; -using StellaOps.Attestor.Core.Submission; -using StellaOps.Attestor.Infrastructure; -using StellaOps.Configuration; -using StellaOps.Auth.ServerIntegration; -using Microsoft.Extensions.Diagnostics.HealthChecks; -using OpenTelemetry.Metrics; -using StellaOps.Attestor.Core.Observability; -using StellaOps.Attestor.Core.Verification; -using Microsoft.AspNetCore.Server.Kestrel.Https; -using Serilog.Context; - -const string ConfigurationSection = "attestor"; - -var builder = WebApplication.CreateBuilder(args); - -builder.Configuration.AddStellaOpsDefaults(options => -{ - options.BasePath = builder.Environment.ContentRootPath; - options.EnvironmentPrefix = "ATTESTOR_"; - options.BindingSection = ConfigurationSection; -}); - -builder.Host.UseSerilog((context, services, loggerConfiguration) => -{ - loggerConfiguration - .MinimumLevel.Information() - .MinimumLevel.Override("Microsoft", LogEventLevel.Warning) - .Enrich.FromLogContext() - .WriteTo.Console(); -}); - -var attestorOptions = builder.Configuration.BindOptions(ConfigurationSection); - -var clientCertificateAuthorities = LoadClientCertificateAuthorities(attestorOptions.Security.Mtls.CaBundle); - -builder.Services.AddSingleton(TimeProvider.System); -builder.Services.AddSingleton(attestorOptions); - -builder.Services.AddRateLimiter(options => -{ - options.RejectionStatusCode = StatusCodes.Status429TooManyRequests; - options.OnRejected = static (context, _) => - { - context.HttpContext.Response.Headers.TryAdd("Retry-After", "1"); - return ValueTask.CompletedTask; - }; - - options.AddPolicy("attestor-submissions", httpContext => - { - var identity = httpContext.Connection.ClientCertificate?.Thumbprint - ?? httpContext.User.FindFirst("sub")?.Value - ?? httpContext.User.FindFirst("client_id")?.Value - ?? httpContext.Connection.RemoteIpAddress?.ToString() - ?? "anonymous"; - - var quota = attestorOptions.Quotas.PerCaller; - var tokensPerPeriod = Math.Max(1, quota.Qps); - var tokenLimit = Math.Max(tokensPerPeriod, quota.Burst); - var queueLimit = Math.Max(quota.Burst, tokensPerPeriod); - - return RateLimitPartition.GetTokenBucketLimiter(identity, _ => new TokenBucketRateLimiterOptions - { - TokenLimit = tokenLimit, - TokensPerPeriod = tokensPerPeriod, - ReplenishmentPeriod = TimeSpan.FromSeconds(1), - QueueLimit = queueLimit, - QueueProcessingOrder = QueueProcessingOrder.OldestFirst, - AutoReplenishment = true - }); - }); -}); - -builder.Services.AddOptions() - .Bind(builder.Configuration.GetSection(ConfigurationSection)) - .ValidateOnStart(); - -builder.Services.AddProblemDetails(); -builder.Services.AddEndpointsApiExplorer(); -builder.Services.AddAttestorInfrastructure(); -builder.Services.AddHttpContextAccessor(); -builder.Services.AddHealthChecks() - .AddCheck("self", () => HealthCheckResult.Healthy()); - -builder.Services.AddOpenTelemetry() - .WithMetrics(metricsBuilder => - { - metricsBuilder.AddMeter(AttestorMetrics.MeterName); - metricsBuilder.AddAspNetCoreInstrumentation(); - metricsBuilder.AddRuntimeInstrumentation(); - }); - -if (attestorOptions.Security.Authority is { Issuer: not null } authority) -{ - builder.Services.AddStellaOpsResourceServerAuthentication( - builder.Configuration, - configurationSection: null, - configure: resourceOptions => - { - resourceOptions.Authority = authority.Issuer!; - resourceOptions.RequireHttpsMetadata = authority.RequireHttpsMetadata; - if (!string.IsNullOrWhiteSpace(authority.JwksUrl)) - { - resourceOptions.MetadataAddress = authority.JwksUrl; - } - - foreach (var audience in authority.Audiences) - { - resourceOptions.Audiences.Add(audience); - } - - foreach (var scope in authority.RequiredScopes) - { - resourceOptions.RequiredScopes.Add(scope); - } - }); - - builder.Services.AddAuthorization(options => - { - options.AddPolicy("attestor:write", policy => - { - policy.RequireAuthenticatedUser(); - policy.RequireClaim("scope", authority.RequiredScopes); - }); - }); -} -else -{ - builder.Services.AddAuthorization(); -} - -builder.WebHost.ConfigureKestrel(kestrel => -{ - kestrel.ConfigureHttpsDefaults(https => - { - if (attestorOptions.Security.Mtls.RequireClientCertificate) - { - https.ClientCertificateMode = ClientCertificateMode.RequireCertificate; - } - - https.SslProtocols = SslProtocols.Tls13 | SslProtocols.Tls12; - - https.ClientCertificateValidation = (certificate, _, _) => - { - if (!attestorOptions.Security.Mtls.RequireClientCertificate) - { - return true; - } - - if (certificate is null) - { - Log.Warning("Client certificate missing"); - return false; - } - - if (clientCertificateAuthorities.Count > 0) - { - using var chain = new X509Chain - { - ChainPolicy = - { - RevocationMode = X509RevocationMode.NoCheck, - TrustMode = X509ChainTrustMode.CustomRootTrust - } - }; - - foreach (var authority in clientCertificateAuthorities) - { - chain.ChainPolicy.CustomTrustStore.Add(authority); - } - - if (!chain.Build(certificate)) - { - Log.Warning("Client certificate chain validation failed for {Subject}", certificate.Subject); - return false; - } - } - - if (attestorOptions.Security.Mtls.AllowedThumbprints.Count > 0 && - !attestorOptions.Security.Mtls.AllowedThumbprints.Contains(certificate.Thumbprint ?? string.Empty, StringComparer.OrdinalIgnoreCase)) - { - Log.Warning("Client certificate thumbprint {Thumbprint} rejected", certificate.Thumbprint); - return false; - } - - if (attestorOptions.Security.Mtls.AllowedSubjects.Count > 0 && - !attestorOptions.Security.Mtls.AllowedSubjects.Contains(certificate.Subject, StringComparer.OrdinalIgnoreCase)) - { - Log.Warning("Client certificate subject {Subject} rejected", certificate.Subject); - return false; - } - - return true; - }; - }); -}); - -var app = builder.Build(); - -app.UseSerilogRequestLogging(); - -app.Use(async (context, next) => -{ - var correlationId = context.Request.Headers["X-Correlation-Id"].FirstOrDefault(); - if (string.IsNullOrWhiteSpace(correlationId)) - { - correlationId = Guid.NewGuid().ToString("N"); - } - - context.Response.Headers["X-Correlation-Id"] = correlationId; - - using (LogContext.PushProperty("CorrelationId", correlationId)) - { - await next().ConfigureAwait(false); - } -}); - -app.UseExceptionHandler(static handler => -{ - handler.Run(async context => - { - var result = Results.Problem(statusCode: StatusCodes.Status500InternalServerError); - await result.ExecuteAsync(context); - }); -}); - -app.UseRateLimiter(); - -app.UseAuthentication(); -app.UseAuthorization(); - -app.MapHealthChecks("/health/ready"); -app.MapHealthChecks("/health/live"); - -app.MapPost("/api/v1/rekor/entries", async (AttestorSubmissionRequest request, HttpContext httpContext, IAttestorSubmissionService submissionService, CancellationToken cancellationToken) => -{ - var certificate = httpContext.Connection.ClientCertificate; - if (certificate is null) - { - return Results.Problem(statusCode: StatusCodes.Status403Forbidden, title: "Client certificate required"); - } - - var user = httpContext.User; - if (user?.Identity is not { IsAuthenticated: true }) - { - return Results.Problem(statusCode: StatusCodes.Status401Unauthorized, title: "Authentication required"); - } - - var submissionContext = BuildSubmissionContext(user, certificate); - - try - { - var result = await submissionService.SubmitAsync(request, submissionContext, cancellationToken).ConfigureAwait(false); - return Results.Ok(result); - } - catch (AttestorValidationException validationEx) - { - return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: validationEx.Message, extensions: new Dictionary - { - ["code"] = validationEx.Code - }); - } -}) -.RequireAuthorization("attestor:write") -.RequireRateLimiting("attestor-submissions"); - -app.MapGet("/api/v1/rekor/entries/{uuid}", async (string uuid, bool? refresh, IAttestorVerificationService verificationService, CancellationToken cancellationToken) => -{ - var entry = await verificationService.GetEntryAsync(uuid, refresh is true, cancellationToken).ConfigureAwait(false); - if (entry is null) - { - return Results.NotFound(); - } - - return Results.Ok(new - { - uuid = entry.RekorUuid, - index = entry.Index, - backend = entry.Log.Backend, - proof = entry.Proof is null ? null : new - { - checkpoint = entry.Proof.Checkpoint is null ? null : new - { - origin = entry.Proof.Checkpoint.Origin, - size = entry.Proof.Checkpoint.Size, - rootHash = entry.Proof.Checkpoint.RootHash, - timestamp = entry.Proof.Checkpoint.Timestamp?.ToString("O") - }, - inclusion = entry.Proof.Inclusion is null ? null : new - { - leafHash = entry.Proof.Inclusion.LeafHash, - path = entry.Proof.Inclusion.Path - } - }, - logURL = entry.Log.Url, - status = entry.Status, - mirror = entry.Mirror is null ? null : new - { - backend = entry.Mirror.Backend, - uuid = entry.Mirror.Uuid, - index = entry.Mirror.Index, - logURL = entry.Mirror.Url, - status = entry.Mirror.Status, - proof = entry.Mirror.Proof is null ? null : new - { - checkpoint = entry.Mirror.Proof.Checkpoint is null ? null : new - { - origin = entry.Mirror.Proof.Checkpoint.Origin, - size = entry.Mirror.Proof.Checkpoint.Size, - rootHash = entry.Mirror.Proof.Checkpoint.RootHash, - timestamp = entry.Mirror.Proof.Checkpoint.Timestamp?.ToString("O") - }, - inclusion = entry.Mirror.Proof.Inclusion is null ? null : new - { - leafHash = entry.Mirror.Proof.Inclusion.LeafHash, - path = entry.Mirror.Proof.Inclusion.Path - } - }, - error = entry.Mirror.Error - }, - artifact = new - { - sha256 = entry.Artifact.Sha256, - kind = entry.Artifact.Kind, - imageDigest = entry.Artifact.ImageDigest, - subjectUri = entry.Artifact.SubjectUri - } - }); -}).RequireAuthorization("attestor:write"); - -app.MapPost("/api/v1/rekor/verify", async (AttestorVerificationRequest request, IAttestorVerificationService verificationService, CancellationToken cancellationToken) => -{ - try - { - var result = await verificationService.VerifyAsync(request, cancellationToken).ConfigureAwait(false); - return Results.Ok(result); - } - catch (AttestorVerificationException ex) - { - return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: ex.Message, extensions: new Dictionary - { - ["code"] = ex.Code - }); - } -}).RequireAuthorization("attestor:write"); - -app.Run(); - -static SubmissionContext BuildSubmissionContext(ClaimsPrincipal user, X509Certificate2 certificate) -{ - var subject = user.FindFirst("sub")?.Value ?? certificate.Subject; - var audience = user.FindFirst("aud")?.Value ?? string.Empty; - var clientId = user.FindFirst("client_id")?.Value; - var tenant = user.FindFirst("tenant")?.Value; - - return new SubmissionContext - { - CallerSubject = subject, - CallerAudience = audience, - CallerClientId = clientId, - CallerTenant = tenant, - ClientCertificate = certificate, - MtlsThumbprint = certificate.Thumbprint - }; -} - -static List LoadClientCertificateAuthorities(string? path) -{ - var certificates = new List(); - - if (string.IsNullOrWhiteSpace(path)) - { - return certificates; - } - - try - { - if (!File.Exists(path)) - { - Log.Warning("Client CA bundle '{Path}' not found", path); - return certificates; - } - - var collection = new X509Certificate2Collection(); - collection.ImportFromPemFile(path); - - certificates.AddRange(collection.Cast()); - } - catch (Exception ex) when (ex is IOException or CryptographicException) - { - Log.Warning(ex, "Failed to load client CA bundle from {Path}", path); - } - - return certificates; -} +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Security.Authentication; +using System.Security.Cryptography; +using System.Security.Claims; +using System.Security.Cryptography.X509Certificates; +using System.Threading.RateLimiting; +using Serilog; +using Serilog.Events; +using StellaOps.Attestor.Core.Offline; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Signing; +using StellaOps.Attestor.Infrastructure; +using StellaOps.Configuration; +using StellaOps.Auth.ServerIntegration; +using Microsoft.Extensions.Diagnostics.HealthChecks; +using OpenTelemetry.Metrics; +using OpenTelemetry.Trace; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Verification; +using StellaOps.Attestor.WebService.Contracts; +using StellaOps.Attestor.Core.Bulk; +using Microsoft.AspNetCore.Server.Kestrel.Https; +using Serilog.Context; + +const string ConfigurationSection = "attestor"; + +var builder = WebApplication.CreateBuilder(args); + +builder.Configuration.AddStellaOpsDefaults(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = "ATTESTOR_"; + options.BindingSection = ConfigurationSection; +}); + +builder.Host.UseSerilog((context, services, loggerConfiguration) => +{ + loggerConfiguration + .MinimumLevel.Information() + .MinimumLevel.Override("Microsoft", LogEventLevel.Warning) + .Enrich.FromLogContext() + .WriteTo.Console(); +}); + +var attestorOptions = builder.Configuration.BindOptions(ConfigurationSection); + +var clientCertificateAuthorities = LoadClientCertificateAuthorities(attestorOptions.Security.Mtls.CaBundle); + +builder.Services.AddSingleton(TimeProvider.System); +builder.Services.AddSingleton(attestorOptions); + +builder.Services.AddRateLimiter(options => +{ + options.RejectionStatusCode = StatusCodes.Status429TooManyRequests; + options.OnRejected = static (context, _) => + { + context.HttpContext.Response.Headers.TryAdd("Retry-After", "1"); + return ValueTask.CompletedTask; + }; + + static string ResolveIdentity(HttpContext httpContext) + { + return httpContext.Connection.ClientCertificate?.Thumbprint + ?? httpContext.User.FindFirst("sub")?.Value + ?? httpContext.User.FindFirst("client_id")?.Value + ?? httpContext.Connection.RemoteIpAddress?.ToString() + ?? "anonymous"; + } + + RateLimitPartition BuildTokenBucket(HttpContext httpContext, AttestorOptions.PerCallerQuotaOptions quota) + { + var identity = ResolveIdentity(httpContext); + var tokensPerPeriod = Math.Max(1, quota.Qps); + var tokenLimit = Math.Max(tokensPerPeriod, quota.Burst); + var queueLimit = Math.Max(quota.Burst, tokensPerPeriod); + + return RateLimitPartition.GetTokenBucketLimiter(identity, _ => new TokenBucketRateLimiterOptions + { + TokenLimit = tokenLimit, + TokensPerPeriod = tokensPerPeriod, + ReplenishmentPeriod = TimeSpan.FromSeconds(1), + QueueLimit = queueLimit, + QueueProcessingOrder = QueueProcessingOrder.OldestFirst, + AutoReplenishment = true + }); + } + + var perCallerQuota = attestorOptions.Quotas.PerCaller; + options.AddPolicy("attestor-submissions", httpContext => BuildTokenBucket(httpContext, perCallerQuota)); + options.AddPolicy("attestor-verifications", httpContext => BuildTokenBucket(httpContext, perCallerQuota)); + options.AddPolicy("attestor-reads", httpContext => BuildTokenBucket(httpContext, perCallerQuota)); + + options.AddPolicy("attestor-bulk", httpContext => + { + var identity = ResolveIdentity(httpContext); + var bulkQuota = attestorOptions.Quotas.Bulk; + var permitLimit = Math.Max(1, bulkQuota.RequestsPerMinute); + var queueLimit = Math.Max(0, bulkQuota.RequestsPerMinute / 2); + + return RateLimitPartition.GetFixedWindowLimiter(identity, _ => new FixedWindowRateLimiterOptions + { + PermitLimit = permitLimit, + Window = TimeSpan.FromMinutes(1), + QueueLimit = queueLimit, + QueueProcessingOrder = QueueProcessingOrder.OldestFirst + }); + }); +}); + +builder.Services.AddOptions() + .Bind(builder.Configuration.GetSection(ConfigurationSection)) + .ValidateOnStart(); + +builder.Services.AddProblemDetails(); +builder.Services.AddEndpointsApiExplorer(); +builder.Services.AddAttestorInfrastructure(); +builder.Services.AddHttpContextAccessor(); +builder.Services.AddHealthChecks() + .AddCheck("self", () => HealthCheckResult.Healthy()); + +var openTelemetry = builder.Services.AddOpenTelemetry(); + +openTelemetry.WithMetrics(metricsBuilder => +{ + metricsBuilder.AddMeter(AttestorMetrics.MeterName); + metricsBuilder.AddAspNetCoreInstrumentation(); + metricsBuilder.AddRuntimeInstrumentation(); +}); + +if (attestorOptions.Telemetry.EnableTracing) +{ + openTelemetry.WithTracing(tracingBuilder => + { + tracingBuilder.AddSource(AttestorActivitySource.Name); + tracingBuilder.AddAspNetCoreInstrumentation(); + tracingBuilder.AddHttpClientInstrumentation(); + }); +} + +if (attestorOptions.Security.Authority is { Issuer: not null } authority) +{ + builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configurationSection: null, + configure: resourceOptions => + { + resourceOptions.Authority = authority.Issuer!; + resourceOptions.RequireHttpsMetadata = authority.RequireHttpsMetadata; + if (!string.IsNullOrWhiteSpace(authority.JwksUrl)) + { + resourceOptions.MetadataAddress = authority.JwksUrl; + } + + foreach (var audience in authority.Audiences) + { + resourceOptions.Audiences.Add(audience); + } + + if (authority.RequiredScopes.Count == 0) + { + resourceOptions.RequiredScopes.Add("attestor.write"); + resourceOptions.RequiredScopes.Add("attestor.verify"); + resourceOptions.RequiredScopes.Add("attestor.read"); + } + else + { + foreach (var scope in authority.RequiredScopes) + { + resourceOptions.RequiredScopes.Add(scope); + } + } + }); +} + +builder.Services.AddAuthorization(options => +{ + options.AddPolicy("attestor:write", policy => + { + policy.RequireAuthenticatedUser(); + policy.RequireAssertion(context => HasAnyScope(context.User, "attestor.write")); + }); + + options.AddPolicy("attestor:verify", policy => + { + policy.RequireAuthenticatedUser(); + policy.RequireAssertion(context => HasAnyScope(context.User, "attestor.verify", "attestor.write")); + }); + + options.AddPolicy("attestor:read", policy => + { + policy.RequireAuthenticatedUser(); + policy.RequireAssertion(context => HasAnyScope(context.User, "attestor.read", "attestor.verify", "attestor.write")); + }); +}); + +builder.WebHost.ConfigureKestrel(kestrel => +{ + kestrel.ConfigureHttpsDefaults(https => + { + if (attestorOptions.Security.Mtls.RequireClientCertificate) + { + https.ClientCertificateMode = ClientCertificateMode.RequireCertificate; + } + + https.SslProtocols = SslProtocols.Tls13 | SslProtocols.Tls12; + + https.ClientCertificateValidation = (certificate, _, _) => + { + if (!attestorOptions.Security.Mtls.RequireClientCertificate) + { + return true; + } + + if (certificate is null) + { + Log.Warning("Client certificate missing"); + return false; + } + + if (clientCertificateAuthorities.Count > 0) + { + using var chain = new X509Chain + { + ChainPolicy = + { + RevocationMode = X509RevocationMode.NoCheck, + TrustMode = X509ChainTrustMode.CustomRootTrust + } + }; + + foreach (var authority in clientCertificateAuthorities) + { + chain.ChainPolicy.CustomTrustStore.Add(authority); + } + + if (!chain.Build(certificate)) + { + Log.Warning("Client certificate chain validation failed for {Subject}", certificate.Subject); + return false; + } + } + + if (attestorOptions.Security.Mtls.AllowedThumbprints.Count > 0 && + !attestorOptions.Security.Mtls.AllowedThumbprints.Contains(certificate.Thumbprint ?? string.Empty, StringComparer.OrdinalIgnoreCase)) + { + Log.Warning("Client certificate thumbprint {Thumbprint} rejected", certificate.Thumbprint); + return false; + } + + if (attestorOptions.Security.Mtls.AllowedSubjects.Count > 0 && + !attestorOptions.Security.Mtls.AllowedSubjects.Contains(certificate.Subject, StringComparer.OrdinalIgnoreCase)) + { + Log.Warning("Client certificate subject {Subject} rejected", certificate.Subject); + return false; + } + + return true; + }; + }); +}); + +var app = builder.Build(); + +app.UseSerilogRequestLogging(); + +app.Use(async (context, next) => +{ + var correlationId = context.Request.Headers["X-Correlation-Id"].FirstOrDefault(); + if (string.IsNullOrWhiteSpace(correlationId)) + { + correlationId = Guid.NewGuid().ToString("N"); + } + + context.Response.Headers["X-Correlation-Id"] = correlationId; + + using (LogContext.PushProperty("CorrelationId", correlationId)) + { + await next().ConfigureAwait(false); + } +}); + +app.UseExceptionHandler(static handler => +{ + handler.Run(async context => + { + var result = Results.Problem(statusCode: StatusCodes.Status500InternalServerError); + await result.ExecuteAsync(context); + }); +}); + +app.UseRateLimiter(); + +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapHealthChecks("/health/ready"); +app.MapHealthChecks("/health/live"); + +app.MapGet("/api/v1/attestations", async (HttpRequest request, IAttestorEntryRepository repository, CancellationToken cancellationToken) => +{ + if (!AttestationListContracts.TryBuildQuery(request, out var query, out var error)) + { + return error!; + } + + var result = await repository.QueryAsync(query, cancellationToken).ConfigureAwait(false); + var response = new AttestationListResponseDto + { + Items = result.Items.Select(MapToListItem).ToList(), + ContinuationToken = result.ContinuationToken + }; + + return Results.Ok(response); +}) +.RequireAuthorization("attestor:read") +.RequireRateLimiting("attestor-reads"); + +app.MapPost("/api/v1/attestations:export", async (HttpContext httpContext, AttestationExportRequestDto? requestDto, IAttestorBundleService bundleService, CancellationToken cancellationToken) => +{ + if (httpContext.Request.ContentLength > 0 && !IsJsonContentType(httpContext.Request.ContentType)) + { + return UnsupportedMediaTypeResult(); + } + + AttestorBundleExportRequest request; + if (requestDto is null) + { + request = new AttestorBundleExportRequest(); + } + else if (!requestDto.TryToDomain(out request, out var error)) + { + return error!; + } + + var package = await bundleService.ExportAsync(request, cancellationToken).ConfigureAwait(false); + return Results.Ok(package); +}) +.RequireAuthorization("attestor:read") +.RequireRateLimiting("attestor-reads") +.Produces(StatusCodes.Status200OK); + +app.MapPost("/api/v1/attestations:import", async (HttpContext httpContext, AttestorBundlePackage package, IAttestorBundleService bundleService, CancellationToken cancellationToken) => +{ + if (!IsJsonContentType(httpContext.Request.ContentType)) + { + return UnsupportedMediaTypeResult(); + } + + var result = await bundleService.ImportAsync(package, cancellationToken).ConfigureAwait(false); + return Results.Ok(result); +}) +.RequireAuthorization("attestor:write") +.RequireRateLimiting("attestor-submissions") +.Produces(StatusCodes.Status200OK); + +app.MapPost("/api/v1/attestations:sign", async (AttestationSignRequestDto? requestDto, HttpContext httpContext, IAttestationSigningService signingService, CancellationToken cancellationToken) => +{ + if (requestDto is null) + { + return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "Request body is required."); + } + + if (!IsJsonContentType(httpContext.Request.ContentType)) + { + return UnsupportedMediaTypeResult(); + } + + var certificate = httpContext.Connection.ClientCertificate; + if (certificate is null) + { + return Results.Problem(statusCode: StatusCodes.Status403Forbidden, title: "Client certificate required"); + } + + var user = httpContext.User; + if (user?.Identity is not { IsAuthenticated: true }) + { + return Results.Problem(statusCode: StatusCodes.Status401Unauthorized, title: "Authentication required"); + } + + var signingRequest = new AttestationSignRequest + { + KeyId = requestDto.KeyId ?? string.Empty, + PayloadType = requestDto.PayloadType ?? string.Empty, + PayloadBase64 = requestDto.Payload ?? string.Empty, + Mode = requestDto.Mode, + CertificateChain = requestDto.CertificateChain ?? new List(), + Artifact = new AttestorSubmissionRequest.ArtifactInfo + { + Sha256 = requestDto.Artifact?.Sha256 ?? string.Empty, + Kind = requestDto.Artifact?.Kind ?? string.Empty, + ImageDigest = requestDto.Artifact?.ImageDigest, + SubjectUri = requestDto.Artifact?.SubjectUri + }, + LogPreference = requestDto.LogPreference ?? "primary", + Archive = requestDto.Archive ?? true + }; + + try + { + var submissionContext = BuildSubmissionContext(user, certificate); + var result = await signingService.SignAsync(signingRequest, submissionContext, cancellationToken).ConfigureAwait(false); + var response = new AttestationSignResponseDto + { + Bundle = result.Bundle, + Meta = result.Meta, + Key = new AttestationSignKeyDto + { + KeyId = result.KeyId, + Algorithm = result.Algorithm, + Mode = result.Mode, + Provider = result.Provider, + SignedAt = result.SignedAt.ToString("O") + } + }; + return Results.Ok(response); + } + catch (AttestorSigningException signingEx) + { + return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: signingEx.Message, extensions: new Dictionary + { + ["code"] = signingEx.Code + }); + } +}).RequireAuthorization("attestor:write") + .RequireRateLimiting("attestor-submissions"); + +app.MapPost("/api/v1/rekor/entries", async (AttestorSubmissionRequest request, HttpContext httpContext, IAttestorSubmissionService submissionService, CancellationToken cancellationToken) => +{ + if (!IsJsonContentType(httpContext.Request.ContentType)) + { + return UnsupportedMediaTypeResult(); + } + + var certificate = httpContext.Connection.ClientCertificate; + if (certificate is null) + { + return Results.Problem(statusCode: StatusCodes.Status403Forbidden, title: "Client certificate required"); + } + + var user = httpContext.User; + if (user?.Identity is not { IsAuthenticated: true }) + { + return Results.Problem(statusCode: StatusCodes.Status401Unauthorized, title: "Authentication required"); + } + + var submissionContext = BuildSubmissionContext(user, certificate); + + try + { + var result = await submissionService.SubmitAsync(request, submissionContext, cancellationToken).ConfigureAwait(false); + return Results.Ok(result); + } + catch (AttestorValidationException validationEx) + { + return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: validationEx.Message, extensions: new Dictionary + { + ["code"] = validationEx.Code + }); + } +}) +.RequireAuthorization("attestor:write") +.RequireRateLimiting("attestor-submissions"); + +app.MapGet("/api/v1/rekor/entries/{uuid}", async (string uuid, bool? refresh, IAttestorVerificationService verificationService, CancellationToken cancellationToken) => + await GetAttestationDetailResultAsync(uuid, refresh is true, verificationService, cancellationToken)) + .RequireAuthorization("attestor:read") + .RequireRateLimiting("attestor-reads"); + +app.MapGet("/api/v1/attestations/{uuid}", async (string uuid, bool? refresh, IAttestorVerificationService verificationService, CancellationToken cancellationToken) => + await GetAttestationDetailResultAsync(uuid, refresh is true, verificationService, cancellationToken)) + .RequireAuthorization("attestor:read") + .RequireRateLimiting("attestor-reads"); + +app.MapPost("/api/v1/rekor/verify", async (HttpContext httpContext, AttestorVerificationRequest verifyRequest, IAttestorVerificationService verificationService, CancellationToken cancellationToken) => +{ + if (!IsJsonContentType(httpContext.Request.ContentType)) + { + return UnsupportedMediaTypeResult(); + } + + try + { + var result = await verificationService.VerifyAsync(verifyRequest, cancellationToken).ConfigureAwait(false); + return Results.Ok(result); + } + catch (AttestorVerificationException ex) + { + return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: ex.Message, extensions: new Dictionary + { + ["code"] = ex.Code + }); + } +}) +.RequireAuthorization("attestor:verify") +.RequireRateLimiting("attestor-verifications"); + +app.MapPost("/api/v1/rekor/verify:bulk", async ( + BulkVerificationRequestDto? requestDto, + HttpContext httpContext, + IBulkVerificationJobStore jobStore, + CancellationToken cancellationToken) => +{ + var context = BuildBulkJobContext(httpContext.User); + + if (!BulkVerificationContracts.TryBuildJob(requestDto, attestorOptions, context, out var job, out var error)) + { + return error!; + } + + var queued = await jobStore.CountQueuedAsync(cancellationToken).ConfigureAwait(false); + if (queued >= Math.Max(1, attestorOptions.Quotas.Bulk.MaxQueuedJobs)) + { + return Results.Problem(statusCode: StatusCodes.Status429TooManyRequests, title: "Too many bulk verification jobs queued. Try again later."); + } + + job = await jobStore.CreateAsync(job!, cancellationToken).ConfigureAwait(false); + var response = BulkVerificationContracts.MapJob(job); + return Results.Accepted($"/api/v1/rekor/verify:bulk/{job.Id}", response); +}).RequireAuthorization("attestor:write") + .RequireRateLimiting("attestor-bulk"); + +app.MapGet("/api/v1/rekor/verify:bulk/{jobId}", async ( + string jobId, + HttpContext httpContext, + IBulkVerificationJobStore jobStore, + CancellationToken cancellationToken) => +{ + if (string.IsNullOrWhiteSpace(jobId)) + { + return Results.NotFound(); + } + + var job = await jobStore.GetAsync(jobId, cancellationToken).ConfigureAwait(false); + if (job is null || !IsAuthorizedForJob(job, httpContext.User)) + { + return Results.NotFound(); + } + + return Results.Ok(BulkVerificationContracts.MapJob(job)); +}).RequireAuthorization("attestor:write"); + +app.Run(); + +static async Task GetAttestationDetailResultAsync( + string uuid, + bool refresh, + IAttestorVerificationService verificationService, + CancellationToken cancellationToken) +{ + var entry = await verificationService.GetEntryAsync(uuid, refresh, cancellationToken).ConfigureAwait(false); + if (entry is null) + { + return Results.NotFound(); + } + + return Results.Ok(MapAttestationDetail(entry)); +} + +static object MapAttestationDetail(AttestorEntry entry) +{ + return new + { + uuid = entry.RekorUuid, + index = entry.Index, + backend = entry.Log.Backend, + proof = entry.Proof is null ? null : new + { + checkpoint = entry.Proof.Checkpoint is null ? null : new + { + origin = entry.Proof.Checkpoint.Origin, + size = entry.Proof.Checkpoint.Size, + rootHash = entry.Proof.Checkpoint.RootHash, + timestamp = entry.Proof.Checkpoint.Timestamp?.ToString("O") + }, + inclusion = entry.Proof.Inclusion is null ? null : new + { + leafHash = entry.Proof.Inclusion.LeafHash, + path = entry.Proof.Inclusion.Path + } + }, + logURL = entry.Log.Url, + status = entry.Status, + mirror = entry.Mirror is null ? null : new + { + backend = entry.Mirror.Backend, + uuid = entry.Mirror.Uuid, + index = entry.Mirror.Index, + logURL = entry.Mirror.Url, + status = entry.Mirror.Status, + proof = entry.Mirror.Proof is null ? null : new + { + checkpoint = entry.Mirror.Proof.Checkpoint is null ? null : new + { + origin = entry.Mirror.Proof.Checkpoint.Origin, + size = entry.Mirror.Proof.Checkpoint.Size, + rootHash = entry.Mirror.Proof.Checkpoint.RootHash, + timestamp = entry.Mirror.Proof.Checkpoint.Timestamp?.ToString("O") + }, + inclusion = entry.Mirror.Proof.Inclusion is null ? null : new + { + leafHash = entry.Mirror.Proof.Inclusion.LeafHash, + path = entry.Mirror.Proof.Inclusion.Path + } + }, + error = entry.Mirror.Error + }, + artifact = new + { + sha256 = entry.Artifact.Sha256, + kind = entry.Artifact.Kind, + imageDigest = entry.Artifact.ImageDigest, + subjectUri = entry.Artifact.SubjectUri + } + }; +} + +static AttestationListItemDto MapToListItem(AttestorEntry entry) +{ + return new AttestationListItemDto + { + Uuid = entry.RekorUuid, + Status = entry.Status, + CreatedAt = entry.CreatedAt.ToString("O"), + Artifact = new AttestationArtifactDto + { + Sha256 = entry.Artifact.Sha256, + Kind = entry.Artifact.Kind, + ImageDigest = entry.Artifact.ImageDigest, + SubjectUri = entry.Artifact.SubjectUri + }, + Signer = new AttestationSignerDto + { + Mode = entry.SignerIdentity.Mode, + Issuer = entry.SignerIdentity.Issuer, + Subject = entry.SignerIdentity.SubjectAlternativeName, + KeyId = entry.SignerIdentity.KeyId + }, + Log = new AttestationLogDto + { + Backend = entry.Log.Backend, + Url = entry.Log.Url, + Index = entry.Index, + Status = entry.Status + }, + Mirror = entry.Mirror is null ? null : new AttestationLogDto + { + Backend = entry.Mirror.Backend, + Url = entry.Mirror.Url, + Index = entry.Mirror.Index, + Status = entry.Mirror.Status + } + }; +} + +static SubmissionContext BuildSubmissionContext(ClaimsPrincipal user, X509Certificate2 certificate) +{ + var subject = user.FindFirst("sub")?.Value ?? certificate.Subject; + var audience = user.FindFirst("aud")?.Value ?? string.Empty; + var clientId = user.FindFirst("client_id")?.Value; + var tenant = user.FindFirst("tenant")?.Value; + + return new SubmissionContext + { + CallerSubject = subject, + CallerAudience = audience, + CallerClientId = clientId, + CallerTenant = tenant, + ClientCertificate = certificate, + MtlsThumbprint = certificate.Thumbprint + }; +} + +static BulkVerificationJobContext BuildBulkJobContext(ClaimsPrincipal user) +{ + var scopes = user.FindAll("scope") + .Select(claim => claim.Value) + .Where(value => !string.IsNullOrWhiteSpace(value)) + .ToList(); + + return new BulkVerificationJobContext + { + Tenant = user.FindFirst("tenant")?.Value, + RequestedBy = user.FindFirst("sub")?.Value, + ClientId = user.FindFirst("client_id")?.Value, + Scopes = scopes + }; +} + +static bool IsAuthorizedForJob(BulkVerificationJob job, ClaimsPrincipal user) +{ + var tenant = user.FindFirst("tenant")?.Value; + if (!string.IsNullOrEmpty(job.Context.Tenant) && + !string.Equals(job.Context.Tenant, tenant, StringComparison.Ordinal)) + { + return false; + } + + var subject = user.FindFirst("sub")?.Value; + if (!string.IsNullOrEmpty(job.Context.RequestedBy) && + !string.Equals(job.Context.RequestedBy, subject, StringComparison.Ordinal)) + { + return false; + } + + return true; +} + + +static List LoadClientCertificateAuthorities(string? path) +{ + var certificates = new List(); + + if (string.IsNullOrWhiteSpace(path)) + { + return certificates; + } + + try + { + if (!File.Exists(path)) + { + Log.Warning("Client CA bundle '{Path}' not found", path); + return certificates; + } + + var collection = new X509Certificate2Collection(); + collection.ImportFromPemFile(path); + + certificates.AddRange(collection.Cast()); + } + catch (Exception ex) when (ex is IOException or CryptographicException) + { + Log.Warning(ex, "Failed to load client CA bundle from {Path}", path); + } + + return certificates; +} + +static bool HasAnyScope(ClaimsPrincipal user, params string[] scopes) +{ + if (user?.Identity is not { IsAuthenticated: true } || scopes.Length == 0) + { + return false; + } + + foreach (var granted in ExtractScopes(user)) + { + foreach (var required in scopes) + { + if (string.Equals(granted, required, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + } + + return false; +} + +static IEnumerable ExtractScopes(ClaimsPrincipal user) +{ + foreach (var claim in user.FindAll("scope")) + { + foreach (var value in claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries)) + { + yield return value; + } + } + + foreach (var claim in user.FindAll("scp")) + { + foreach (var value in claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries)) + { + yield return value; + } + } +} + +static bool IsJsonContentType(string? contentType) +{ + if (string.IsNullOrWhiteSpace(contentType)) + { + return false; + } + + var mediaType = contentType.Split(';', 2)[0].Trim(); + if (mediaType.Length == 0) + { + return false; + } + + return mediaType.EndsWith("/json", StringComparison.OrdinalIgnoreCase) + || mediaType.Contains("+json", StringComparison.OrdinalIgnoreCase); +} + +static IResult UnsupportedMediaTypeResult() +{ + return Results.Problem( + statusCode: StatusCodes.Status415UnsupportedMediaType, + title: "Unsupported content type. Submit application/json payloads.", + extensions: new Dictionary + { + ["code"] = "unsupported_media_type" + }); +} diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Properties/AssemblyInfo.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..98cbea92 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Attestor.Tests")] diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.sln b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.sln index 8abfec51..ce6da1e8 100644 --- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.sln +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.sln @@ -9,15 +9,17 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Infrastr EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.WebService", "StellaOps.Attestor.WebService\StellaOps.Attestor.WebService.csproj", "{B238B098-32B1-4875-99A7-393A63AC3CCF}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\StellaOps.Configuration\StellaOps.Configuration.csproj", "{988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{82EFA477-307D-4B47-A4CF-1627F076D60A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{21327A4F-2586-49F8-9D4A-3840DE64C48E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Tests", "StellaOps.Attestor.Tests\StellaOps.Attestor.Tests.csproj", "{4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}" -EndProject -Global +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{82EFA477-307D-4B47-A4CF-1627F076D60A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{21327A4F-2586-49F8-9D4A-3840DE64C48E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Tests", "StellaOps.Attestor.Tests\StellaOps.Attestor.Tests.csproj", "{4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Verify", "..\StellaOps.Attestor.Verify\StellaOps.Attestor.Verify.csproj", "{99EC90D8-0D5E-41E4-A895-585A7680916C}" +EndProject +Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU Debug|x64 = Debug|x64 @@ -108,10 +110,22 @@ Global {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|Any CPU.ActiveCfg = Release|Any CPU {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|Any CPU.Build.0 = Release|Any CPU {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|x64.ActiveCfg = Release|Any CPU - {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|x64.Build.0 = Release|Any CPU - {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|x86.ActiveCfg = Release|Any CPU - {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|x64.Build.0 = Release|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|x86.ActiveCfg = Release|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|x86.Build.0 = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|x64.ActiveCfg = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|x64.Build.0 = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|x86.ActiveCfg = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|x86.Build.0 = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|Any CPU.Build.0 = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|x64.ActiveCfg = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|x64.Build.0 = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|x86.ActiveCfg = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection diff --git a/src/Attestor/StellaOps.Attestor/TASKS.md b/src/Attestor/StellaOps.Attestor/TASKS.md index 58dbf5a2..c346c56c 100644 --- a/src/Attestor/StellaOps.Attestor/TASKS.md +++ b/src/Attestor/StellaOps.Attestor/TASKS.md @@ -13,26 +13,29 @@ ### Sprint 72 – Foundations | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| ATTESTOR-72-001 | TODO | Attestor Service Guild | ATTEST-ENVELOPE-72-001 | Scaffold service (REST API skeleton, storage interfaces, KMS integration stubs) and DSSE validation pipeline. | Service builds/tests; signing & verification stubs wired; lint/CI green. | -| ATTESTOR-72-002 | TODO | Attestor Service Guild | ATTESTOR-72-001 | Implement attestation store (DB tables, object storage integration), CRUD, and indexing strategies. | Migrations applied; CRUD API functional; storage integration unit tests pass. | +| ATTESTOR-72-001 | DONE | Attestor Service Guild | ATTEST-ENVELOPE-72-001 | Scaffold service (REST API skeleton, storage interfaces, KMS integration stubs) and DSSE validation pipeline. | Service builds/tests; signing & verification stubs wired; lint/CI green. | +| ATTESTOR-72-002 | DONE | Attestor Service Guild | ATTESTOR-72-001 | Implement attestation store (DB tables, object storage integration), CRUD, and indexing strategies. | Migrations applied; CRUD API functional; storage integration unit tests pass. | +| ATTESTOR-72-003 | BLOCKED | Attestor Service Guild, QA Guild | ATTESTOR-72-002 | Validate attestation store TTL against production-like Mongo/Redis stack; capture logs and remediation plan. | Evidence of TTL expiry captured; report archived in docs/modules/attestor/ttl-validation.md. | ### Sprint 73 – Signing & Verification | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| ATTESTOR-73-001 | TODO | Attestor Service Guild, KMS Guild | ATTESTOR-72-002, KMS-72-001 | Implement signing endpoint with Ed25519/ECDSA support, KMS integration, and audit logging. | `POST /v1/attestations:sign` functional; audit entries recorded; tests cover success/failure. | -| ATTESTOR-73-002 | TODO | Attestor Service Guild, Policy Guild | ATTESTOR-72-002, VERPOL-73-001 | Build verification pipeline evaluating DSSE signatures, issuer trust, and verification policies; persist reports. | Verification endpoint returns structured report; results cached; contract tests pass. | -| ATTESTOR-73-003 | TODO | Attestor Service Guild | ATTESTOR-73-002 | Implement listing/fetch APIs with filters (subject, type, issuer, scope, date). | API documented; pagination works; contract tests green. | +| ATTESTOR-73-001 | DONE (2025-11-01) | Attestor Service Guild, KMS Guild | ATTESTOR-72-002, KMS-72-001 | Implement signing endpoint with Ed25519/ECDSA support, KMS integration, and audit logging. | `POST /v1/attestations:sign` functional; audit entries recorded; tests cover success/failure. | +| ATTESTOR-73-002 | DONE (2025-11-01) | Attestor Service Guild, Policy Guild | ATTESTOR-72-002, VERPOL-73-001 | Build verification pipeline evaluating DSSE signatures, issuer trust, and verification policies; persist reports. | Verification endpoint returns structured report; results cached; contract tests pass. | +| ATTESTOR-73-003 | DONE | Attestor Service Guild | ATTESTOR-73-002 | Implement listing/fetch APIs with filters (subject, type, issuer, scope, date). | API documented; pagination works; contract tests green. | + +> 2025-11-01: Verification endpoints now return structured reports and persist cached results; telemetry and tests (AttestorVerificationServiceTests, CachedAttestorVerificationServiceTests) cover pass/fail/cached paths. ### Sprint 74 – Transparency & Bulk | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| ATTESTOR-74-001 | TODO | Attestor Service Guild | ATTESTOR-73-002, TRANSP-74-001 | Integrate transparency witness client, inclusion proof verification, and caching. | Witness proofs stored; verification fails on missing/inconsistent proofs; metrics emitted. | -| ATTESTOR-74-002 | TODO | Attestor Service Guild | ATTESTOR-73-002 | Implement bulk verification worker + API with progress tracking, rate limits, and caching. | Bulk job API functional; worker processes batches; telemetry recorded. | +| ATTESTOR-74-001 | DONE (2025-11-02) | Attestor Service Guild | ATTESTOR-73-002, TRANSP-74-001 | Integrate transparency witness client, inclusion proof verification, and caching. | Witness proofs stored; verification fails on missing/inconsistent proofs; metrics emitted. | +| ATTESTOR-74-002 | DONE | Attestor Service Guild | ATTESTOR-73-002 | Implement bulk verification worker + API with progress tracking, rate limits, and caching. | Bulk job API functional; worker processes batches; telemetry recorded. | ### Sprint 75 – Air Gap & Hardening | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| ATTESTOR-75-001 | TODO | Attestor Service Guild, Export Guild | ATTESTOR-74-002, EXPORT-ATTEST-74-001 | Add export/import flows for attestation bundles and offline verification mode. | Bundles generated/imported; offline verification path documented; tests cover missing witness data. | -| ATTESTOR-75-002 | TODO | Attestor Service Guild, Security Guild | ATTESTOR-73-002 | Harden APIs with rate limits, auth scopes, threat model mitigations, and fuzz testing. | Rate limiting enforced; fuzz tests run in CI; threat model actions resolved. | +| ATTESTOR-75-001 | DONE | Attestor Service Guild, Export Guild | ATTESTOR-74-002, EXPORT-ATTEST-74-001 | Add export/import flows for attestation bundles and offline verification mode. | Bundles generated/imported; offline verification path documented; tests cover missing witness data. | +| ATTESTOR-75-002 | DONE | Attestor Service Guild, Security Guild | ATTESTOR-73-002 | Harden APIs with rate limits, auth scopes, threat model mitigations, and fuzz testing. | Rate limiting enforced; fuzz tests run in CI; threat model actions resolved. | *** End Task Board *** diff --git a/src/Attestor/__Tests/StellaOps.Attestor.Types.Tests/AttestationGoldenSamplesTests.cs b/src/Attestor/__Tests/StellaOps.Attestor.Types.Tests/AttestationGoldenSamplesTests.cs new file mode 100644 index 00000000..6471bb0e --- /dev/null +++ b/src/Attestor/__Tests/StellaOps.Attestor.Types.Tests/AttestationGoldenSamplesTests.cs @@ -0,0 +1,123 @@ +using System.Text.Json; +using System.Text.Json.Nodes; +using FluentAssertions; +using Xunit; + +namespace StellaOps.Attestor.Types.Tests; + +public class AttestationGoldenSamplesTests +{ + private const string ExpectedSubjectDigest = "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"; + + [Fact] + public void EverySampleIsCanonicalAndComplete() + { + var samplesDirectory = Path.Combine(AppContext.BaseDirectory, "samples"); + Directory.Exists(samplesDirectory) + .Should() + .BeTrue($"golden samples should be copied to '{samplesDirectory}'"); + + var sampleFiles = Directory.EnumerateFiles(samplesDirectory, "*.json", SearchOption.TopDirectoryOnly) + .OrderBy(path => path, StringComparer.OrdinalIgnoreCase) + .ToList(); + + sampleFiles.Should().NotBeEmpty("golden attestation samples must exist"); + + foreach (var samplePath in sampleFiles) + { + var json = File.ReadAllText(samplePath); + var node = JsonNode.Parse(json, new JsonNodeOptions { PropertyNameCaseInsensitive = false }) + ?? throw new InvalidOperationException($"Failed to parse sample '{samplePath}'."); + + node.Should().BeOfType($"sample '{samplePath}' must be a JSON object"); + AssertObjectKeysSorted(node.AsObject(), Path.GetFileName(samplePath)); + + node["_type"] + .Should() + .NotBeNull($"sample '{samplePath}' must declare in-toto type") + .And + .Subject.As() + .GetValue() + .Should() + .Be("https://in-toto.io/Statement/v1"); + + var predicateType = node["predicateType"]?.GetValue(); + predicateType.Should().NotBeNullOrWhiteSpace($"sample '{samplePath}' must declare predicateType"); + predicateType!.Should().MatchRegex(@"^StellaOps\.[A-Za-z]+@1$", "predicate types follow naming convention"); + + node["predicateVersion"] + ?.GetValue() + .Should() + .Be("1.0.0", $"sample '{samplePath}' must lock predicateVersion"); + + var subjectArray = node["subject"]?.AsArray(); + subjectArray.Should().NotBeNullOrEmpty($"sample '{samplePath}' must describe subject digests"); + + for (var index = 0; index < subjectArray!.Count; index++) + { + var subjectEntry = subjectArray[index] ?? throw new InvalidOperationException($"Null subject entry at index {index} in '{samplePath}'."); + + var digest = subjectEntry["digest"]?["sha256"]?.GetValue(); + digest.Should().NotBeNullOrWhiteSpace($"sample '{samplePath}' requires subject.digest.sha256"); + digest!.Should().Be(ExpectedSubjectDigest, "golden samples share a single canonical digest"); + + var name = subjectEntry["name"]?.GetValue(); + name.Should().NotBeNullOrWhiteSpace($"sample '{samplePath}' requires subject name"); + name!.Should().Contain(ExpectedSubjectDigest, "subject name should embed the digest"); + + AssertObjectKeysSorted(subjectEntry.AsObject(), $"{Path.GetFileName(samplePath)}:subject[{index}]"); + AssertCanonicalRecursively(subjectEntry, $"{Path.GetFileName(samplePath)}:subject[{index}]"); + } + + var predicate = node["predicate"]?.AsObject(); + predicate.Should().NotBeNull($"sample '{samplePath}' must include predicate content"); + + AssertObjectKeysSorted(predicate!, $"{Path.GetFileName(samplePath)}:predicate"); + AssertCanonicalRecursively(predicate!, $"{Path.GetFileName(samplePath)}:predicate"); + } + } + + private static void AssertCanonicalRecursively(JsonNode node, string path) + { + switch (node) + { + case JsonObject obj: + AssertObjectKeysSorted(obj, path); + + foreach (var property in obj) + { + property.Value.Should().NotBeNull($"property '{path}.{property.Key}' must not be null"); + AssertCanonicalRecursively(property.Value!, $"{path}.{property.Key}"); + } + + break; + + case JsonArray array: + for (var index = 0; index < array.Count; index++) + { + var element = array[index]; + element.Should().NotBeNull($"array element '{path}[{index}]' must not be null"); + AssertCanonicalRecursively(element!, $"{path}[{index}]"); + } + + break; + } + } + + private static void AssertObjectKeysSorted(JsonObject obj, string path) + { + string? previous = null; + + foreach (var property in obj) + { + if (previous is not null) + { + string.CompareOrdinal(previous, property.Key) + .Should() + .BeLessOrEqualTo(0, $"object '{path}' must keep keys in lexicographical order"); + } + + previous = property.Key; + } + } +} diff --git a/src/Attestor/__Tests/StellaOps.Attestor.Types.Tests/StellaOps.Attestor.Types.Tests.csproj b/src/Attestor/__Tests/StellaOps.Attestor.Types.Tests/StellaOps.Attestor.Types.Tests.csproj new file mode 100644 index 00000000..f58077af --- /dev/null +++ b/src/Attestor/__Tests/StellaOps.Attestor.Types.Tests/StellaOps.Attestor.Types.Tests.csproj @@ -0,0 +1,15 @@ + + + net10.0 + enable + enable + false + true + + + + + + + + diff --git a/src/Authority/StellaOps.Api.OpenApi/authority/openapi.yaml b/src/Authority/StellaOps.Api.OpenApi/authority/openapi.yaml index 66a034a0..615785b9 100644 --- a/src/Authority/StellaOps.Api.OpenApi/authority/openapi.yaml +++ b/src/Authority/StellaOps.Api.OpenApi/authority/openapi.yaml @@ -1,689 +1,884 @@ -openapi: 3.1.0 -info: - title: StellaOps Authority Authentication API - summary: Token issuance, introspection, revocation, and key discovery endpoints exposed by the Authority service. - description: | - The Authority service issues OAuth 2.1 access tokens for StellaOps components, enforcing tenant and scope - restrictions configured per client. This specification describes the authentication surface only; domain APIs - are documented by their owning services. - version: 0.1.0 -jsonSchemaDialect: https://json-schema.org/draft/2020-12/schema -servers: - - url: https://authority.stellaops.local - description: Example Authority deployment -tags: - - name: Authentication - description: OAuth 2.1 token exchange, introspection, and revocation flows. - - name: Keys - description: JSON Web Key Set discovery. -components: - securitySchemes: - ClientSecretBasic: - type: http - scheme: basic - description: HTTP Basic authentication with `client_id` and `client_secret`. - OAuthPassword: - type: oauth2 - description: Resource owner password exchange for Authority-managed identities. - flows: - password: - tokenUrl: /token - refreshUrl: /token - scopes: - advisory:ingest: Submit advisory ingestion payloads. - advisory:read: Read advisory ingestion data. - aoc:verify: Execute Aggregation-Only Contract verification workflows. - authority.audit.read: Read Authority audit logs. - authority.clients.manage: Manage Authority client registrations. - authority.users.manage: Manage Authority users. - authority:tenants.read: Read the Authority tenant catalog. - concelier.jobs.trigger: Trigger Concelier aggregation jobs. - concelier.merge: Manage Concelier merge operations. - effective:write: Write effective findings (Policy Engine service identity only). - email: Access email claim data. - exceptions:approve: Approve exception workflows. - findings:read: Read effective findings emitted by Policy Engine. - graph:export: Export graph artefacts. - graph:read: Read graph explorer data. - graph:simulate: Run graph what-if simulations. - graph:write: Enqueue or mutate graph build jobs. - offline_access: Request refresh tokens for offline access. - openid: Request OpenID Connect identity tokens. - orch:operate: Execute privileged Orchestrator control actions. - orch:read: Read Orchestrator job state. - policy:author: Author Policy Studio drafts and workspaces. - policy:activate: Activate policy revisions. - policy:approve: Approve or reject policy drafts. - policy:audit: Inspect Policy Studio audit history. - policy:edit: Edit policy definitions. - policy:operate: Operate Policy Studio promotions and runs. - policy:read: Read policy definitions and metadata. - policy:run: Trigger policy executions. - policy:submit: Submit policy drafts for review. - policy:review: Review Policy Studio drafts and leave feedback. - policy:simulate: Execute Policy Studio simulations. - policy:write: Create or update policy drafts. - profile: Access profile claim data. - signals:admin: Administer Signals ingestion and routing settings. - signals:read: Read Signals events and state. - signals:write: Publish Signals events or mutate state. - stellaops.bypass: Bypass trust boundary protections (restricted identities only). - ui.read: Read Console UX resources. - vex:ingest: Submit VEX ingestion payloads. - vex:read: Read VEX ingestion data. - vuln:read: Read vulnerability permalinks and overlays. - authorizationCode: - authorizationUrl: /authorize - tokenUrl: /token - refreshUrl: /token - scopes: - advisory:ingest: Submit advisory ingestion payloads. - advisory:read: Read advisory ingestion data. - aoc:verify: Execute Aggregation-Only Contract verification workflows. - authority.audit.read: Read Authority audit logs. - authority.clients.manage: Manage Authority client registrations. - authority.users.manage: Manage Authority users. - authority:tenants.read: Read the Authority tenant catalog. - concelier.jobs.trigger: Trigger Concelier aggregation jobs. - concelier.merge: Manage Concelier merge operations. - effective:write: Write effective findings (Policy Engine service identity only). - email: Access email claim data. - exceptions:approve: Approve exception workflows. - findings:read: Read effective findings emitted by Policy Engine. - graph:export: Export graph artefacts. - graph:read: Read graph explorer data. - graph:simulate: Run graph what-if simulations. - graph:write: Enqueue or mutate graph build jobs. - offline_access: Request refresh tokens for offline access. - openid: Request OpenID Connect identity tokens. - orch:operate: Execute privileged Orchestrator control actions. - orch:read: Read Orchestrator job state. - policy:author: Author Policy Studio drafts and workspaces. - policy:activate: Activate policy revisions. - policy:approve: Approve or reject policy drafts. - policy:audit: Inspect Policy Studio audit history. - policy:edit: Edit policy definitions. - policy:operate: Operate Policy Studio promotions and runs. - policy:read: Read policy definitions and metadata. - policy:run: Trigger policy executions. - policy:submit: Submit policy drafts for review. - policy:review: Review Policy Studio drafts and leave feedback. - policy:simulate: Execute Policy Studio simulations. - policy:write: Create or update policy drafts. - profile: Access profile claim data. - signals:admin: Administer Signals ingestion and routing settings. - signals:read: Read Signals events and state. - signals:write: Publish Signals events or mutate state. - stellaops.bypass: Bypass trust boundary protections (restricted identities only). - ui.read: Read Console UX resources. - vex:ingest: Submit VEX ingestion payloads. - vex:read: Read VEX ingestion data. - vuln:read: Read vulnerability permalinks and overlays. - OAuthClientCredentials: - type: oauth2 - description: Client credential exchange for machine-to-machine identities. - flows: - clientCredentials: - tokenUrl: /token - scopes: - advisory:ingest: Submit advisory ingestion payloads. - advisory:read: Read advisory ingestion data. - aoc:verify: Execute Aggregation-Only Contract verification workflows. - authority.audit.read: Read Authority audit logs. - authority.clients.manage: Manage Authority client registrations. - authority.users.manage: Manage Authority users. - authority:tenants.read: Read the Authority tenant catalog. - concelier.jobs.trigger: Trigger Concelier aggregation jobs. - concelier.merge: Manage Concelier merge operations. - effective:write: Write effective findings (Policy Engine service identity only). - email: Access email claim data. - exceptions:approve: Approve exception workflows. - findings:read: Read effective findings emitted by Policy Engine. - graph:export: Export graph artefacts. - graph:read: Read graph explorer data. - graph:simulate: Run graph what-if simulations. - graph:write: Enqueue or mutate graph build jobs. - offline_access: Request refresh tokens for offline access. - openid: Request OpenID Connect identity tokens. - orch:operate: Execute privileged Orchestrator control actions. - orch:read: Read Orchestrator job state. - policy:author: Author Policy Studio drafts and workspaces. - policy:activate: Activate policy revisions. - policy:approve: Approve or reject policy drafts. - policy:audit: Inspect Policy Studio audit history. - policy:edit: Edit policy definitions. - policy:operate: Operate Policy Studio promotions and runs. - policy:read: Read policy definitions and metadata. - policy:run: Trigger policy executions. - policy:submit: Submit policy drafts for review. - policy:review: Review Policy Studio drafts and leave feedback. - policy:simulate: Execute Policy Studio simulations. - policy:write: Create or update policy drafts. - profile: Access profile claim data. - signals:admin: Administer Signals ingestion and routing settings. - signals:read: Read Signals events and state. - signals:write: Publish Signals events or mutate state. - stellaops.bypass: Bypass trust boundary protections (restricted identities only). - ui.read: Read Console UX resources. - vex:ingest: Submit VEX ingestion payloads. - vex:read: Read VEX ingestion data. - vuln:read: Read vulnerability permalinks and overlays. - schemas: - TokenResponse: - type: object - description: OAuth 2.1 bearer token response. - properties: - access_token: - type: string - description: Access token encoded as JWT. - token_type: - type: string - description: Token type indicator. Always `Bearer`. - expires_in: - type: integer - description: Lifetime of the access token, in seconds. - minimum: 1 - refresh_token: - type: string - description: Refresh token issued when the grant allows offline access. - scope: - type: string - description: Space-delimited scopes granted in the response. - id_token: - type: string - description: ID token issued for authorization-code flows. - required: - - access_token - - token_type - - expires_in - OAuthErrorResponse: - type: object - description: RFC 6749 compliant error envelope. - properties: - error: - type: string - description: Machine-readable error code. - error_description: - type: string - description: Human-readable error description. - error_uri: - type: string - format: uri - description: Link to documentation about the error. - required: - - error - PasswordGrantRequest: - type: object - required: - - grant_type - - client_id - - username - - password - properties: - grant_type: - type: string - const: password - client_id: - type: string - description: Registered client identifier. May also be supplied via HTTP Basic auth. - client_secret: - type: string - description: Client secret. Required for confidential clients when not using HTTP Basic auth. - scope: - type: string - description: Space-delimited scopes being requested. - username: - type: string - description: Resource owner username. - password: - type: string - description: Resource owner password. - authority_provider: - type: string - description: Optional identity provider hint. Required when multiple password-capable providers are registered. - description: Form-encoded payload for password grant exchange. - ClientCredentialsGrantRequest: - type: object - required: - - grant_type - - client_id - properties: - grant_type: - type: string - const: client_credentials - client_id: - type: string - description: Registered client identifier. May also be supplied via HTTP Basic auth. - client_secret: - type: string - description: Client secret. Required for confidential clients when not using HTTP Basic auth. - scope: - type: string - description: Space-delimited scopes being requested. - authority_provider: - type: string - description: Optional identity provider hint for plugin-backed clients. - operator_reason: - type: string - description: Required when requesting `orch:operate`; explains the operator action. - maxLength: 256 - operator_ticket: - type: string - description: Required when requesting `orch:operate`; tracks the external change ticket or incident. - maxLength: 128 - description: Form-encoded payload for client credentials exchange. - RefreshTokenGrantRequest: - type: object - required: - - grant_type - - refresh_token - properties: - grant_type: - type: string - const: refresh_token - client_id: - type: string - description: Registered client identifier. May also be supplied via HTTP Basic auth. - client_secret: - type: string - description: Client secret. Required for confidential clients when not using HTTP Basic auth. - refresh_token: - type: string - description: Previously issued refresh token. - scope: - type: string - description: Optional scope list to narrow the requested access. - description: Form-encoded payload for refresh token exchange. - RevocationRequest: - type: object - required: - - token - properties: - token: - type: string - description: Token value or token identifier to revoke. - token_type_hint: - type: string - description: Optional token type hint (`access_token` or `refresh_token`). - description: Form-encoded payload for token revocation. - IntrospectionRequest: - type: object - required: - - token - properties: - token: - type: string - description: Token value whose state should be introspected. - token_type_hint: - type: string - description: Optional token type hint (`access_token` or `refresh_token`). - description: Form-encoded payload for token introspection. - IntrospectionResponse: - type: object - description: Active token descriptor compliant with RFC 7662. - properties: - active: - type: boolean - description: Indicates whether the token is currently active. - scope: - type: string - description: Space-delimited list of scopes granted to the token. - client_id: - type: string - description: Client identifier associated with the token. - sub: - type: string - description: Subject identifier when the token represents an end-user. - username: - type: string - description: Preferred username associated with the subject. - token_type: - type: string - description: Type of the token (e.g., `Bearer`). - exp: - type: integer - description: Expiration timestamp (seconds since UNIX epoch). - iat: - type: integer - description: Issued-at timestamp (seconds since UNIX epoch). - nbf: - type: integer - description: Not-before timestamp (seconds since UNIX epoch). - aud: - type: array - description: Audience values associated with the token. - items: - type: string - iss: - type: string - description: Issuer identifier. - jti: - type: string - description: JWT identifier corresponding to the token. - tenant: - type: string - description: Tenant associated with the token, when assigned. - confirmation: - type: object - description: Sender-constrained confirmation data (e.g., mTLS thumbprint, DPoP JWK thumbprint). - required: - - active - JwksDocument: - type: object - description: JSON Web Key Set published by the Authority. - properties: - keys: - type: array - items: - $ref: '#/components/schemas/Jwk' - required: - - keys - Jwk: - type: object - description: Public key material for token signature validation. - properties: - kid: - type: string - description: Key identifier. - kty: - type: string - description: Key type (e.g., `EC`, `RSA`). - use: - type: string - description: Intended key use (`sig`). - alg: - type: string - description: Signing algorithm (e.g., `ES384`). - crv: - type: string - description: Elliptic curve identifier when applicable. - x: - type: string - description: X coordinate for EC keys. - y: - type: string - description: Y coordinate for EC keys. - status: - type: string - description: Operational status metadata for the key (e.g., `active`, `retiring`). -paths: - /token: - post: - tags: - - Authentication - summary: Exchange credentials for tokens - description: | - Issues OAuth 2.1 bearer tokens for StellaOps clients. Supports password, client credentials, - authorization-code, device, and refresh token grants. Confidential clients must authenticate using - HTTP Basic auth or `client_secret` form fields. - security: - - ClientSecretBasic: [] - - {} - requestBody: - required: true - content: - application/x-www-form-urlencoded: - schema: - oneOf: - - $ref: '#/components/schemas/PasswordGrantRequest' - - $ref: '#/components/schemas/ClientCredentialsGrantRequest' - - $ref: '#/components/schemas/RefreshTokenGrantRequest' - encoding: - authority_provider: - style: form - explode: false - examples: - passwordGrant: - summary: Password grant for tenant-scoped ingestion bot - value: - grant_type: password - client_id: ingest-cli - client_secret: s3cr3t - username: ingest-bot - password: pa55w0rd! - scope: advisory:ingest vex:ingest - authority_provider: primary-directory - authorizationCode: - summary: Authorization code exchange for Console UI session - value: - grant_type: authorization_code - client_id: console-ui - code: 2Lba1WtwPLfZ2b0Z9uPrsQ - redirect_uri: https://console.stellaops.local/auth/callback - code_verifier: g3ZnL91QJ6i4zO_86oI4CDnZ7gS0bSeK - clientCredentials: - summary: Client credentials exchange for Policy Engine - value: - grant_type: client_credentials - client_id: policy-engine - client_secret: 9c39f602-2f2b-4f29 - scope: effective:write findings:read - operator_reason: Deploying policy change 1234 - operator_ticket: CHG-004211 - refreshToken: - summary: Refresh token rotation for console session - value: - grant_type: refresh_token - client_id: console-ui - refresh_token: 0.rg9pVlsGzXE8Q - responses: - '200': - description: Token exchange succeeded. - content: - application/json: - schema: - $ref: '#/components/schemas/TokenResponse' - examples: - passwordGrant: - summary: Password grant success response - value: - access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... - token_type: Bearer - expires_in: 3600 - refresh_token: OxGdVtZJ-mk49cFd38uRUw - scope: advisory:ingest vex:ingest - clientCredentials: - summary: Client credentials success response - value: - access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... - token_type: Bearer - expires_in: 900 - scope: effective:write findings:read - authorizationCode: - summary: Authorization code success response - value: - access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... - token_type: Bearer - expires_in: 900 - refresh_token: VxKpc9Vj9QjYV6gLrhQHTw - scope: ui.read authority:tenants.read - id_token: eyJhbGciOiJFUzM4NCIsImtpZCI6ImNvbnNvbGUifQ... - '400': - description: Malformed request, unsupported grant type, or invalid credentials. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - invalidProvider: - summary: Unknown identity provider hint - value: - error: invalid_request - error_description: "Unknown identity provider 'legacy-directory'." - invalidScope: - summary: Scope not permitted for client - value: - error: invalid_scope - error_description: Scope 'effective:write' is not permitted for this client. - '401': - description: Client authentication failed. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - badClientSecret: - summary: Invalid client secret - value: - error: invalid_client - error_description: Client authentication failed. - /revoke: - post: - tags: - - Authentication - summary: Revoke an access or refresh token - security: - - ClientSecretBasic: [] - requestBody: - required: true - content: - application/x-www-form-urlencoded: - schema: - $ref: '#/components/schemas/RevocationRequest' - examples: - revokeRefreshToken: - summary: Revoke refresh token after logout - value: - token: 0.rg9pVlsGzXE8Q - token_type_hint: refresh_token - responses: - '200': - description: Token revoked or already invalid. The response body is intentionally blank. - '400': - description: Malformed request. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - missingToken: - summary: Token parameter omitted - value: - error: invalid_request - error_description: The revocation request is missing the token parameter. - '401': - description: Client authentication failed. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - badClientSecret: - summary: Invalid client credentials - value: - error: invalid_client - error_description: Client authentication failed. - /introspect: - post: - tags: - - Authentication - summary: Introspect token state - description: Returns the active status and claims for a given token. Requires a privileged client. - security: - - ClientSecretBasic: [] - requestBody: - required: true - content: - application/x-www-form-urlencoded: - schema: - $ref: '#/components/schemas/IntrospectionRequest' - examples: - introspectToken: - summary: Validate an access token issued to Orchestrator - value: - token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... - token_type_hint: access_token - responses: - '200': - description: Token state evaluated. - content: - application/json: - schema: - $ref: '#/components/schemas/IntrospectionResponse' - examples: - activeToken: - summary: Active token response - value: - active: true - scope: orch:operate orch:read - client_id: orch-control - sub: operator-7f12 - username: ops.engineer@tenant.example - token_type: Bearer - exp: 1761628800 - iat: 1761625200 - nbf: 1761625200 - iss: https://authority.stellaops.local - aud: - - https://orch.stellaops.local - jti: 01J8KYRAMG7FWBPRRV5XG20T7S - tenant: tenant-alpha - confirmation: - mtls_thumbprint: 079871b8c9a0f2e6 - inactiveToken: - summary: Revoked token response - value: - active: false - '400': - description: Malformed request. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - missingToken: - summary: Token missing - value: - error: invalid_request - error_description: token parameter is required. - '401': - description: Client authentication failed or client lacks introspection permission. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - unauthorizedClient: - summary: Client not allowed to introspect tokens - value: - error: invalid_client - error_description: Client authentication failed. - /jwks: - get: - tags: - - Keys - summary: Retrieve signing keys - description: Returns the JSON Web Key Set used to validate Authority-issued tokens. - responses: - '200': - description: JWKS document. - headers: - Cache-Control: - schema: - type: string - description: Standard caching headers apply; keys rotate infrequently. - content: - application/json: - schema: - $ref: '#/components/schemas/JwksDocument' - examples: - ecKeySet: - summary: EC signing keys - value: - keys: - - kid: auth-tokens-es384-202510 - kty: EC - use: sig - alg: ES384 - crv: P-384 - x: 7UchU5R77LtChrJx6uWg9mYjFvV6RIpSgZPDIj7d1q0 - y: v98nHe8a7mGZ9Fn1t4Jp9PTJv1ma35QPmhUrE4pH7H0 - status: active - - kid: auth-tokens-es384-202409 - kty: EC - use: sig - alg: ES384 - crv: P-384 - x: hjdKc0r8jvVHJ7S9mP0y0mU9bqN7v5PxS21SwclTzfc - y: yk6J3pz4TUpymN4mG-6th3dYvJ5N1lQvDK0PLuFv3Pg - status: retiring \ No newline at end of file +openapi: 3.1.0 +info: + title: StellaOps Authority Authentication API + summary: Token issuance, introspection, revocation, and key discovery endpoints exposed by the Authority service. + description: | + The Authority service issues OAuth 2.1 access tokens for StellaOps components, enforcing tenant and scope + restrictions configured per client. This specification describes the authentication surface only; domain APIs + are documented by their owning services. + version: 0.1.0 +jsonSchemaDialect: https://json-schema.org/draft/2020-12/schema +servers: + - url: https://authority.stellaops.local + description: Example Authority deployment +tags: + - name: Authentication + description: OAuth 2.1 token exchange, introspection, and revocation flows. + - name: Keys + description: JSON Web Key Set discovery. +components: + securitySchemes: + ClientSecretBasic: + type: http + scheme: basic + description: HTTP Basic authentication with `client_id` and `client_secret`. + OAuthPassword: + type: oauth2 + description: Resource owner password exchange for Authority-managed identities. + flows: + password: + tokenUrl: /token + refreshUrl: /token + scopes: + advisory:ingest: Submit advisory ingestion payloads. + advisory:read: Read advisory ingestion data. + aoc:verify: Execute Aggregation-Only Contract verification workflows. + authority.audit.read: Read Authority audit logs. + authority.clients.manage: Manage Authority client registrations. + authority.users.manage: Manage Authority users. + authority:tenants.read: Read the Authority tenant catalog. + concelier.jobs.trigger: Trigger Concelier aggregation jobs. + concelier.merge: Manage Concelier merge operations. + effective:write: Write effective findings (Policy Engine service identity only). + email: Access email claim data. + exceptions:approve: Approve exception workflows. + findings:read: Read effective findings emitted by Policy Engine. + graph:export: Export graph artefacts. + graph:read: Read graph explorer data. + graph:simulate: Run graph what-if simulations. + graph:write: Enqueue or mutate graph build jobs. + offline_access: Request refresh tokens for offline access. + openid: Request OpenID Connect identity tokens. + orch:operate: Execute privileged Orchestrator control actions. + orch:read: Read Orchestrator job state. + packs.read: Discover Task Packs and download manifests. + packs.write: Publish or update Task Packs in the registry. + packs.run: Execute Task Packs via CLI or Task Runner. + packs.approve: Approve Task Pack gates and resume runs. + policy:author: Author Policy Studio drafts and workspaces. + policy:activate: Activate policy revisions. + policy:approve: Approve or reject policy drafts. + policy:audit: Inspect Policy Studio audit history. + policy:edit: Edit policy definitions. + policy:operate: Operate Policy Studio promotions and runs. + policy:read: Read policy definitions and metadata. + policy:run: Trigger policy executions. + policy:submit: Submit policy drafts for review. + policy:review: Review Policy Studio drafts and leave feedback. + policy:simulate: Execute Policy Studio simulations. + policy:write: Create or update policy drafts. + profile: Access profile claim data. + signals:admin: Administer Signals ingestion and routing settings. + signals:read: Read Signals events and state. + signals:write: Publish Signals events or mutate state. + stellaops.bypass: Bypass trust boundary protections (restricted identities only). + ui.read: Read Console UX resources. + vex:ingest: Submit VEX ingestion payloads. + vex:read: Read VEX ingestion data. + vuln:read: Read vulnerability permalinks and overlays. + authorizationCode: + authorizationUrl: /authorize + tokenUrl: /token + refreshUrl: /token + scopes: + advisory:ingest: Submit advisory ingestion payloads. + advisory:read: Read advisory ingestion data. + aoc:verify: Execute Aggregation-Only Contract verification workflows. + authority.audit.read: Read Authority audit logs. + authority.clients.manage: Manage Authority client registrations. + authority.users.manage: Manage Authority users. + authority:tenants.read: Read the Authority tenant catalog. + concelier.jobs.trigger: Trigger Concelier aggregation jobs. + concelier.merge: Manage Concelier merge operations. + effective:write: Write effective findings (Policy Engine service identity only). + email: Access email claim data. + exceptions:approve: Approve exception workflows. + findings:read: Read effective findings emitted by Policy Engine. + graph:export: Export graph artefacts. + graph:read: Read graph explorer data. + graph:simulate: Run graph what-if simulations. + graph:write: Enqueue or mutate graph build jobs. + offline_access: Request refresh tokens for offline access. + openid: Request OpenID Connect identity tokens. + orch:operate: Execute privileged Orchestrator control actions. + orch:read: Read Orchestrator job state. + packs.read: Discover Task Packs and download manifests. + packs.write: Publish or update Task Packs in the registry. + packs.run: Execute Task Packs via CLI or Task Runner. + packs.approve: Approve Task Pack gates and resume runs. + policy:author: Author Policy Studio drafts and workspaces. + policy:activate: Activate policy revisions. + policy:approve: Approve or reject policy drafts. + policy:audit: Inspect Policy Studio audit history. + policy:edit: Edit policy definitions. + policy:operate: Operate Policy Studio promotions and runs. + policy:read: Read policy definitions and metadata. + policy:run: Trigger policy executions. + policy:submit: Submit policy drafts for review. + policy:review: Review Policy Studio drafts and leave feedback. + policy:simulate: Execute Policy Studio simulations. + policy:write: Create or update policy drafts. + profile: Access profile claim data. + signals:admin: Administer Signals ingestion and routing settings. + signals:read: Read Signals events and state. + signals:write: Publish Signals events or mutate state. + stellaops.bypass: Bypass trust boundary protections (restricted identities only). + ui.read: Read Console UX resources. + vex:ingest: Submit VEX ingestion payloads. + vex:read: Read VEX ingestion data. + vuln:read: Read vulnerability permalinks and overlays. + OAuthClientCredentials: + type: oauth2 + description: Client credential exchange for machine-to-machine identities. + flows: + clientCredentials: + tokenUrl: /token + scopes: + advisory:ingest: Submit advisory ingestion payloads. + advisory:read: Read advisory ingestion data. + aoc:verify: Execute Aggregation-Only Contract verification workflows. + authority.audit.read: Read Authority audit logs. + authority.clients.manage: Manage Authority client registrations. + authority.users.manage: Manage Authority users. + authority:tenants.read: Read the Authority tenant catalog. + concelier.jobs.trigger: Trigger Concelier aggregation jobs. + concelier.merge: Manage Concelier merge operations. + effective:write: Write effective findings (Policy Engine service identity only). + email: Access email claim data. + exceptions:approve: Approve exception workflows. + findings:read: Read effective findings emitted by Policy Engine. + graph:export: Export graph artefacts. + graph:read: Read graph explorer data. + graph:simulate: Run graph what-if simulations. + graph:write: Enqueue or mutate graph build jobs. + offline_access: Request refresh tokens for offline access. + openid: Request OpenID Connect identity tokens. + orch:operate: Execute privileged Orchestrator control actions. + orch:read: Read Orchestrator job state. + packs.read: Discover Task Packs and download manifests. + packs.write: Publish or update Task Packs in the registry. + packs.run: Execute Task Packs via CLI or Task Runner. + packs.approve: Approve Task Pack gates and resume runs. + policy:author: Author Policy Studio drafts and workspaces. + policy:activate: Activate policy revisions. + policy:approve: Approve or reject policy drafts. + policy:audit: Inspect Policy Studio audit history. + policy:edit: Edit policy definitions. + policy:operate: Operate Policy Studio promotions and runs. + policy:read: Read policy definitions and metadata. + policy:run: Trigger policy executions. + policy:submit: Submit policy drafts for review. + policy:review: Review Policy Studio drafts and leave feedback. + policy:simulate: Execute Policy Studio simulations. + policy:write: Create or update policy drafts. + profile: Access profile claim data. + signals:admin: Administer Signals ingestion and routing settings. + signals:read: Read Signals events and state. + signals:write: Publish Signals events or mutate state. + stellaops.bypass: Bypass trust boundary protections (restricted identities only). + ui.read: Read Console UX resources. + vex:ingest: Submit VEX ingestion payloads. + vex:read: Read VEX ingestion data. + vuln:read: Read vulnerability permalinks and overlays. + schemas: + TokenResponse: + type: object + description: OAuth 2.1 bearer token response. + properties: + access_token: + type: string + description: Access token encoded as JWT. + token_type: + type: string + description: Token type indicator. Always `Bearer`. + expires_in: + type: integer + description: Lifetime of the access token, in seconds. + minimum: 1 + refresh_token: + type: string + description: Refresh token issued when the grant allows offline access. + scope: + type: string + description: Space-delimited scopes granted in the response. + id_token: + type: string + description: ID token issued for authorization-code flows. + required: + - access_token + - token_type + - expires_in + OAuthErrorResponse: + type: object + description: RFC 6749 compliant error envelope. + properties: + error: + type: string + description: Machine-readable error code. + error_description: + type: string + description: Human-readable error description. + error_uri: + type: string + format: uri + description: Link to documentation about the error. + required: + - error + PasswordGrantRequest: + type: object + required: + - grant_type + - client_id + - username + - password + properties: + grant_type: + type: string + const: password + client_id: + type: string + description: Registered client identifier. May also be supplied via HTTP Basic auth. + client_secret: + type: string + description: Client secret. Required for confidential clients when not using HTTP Basic auth. + scope: + type: string + description: Space-delimited scopes being requested. + username: + type: string + description: Resource owner username. + password: + type: string + description: Resource owner password. + authority_provider: + type: string + description: Optional identity provider hint. Required when multiple password-capable providers are registered. + description: Form-encoded payload for password grant exchange. + ClientCredentialsGrantRequest: + type: object + required: + - grant_type + - client_id + properties: + grant_type: + type: string + const: client_credentials + client_id: + type: string + description: Registered client identifier. May also be supplied via HTTP Basic auth. + client_secret: + type: string + description: Client secret. Required for confidential clients when not using HTTP Basic auth. + scope: + type: string + description: Space-delimited scopes being requested. + authority_provider: + type: string + description: Optional identity provider hint for plugin-backed clients. + operator_reason: + type: string + description: Required when requesting `orch:operate`; explains the operator action. + maxLength: 256 + operator_ticket: + type: string + description: Required when requesting `orch:operate`; tracks the external change ticket or incident. + maxLength: 128 + description: Form-encoded payload for client credentials exchange. + RefreshTokenGrantRequest: + type: object + required: + - grant_type + - refresh_token + properties: + grant_type: + type: string + const: refresh_token + client_id: + type: string + description: Registered client identifier. May also be supplied via HTTP Basic auth. + client_secret: + type: string + description: Client secret. Required for confidential clients when not using HTTP Basic auth. + refresh_token: + type: string + description: Previously issued refresh token. + scope: + type: string + description: Optional scope list to narrow the requested access. + description: Form-encoded payload for refresh token exchange. + RevocationRequest: + type: object + required: + - token + properties: + token: + type: string + description: Token value or token identifier to revoke. + token_type_hint: + type: string + description: Optional token type hint (`access_token` or `refresh_token`). + description: Form-encoded payload for token revocation. + IntrospectionRequest: + type: object + required: + - token + properties: + token: + type: string + description: Token value whose state should be introspected. + token_type_hint: + type: string + description: Optional token type hint (`access_token` or `refresh_token`). + description: Form-encoded payload for token introspection. + IntrospectionResponse: + type: object + description: Active token descriptor compliant with RFC 7662. + properties: + active: + type: boolean + description: Indicates whether the token is currently active. + scope: + type: string + description: Space-delimited list of scopes granted to the token. + client_id: + type: string + description: Client identifier associated with the token. + sub: + type: string + description: Subject identifier when the token represents an end-user. + username: + type: string + description: Preferred username associated with the subject. + token_type: + type: string + description: Type of the token (e.g., `Bearer`). + exp: + type: integer + description: Expiration timestamp (seconds since UNIX epoch). + iat: + type: integer + description: Issued-at timestamp (seconds since UNIX epoch). + nbf: + type: integer + description: Not-before timestamp (seconds since UNIX epoch). + aud: + type: array + description: Audience values associated with the token. + items: + type: string + iss: + type: string + description: Issuer identifier. + jti: + type: string + description: JWT identifier corresponding to the token. + tenant: + type: string + description: Tenant associated with the token, when assigned. + confirmation: + type: object + description: Sender-constrained confirmation data (e.g., mTLS thumbprint, DPoP JWK thumbprint). + required: + - active + JwksDocument: + type: object + description: JSON Web Key Set published by the Authority. + properties: + keys: + type: array + items: + $ref: '#/components/schemas/Jwk' + required: + - keys + Jwk: + type: object + description: Public key material for token signature validation. + properties: + kid: + type: string + description: Key identifier. + kty: + type: string + description: Key type (e.g., `EC`, `RSA`). + use: + type: string + description: Intended key use (`sig`). + alg: + type: string + description: Signing algorithm (e.g., `ES384`). + crv: + type: string + description: Elliptic curve identifier when applicable. + x: + type: string + description: X coordinate for EC keys. + y: + type: string + description: Y coordinate for EC keys. + status: + type: string + description: Operational status metadata for the key (e.g., `active`, `retiring`). +paths: + /token: + post: + tags: + - Authentication + summary: Exchange credentials for tokens + description: | + Issues OAuth 2.1 bearer tokens for StellaOps clients. Supports password, client credentials, + authorization-code, device, and refresh token grants. Confidential clients must authenticate using + HTTP Basic auth or `client_secret` form fields. + security: + - ClientSecretBasic: [] + - {} + requestBody: + required: true + content: + application/x-www-form-urlencoded: + schema: + oneOf: + - $ref: '#/components/schemas/PasswordGrantRequest' + - $ref: '#/components/schemas/ClientCredentialsGrantRequest' + - $ref: '#/components/schemas/RefreshTokenGrantRequest' + encoding: + authority_provider: + style: form + explode: false + examples: + passwordGrant: + summary: Password grant for tenant-scoped ingestion bot + value: + grant_type: password + client_id: ingest-cli + client_secret: s3cr3t + username: ingest-bot + password: pa55w0rd! + scope: advisory:ingest vex:ingest + authority_provider: primary-directory + authorizationCode: + summary: Authorization code exchange for Console UI session + value: + grant_type: authorization_code + client_id: console-ui + code: 2Lba1WtwPLfZ2b0Z9uPrsQ + redirect_uri: https://console.stellaops.local/auth/callback + code_verifier: g3ZnL91QJ6i4zO_86oI4CDnZ7gS0bSeK + clientCredentials: + summary: Client credentials exchange for Policy Engine + value: + grant_type: client_credentials + client_id: policy-engine + client_secret: 9c39f602-2f2b-4f29 + scope: effective:write findings:read + operator_reason: Deploying policy change 1234 + operator_ticket: CHG-004211 + refreshToken: + summary: Refresh token rotation for console session + value: + grant_type: refresh_token + client_id: console-ui + refresh_token: 0.rg9pVlsGzXE8Q + responses: + '200': + description: Token exchange succeeded. + content: + application/json: + schema: + $ref: '#/components/schemas/TokenResponse' + examples: + passwordGrant: + summary: Password grant success response + value: + access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... + token_type: Bearer + expires_in: 3600 + refresh_token: OxGdVtZJ-mk49cFd38uRUw + scope: advisory:ingest vex:ingest + clientCredentials: + summary: Client credentials success response + value: + access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... + token_type: Bearer + expires_in: 900 + scope: effective:write findings:read + authorizationCode: + summary: Authorization code success response + value: + access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... + token_type: Bearer + expires_in: 900 + refresh_token: VxKpc9Vj9QjYV6gLrhQHTw + scope: ui.read authority:tenants.read + id_token: eyJhbGciOiJFUzM4NCIsImtpZCI6ImNvbnNvbGUifQ... + '400': + description: Malformed request, unsupported grant type, or invalid credentials. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + invalidProvider: + summary: Unknown identity provider hint + value: + error: invalid_request + error_description: "Unknown identity provider 'legacy-directory'." + invalidScope: + summary: Scope not permitted for client + value: + error: invalid_scope + error_description: Scope 'effective:write' is not permitted for this client. + '401': + description: Client authentication failed. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + badClientSecret: + summary: Invalid client secret + value: + error: invalid_client + error_description: Client authentication failed. + /revoke: + post: + tags: + - Authentication + summary: Revoke an access or refresh token + security: + - ClientSecretBasic: [] + requestBody: + required: true + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/RevocationRequest' + examples: + revokeRefreshToken: + summary: Revoke refresh token after logout + value: + token: 0.rg9pVlsGzXE8Q + token_type_hint: refresh_token + responses: + '200': + description: Token revoked or already invalid. The response body is intentionally blank. + '400': + description: Malformed request. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + missingToken: + summary: Token parameter omitted + value: + error: invalid_request + error_description: The revocation request is missing the token parameter. + '401': + description: Client authentication failed. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + badClientSecret: + summary: Invalid client credentials + value: + error: invalid_client + error_description: Client authentication failed. + /introspect: + post: + tags: + - Authentication + summary: Introspect token state + description: Returns the active status and claims for a given token. Requires a privileged client. + security: + - ClientSecretBasic: [] + requestBody: + required: true + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/IntrospectionRequest' + examples: + introspectToken: + summary: Validate an access token issued to Orchestrator + value: + token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... + token_type_hint: access_token + responses: + '200': + description: Token state evaluated. + content: + application/json: + schema: + $ref: '#/components/schemas/IntrospectionResponse' + examples: + activeToken: + summary: Active token response + value: + active: true + scope: orch:operate orch:read + client_id: orch-control + sub: operator-7f12 + username: ops.engineer@tenant.example + token_type: Bearer + exp: 1761628800 + iat: 1761625200 + nbf: 1761625200 + iss: https://authority.stellaops.local + aud: + - https://orch.stellaops.local + jti: 01J8KYRAMG7FWBPRRV5XG20T7S + tenant: tenant-alpha + confirmation: + mtls_thumbprint: 079871b8c9a0f2e6 + inactiveToken: + summary: Revoked token response + value: + active: false + '400': + description: Malformed request. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + missingToken: + summary: Token missing + value: + error: invalid_request + error_description: token parameter is required. + '401': + description: Client authentication failed or client lacks introspection permission. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + unauthorizedClient: + summary: Client not allowed to introspect tokens + value: + error: invalid_client + error_description: Client authentication failed. + + /oauth/token: + post: + tags: + - Authentication + summary: "[Deprecated] Exchange credentials for tokens" + description: | + Legacy alias for `/token`. Responses include `Deprecation`, `Sunset`, `Warning`, and `Link` + headers to advertise the removal timeline. Migrate clients to `/token` before the + announced sunset date (2026-05-01). + deprecated: true + security: + - ClientSecretBasic: [] + - {} + requestBody: + $ref: #/paths/~1token/post/requestBody + responses: + 200: + description: Token exchange succeeded (legacy alias of `/token`). + headers: + Deprecation: + description: RFC 7231 HTTP-date signaling when the endpoint was deprecated. + schema: + type: string + Sunset: + description: RFC 7231 HTTP-date signaling the planned removal of this endpoint. + schema: + type: string + Link: + description: Sunset documentation link (`rel="sunset"`). + schema: + type: string + Warning: + description: RFC 7234 Warning header describing the deprecation notice. + schema: + type: string + content: + application/json: + schema: + $ref: #/components/schemas/TokenResponse + 400: + description: Malformed request, unsupported grant type, or invalid credentials. + headers: + Deprecation: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Deprecation + Sunset: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Sunset + Link: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Link + Warning: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Warning + content: + application/json: + schema: + $ref: #/components/schemas/OAuthErrorResponse + 401: + description: Client authentication failed. + headers: + Deprecation: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Deprecation + Sunset: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Sunset + Link: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Link + Warning: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Warning + content: + application/json: + schema: + $ref: #/components/schemas/OAuthErrorResponse + /oauth/revoke: + post: + tags: + - Authentication + summary: "[Deprecated] Revoke an access or refresh token" + description: | + Legacy alias for `/revoke`. Deprecated; clients should call `/revoke` directly. Deprecation headers + mirror those emitted by the runtime middleware. + deprecated: true + security: + - ClientSecretBasic: [] + requestBody: + $ref: #/paths/~1revoke/post/requestBody + responses: + 200: + description: Token revoked or already invalid (legacy alias of `/revoke`). + headers: + Deprecation: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Deprecation + Sunset: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Sunset + Link: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Link + Warning: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Warning + 400: + description: Malformed request. + headers: + Deprecation: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Deprecation + Sunset: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Sunset + Link: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Link + Warning: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Warning + content: + application/json: + schema: + $ref: #/components/schemas/OAuthErrorResponse + 401: + description: Client authentication failed. + headers: + Deprecation: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Deprecation + Sunset: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Sunset + Link: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Link + Warning: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Warning + content: + application/json: + schema: + $ref: #/components/schemas/OAuthErrorResponse + /oauth/introspect: + post: + tags: + - Authentication + summary: "[Deprecated] Introspect token state" + description: | + Legacy alias for `/introspect`. Deprecated; clients must migrate to `/introspect`. Deprecation headers + highlight the removal schedule. + deprecated: true + security: + - ClientSecretBasic: [] + requestBody: + $ref: #/paths/~1introspect/post/requestBody + responses: + 200: + description: Token state evaluated (legacy alias of `/introspect`). + headers: + Deprecation: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Deprecation + Sunset: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Sunset + Link: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Link + Warning: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Warning + content: + application/json: + schema: + $ref: #/components/schemas/IntrospectionResponse + 400: + description: Malformed request. + headers: + Deprecation: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Deprecation + Sunset: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Sunset + Link: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Link + Warning: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Warning + content: + application/json: + schema: + $ref: #/components/schemas/OAuthErrorResponse + 401: + description: Client authentication failed or client lacks introspection permission. + headers: + Deprecation: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Deprecation + Sunset: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Sunset + Link: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Link + Warning: + $ref: #/paths/~1oauth~1token/post/responses/200/headers/Warning + content: + application/json: + schema: + $ref: #/components/schemas/OAuthErrorResponse /jwks: + get: + tags: + - Keys + summary: Retrieve signing keys + description: Returns the JSON Web Key Set used to validate Authority-issued tokens. + responses: + '200': + description: JWKS document. + headers: + Cache-Control: + schema: + type: string + description: Standard caching headers apply; keys rotate infrequently. + content: + application/json: + schema: + $ref: '#/components/schemas/JwksDocument' + examples: + ecKeySet: + summary: EC signing keys + value: + keys: + - kid: auth-tokens-es384-202510 + kty: EC + use: sig + alg: ES384 + crv: P-384 + x: 7UchU5R77LtChrJx6uWg9mYjFvV6RIpSgZPDIj7d1q0 + y: v98nHe8a7mGZ9Fn1t4Jp9PTJv1ma35QPmhUrE4pH7H0 + status: active + - kid: auth-tokens-es384-202409 + kty: EC + use: sig + alg: ES384 + crv: P-384 + x: hjdKc0r8jvVHJ7S9mP0y0mU9bqN7v5PxS21SwclTzfc + y: yk6J3pz4TUpymN4mG-6th3dYvJ5N1lQvDK0PLuFv3Pg + status: retiring diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsScopesTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsScopesTests.cs index fd261ff2..a25daa83 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsScopesTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsScopesTests.cs @@ -7,16 +7,22 @@ public class StellaOpsScopesTests { [Theory] [InlineData(StellaOpsScopes.AdvisoryRead)] - [InlineData(StellaOpsScopes.AdvisoryIngest)] + [InlineData(StellaOpsScopes.AdvisoryIngest)] + [InlineData(StellaOpsScopes.AdvisoryAiView)] + [InlineData(StellaOpsScopes.AdvisoryAiOperate)] + [InlineData(StellaOpsScopes.AdvisoryAiAdmin)] [InlineData(StellaOpsScopes.VexRead)] [InlineData(StellaOpsScopes.VexIngest)] [InlineData(StellaOpsScopes.AocVerify)] - [InlineData(StellaOpsScopes.SignalsRead)] - [InlineData(StellaOpsScopes.SignalsWrite)] - [InlineData(StellaOpsScopes.SignalsAdmin)] - [InlineData(StellaOpsScopes.PolicyWrite)] - [InlineData(StellaOpsScopes.PolicyAuthor)] - [InlineData(StellaOpsScopes.PolicySubmit)] + [InlineData(StellaOpsScopes.SignalsRead)] + [InlineData(StellaOpsScopes.SignalsWrite)] + [InlineData(StellaOpsScopes.SignalsAdmin)] + [InlineData(StellaOpsScopes.AirgapSeal)] + [InlineData(StellaOpsScopes.AirgapImport)] + [InlineData(StellaOpsScopes.AirgapStatusRead)] + [InlineData(StellaOpsScopes.PolicyWrite)] + [InlineData(StellaOpsScopes.PolicyAuthor)] + [InlineData(StellaOpsScopes.PolicySubmit)] [InlineData(StellaOpsScopes.PolicyApprove)] [InlineData(StellaOpsScopes.PolicyReview)] [InlineData(StellaOpsScopes.PolicyOperate)] @@ -28,27 +34,52 @@ public class StellaOpsScopesTests [InlineData(StellaOpsScopes.GraphRead)] [InlineData(StellaOpsScopes.VulnRead)] [InlineData(StellaOpsScopes.GraphWrite)] - [InlineData(StellaOpsScopes.GraphExport)] - [InlineData(StellaOpsScopes.GraphSimulate)] - [InlineData(StellaOpsScopes.OrchRead)] - [InlineData(StellaOpsScopes.OrchOperate)] - [InlineData(StellaOpsScopes.ExportViewer)] - [InlineData(StellaOpsScopes.ExportOperator)] - [InlineData(StellaOpsScopes.ExportAdmin)] - public void All_IncludesNewScopes(string scope) - { - Assert.Contains(scope, StellaOpsScopes.All); - } - + [InlineData(StellaOpsScopes.GraphExport)] + [InlineData(StellaOpsScopes.GraphSimulate)] + [InlineData(StellaOpsScopes.OrchRead)] + [InlineData(StellaOpsScopes.OrchOperate)] + [InlineData(StellaOpsScopes.OrchBackfill)] + [InlineData(StellaOpsScopes.OrchQuota)] + [InlineData(StellaOpsScopes.ExportViewer)] + [InlineData(StellaOpsScopes.ExportOperator)] + [InlineData(StellaOpsScopes.ExportAdmin)] + [InlineData(StellaOpsScopes.NotifyViewer)] + [InlineData(StellaOpsScopes.NotifyOperator)] + [InlineData(StellaOpsScopes.NotifyAdmin)] + [InlineData(StellaOpsScopes.NotifyEscalate)] + [InlineData(StellaOpsScopes.PacksRead)] + [InlineData(StellaOpsScopes.PacksWrite)] + [InlineData(StellaOpsScopes.PacksRun)] + [InlineData(StellaOpsScopes.PacksApprove)] + [InlineData(StellaOpsScopes.ObservabilityRead)] + [InlineData(StellaOpsScopes.TimelineRead)] + [InlineData(StellaOpsScopes.TimelineWrite)] + [InlineData(StellaOpsScopes.EvidenceCreate)] + [InlineData(StellaOpsScopes.EvidenceRead)] + [InlineData(StellaOpsScopes.EvidenceHold)] + [InlineData(StellaOpsScopes.AttestRead)] + [InlineData(StellaOpsScopes.ObservabilityIncident)] + [InlineData(StellaOpsScopes.AuthorityTenantsRead)] + public void All_IncludesNewScopes(string scope) + { + Assert.Contains(scope, StellaOpsScopes.All); + } + [Theory] [InlineData("Advisory:Read", StellaOpsScopes.AdvisoryRead)] - [InlineData(" VEX:Ingest ", StellaOpsScopes.VexIngest)] - [InlineData("AOC:VERIFY", StellaOpsScopes.AocVerify)] - [InlineData(" Signals:Write ", StellaOpsScopes.SignalsWrite)] - [InlineData("Policy:Author", StellaOpsScopes.PolicyAuthor)] - [InlineData("Export.Admin", StellaOpsScopes.ExportAdmin)] - public void Normalize_NormalizesToLowerCase(string input, string expected) - { - Assert.Equal(expected, StellaOpsScopes.Normalize(input)); - } -} + [InlineData(" VEX:Ingest ", StellaOpsScopes.VexIngest)] + [InlineData("AOC:VERIFY", StellaOpsScopes.AocVerify)] + [InlineData(" Signals:Write ", StellaOpsScopes.SignalsWrite)] + [InlineData("AIRGAP:SEAL", StellaOpsScopes.AirgapSeal)] + [InlineData("Policy:Author", StellaOpsScopes.PolicyAuthor)] + [InlineData("Export.Admin", StellaOpsScopes.ExportAdmin)] + [InlineData("Advisory-AI:Operate", StellaOpsScopes.AdvisoryAiOperate)] + [InlineData("Notify.Admin", StellaOpsScopes.NotifyAdmin)] + [InlineData("Packs.Run", StellaOpsScopes.PacksRun)] + [InlineData("Packs.Approve", StellaOpsScopes.PacksApprove)] + [InlineData("Notify.Escalate", StellaOpsScopes.NotifyEscalate)] + public void Normalize_NormalizesToLowerCase(string input, string expected) + { + Assert.Equal(expected, StellaOpsScopes.Normalize(input)); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsClaimTypes.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsClaimTypes.cs index c3098b33..0efcc83b 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsClaimTypes.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsClaimTypes.cs @@ -65,6 +65,31 @@ public static class StellaOpsClaimTypes /// public const string OperatorTicket = "stellaops:operator_ticket"; + /// + /// Quota change reason supplied when issuing Orchestrator quota tokens. + /// + public const string QuotaReason = "stellaops:quota_reason"; + + /// + /// Quota change ticket/incident reference supplied when issuing Orchestrator quota tokens. + /// + public const string QuotaTicket = "stellaops:quota_ticket"; + + /// + /// Backfill activation reason supplied when issuing orchestrator backfill tokens. + /// + public const string BackfillReason = "stellaops:backfill_reason"; + + /// + /// Backfill ticket/incident reference supplied when issuing orchestrator backfill tokens. + /// + public const string BackfillTicket = "stellaops:backfill_ticket"; + + /// + /// Incident activation reason recorded when issuing observability incident tokens. + /// + public const string IncidentReason = "stellaops:incident_reason"; + /// /// Session identifier claim (sid). /// diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsHttpHeaderNames.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsHttpHeaderNames.cs new file mode 100644 index 00000000..7bfbba7a --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsHttpHeaderNames.cs @@ -0,0 +1,12 @@ +namespace StellaOps.Auth.Abstractions; + +/// +/// Shared HTTP header names used across StellaOps clients and services. +/// +public static class StellaOpsHttpHeaderNames +{ + /// + /// Header used to convey the tenant override when issuing requests to StellaOps APIs. + /// + public const string Tenant = "X-StellaOps-Tenant"; +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsScopes.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsScopes.cs index 0201d293..908ca5a2 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsScopes.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsScopes.cs @@ -49,14 +49,29 @@ public static class StellaOpsScopes public const string ExceptionsApprove = "exceptions:approve"; /// - /// Scope granting read-only access to raw advisory ingestion data. - /// - public const string AdvisoryRead = "advisory:read"; - - /// - /// Scope granting write access for raw advisory ingestion. - /// - public const string AdvisoryIngest = "advisory:ingest"; + /// Scope granting read-only access to raw advisory ingestion data. + /// + public const string AdvisoryRead = "advisory:read"; + + /// + /// Scope granting write access for raw advisory ingestion. + /// + public const string AdvisoryIngest = "advisory:ingest"; + + /// + /// Scope granting read-only access to Advisory AI artefacts (summaries, remediation exports). + /// + public const string AdvisoryAiView = "advisory-ai:view"; + + /// + /// Scope permitting Advisory AI inference requests and workflow execution. + /// + public const string AdvisoryAiOperate = "advisory-ai:operate"; + + /// + /// Scope granting administrative control over Advisory AI configuration and profiles. + /// + public const string AdvisoryAiAdmin = "advisory-ai:admin"; /// /// Scope granting read-only access to raw VEX ingestion data. @@ -85,13 +100,28 @@ public static class StellaOpsScopes /// /// Scope granting administrative access to reachability signal ingestion. - /// - public const string SignalsAdmin = "signals:admin"; - - /// - /// Scope granting permission to create or edit policy drafts. - /// - public const string PolicyWrite = "policy:write"; + /// + public const string SignalsAdmin = "signals:admin"; + + /// + /// Scope granting permission to seal or unseal an installation in air-gapped mode. + /// + public const string AirgapSeal = "airgap:seal"; + + /// + /// Scope granting permission to import offline bundles while in air-gapped mode. + /// + public const string AirgapImport = "airgap:import"; + + /// + /// Scope granting read-only access to air-gap status and sealing state endpoints. + /// + public const string AirgapStatusRead = "airgap:status:read"; + + /// + /// Scope granting permission to create or edit policy drafts. + /// + public const string PolicyWrite = "policy:write"; /// /// Scope granting permission to author Policy Studio workspaces. @@ -163,11 +193,51 @@ public static class StellaOpsScopes /// public const string GraphRead = "graph:read"; - /// - /// Scope granting read-only access to Vuln Explorer resources and permalinks. - /// - public const string VulnRead = "vuln:read"; - + /// + /// Scope granting read-only access to Vuln Explorer resources and permalinks. + /// + public const string VulnRead = "vuln:read"; + + /// + /// Scope granting read-only access to observability dashboards and overlays. + /// + public const string ObservabilityRead = "obs:read"; + + /// + /// Scope granting read-only access to incident timelines and chronology data. + /// + public const string TimelineRead = "timeline:read"; + + /// + /// Scope granting permission to append events to incident timelines. + /// + public const string TimelineWrite = "timeline:write"; + + /// + /// Scope granting permission to create evidence packets in the evidence locker. + /// + public const string EvidenceCreate = "evidence:create"; + + /// + /// Scope granting read-only access to stored evidence packets. + /// + public const string EvidenceRead = "evidence:read"; + + /// + /// Scope granting permission to place or release legal holds on evidence packets. + /// + public const string EvidenceHold = "evidence:hold"; + + /// + /// Scope granting read-only access to attestation records and observer feeds. + /// + public const string AttestRead = "attest:read"; + + /// + /// Scope granting permission to activate or resolve observability incident mode controls. + /// + public const string ObservabilityIncident = "obs:incident"; + /// /// Scope granting read-only access to export center runs and bundles. /// @@ -176,13 +246,68 @@ public static class StellaOpsScopes /// /// Scope granting permission to operate export center scheduling and run execution. /// - public const string ExportOperator = "export.operator"; - - /// - /// Scope granting administrative control over export center retention, encryption keys, and scheduling policies. - /// - public const string ExportAdmin = "export.admin"; - + public const string ExportOperator = "export.operator"; + + /// + /// Scope granting administrative control over export center retention, encryption keys, and scheduling policies. + /// + public const string ExportAdmin = "export.admin"; + + /// + /// Scope granting read-only access to notifier channels, rules, and delivery history. + /// + public const string NotifyViewer = "notify.viewer"; + + /// + /// Scope permitting notifier rule management, delivery actions, and channel operations. + /// + public const string NotifyOperator = "notify.operator"; + + /// + /// Scope granting administrative control over notifier secrets, escalations, and platform-wide settings. + /// + public const string NotifyAdmin = "notify.admin"; + + /// + /// Scope granting read-only access to issuer directory catalogues. + /// + public const string IssuerDirectoryRead = "issuer-directory:read"; + + /// + /// Scope permitting creation and modification of issuer directory entries. + /// + public const string IssuerDirectoryWrite = "issuer-directory:write"; + + /// + /// Scope granting administrative control over issuer directory resources (delete, audit bypass). + /// + public const string IssuerDirectoryAdmin = "issuer-directory:admin"; + + /// + /// Scope required to issue or honour escalation actions for notifications. + /// + public const string NotifyEscalate = "notify.escalate"; + + /// + /// Scope granting read-only access to Task Packs catalogues and manifests. + /// + public const string PacksRead = "packs.read"; + + /// + /// Scope permitting publication or updates to Task Packs in the registry. + /// + public const string PacksWrite = "packs.write"; + + /// + /// Scope granting permission to execute Task Packs via CLI or Task Runner. + /// + public const string PacksRun = "packs.run"; + + /// + /// Scope granting permission to fulfil Task Pack approval gates. + /// + public const string PacksApprove = "packs.approve"; + /// /// Scope granting permission to enqueue or mutate graph build jobs. /// @@ -204,10 +329,20 @@ public static class StellaOpsScopes public const string OrchRead = "orch:read"; /// - /// Scope granting permission to execute Orchestrator control actions. - /// - public const string OrchOperate = "orch:operate"; - + /// Scope granting permission to execute Orchestrator control actions. + /// + public const string OrchOperate = "orch:operate"; + + /// + /// Scope granting permission to manage Orchestrator quotas and elevated backfill tooling. + /// + public const string OrchQuota = "orch:quota"; + + /// + /// Scope granting permission to initiate orchestrator-controlled backfill runs. + /// + public const string OrchBackfill = "orch:backfill"; + /// /// Scope granting read-only access to Authority tenant catalog APIs. /// @@ -223,17 +358,23 @@ public static class StellaOpsScopes Bypass, UiRead, ExceptionsApprove, - AdvisoryRead, - AdvisoryIngest, - VexRead, - VexIngest, - AocVerify, - SignalsRead, - SignalsWrite, - SignalsAdmin, - PolicyWrite, - PolicyAuthor, - PolicyEdit, + AdvisoryRead, + AdvisoryIngest, + AdvisoryAiView, + AdvisoryAiOperate, + AdvisoryAiAdmin, + VexRead, + VexIngest, + AocVerify, + SignalsRead, + SignalsWrite, + SignalsAdmin, + AirgapSeal, + AirgapImport, + AirgapStatusRead, + PolicyWrite, + PolicyAuthor, + PolicyEdit, PolicyRead, PolicyReview, PolicySubmit, @@ -245,18 +386,39 @@ public static class StellaOpsScopes PolicySimulate, FindingsRead, EffectiveWrite, - GraphRead, - VulnRead, - ExportViewer, - ExportOperator, - ExportAdmin, - GraphWrite, - GraphExport, - GraphSimulate, - OrchRead, - OrchOperate, - AuthorityTenantsRead - }; + GraphRead, + VulnRead, + ObservabilityRead, + TimelineRead, + TimelineWrite, + EvidenceCreate, + EvidenceRead, + EvidenceHold, + AttestRead, + ObservabilityIncident, + ExportViewer, + ExportOperator, + ExportAdmin, + NotifyViewer, + NotifyOperator, + NotifyAdmin, + IssuerDirectoryRead, + IssuerDirectoryWrite, + IssuerDirectoryAdmin, + NotifyEscalate, + PacksRead, + PacksWrite, + PacksRun, + PacksApprove, + GraphWrite, + GraphExport, + GraphSimulate, + OrchRead, + OrchOperate, + OrchBackfill, + OrchQuota, + AuthorityTenantsRead + }; /// /// Normalises a scope string (trim/convert to lower case). diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/ServiceCollectionExtensionsTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/ServiceCollectionExtensionsTests.cs index 521f4598..c94f29bc 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/ServiceCollectionExtensionsTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/ServiceCollectionExtensionsTests.cs @@ -8,6 +8,9 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Http; +using Microsoft.Extensions.Options; +using Microsoft.Extensions.Time.Testing; +using Microsoft.IdentityModel.Tokens; using StellaOps.Auth.Client; using Xunit; @@ -92,4 +95,206 @@ public class ServiceCollectionExtensionsTests protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) => responder(request, cancellationToken); } + + [Fact] + public async Task AddStellaOpsApiAuthentication_AttachesPatAndTenantHeader() + { + var services = new ServiceCollection(); + services.AddLogging(); + + services.AddStellaOpsAuthClient(options => + { + options.Authority = "https://authority.test"; + options.DiscoveryCacheLifetime = TimeSpan.FromMinutes(1); + options.JwksCacheLifetime = TimeSpan.FromMinutes(1); + options.AllowOfflineCacheFallback = false; + }); + + var tokenClient = new ThrowingTokenClient(); + services.AddSingleton(tokenClient); + + var handler = new RecordingHttpMessageHandler(); + + services.AddHttpClient("notify") + .ConfigurePrimaryHttpMessageHandler(() => handler) + .AddStellaOpsApiAuthentication(options => + { + options.Mode = StellaOpsApiAuthMode.PersonalAccessToken; + options.PersonalAccessToken = "pat-token"; + options.Tenant = "tenant-123"; + options.TenantHeader = "X-Custom-Tenant"; + }); + + using var provider = services.BuildServiceProvider(); + var client = provider.GetRequiredService().CreateClient("notify"); + + var response = await client.GetAsync("https://notify.example/api"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + Assert.Single(handler.AuthorizationHistory); + var authorization = handler.AuthorizationHistory[0]; + Assert.NotNull(authorization); + Assert.Equal("Bearer", authorization!.Scheme); + Assert.Equal("pat-token", authorization.Parameter); + + Assert.Single(handler.TenantHeaders); + Assert.Equal("tenant-123", handler.TenantHeaders[0]); + Assert.Equal(0, tokenClient.RequestCount); + } + + [Fact] + public async Task AddStellaOpsApiAuthentication_UsesClientCredentialsWithCaching() + { + var services = new ServiceCollection(); + services.AddLogging(); + + services.AddStellaOpsAuthClient(options => + { + options.Authority = "https://authority.test"; + options.DiscoveryCacheLifetime = TimeSpan.FromMinutes(1); + options.JwksCacheLifetime = TimeSpan.FromMinutes(1); + options.AllowOfflineCacheFallback = false; + options.ExpirationSkew = TimeSpan.FromSeconds(10); + }); + + var fakeTime = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T00:00:00Z")); + services.AddSingleton(fakeTime); + + var recordingTokenClient = new RecordingTokenClient(fakeTime); + services.AddSingleton(recordingTokenClient); + + var handler = new RecordingHttpMessageHandler(); + + services.AddHttpClient("notify") + .ConfigurePrimaryHttpMessageHandler(() => handler) + .AddStellaOpsApiAuthentication(options => + { + options.Mode = StellaOpsApiAuthMode.ClientCredentials; + options.Scope = "notify.read"; + options.Tenant = "tenant-oauth"; + }); + + using var provider = services.BuildServiceProvider(); + var client = provider.GetRequiredService().CreateClient("notify"); + + await client.GetAsync("https://notify.example/api"); + await client.GetAsync("https://notify.example/api"); + + Assert.Equal(2, handler.AuthorizationHistory.Count); + Assert.Equal(1, recordingTokenClient.ClientCredentialsCallCount); + Assert.All(handler.AuthorizationHistory, header => + { + Assert.NotNull(header); + Assert.Equal("Bearer", header!.Scheme); + Assert.Equal("token-1", header.Parameter); + }); + Assert.All(handler.TenantHeaders, value => Assert.Equal("tenant-oauth", value)); + + // Advance beyond expiry buffer to force refresh. + fakeTime.Advance(TimeSpan.FromMinutes(2)); + await client.GetAsync("https://notify.example/api"); + + Assert.Equal(3, handler.AuthorizationHistory.Count); + Assert.Equal("token-2", handler.AuthorizationHistory[^1]!.Parameter); + Assert.Equal(2, recordingTokenClient.ClientCredentialsCallCount); + } + + private sealed class RecordingHttpMessageHandler : HttpMessageHandler + { + public List AuthorizationHistory { get; } = new(); + public List TenantHeaders { get; } = new(); + + protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + AuthorizationHistory.Add(request.Headers.Authorization); + if (request.Headers.TryGetValues("X-Custom-Tenant", out var customTenant)) + { + TenantHeaders.Add(customTenant.Single()); + } + else if (request.Headers.TryGetValues("X-StellaOps-Tenant", out var defaultTenant)) + { + TenantHeaders.Add(defaultTenant.Single()); + } + else + { + TenantHeaders.Add(null); + } + + return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK)); + } + } + + private sealed class ThrowingTokenClient : IStellaOpsTokenClient + { + public int RequestCount { get; private set; } + + public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + + public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + + public Task GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) + => Task.FromResult(new JsonWebKeySet()); + + public ValueTask GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.FromResult(null); + + public Task RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) + { + RequestCount++; + throw new InvalidOperationException("Client credentials flow should not be invoked for PAT mode."); + } + + public Task RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) + { + RequestCount++; + throw new InvalidOperationException("Password flow should not be invoked for PAT mode."); + } + } + + private sealed class RecordingTokenClient : IStellaOpsTokenClient + { + private readonly FakeTimeProvider timeProvider; + private int tokenCounter; + + public RecordingTokenClient(FakeTimeProvider timeProvider) + { + this.timeProvider = timeProvider; + } + + public int ClientCredentialsCallCount { get; private set; } + + public Task RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) + { + ClientCredentialsCallCount++; + var tokenId = Interlocked.Increment(ref tokenCounter); + var result = new StellaOpsTokenResult( + $"token-{tokenId}", + "Bearer", + timeProvider.GetUtcNow().AddMinutes(1), + scope is null ? Array.Empty() : new[] { scope }, + null, + null, + "{}"); + + return Task.FromResult(result); + } + + public Task RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) + => Task.FromResult(new JsonWebKeySet()); + + public ValueTask GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.FromResult(null); + + public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + + public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + } + } diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/README.NuGet.md b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/README.NuGet.md index 3369a62e..28ba0075 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/README.NuGet.md +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/README.NuGet.md @@ -5,5 +5,7 @@ Typed OpenID Connect client used by StellaOps services, agents, and tooling to t - Discovery + JWKS caching with deterministic refresh windows. - Password and client-credential flows with token cache abstractions. - Configurable HTTP retry/backoff policies (Polly) and offline fallback support for air-gapped deployments. +- `HttpClient` authentication helpers that attach OAuth2 (password/client-credentials) or personal access tokens, + including automatic `X-StellaOps-Tenant` header injection for multi-tenant APIs. See `docs/dev/32_AUTH_CLIENT_GUIDE.md` in the repository for integration guidance, option descriptions, and rollout checklists. diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/ServiceCollectionExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/ServiceCollectionExtensions.cs index b3b43c8e..a136d1b5 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/ServiceCollectionExtensions.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/ServiceCollectionExtensions.cs @@ -68,6 +68,29 @@ public static class ServiceCollectionExtensions return services; } + /// + /// Adds authentication and tenancy header handling for an registered via . + /// + public static IHttpClientBuilder AddStellaOpsApiAuthentication(this IHttpClientBuilder builder, Action configure) + { + ArgumentNullException.ThrowIfNull(builder); + ArgumentNullException.ThrowIfNull(configure); + + builder.Services.AddOptions(builder.Name) + .Configure(configure) + .PostConfigure(static options => options.Validate()); + + builder.AddHttpMessageHandler(provider => new StellaOpsBearerTokenHandler( + builder.Name, + provider.GetRequiredService>(), + provider.GetRequiredService>(), + provider.GetRequiredService(), + provider.GetService(), + provider.GetService>())); + + return builder; + } + private static IAsyncPolicy CreateRetryPolicy(IServiceProvider provider) { var options = provider.GetRequiredService>().CurrentValue; diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsApiAuthMode.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsApiAuthMode.cs new file mode 100644 index 00000000..ae8cd564 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsApiAuthMode.cs @@ -0,0 +1,22 @@ +namespace StellaOps.Auth.Client; + +/// +/// Authentication strategies supported by the StellaOps API client helpers. +/// +public enum StellaOpsApiAuthMode +{ + /// + /// Use the OAuth 2.0 client credentials grant to request access tokens. + /// + ClientCredentials, + + /// + /// Use the resource owner password credentials grant to request access tokens. + /// + Password, + + /// + /// Use a pre-issued personal access token (PAT) as the bearer credential. + /// + PersonalAccessToken +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsApiAuthenticationOptions.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsApiAuthenticationOptions.cs new file mode 100644 index 00000000..b4e8c089 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsApiAuthenticationOptions.cs @@ -0,0 +1,97 @@ +using System; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Auth.Client; + +/// +/// Options controlling how instances obtain authentication and tenancy headers. +/// +public sealed class StellaOpsApiAuthenticationOptions +{ + private string tenantHeader = StellaOpsHttpHeaderNames.Tenant; + + /// + /// Gets or sets the authentication mode used to authorise outbound requests. + /// + public StellaOpsApiAuthMode Mode { get; set; } = StellaOpsApiAuthMode.ClientCredentials; + + /// + /// Optional scope override supplied when requesting OAuth access tokens. + /// + public string? Scope { get; set; } + + /// + /// Username used when is . + /// + public string? Username { get; set; } + + /// + /// Password used when is . + /// + public string? Password { get; set; } + + /// + /// Pre-issued personal access token used when is . + /// + public string? PersonalAccessToken { get; set; } + + /// + /// Optional tenant identifier injected via . If null, the header is omitted. + /// + public string? Tenant { get; set; } + + /// + /// Header name used to convey the tenant override (defaults to X-StellaOps-Tenant). + /// + public string TenantHeader + { + get => tenantHeader; + set => tenantHeader = string.IsNullOrWhiteSpace(value) ? StellaOpsHttpHeaderNames.Tenant : value.Trim(); + } + + /// + /// Buffer window applied before token expiration that triggers proactive refresh (defaults to 30 seconds). + /// + public TimeSpan RefreshBuffer { get; set; } = TimeSpan.FromSeconds(30); + + internal void Validate() + { + if (RefreshBuffer < TimeSpan.Zero || RefreshBuffer > TimeSpan.FromMinutes(5)) + { + throw new InvalidOperationException("RefreshBuffer must be between 0 seconds and 5 minutes."); + } + + Tenant = string.IsNullOrWhiteSpace(Tenant) ? null : Tenant.Trim(); + + Scope = string.IsNullOrWhiteSpace(Scope) ? null : Scope.Trim(); + + switch (Mode) + { + case StellaOpsApiAuthMode.ClientCredentials: + break; + case StellaOpsApiAuthMode.Password: + if (string.IsNullOrWhiteSpace(Username)) + { + throw new InvalidOperationException("Username is required for password authentication."); + } + + if (string.IsNullOrEmpty(Password)) + { + throw new InvalidOperationException("Password is required for password authentication."); + } + + Username = Username.Trim(); + break; + case StellaOpsApiAuthMode.PersonalAccessToken: + if (string.IsNullOrWhiteSpace(PersonalAccessToken)) + { + throw new InvalidOperationException("PersonalAccessToken is required when using personal access token mode."); + } + + PersonalAccessToken = PersonalAccessToken.Trim(); + break; + default: + throw new InvalidOperationException($"Unsupported authentication mode '{Mode}'."); + } + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsBearerTokenHandler.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsBearerTokenHandler.cs new file mode 100644 index 00000000..ef76bebf --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsBearerTokenHandler.cs @@ -0,0 +1,123 @@ +using System; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace StellaOps.Auth.Client; + +/// +/// Delegating handler that attaches bearer credentials and tenant headers to outbound requests. +/// +internal sealed class StellaOpsBearerTokenHandler : DelegatingHandler +{ + private readonly string clientName; + private readonly IOptionsMonitor apiAuthOptions; + private readonly IOptionsMonitor authClientOptions; + private readonly IStellaOpsTokenClient tokenClient; + private readonly TimeProvider timeProvider; + private readonly ILogger? logger; + private readonly SemaphoreSlim refreshLock = new(1, 1); + + private StellaOpsTokenResult? cachedToken; + + public StellaOpsBearerTokenHandler( + string clientName, + IOptionsMonitor apiAuthOptions, + IOptionsMonitor authClientOptions, + IStellaOpsTokenClient tokenClient, + TimeProvider? timeProvider, + ILogger? logger) + { + this.clientName = clientName ?? throw new ArgumentNullException(nameof(clientName)); + this.apiAuthOptions = apiAuthOptions ?? throw new ArgumentNullException(nameof(apiAuthOptions)); + this.authClientOptions = authClientOptions ?? throw new ArgumentNullException(nameof(authClientOptions)); + this.tokenClient = tokenClient ?? throw new ArgumentNullException(nameof(tokenClient)); + this.timeProvider = timeProvider ?? TimeProvider.System; + this.logger = logger; + } + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + var options = apiAuthOptions.Get(clientName); + + if (!string.IsNullOrWhiteSpace(options.Tenant)) + { + request.Headers.Remove(options.TenantHeader); + request.Headers.TryAddWithoutValidation(options.TenantHeader, options.Tenant); + } + + var token = await ResolveTokenAsync(options, cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrEmpty(token)) + { + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token); + } + + return await base.SendAsync(request, cancellationToken).ConfigureAwait(false); + } + + private async Task ResolveTokenAsync(StellaOpsApiAuthenticationOptions options, CancellationToken cancellationToken) + { + if (options.Mode == StellaOpsApiAuthMode.PersonalAccessToken) + { + return options.PersonalAccessToken; + } + + var buffer = GetRefreshBuffer(options); + var now = timeProvider.GetUtcNow(); + var token = cachedToken; + + if (token is not null && token.ExpiresAt - buffer > now) + { + return token.AccessToken; + } + + await refreshLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + token = cachedToken; + now = timeProvider.GetUtcNow(); + if (token is not null && token.ExpiresAt - buffer > now) + { + return token.AccessToken; + } + + StellaOpsTokenResult result = options.Mode switch + { + StellaOpsApiAuthMode.ClientCredentials => await tokenClient.RequestClientCredentialsTokenAsync( + options.Scope, + null, + cancellationToken).ConfigureAwait(false), + StellaOpsApiAuthMode.Password => await tokenClient.RequestPasswordTokenAsync( + options.Username!, + options.Password!, + options.Scope, + null, + cancellationToken).ConfigureAwait(false), + _ => throw new InvalidOperationException($"Unsupported authentication mode '{options.Mode}'.") + }; + + cachedToken = result; + logger?.LogDebug("Issued access token for client {ClientName}; expires at {ExpiresAt}.", clientName, result.ExpiresAt); + return result.AccessToken; + } + finally + { + refreshLock.Release(); + } + } + + private TimeSpan GetRefreshBuffer(StellaOpsApiAuthenticationOptions options) + { + var authOptions = authClientOptions.CurrentValue; + var buffer = options.RefreshBuffer; + if (buffer <= TimeSpan.Zero) + { + return authOptions.ExpirationSkew; + } + + return buffer > authOptions.ExpirationSkew ? buffer : authOptions.ExpirationSkew; + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenResult.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenResult.cs index f820adc2..800b56f6 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenResult.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenResult.cs @@ -15,6 +15,11 @@ public sealed record StellaOpsTokenResult( string? IdToken = null, string? RawResponse = null) { + /// + /// Temporary shim for callers expecting the legacy ExpiresAt member. + /// + public DateTimeOffset ExpiresAt => ExpiresAtUtc; + /// /// Converts the result to a cache entry. /// diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsResourceServerPoliciesTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsResourceServerPoliciesTests.cs new file mode 100644 index 00000000..12040c5c --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsResourceServerPoliciesTests.cs @@ -0,0 +1,39 @@ +using System.Linq; +using Microsoft.AspNetCore.Authorization; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; +using Xunit; + +namespace StellaOps.Auth.ServerIntegration.Tests; + +public class StellaOpsResourceServerPoliciesTests +{ + [Fact] + public void AddObservabilityResourcePolicies_RegistersExpectedPolicies() + { + var options = new AuthorizationOptions(); + + options.AddObservabilityResourcePolicies(); + + AssertPolicy(options, StellaOpsResourceServerPolicies.ObservabilityRead, StellaOpsScopes.ObservabilityRead); + AssertPolicy(options, StellaOpsResourceServerPolicies.ObservabilityIncident, StellaOpsScopes.ObservabilityIncident); + AssertPolicy(options, StellaOpsResourceServerPolicies.TimelineRead, StellaOpsScopes.TimelineRead); + AssertPolicy(options, StellaOpsResourceServerPolicies.TimelineWrite, StellaOpsScopes.TimelineWrite); + AssertPolicy(options, StellaOpsResourceServerPolicies.EvidenceCreate, StellaOpsScopes.EvidenceCreate); + AssertPolicy(options, StellaOpsResourceServerPolicies.EvidenceRead, StellaOpsScopes.EvidenceRead); + AssertPolicy(options, StellaOpsResourceServerPolicies.EvidenceHold, StellaOpsScopes.EvidenceHold); + AssertPolicy(options, StellaOpsResourceServerPolicies.AttestRead, StellaOpsScopes.AttestRead); + AssertPolicy(options, StellaOpsResourceServerPolicies.ExportViewer, StellaOpsScopes.ExportViewer); + AssertPolicy(options, StellaOpsResourceServerPolicies.ExportOperator, StellaOpsScopes.ExportOperator); + AssertPolicy(options, StellaOpsResourceServerPolicies.ExportAdmin, StellaOpsScopes.ExportAdmin); + } + + private static void AssertPolicy(AuthorizationOptions options, string policyName, string expectedScope) + { + var policy = options.GetPolicy(policyName); + Assert.NotNull(policy); + + var requirement = Assert.Single(policy!.Requirements.OfType()); + Assert.Equal(new[] { expectedScope }, requirement.RequiredScopes); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsScopeAuthorizationHandlerTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsScopeAuthorizationHandlerTests.cs index bcefb986..9125a29f 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsScopeAuthorizationHandlerTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsScopeAuthorizationHandlerTests.cs @@ -1,14 +1,20 @@ -using System; -using System.Net; -using System.Security.Claims; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Authorization; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.ServerIntegration; -using Xunit; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Net; +using System.Security.Claims; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Microsoft.Extensions.Time.Testing; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Cryptography.Audit; +using OpenIddict.Abstractions; +using Xunit; namespace StellaOps.Auth.ServerIntegration.Tests; @@ -24,158 +30,322 @@ public class StellaOpsScopeAuthorizationHandlerTests options.Validate(); }); - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("10.0.0.1")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new StellaOpsPrincipalBuilder() - .WithSubject("user-1") - .WithTenant("tenant-alpha") - .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) - .Build(); - - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.True(context.HasSucceeded); - } - - [Fact] - [Fact] - public async Task HandleRequirement_Fails_WhenTenantMismatch() - { - var optionsMonitor = CreateOptionsMonitor(options => - { - options.Authority = "https://authority.example"; - options.RequiredTenants.Add("tenant-alpha"); - options.Validate(); - }); - - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("10.0.0.1")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new StellaOpsPrincipalBuilder() - .WithSubject("user-1") - .WithTenant("tenant-beta") - .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) - .Build(); - - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.False(context.HasSucceeded); - } - - public async Task HandleRequirement_Succeeds_WhenBypassNetworkMatches() - { - var optionsMonitor = CreateOptionsMonitor(options => - { - options.Authority = "https://authority.example"; - options.BypassNetworks.Add("127.0.0.1/32"); - options.Validate(); - }); - - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("127.0.0.1")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new ClaimsPrincipal(new ClaimsIdentity()); - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.True(context.HasSucceeded); - } - - [Fact] + var (handler, accessor, sink) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("10.0.0.1")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new StellaOpsPrincipalBuilder() + .WithSubject("user-1") + .WithTenant("tenant-alpha") + .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) + .Build(); + + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.True(context.HasSucceeded); + var record = Assert.Single(sink.Records); + Assert.Equal(AuthEventOutcome.Success, record.Outcome); + Assert.Equal(StellaOpsScopes.ConcelierJobsTrigger, Assert.Single(record.Scopes)); + Assert.Equal("tenant-alpha", record.Tenant.Value); + Assert.Equal("true", GetPropertyValue(record, "principal.authenticated")); + Assert.Null(GetPropertyValue(record, "resource.authorization.bypass")); + Assert.False(string.IsNullOrWhiteSpace(record.CorrelationId)); + } + + [Fact] + public async Task HandleRequirement_Fails_WhenTenantMismatch() + { + var optionsMonitor = CreateOptionsMonitor(options => + { + options.Authority = "https://authority.example"; + options.RequiredTenants.Add("tenant-alpha"); + options.Validate(); + }); + + var (handler, accessor, sink) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("10.0.0.1")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new StellaOpsPrincipalBuilder() + .WithSubject("user-1") + .WithTenant("tenant-beta") + .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) + .Build(); + + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.False(context.HasSucceeded); + var record = Assert.Single(sink.Records); + Assert.Equal(AuthEventOutcome.Failure, record.Outcome); + Assert.Equal("tenant-beta", record.Tenant.Value); + Assert.Equal("Tenant requirement not satisfied.", record.Reason); + Assert.Equal("true", GetPropertyValue(record, "principal.authenticated")); + Assert.Equal("true", GetPropertyValue(record, "resource.tenant.mismatch")); + } + + [Fact] + public async Task HandleRequirement_Succeeds_WhenBypassNetworkMatches() + { + var optionsMonitor = CreateOptionsMonitor(options => + { + options.Authority = "https://authority.example"; + options.BypassNetworks.Add("127.0.0.1/32"); + options.Validate(); + }); + + var (handler, accessor, sink) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("127.0.0.1")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new ClaimsPrincipal(new ClaimsIdentity()); + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.True(context.HasSucceeded); + var record = Assert.Single(sink.Records); + Assert.Equal(AuthEventOutcome.Success, record.Outcome); + Assert.Equal("Matched trusted bypass network.", record.Reason); + Assert.Equal("true", GetPropertyValue(record, "resource.authorization.bypass")); + } + + [Fact] public async Task HandleRequirement_Fails_WhenScopeMissingAndNoBypass() { var optionsMonitor = CreateOptionsMonitor(options => - { - options.Authority = "https://authority.example"; - options.Validate(); - }); - - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("203.0.113.10")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new ClaimsPrincipal(new ClaimsIdentity()); - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.False(context.HasSucceeded); - } - - [Fact] - public async Task HandleRequirement_Fails_WhenDefaultScopeMissing() - { + { + options.Authority = "https://authority.example"; + options.Validate(); + }); + + var (handler, accessor, sink) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("203.0.113.10")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new ClaimsPrincipal(new ClaimsIdentity()); + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.False(context.HasSucceeded); + var record = Assert.Single(sink.Records); + Assert.Equal(AuthEventOutcome.Failure, record.Outcome); + Assert.Equal("Principal not authenticated.", record.Reason); + Assert.Equal("false", GetPropertyValue(record, "principal.authenticated")); + } + + [Fact] + public async Task HandleRequirement_Fails_WhenDefaultScopeMissing() + { var optionsMonitor = CreateOptionsMonitor(options => { options.Authority = "https://authority.example"; - options.RequiredScopes.Add(StellaOpsScopes.PolicyRun); - options.Validate(); - }); - - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("198.51.100.5")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new StellaOpsPrincipalBuilder() - .WithSubject("user-tenant") - .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) - .Build(); - - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.False(context.HasSucceeded); - } - - [Fact] - public async Task HandleRequirement_Succeeds_WhenDefaultScopePresent() - { + options.RequiredScopes.Add(StellaOpsScopes.PolicyRun); + options.Validate(); + }); + + var (handler, accessor, sink) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("198.51.100.5")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new StellaOpsPrincipalBuilder() + .WithSubject("user-tenant") + .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) + .Build(); + + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.False(context.HasSucceeded); + var record = Assert.Single(sink.Records); + Assert.Equal(AuthEventOutcome.Failure, record.Outcome); + Assert.Equal("Required scopes not granted.", record.Reason); + Assert.Equal("true", GetPropertyValue(record, "principal.authenticated")); + Assert.Equal(StellaOpsScopes.PolicyRun, GetPropertyValue(record, "resource.scopes.missing")); + } + + [Fact] + public async Task HandleRequirement_Succeeds_WhenDefaultScopePresent() + { var optionsMonitor = CreateOptionsMonitor(options => { options.Authority = "https://authority.example"; - options.RequiredScopes.Add(StellaOpsScopes.PolicyRun); - options.Validate(); - }); - - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("198.51.100.5")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new StellaOpsPrincipalBuilder() - .WithSubject("user-tenant") - .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger, StellaOpsScopes.PolicyRun }) - .Build(); - - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.True(context.HasSucceeded); - } - - private static (StellaOpsScopeAuthorizationHandler Handler, IHttpContextAccessor Accessor) CreateHandler(IOptionsMonitor optionsMonitor, IPAddress remoteAddress) - { - var accessor = new HttpContextAccessor(); - var httpContext = new DefaultHttpContext(); - httpContext.Connection.RemoteIpAddress = remoteAddress; - accessor.HttpContext = httpContext; - - var bypassEvaluator = new StellaOpsBypassEvaluator(optionsMonitor, NullLogger.Instance); - - var handler = new StellaOpsScopeAuthorizationHandler( - accessor, - bypassEvaluator, - optionsMonitor, - NullLogger.Instance); - return (handler, accessor); - } - - private static IOptionsMonitor CreateOptionsMonitor(Action configure) - => new TestOptionsMonitor(configure); - - private sealed class TestOptionsMonitor : IOptionsMonitor - where TOptions : class, new() - { - private readonly TOptions value; + options.RequiredScopes.Add(StellaOpsScopes.PolicyRun); + options.Validate(); + }); + + var (handler, accessor, sink) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("198.51.100.5")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new StellaOpsPrincipalBuilder() + .WithSubject("user-tenant") + .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger, StellaOpsScopes.PolicyRun }) + .Build(); + + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.True(context.HasSucceeded); + var record = Assert.Single(sink.Records); + Assert.Equal(AuthEventOutcome.Success, record.Outcome); + Assert.Null(record.Reason); + Assert.Equal("true", GetPropertyValue(record, "principal.authenticated")); + } + + [Fact] + public async Task HandleRequirement_Fails_WhenIncidentAuthTimeMissing() + { + var optionsMonitor = CreateOptionsMonitor(options => + { + options.Authority = "https://authority.example"; + options.RequiredTenants.Add("tenant-alpha"); + options.Validate(); + }); + + var fakeTime = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T12:00:00Z", CultureInfo.InvariantCulture)); + var (handler, accessor, sink) = CreateHandler(optionsMonitor, IPAddress.Parse("10.0.0.50"), fakeTime); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ObservabilityIncident }); + var principal = new StellaOpsPrincipalBuilder() + .WithSubject("user-incident") + .WithClientId("incident-client") + .WithTenant("tenant-alpha") + .WithScopes(new[] { StellaOpsScopes.ObservabilityIncident }) + .AddClaim(StellaOpsClaimTypes.IncidentReason, "Sev1 drill") + .Build(); + + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.False(context.HasSucceeded); + var record = Assert.Single(sink.Records); + Assert.Equal(AuthEventOutcome.Failure, record.Outcome); + Assert.Equal("obs:incident tokens require authentication_time claim.", record.Reason); + Assert.Equal("false", GetPropertyValue(record, "incident.fresh_auth_satisfied")); + Assert.Equal("Sev1 drill", GetPropertyValue(record, "incident.reason")); + } + + [Fact] + public async Task HandleRequirement_Fails_WhenIncidentAuthTimeStale() + { + var optionsMonitor = CreateOptionsMonitor(options => + { + options.Authority = "https://authority.example"; + options.RequiredTenants.Add("tenant-alpha"); + options.Validate(); + }); + + var fakeTime = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T12:00:00Z", CultureInfo.InvariantCulture)); + var (handler, accessor, sink) = CreateHandler(optionsMonitor, IPAddress.Parse("10.0.0.51"), fakeTime); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ObservabilityIncident }); + var staleAuthTime = fakeTime.GetUtcNow().AddMinutes(-10); + var principal = new StellaOpsPrincipalBuilder() + .WithSubject("user-incident") + .WithClientId("incident-client") + .WithTenant("tenant-alpha") + .WithScopes(new[] { StellaOpsScopes.ObservabilityIncident }) + .AddClaim(StellaOpsClaimTypes.IncidentReason, "Sev1 drill") + .AddClaim(OpenIddictConstants.Claims.AuthenticationTime, staleAuthTime.ToUnixTimeSeconds().ToString(CultureInfo.InvariantCulture)) + .Build(); + + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.False(context.HasSucceeded); + var record = Assert.Single(sink.Records); + Assert.Equal(AuthEventOutcome.Failure, record.Outcome); + Assert.Equal("obs:incident tokens require fresh authentication.", record.Reason); + Assert.Equal("false", GetPropertyValue(record, "incident.fresh_auth_satisfied")); + Assert.Equal(staleAuthTime.ToString("o", CultureInfo.InvariantCulture), GetPropertyValue(record, "incident.auth_time")); + Assert.Equal("Sev1 drill", GetPropertyValue(record, "incident.reason")); + } + + [Fact] + public async Task HandleRequirement_Succeeds_WhenIncidentFreshAuthValid() + { + var optionsMonitor = CreateOptionsMonitor(options => + { + options.Authority = "https://authority.example"; + options.RequiredTenants.Add("tenant-alpha"); + options.Validate(); + }); + + var fakeTime = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T12:00:00Z", CultureInfo.InvariantCulture)); + var (handler, accessor, sink) = CreateHandler(optionsMonitor, IPAddress.Parse("10.0.0.52"), fakeTime); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ObservabilityIncident }); + var freshAuthTime = fakeTime.GetUtcNow().AddMinutes(-2); + var principal = new StellaOpsPrincipalBuilder() + .WithSubject("user-incident") + .WithClientId("incident-client") + .WithTenant("tenant-alpha") + .WithScopes(new[] { StellaOpsScopes.ObservabilityIncident }) + .AddClaim(StellaOpsClaimTypes.IncidentReason, "Sev1 drill") + .AddClaim(OpenIddictConstants.Claims.AuthenticationTime, freshAuthTime.ToUnixTimeSeconds().ToString(CultureInfo.InvariantCulture)) + .Build(); + + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.True(context.HasSucceeded); + var record = Assert.Single(sink.Records); + Assert.Equal(AuthEventOutcome.Success, record.Outcome); + Assert.Equal("true", GetPropertyValue(record, "incident.fresh_auth_satisfied")); + Assert.Equal(freshAuthTime.ToString("o", CultureInfo.InvariantCulture), GetPropertyValue(record, "incident.auth_time")); + Assert.Equal("Sev1 drill", GetPropertyValue(record, "incident.reason")); + } + private static (StellaOpsScopeAuthorizationHandler Handler, IHttpContextAccessor Accessor, RecordingAuthEventSink Sink) CreateHandler(IOptionsMonitor optionsMonitor, IPAddress remoteAddress, TimeProvider? timeProvider = null) + { + var accessor = new HttpContextAccessor(); + var httpContext = new DefaultHttpContext(); + httpContext.Connection.RemoteIpAddress = remoteAddress; + httpContext.TraceIdentifier = $"trace-{remoteAddress}"; + accessor.HttpContext = httpContext; + + var bypassEvaluator = new StellaOpsBypassEvaluator(optionsMonitor, NullLogger.Instance); + var sink = new RecordingAuthEventSink(); + + var handler = new StellaOpsScopeAuthorizationHandler( + accessor, + bypassEvaluator, + optionsMonitor, + new[] { sink }, + timeProvider ?? TimeProvider.System, + NullLogger.Instance); + return (handler, accessor, sink); + } + + private static IOptionsMonitor CreateOptionsMonitor(Action configure) + => new TestOptionsMonitor(configure); + + private static string? GetPropertyValue(AuthEventRecord record, string propertyName) + { + foreach (var property in record.Properties) + { + if (string.Equals(property.Name, propertyName, StringComparison.Ordinal)) + { + return property.Value.Value; + } + } + + return null; + } + + private sealed class RecordingAuthEventSink : IAuthEventSink + { + private readonly List records = new(); + + public IReadOnlyList Records => records; + + public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) + { + records.Add(record); + return ValueTask.CompletedTask; + } + } + + private sealed class TestOptionsMonitor : IOptionsMonitor + where TOptions : class, new() + { + private readonly TOptions value; + public TestOptionsMonitor(Action configure) { value = new TOptions(); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/README.NuGet.md b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/README.NuGet.md index 3577521c..fe4e1016 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/README.NuGet.md +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/README.NuGet.md @@ -2,8 +2,10 @@ ASP.NET Core helpers that enable resource servers to authenticate with **StellaOps Authority**: -- `AddStellaOpsResourceServerAuthentication` extension for JWT bearer + scope policies. -- Network bypass mask evaluation for on-host automation. -- Consistent `ProblemDetails` responses and policy helpers shared with Concelier/Backend services. +- `AddStellaOpsResourceServerAuthentication` extension for JWT bearer + scope policies. +- `AddObservabilityResourcePolicies` helper to register timeline, evidence, export, and observability scope policies. +- Network bypass mask evaluation for on-host automation. +- Consistent `ProblemDetails` responses and policy helpers shared with Concelier/Backend services. +- Structured audit emission (`authority.resource.authorize`) capturing granted scopes, tenant, and trace identifiers. Pair this package with `StellaOps.Auth.Abstractions` and `StellaOps.Auth.Client` for end-to-end Authority integration. diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj index a700ef21..4bcb7c55 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj @@ -29,12 +29,14 @@ + + @@ -44,4 +46,4 @@ <_Parameter1>StellaOps.Auth.ServerIntegration.Tests - \ No newline at end of file + diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsResourceServerPolicies.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsResourceServerPolicies.cs new file mode 100644 index 00000000..cd8a4d72 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsResourceServerPolicies.cs @@ -0,0 +1,86 @@ +using System; +using Microsoft.AspNetCore.Authorization; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Auth.ServerIntegration; + +/// +/// Named authorization policies for StellaOps observability and evidence resource servers. +/// +public static class StellaOpsResourceServerPolicies +{ + /// + /// Observability dashboards/read-only access policy name. + /// + public const string ObservabilityRead = StellaOpsScopes.ObservabilityRead; + + /// + /// Observability incident activation policy name. + /// + public const string ObservabilityIncident = StellaOpsScopes.ObservabilityIncident; + + /// + /// Timeline read policy name. + /// + public const string TimelineRead = StellaOpsScopes.TimelineRead; + + /// + /// Timeline write policy name. + /// + public const string TimelineWrite = StellaOpsScopes.TimelineWrite; + + /// + /// Evidence create policy name. + /// + public const string EvidenceCreate = StellaOpsScopes.EvidenceCreate; + + /// + /// Evidence read policy name. + /// + public const string EvidenceRead = StellaOpsScopes.EvidenceRead; + + /// + /// Evidence hold policy name. + /// + public const string EvidenceHold = StellaOpsScopes.EvidenceHold; + + /// + /// Attestation read policy name. + /// + public const string AttestRead = StellaOpsScopes.AttestRead; + + /// + /// Export viewer policy name. + /// + public const string ExportViewer = StellaOpsScopes.ExportViewer; + + /// + /// Export operator policy name. + /// + public const string ExportOperator = StellaOpsScopes.ExportOperator; + + /// + /// Export admin policy name. + /// + public const string ExportAdmin = StellaOpsScopes.ExportAdmin; + + /// + /// Registers all observability, timeline, evidence, attestation, and export authorization policies. + /// + public static void AddObservabilityResourcePolicies(this AuthorizationOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + options.AddStellaOpsScopePolicy(ObservabilityRead, StellaOpsScopes.ObservabilityRead); + options.AddStellaOpsScopePolicy(ObservabilityIncident, StellaOpsScopes.ObservabilityIncident); + options.AddStellaOpsScopePolicy(TimelineRead, StellaOpsScopes.TimelineRead); + options.AddStellaOpsScopePolicy(TimelineWrite, StellaOpsScopes.TimelineWrite); + options.AddStellaOpsScopePolicy(EvidenceCreate, StellaOpsScopes.EvidenceCreate); + options.AddStellaOpsScopePolicy(EvidenceRead, StellaOpsScopes.EvidenceRead); + options.AddStellaOpsScopePolicy(EvidenceHold, StellaOpsScopes.EvidenceHold); + options.AddStellaOpsScopePolicy(AttestRead, StellaOpsScopes.AttestRead); + options.AddStellaOpsScopePolicy(ExportViewer, StellaOpsScopes.ExportViewer); + options.AddStellaOpsScopePolicy(ExportOperator, StellaOpsScopes.ExportOperator); + options.AddStellaOpsScopePolicy(ExportAdmin, StellaOpsScopes.ExportAdmin); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeAuthorizationHandler.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeAuthorizationHandler.cs index 8f713c99..701a6652 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeAuthorizationHandler.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeAuthorizationHandler.cs @@ -1,202 +1,757 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Security.Claims; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Authorization; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Auth.ServerIntegration; - -/// -/// Handles evaluation. -/// -internal sealed class StellaOpsScopeAuthorizationHandler : AuthorizationHandler -{ - private readonly IHttpContextAccessor httpContextAccessor; - private readonly StellaOpsBypassEvaluator bypassEvaluator; - private readonly IOptionsMonitor optionsMonitor; - private readonly ILogger logger; - - public StellaOpsScopeAuthorizationHandler( - IHttpContextAccessor httpContextAccessor, - StellaOpsBypassEvaluator bypassEvaluator, - IOptionsMonitor optionsMonitor, - ILogger logger) - { - this.httpContextAccessor = httpContextAccessor; - this.bypassEvaluator = bypassEvaluator; - this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); - this.logger = logger; - } - - protected override Task HandleRequirementAsync( - AuthorizationHandlerContext context, - StellaOpsScopeRequirement requirement) - { - var resourceOptions = optionsMonitor.CurrentValue; - var httpContext = httpContextAccessor.HttpContext; - var combinedScopes = CombineRequiredScopes(resourceOptions.NormalizedScopes, requirement.RequiredScopes); - HashSet? userScopes = null; - - if (context.User?.Identity?.IsAuthenticated == true) - { - userScopes = ExtractScopes(context.User); - - foreach (var scope in combinedScopes) - { - if (!userScopes.Contains(scope)) - { - continue; - } - - if (TenantAllowed(context.User, resourceOptions, out var normalizedTenant)) - { - context.Succeed(requirement); - return Task.CompletedTask; - } - - if (logger.IsEnabled(LogLevel.Debug)) - { - var allowedTenants = resourceOptions.NormalizedTenants.Count == 0 - ? "(none)" - : string.Join(", ", resourceOptions.NormalizedTenants); - - logger.LogDebug( - "Tenant requirement not satisfied. RequiredTenants={RequiredTenants}; PrincipalTenant={PrincipalTenant}; Remote={Remote}", - allowedTenants, - normalizedTenant ?? "(none)", - httpContext?.Connection.RemoteIpAddress); - } - - // tenant mismatch cannot be resolved by checking additional scopes for this principal - break; - } - } - - if (httpContext is not null && bypassEvaluator.ShouldBypass(httpContext, combinedScopes)) - { - context.Succeed(requirement); - return Task.CompletedTask; - } - - if (logger.IsEnabled(LogLevel.Debug)) - { - var required = string.Join(", ", combinedScopes); - var principalScopes = userScopes is null || userScopes.Count == 0 - ? "(none)" - : string.Join(", ", userScopes); - var tenantValue = context.User?.FindFirstValue(StellaOpsClaimTypes.Tenant) ?? "(none)"; - - logger.LogDebug( - "Scope requirement not satisfied. Required={RequiredScopes}; PrincipalScopes={PrincipalScopes}; Tenant={Tenant}; Remote={Remote}", - required, - principalScopes, - tenantValue, - httpContext?.Connection.RemoteIpAddress); - } - - return Task.CompletedTask; - } - - private static bool TenantAllowed(ClaimsPrincipal principal, StellaOpsResourceServerOptions options, out string? normalizedTenant) - { - normalizedTenant = null; - - if (options.NormalizedTenants.Count == 0) - { - return true; - } - - var rawTenant = principal.FindFirstValue(StellaOpsClaimTypes.Tenant); - if (string.IsNullOrWhiteSpace(rawTenant)) - { - return false; - } - - normalizedTenant = rawTenant.Trim().ToLowerInvariant(); - - foreach (var allowed in options.NormalizedTenants) - { - if (string.Equals(allowed, normalizedTenant, StringComparison.Ordinal)) - { - return true; - } - } - - return false; - } - - private static HashSet ExtractScopes(ClaimsPrincipal principal) - { - var scopes = new HashSet(StringComparer.Ordinal); - - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) - { - if (string.IsNullOrWhiteSpace(claim.Value)) - { - continue; - } - - scopes.Add(claim.Value); - } - - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) - { - if (string.IsNullOrWhiteSpace(claim.Value)) - { - continue; - } - - var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - - foreach (var part in parts) - { - var normalized = StellaOpsScopes.Normalize(part); - if (normalized is not null) - { - scopes.Add(normalized); - } - } - } - - return scopes; - } - - private static IReadOnlyList CombineRequiredScopes( - IReadOnlyList defaultScopes, - IReadOnlyCollection requirementScopes) - { - if ((defaultScopes is null || defaultScopes.Count == 0) && (requirementScopes is null || requirementScopes.Count == 0)) - { - return Array.Empty(); - } - - if (defaultScopes is null || defaultScopes.Count == 0) - { - return requirementScopes is string[] requirementArray - ? requirementArray - : requirementScopes.ToArray(); - } - - var combined = new HashSet(defaultScopes, StringComparer.Ordinal); - - if (requirementScopes is not null) - { - foreach (var scope in requirementScopes) - { - if (!string.IsNullOrWhiteSpace(scope)) - { - combined.Add(scope); - } - } - } - - return combined.Count == defaultScopes.Count && requirementScopes is null - ? defaultScopes - : combined.OrderBy(static scope => scope, StringComparer.Ordinal).ToArray(); - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Security.Claims; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Abstractions; +using StellaOps.Cryptography.Audit; +using OpenIddict.Abstractions; + +namespace StellaOps.Auth.ServerIntegration; + +/// +/// Handles evaluation. +/// +internal sealed class StellaOpsScopeAuthorizationHandler : AuthorizationHandler +{ + private const string ResourceEventType = "authority.resource.authorize"; + private static readonly TimeSpan ObservabilityIncidentFreshAuthWindow = TimeSpan.FromMinutes(5); + + private readonly IHttpContextAccessor httpContextAccessor; + private readonly StellaOpsBypassEvaluator bypassEvaluator; + private readonly IOptionsMonitor optionsMonitor; + private readonly IEnumerable auditSinks; + private readonly TimeProvider timeProvider; + private readonly ILogger logger; + + public StellaOpsScopeAuthorizationHandler( + IHttpContextAccessor httpContextAccessor, + StellaOpsBypassEvaluator bypassEvaluator, + IOptionsMonitor optionsMonitor, + IEnumerable auditSinks, + TimeProvider timeProvider, + ILogger logger) + { + this.httpContextAccessor = httpContextAccessor; + this.bypassEvaluator = bypassEvaluator; + this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + this.auditSinks = auditSinks ?? Array.Empty(); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + this.logger = logger; + } + + protected override async Task HandleRequirementAsync( + AuthorizationHandlerContext context, + StellaOpsScopeRequirement requirement) + { + ArgumentNullException.ThrowIfNull(context); + ArgumentNullException.ThrowIfNull(requirement); + + var resourceOptions = optionsMonitor.CurrentValue; + var httpContext = httpContextAccessor.HttpContext; + var combinedScopes = CombineRequiredScopes(resourceOptions.NormalizedScopes, requirement.RequiredScopes); + var principal = context.User; + var principalAuthenticated = principal?.Identity?.IsAuthenticated == true; + var principalScopes = principalAuthenticated + ? ExtractScopes(principal!) + : new HashSet(StringComparer.Ordinal); + + var anyScopeMatched = false; + var missingScopes = new List(); + + if (principalAuthenticated) + { + foreach (var scope in combinedScopes) + { + if (principalScopes.Contains(scope)) + { + anyScopeMatched = true; + } + else + { + missingScopes.Add(scope); + } + } + } + else if (combinedScopes.Count > 0) + { + missingScopes.AddRange(combinedScopes); + } + + var allScopesSatisfied = combinedScopes.Count == 0 + ? false + : missingScopes.Count == 0; + + var tenantAllowed = false; + var tenantMismatch = false; + string? normalizedTenant = null; + + var incidentFreshAuthRequired = combinedScopes.Contains(StellaOpsScopes.ObservabilityIncident); + var incidentFreshAuthSatisfied = true; + string? incidentReasonClaim = null; + DateTimeOffset? incidentAuthTime = null; + string? incidentFailureReason = null; + + if (principalAuthenticated) + { + incidentReasonClaim = principal!.FindFirstValue(StellaOpsClaimTypes.IncidentReason); + } + + if (principalAuthenticated && allScopesSatisfied) + { + tenantAllowed = TenantAllowed(principal!, resourceOptions, out normalizedTenant); + tenantMismatch = !tenantAllowed; + } + + if (principalAuthenticated && tenantAllowed && allScopesSatisfied && incidentFreshAuthRequired) + { + incidentFreshAuthSatisfied = ValidateObservabilityIncidentFreshAuthentication( + principal!, + out incidentReasonClaim, + out incidentAuthTime, + out incidentFailureReason); + } + + var bypassed = false; + + if ((!principalAuthenticated || !allScopesSatisfied || !tenantAllowed || !incidentFreshAuthSatisfied) && + httpContext is not null && + bypassEvaluator.ShouldBypass(httpContext, combinedScopes)) + { + tenantAllowed = true; + tenantMismatch = false; + allScopesSatisfied = true; + anyScopeMatched = true; + missingScopes.Clear(); + incidentFreshAuthSatisfied = true; + incidentFailureReason = null; + incidentAuthTime = null; + bypassed = true; + } + + if (tenantAllowed && allScopesSatisfied && incidentFreshAuthSatisfied) + { + context.Succeed(requirement); + } + else if (logger.IsEnabled(LogLevel.Debug)) + { + if (tenantMismatch) + { + var allowedTenants = resourceOptions.NormalizedTenants.Count == 0 + ? "(none)" + : string.Join(", ", resourceOptions.NormalizedTenants); + + logger.LogDebug( + "Tenant requirement not satisfied. RequiredTenants={RequiredTenants}; PrincipalTenant={PrincipalTenant}; Remote={Remote}", + allowedTenants, + normalizedTenant ?? "(none)", + httpContext?.Connection.RemoteIpAddress); + } + + var required = combinedScopes.Count == 0 ? "(none)" : string.Join(", ", combinedScopes); + var principalScopeList = principalScopes.Count == 0 + ? "(none)" + : string.Join(", ", principalScopes); + var tenantValue = normalizedTenant ?? principal?.FindFirstValue(StellaOpsClaimTypes.Tenant) ?? "(none)"; + var missing = missingScopes.Count == 0 + ? "(none)" + : string.Join(", ", missingScopes); + + logger.LogDebug( + "Scope requirement not satisfied. Required={RequiredScopes}; PrincipalScopes={PrincipalScopes}; Missing={MissingScopes}; Tenant={Tenant}; Remote={Remote}", + required, + principalScopeList, + missing, + tenantValue, + httpContext?.Connection.RemoteIpAddress); + + if (incidentFreshAuthRequired && !incidentFreshAuthSatisfied) + { + var authTimeText = incidentAuthTime?.ToString("o", CultureInfo.InvariantCulture) ?? "(unknown)"; + logger.LogDebug( + "Incident scope fresh-auth requirement not satisfied. AuthTime={AuthTime}; Window={Window}; Remote={Remote}", + authTimeText, + ObservabilityIncidentFreshAuthWindow, + httpContext?.Connection.RemoteIpAddress); + } + } + + var reason = incidentFailureReason ?? DetermineFailureReason( + principalAuthenticated, + allScopesSatisfied, + anyScopeMatched, + tenantMismatch, + combinedScopes.Count); + if (bypassed) + { + reason = "Matched trusted bypass network."; + } + + await EmitAuditEventAsync( + httpContext, + principal, + combinedScopes, + principalScopes, + resourceOptions, + normalizedTenant, + missingScopes, + tenantAllowed && allScopesSatisfied && incidentFreshAuthSatisfied, + bypassed, + reason, + principalAuthenticated, + allScopesSatisfied, + anyScopeMatched, + tenantMismatch, + incidentFreshAuthRequired, + incidentFreshAuthSatisfied, + incidentReasonClaim, + incidentAuthTime).ConfigureAwait(false); + } + + private static string? DetermineFailureReason( + bool principalAuthenticated, + bool allScopesSatisfied, + bool anyScopeMatched, + bool tenantMismatch, + int requiredScopeCount) + { + if (!principalAuthenticated) + { + return "Principal not authenticated."; + } + + if (!allScopesSatisfied) + { + if (requiredScopeCount == 0) + { + return "No scopes configured for resource server."; + } + + return anyScopeMatched + ? "Required scopes not granted." + : "Required scopes not granted."; + } + + if (tenantMismatch) + { + return "Tenant requirement not satisfied."; + } + + return null; + } + + private static bool TenantAllowed(ClaimsPrincipal principal, StellaOpsResourceServerOptions options, out string? normalizedTenant) + { + normalizedTenant = null; + + if (options.NormalizedTenants.Count == 0) + { + return true; + } + + var rawTenant = principal.FindFirstValue(StellaOpsClaimTypes.Tenant); + if (string.IsNullOrWhiteSpace(rawTenant)) + { + return false; + } + + normalizedTenant = rawTenant.Trim().ToLowerInvariant(); + + foreach (var allowed in options.NormalizedTenants) + { + if (string.Equals(allowed, normalizedTenant, StringComparison.Ordinal)) + { + return true; + } + } + + return false; + } + + private async Task EmitAuditEventAsync( + HttpContext? httpContext, + ClaimsPrincipal? principal, + IReadOnlyList requiredScopes, + IReadOnlyCollection principalScopes, + StellaOpsResourceServerOptions resourceOptions, + string? normalizedTenant, + IReadOnlyCollection missingScopes, + bool succeeded, + bool bypassed, + string? reason, + bool principalAuthenticated, + bool allScopesSatisfied, + bool anyScopeMatched, + bool tenantMismatch, + bool incidentFreshAuthRequired, + bool incidentFreshAuthSatisfied, + string? incidentReason, + DateTimeOffset? incidentAuthTime) + { + if (!auditSinks.Any()) + { + return; + } + + try + { + var record = BuildAuditRecord( + httpContext, + principal, + requiredScopes, + principalScopes, + resourceOptions, + normalizedTenant, + missingScopes, + succeeded, + bypassed, + reason, + principalAuthenticated, + allScopesSatisfied, + anyScopeMatched, + tenantMismatch, + incidentFreshAuthRequired, + incidentFreshAuthSatisfied, + incidentReason, + incidentAuthTime); + + var cancellationToken = httpContext?.RequestAborted ?? CancellationToken.None; + + foreach (var sink in auditSinks) + { + await sink.WriteAsync(record, cancellationToken).ConfigureAwait(false); + } + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to emit resource server authorization audit event."); + } + } + + private AuthEventRecord BuildAuditRecord( + HttpContext? httpContext, + ClaimsPrincipal? principal, + IReadOnlyList requiredScopes, + IReadOnlyCollection principalScopes, + StellaOpsResourceServerOptions resourceOptions, + string? normalizedTenant, + IReadOnlyCollection missingScopes, + bool succeeded, + bool bypassed, + string? reason, + bool principalAuthenticated, + bool allScopesSatisfied, + bool anyScopeMatched, + bool tenantMismatch, + bool incidentFreshAuthRequired, + bool incidentFreshAuthSatisfied, + string? incidentReason, + DateTimeOffset? incidentAuthTime) + { + var correlationId = ResolveCorrelationId(httpContext); + var subject = BuildSubject(principal); + var client = BuildClient(principal); + var network = BuildNetwork(httpContext); + var tenantClaim = principal?.FindFirstValue(StellaOpsClaimTypes.Tenant); + var tenantValue = ClassifiedString.Public(normalizedTenant ?? tenantClaim?.Trim().ToLowerInvariant()); + var properties = BuildAuthProperties( + resourceOptions, + principalScopes, + missingScopes, + bypassed, + principalAuthenticated, + allScopesSatisfied, + anyScopeMatched, + tenantMismatch, + incidentFreshAuthRequired, + incidentFreshAuthSatisfied, + incidentReason, + incidentAuthTime); + + return new AuthEventRecord + { + EventType = ResourceEventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = correlationId, + Outcome = succeeded ? AuthEventOutcome.Success : AuthEventOutcome.Failure, + Reason = reason, + Subject = subject, + Client = client, + Tenant = tenantValue, + Scopes = requiredScopes, + Network = network, + Properties = properties + }; + } + + private static IReadOnlyList BuildAuthProperties( + StellaOpsResourceServerOptions resourceOptions, + IReadOnlyCollection principalScopes, + IReadOnlyCollection missingScopes, + bool bypassed, + bool principalAuthenticated, + bool allScopesSatisfied, + bool anyScopeMatched, + bool tenantMismatch, + bool incidentFreshAuthRequired, + bool incidentFreshAuthSatisfied, + string? incidentReason, + DateTimeOffset? incidentAuthTime) + { + var properties = new List(); + + if (resourceOptions.Audiences.Count > 0) + { + properties.Add(new AuthEventProperty + { + Name = "resource.audience", + Value = ClassifiedString.Public(string.Join(",", resourceOptions.Audiences)) + }); + } + + if (resourceOptions.NormalizedTenants.Count > 0) + { + properties.Add(new AuthEventProperty + { + Name = "resource.tenants.allowed", + Value = ClassifiedString.Public(string.Join(",", resourceOptions.NormalizedTenants)) + }); + } + + if (principalScopes.Count > 0) + { + var joined = string.Join(" ", principalScopes.OrderBy(static scope => scope, StringComparer.Ordinal)); + properties.Add(new AuthEventProperty + { + Name = "principal.scopes", + Value = ClassifiedString.Public(joined) + }); + } + + if (missingScopes.Count > 0) + { + properties.Add(new AuthEventProperty + { + Name = "resource.scopes.missing", + Value = ClassifiedString.Public(string.Join(" ", missingScopes)) + }); + } + + properties.Add(new AuthEventProperty + { + Name = "principal.authenticated", + Value = ClassifiedString.Public(principalAuthenticated ? "true" : "false") + }); + + properties.Add(new AuthEventProperty + { + Name = "resource.scopes.all_satisfied", + Value = ClassifiedString.Public(allScopesSatisfied ? "true" : "false") + }); + + properties.Add(new AuthEventProperty + { + Name = "resource.scopes.any_matched", + Value = ClassifiedString.Public(anyScopeMatched ? "true" : "false") + }); + + if (tenantMismatch) + { + properties.Add(new AuthEventProperty + { + Name = "resource.tenant.mismatch", + Value = ClassifiedString.Public("true") + }); + } + + if (bypassed) + { + properties.Add(new AuthEventProperty + { + Name = "resource.authorization.bypass", + Value = ClassifiedString.Public("true") + }); + } + + if (incidentFreshAuthRequired) + { + properties.Add(new AuthEventProperty + { + Name = "incident.fresh_auth_satisfied", + Value = ClassifiedString.Public(incidentFreshAuthSatisfied ? "true" : "false") + }); + + if (incidentAuthTime.HasValue) + { + properties.Add(new AuthEventProperty + { + Name = "incident.auth_time", + Value = ClassifiedString.Public(incidentAuthTime.Value.ToString("o", CultureInfo.InvariantCulture)) + }); + } + + if (!string.IsNullOrWhiteSpace(incidentReason)) + { + properties.Add(new AuthEventProperty + { + Name = "incident.reason", + Value = ClassifiedString.Sensitive(incidentReason!) + }); + } + } + + return properties; + } + + private bool ValidateObservabilityIncidentFreshAuthentication( + ClaimsPrincipal principal, + out string? incidentReason, + out DateTimeOffset? authenticationTime, + out string? failureReason) + { + incidentReason = principal.FindFirstValue(StellaOpsClaimTypes.IncidentReason)?.Trim(); + authenticationTime = null; + + if (string.IsNullOrWhiteSpace(incidentReason)) + { + failureReason = "obs:incident tokens require incident_reason claim."; + LogIncidentValidationFailure(principal, failureReason); + return false; + } + + var authTimeClaim = principal.FindFirstValue(OpenIddictConstants.Claims.AuthenticationTime); + if (string.IsNullOrWhiteSpace(authTimeClaim) || + !long.TryParse(authTimeClaim, NumberStyles.Integer, CultureInfo.InvariantCulture, out var authTimeSeconds)) + { + failureReason = "obs:incident tokens require authentication_time claim."; + LogIncidentValidationFailure(principal, failureReason); + return false; + } + + try + { + authenticationTime = DateTimeOffset.FromUnixTimeSeconds(authTimeSeconds); + } + catch (ArgumentOutOfRangeException) + { + failureReason = "obs:incident tokens contain an invalid authentication_time value."; + LogIncidentValidationFailure(principal, failureReason); + return false; + } + + var now = timeProvider.GetUtcNow(); + if (now - authenticationTime > ObservabilityIncidentFreshAuthWindow) + { + failureReason = "obs:incident tokens require fresh authentication."; + LogIncidentValidationFailure(principal, failureReason, authenticationTime); + return false; + } + + failureReason = null; + return true; + } + + private void LogIncidentValidationFailure( + ClaimsPrincipal principal, + string message, + DateTimeOffset? authenticationTime = null) + { + var clientId = principal.FindFirstValue(StellaOpsClaimTypes.ClientId) ?? ""; + var subject = principal.FindFirstValue(StellaOpsClaimTypes.Subject) ?? ""; + + if (authenticationTime.HasValue) + { + logger.LogWarning( + "{Message} ClientId={ClientId}; Subject={Subject}; AuthTime={AuthTime:o}; Window={Window}", + message, + clientId, + subject, + authenticationTime.Value, + ObservabilityIncidentFreshAuthWindow); + } + else + { + logger.LogWarning( + "{Message} ClientId={ClientId}; Subject={Subject}", + message, + clientId, + subject); + } + } + + private static string ResolveCorrelationId(HttpContext? httpContext) + { + if (Activity.Current is { TraceId: var traceId } && traceId != default) + { + return traceId.ToString(); + } + + if (!string.IsNullOrWhiteSpace(httpContext?.TraceIdentifier)) + { + return httpContext.TraceIdentifier!; + } + + return Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); + } + + private static AuthEventSubject? BuildSubject(ClaimsPrincipal? principal) + { + if (principal is null) + { + return null; + } + + var subjectId = ClassifiedString.Personal(principal.FindFirstValue(StellaOpsClaimTypes.Subject)); + var username = ClassifiedString.Personal(principal.Identity?.Name); + + if (!subjectId.HasValue && !username.HasValue) + { + return null; + } + + return new AuthEventSubject + { + SubjectId = subjectId, + Username = username + }; + } + + private static AuthEventClient? BuildClient(ClaimsPrincipal? principal) + { + if (principal is null) + { + return null; + } + + var clientId = principal.FindFirstValue(StellaOpsClaimTypes.ClientId); + if (string.IsNullOrWhiteSpace(clientId)) + { + return null; + } + + return new AuthEventClient + { + ClientId = ClassifiedString.Personal(clientId), + Name = ClassifiedString.Empty, + Provider = ClassifiedString.Empty + }; + } + + private static AuthEventNetwork? BuildNetwork(HttpContext? httpContext) + { + if (httpContext is null) + { + return null; + } + + var remote = httpContext.Connection.RemoteIpAddress?.ToString(); + var forwarded = GetHeaderValue(httpContext, "X-Forwarded-For"); + if (string.IsNullOrWhiteSpace(forwarded)) + { + forwarded = GetHeaderValue(httpContext, "Forwarded"); + } + + var userAgent = GetHeaderValue(httpContext, "User-Agent"); + + if (string.IsNullOrWhiteSpace(remote) && + string.IsNullOrWhiteSpace(forwarded) && + string.IsNullOrWhiteSpace(userAgent)) + { + return null; + } + + return new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Personal(remote), + ForwardedFor = ClassifiedString.Personal(forwarded), + UserAgent = ClassifiedString.Personal(userAgent) + }; + } + + private static string? GetHeaderValue(HttpContext httpContext, string name) + { + if (httpContext.Request.Headers.TryGetValue(name, out var values) && values.Count > 0) + { + return values[0]; + } + + return null; + } + + private static HashSet ExtractScopes(ClaimsPrincipal principal) + { + var scopes = new HashSet(StringComparer.Ordinal); + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + scopes.Add(claim.Value); + } + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + + foreach (var part in parts) + { + var normalized = StellaOpsScopes.Normalize(part); + if (normalized is not null) + { + scopes.Add(normalized); + } + } + } + + return scopes; + } + + private static IReadOnlyList CombineRequiredScopes( + IReadOnlyList defaultScopes, + IReadOnlyCollection requirementScopes) + { + if ((defaultScopes is null || defaultScopes.Count == 0) && (requirementScopes is null || requirementScopes.Count == 0)) + { + return Array.Empty(); + } + + if (defaultScopes is null || defaultScopes.Count == 0) + { + return requirementScopes is string[] requirementArray + ? requirementArray + : requirementScopes.ToArray(); + } + + var combined = new HashSet(defaultScopes, StringComparer.Ordinal); + + if (requirementScopes is not null) + { + foreach (var scope in requirementScopes) + { + if (!string.IsNullOrWhiteSpace(scope)) + { + combined.Add(scope); + } + } + } + + return combined.Count == defaultScopes.Count && requirementScopes is null + ? defaultScopes + : combined.OrderBy(static scope => scope, StringComparer.Ordinal).ToArray(); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs index 1bbd1caa..5c156085 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs @@ -1,11 +1,11 @@ -namespace StellaOps.Authority.Plugins.Abstractions; - -/// -/// Well-known metadata keys persisted with Authority client registrations. -/// -public static class AuthorityClientMetadataKeys -{ - public const string AllowedGrantTypes = "allowedGrantTypes"; +namespace StellaOps.Authority.Plugins.Abstractions; + +/// +/// Well-known metadata keys persisted with Authority client registrations. +/// +public static class AuthorityClientMetadataKeys +{ + public const string AllowedGrantTypes = "allowedGrantTypes"; public const string AllowedScopes = "allowedScopes"; public const string Audiences = "audiences"; public const string RedirectUris = "redirectUris"; @@ -14,4 +14,5 @@ public static class AuthorityClientMetadataKeys public const string Tenant = "tenant"; public const string Project = "project"; public const string ServiceIdentity = "serviceIdentity"; + public const string RequiresAirGapSealConfirmation = "requiresAirgapSealConfirmation"; } diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs index 311e52c2..632896c6 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs @@ -23,5 +23,6 @@ public static class AuthorityMongoDefaults public const string Revocations = "authority_revocations"; public const string RevocationState = "authority_revocation_state"; public const string Invites = "authority_bootstrap_invites"; + public const string AirgapAudit = "authority_airgap_audit"; } } diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityAirgapAuditDocument.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityAirgapAuditDocument.cs new file mode 100644 index 00000000..f1aeb211 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityAirgapAuditDocument.cs @@ -0,0 +1,70 @@ +using System.Collections.Generic; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Authority.Storage.Mongo.Documents; + +/// +/// Represents an audit record for an air-gapped bundle import operation. +/// +[BsonIgnoreExtraElements] +public sealed class AuthorityAirgapAuditDocument +{ + [BsonId] + [BsonRepresentation(BsonType.ObjectId)] + public string Id { get; set; } = ObjectId.GenerateNewId().ToString(); + + [BsonElement("tenant")] + public string Tenant { get; set; } = string.Empty; + + [BsonElement("subjectId")] + [BsonIgnoreIfNull] + public string? SubjectId { get; set; } + + [BsonElement("username")] + [BsonIgnoreIfNull] + public string? Username { get; set; } + + [BsonElement("displayName")] + [BsonIgnoreIfNull] + public string? DisplayName { get; set; } + + [BsonElement("clientId")] + [BsonIgnoreIfNull] + public string? ClientId { get; set; } + + [BsonElement("bundleId")] + public string BundleId { get; set; } = string.Empty; + + [BsonElement("status")] + public string Status { get; set; } = "unknown"; + + [BsonElement("reason")] + [BsonIgnoreIfNull] + public string? Reason { get; set; } + + [BsonElement("traceId")] + [BsonIgnoreIfNull] + public string? TraceId { get; set; } + + [BsonElement("occurredAt")] + public DateTimeOffset OccurredAt { get; set; } = DateTimeOffset.UtcNow; + + [BsonElement("properties")] + [BsonIgnoreIfNull] + public List? Properties { get; set; } +} + +/// +/// Represents an additional metadata entry captured for an air-gapped import audit record. +/// +[BsonIgnoreExtraElements] +public sealed class AuthorityAirgapAuditPropertyDocument +{ + [BsonElement("name")] + public string Name { get; set; } = string.Empty; + + [BsonElement("value")] + [BsonIgnoreIfNull] + public string? Value { get; set; } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs index a815fe39..9b719070 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs @@ -70,13 +70,17 @@ public sealed class AuthorityTokenDocument [BsonIgnoreIfNull] public string? SenderKeyThumbprint { get; set; } - [BsonElement("senderNonce")] - [BsonIgnoreIfNull] - public string? SenderNonce { get; set; } - - [BsonElement("tenant")] - [BsonIgnoreIfNull] - public string? Tenant { get; set; } + [BsonElement("senderNonce")] + [BsonIgnoreIfNull] + public string? SenderNonce { get; set; } + + [BsonElement("incidentReason")] + [BsonIgnoreIfNull] + public string? IncidentReason { get; set; } + + [BsonElement("tenant")] + [BsonIgnoreIfNull] + public string? Tenant { get; set; } [BsonElement("project")] [BsonIgnoreIfNull] diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs index afd00d45..d423dd04 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs @@ -101,28 +101,36 @@ public static class ServiceCollectionExtensions return database.GetCollection(AuthorityMongoDefaults.Collections.RevocationState); }); - services.AddSingleton(static sp => - { - var database = sp.GetRequiredService(); - return database.GetCollection(AuthorityMongoDefaults.Collections.Invites); - }); + services.AddSingleton(static sp => + { + var database = sp.GetRequiredService(); + return database.GetCollection(AuthorityMongoDefaults.Collections.Invites); + }); + + services.AddSingleton(static sp => + { + var database = sp.GetRequiredService(); + return database.GetCollection(AuthorityMongoDefaults.Collections.AirgapAudit); + }); services.TryAddSingleton(); services.TryAddSingleton(); - services.TryAddSingleton(); - services.TryAddSingleton(); - services.TryAddSingleton(); - services.TryAddSingleton(); - services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); - services.TryAddSingleton(); - services.TryAddSingleton(); - services.TryAddSingleton(); - services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); return services; } diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityAirgapAuditCollectionInitializer.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityAirgapAuditCollectionInitializer.cs new file mode 100644 index 00000000..8af684b2 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityAirgapAuditCollectionInitializer.cs @@ -0,0 +1,38 @@ +using MongoDB.Driver; +using StellaOps.Authority.Storage.Mongo.Documents; + +namespace StellaOps.Authority.Storage.Mongo.Initialization; + +internal sealed class AuthorityAirgapAuditCollectionInitializer : IAuthorityCollectionInitializer +{ + public async ValueTask EnsureIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(database); + + var collection = database.GetCollection(AuthorityMongoDefaults.Collections.AirgapAudit); + var indexModels = new[] + { + new CreateIndexModel( + Builders.IndexKeys.Combine( + Builders.IndexKeys.Ascending(audit => audit.Tenant), + Builders.IndexKeys.Descending("_id")), + new CreateIndexOptions { Name = "airgap_audit_tenant_time" }), + new CreateIndexModel( + Builders.IndexKeys.Combine( + Builders.IndexKeys.Ascending(audit => audit.Tenant), + Builders.IndexKeys.Ascending(audit => audit.BundleId), + Builders.IndexKeys.Descending("_id")), + new CreateIndexOptions { Name = "airgap_audit_bundle" }), + new CreateIndexModel( + Builders.IndexKeys.Combine( + Builders.IndexKeys.Ascending(audit => audit.Status), + Builders.IndexKeys.Descending("_id")), + new CreateIndexOptions { Name = "airgap_audit_status" }), + new CreateIndexModel( + Builders.IndexKeys.Ascending(audit => audit.TraceId), + new CreateIndexOptions { Name = "airgap_audit_trace", Sparse = true }) + }; + + await collection.Indexes.CreateManyAsync(indexModels, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/EnsureAuthorityCollectionsMigration.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/EnsureAuthorityCollectionsMigration.cs index 295c7f02..118f0175 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/EnsureAuthorityCollectionsMigration.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/EnsureAuthorityCollectionsMigration.cs @@ -15,7 +15,8 @@ internal sealed class EnsureAuthorityCollectionsMigration : IAuthorityMongoMigra AuthorityMongoDefaults.Collections.Clients, AuthorityMongoDefaults.Collections.Scopes, AuthorityMongoDefaults.Collections.Tokens, - AuthorityMongoDefaults.Collections.LoginAttempts + AuthorityMongoDefaults.Collections.LoginAttempts, + AuthorityMongoDefaults.Collections.AirgapAudit }; private readonly ILogger logger; diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityAirgapAuditStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityAirgapAuditStore.cs new file mode 100644 index 00000000..0901a24d --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityAirgapAuditStore.cs @@ -0,0 +1,103 @@ +using Microsoft.Extensions.Logging; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Authority.Storage.Mongo.Documents; + +namespace StellaOps.Authority.Storage.Mongo.Stores; + +internal sealed class AuthorityAirgapAuditStore : IAuthorityAirgapAuditStore +{ + private const int DefaultLimit = 50; + private const int MaxLimit = 200; + + private readonly IMongoCollection collection; + private readonly ILogger logger; + + public AuthorityAirgapAuditStore( + IMongoCollection collection, + ILogger logger) + { + this.collection = collection ?? throw new ArgumentNullException(nameof(collection)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async ValueTask InsertAsync( + AuthorityAirgapAuditDocument document, + CancellationToken cancellationToken, + IClientSessionHandle? session = null) + { + ArgumentNullException.ThrowIfNull(document); + + if (session is { }) + { + await collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + logger.LogDebug( + "Recorded airgap audit entry for bundle {BundleId} under tenant {Tenant}.", + document.BundleId, + document.Tenant); + } + + public async ValueTask QueryAsync( + AuthorityAirgapAuditQuery query, + CancellationToken cancellationToken, + IClientSessionHandle? session = null) + { + ArgumentNullException.ThrowIfNull(query); + + if (string.IsNullOrWhiteSpace(query.Tenant)) + { + return new AuthorityAirgapAuditQueryResult(Array.Empty(), null); + } + + var filterBuilder = Builders.Filter; + var filter = filterBuilder.Eq(audit => audit.Tenant, query.Tenant.Trim()); + + if (!string.IsNullOrWhiteSpace(query.BundleId)) + { + filter &= filterBuilder.Eq(audit => audit.BundleId, query.BundleId.Trim()); + } + + if (!string.IsNullOrWhiteSpace(query.Status)) + { + filter &= filterBuilder.Eq(audit => audit.Status, query.Status.Trim().ToLowerInvariant()); + } + + if (!string.IsNullOrWhiteSpace(query.TraceId)) + { + filter &= filterBuilder.Eq(audit => audit.TraceId, query.TraceId.Trim()); + } + + if (!string.IsNullOrWhiteSpace(query.AfterId) && ObjectId.TryParse(query.AfterId, out var afterObjectId)) + { + filter &= filterBuilder.Lt("_id", afterObjectId); + } + + var limit = query.Limit <= 0 ? DefaultLimit : Math.Min(query.Limit, MaxLimit); + var options = new FindOptions + { + Sort = Builders.Sort.Descending("_id"), + Limit = limit + }; + + IAsyncCursor cursor; + if (session is { }) + { + cursor = await collection.FindAsync(session, filter, options, cancellationToken).ConfigureAwait(false); + } + else + { + cursor = await collection.FindAsync(filter, options, cancellationToken).ConfigureAwait(false); + } + + var documents = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false); + var nextCursor = documents.Count == limit ? documents[^1].Id : null; + + return new AuthorityAirgapAuditQueryResult(documents, nextCursor); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs index 11830324..61729503 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs @@ -251,4 +251,58 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore return documents; } + + public async ValueTask> ListByScopeAsync( + string scope, + string tenant, + DateTimeOffset? issuedAfter, + int limit, + CancellationToken cancellationToken, + IClientSessionHandle? session = null) + { + if (string.IsNullOrWhiteSpace(scope)) + { + throw new ArgumentException("Scope cannot be empty.", nameof(scope)); + } + + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new ArgumentException("Tenant cannot be empty.", nameof(tenant)); + } + + var normalizedScope = scope.Trim(); + var normalizedTenant = tenant.Trim().ToLowerInvariant(); + var effectiveLimit = limit <= 0 ? 50 : Math.Min(limit, 500); + + var filters = new List> + { + Builders.Filter.AnyEq(t => t.Scope, normalizedScope), + Builders.Filter.Eq(t => t.Tenant, normalizedTenant) + }; + + if (issuedAfter is DateTimeOffset issuedThreshold) + { + filters.Add(Builders.Filter.Gte(t => t.CreatedAt, issuedThreshold)); + } + + var filter = Builders.Filter.And(filters); + var options = new FindOptions + { + Sort = Builders.Sort.Descending(t => t.CreatedAt).Descending(t => t.TokenId), + Limit = effectiveLimit + }; + + IAsyncCursor cursor; + if (session is { }) + { + cursor = await collection.FindAsync(session, filter, options, cancellationToken).ConfigureAwait(false); + } + else + { + cursor = await collection.FindAsync(filter, options, cancellationToken).ConfigureAwait(false); + } + + var documents = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false); + return documents; + } } diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityAirgapAuditStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityAirgapAuditStore.cs new file mode 100644 index 00000000..83bc7b71 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityAirgapAuditStore.cs @@ -0,0 +1,51 @@ +using MongoDB.Driver; +using StellaOps.Authority.Storage.Mongo.Documents; + +namespace StellaOps.Authority.Storage.Mongo.Stores; + +/// +/// Abstraction for persisting and querying air-gapped import audit records. +/// +public interface IAuthorityAirgapAuditStore +{ + ValueTask InsertAsync( + AuthorityAirgapAuditDocument document, + CancellationToken cancellationToken, + IClientSessionHandle? session = null); + + ValueTask QueryAsync( + AuthorityAirgapAuditQuery query, + CancellationToken cancellationToken, + IClientSessionHandle? session = null); +} + +/// +/// Query options for locating air-gapped import audit records. +/// +public sealed record AuthorityAirgapAuditQuery +{ + public string Tenant { get; init; } = string.Empty; + + public string? BundleId { get; init; } + + public string? Status { get; init; } + + public string? TraceId { get; init; } + + /// + /// Continuation cursor (exclusive) using the Mongo document identifier. + /// + public string? AfterId { get; init; } + + /// + /// Maximum number of documents to return. Defaults to 50 and capped at 200. + /// + public int Limit { get; init; } = 50; +} + +/// +/// Result payload for air-gapped import audit queries. +/// +public sealed record AuthorityAirgapAuditQueryResult( + IReadOnlyList Items, + string? NextCursor); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs index 83ecbf45..ecc88244 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs @@ -28,6 +28,14 @@ public interface IAuthorityTokenStore ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null); ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + ValueTask> ListByScopeAsync( + string scope, + string tenant, + DateTimeOffset? issuedAfter, + int limit, + CancellationToken cancellationToken, + IClientSessionHandle? session = null); } public enum TokenUsageUpdateStatus diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/AdvisoryAi/AdvisoryAiRemoteInferenceEndpointTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/AdvisoryAi/AdvisoryAiRemoteInferenceEndpointTests.cs new file mode 100644 index 00000000..5fd4f9ab --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/AdvisoryAi/AdvisoryAiRemoteInferenceEndpointTests.cs @@ -0,0 +1,282 @@ +using System; +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Net.Http.Json; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json.Serialization; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Authentication; +using Microsoft.AspNetCore.Hosting; +using System.Net.Http.Headers; +using Microsoft.Extensions.DependencyInjection; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Authority.Tests.Infrastructure; +using StellaOps.Auth.Abstractions; +using StellaOps.Configuration; +using Xunit; +using Microsoft.AspNetCore.TestHost; + +namespace StellaOps.Authority.Tests.AdvisoryAi; + +public sealed class AdvisoryAiRemoteInferenceEndpointTests : IClassFixture +{ + private readonly AuthorityWebApplicationFactory factory; + + public AdvisoryAiRemoteInferenceEndpointTests(AuthorityWebApplicationFactory factory) + { + this.factory = factory; + } + + [Fact] + public async Task RemoteInference_ReturnsForbidden_WhenDisabled() + { + using var client = CreateClient( + configureOptions: options => + { + options.AdvisoryAi.RemoteInference.Enabled = false; + options.AdvisoryAi.RemoteInference.AllowedProfiles.Clear(); + options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai"); + }); + + var response = await client.PostAsJsonAsync( + "/advisory-ai/remote-inference/logs", + CreatePayload(profile: "cloud-openai")); + + Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync>(); + Assert.NotNull(body); + Assert.Equal("remote_inference_disabled", body!["error"]); + } + + [Fact] + public async Task RemoteInference_ReturnsForbidden_WhenConsentMissing() + { + using var client = CreateClient( + configureOptions: options => + { + SeedRemoteInferenceEnabled(options); + SeedTenantConsent(options); + options.Tenants[0].AdvisoryAi.RemoteInference.ConsentGranted = false; + options.Tenants[0].AdvisoryAi.RemoteInference.ConsentVersion = null; + options.Tenants[0].AdvisoryAi.RemoteInference.ConsentedAt = null; + options.Tenants[0].AdvisoryAi.RemoteInference.ConsentedBy = null; + }); + + client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default"); + + var response = await client.PostAsJsonAsync( + "/advisory-ai/remote-inference/logs", + CreatePayload(profile: "cloud-openai")); + + Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync>(); + Assert.NotNull(body); + Assert.Equal("remote_inference_consent_required", body!["error"]); + } + + [Fact] + public async Task RemoteInference_ReturnsBadRequest_WhenProfileNotAllowed() + { + using var client = CreateClient( + configureOptions: options => + { + SeedRemoteInferenceEnabled(options); + SeedTenantConsent(options); + }); + + client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default"); + + var response = await client.PostAsJsonAsync( + "/advisory-ai/remote-inference/logs", + CreatePayload(profile: "other-profile")); + + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync>(); + Assert.NotNull(body); + Assert.Equal("profile_not_allowed", body!["error"]); + } + + [Fact] + public async Task RemoteInference_LogsPrompt_WhenConsentGranted() + { + using var client = CreateClient( + configureOptions: options => + { + SeedRemoteInferenceEnabled(options); + SeedTenantConsent(options); + }); + + client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default"); + + var database = new MongoClient(factory.ConnectionString).GetDatabase("authority-tests"); + var collection = database.GetCollection("authority_login_attempts"); + await collection.DeleteManyAsync(FilterDefinition.Empty); + + var payload = CreatePayload(profile: "cloud-openai", prompt: "Generate remediation plan."); + var response = await client.PostAsJsonAsync("/advisory-ai/remote-inference/logs", payload); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync>(); + Assert.NotNull(body); + Assert.Equal("logged", body!["status"]); + + var expectedHash = ComputeSha256(payload.Prompt); + Assert.Equal(expectedHash, body["prompt_hash"]); + + var doc = await collection.Find(Builders.Filter.Eq("eventType", "authority.advisory_ai.remote_inference")).SingleAsync(); + Assert.Equal("authority.advisory_ai.remote_inference", doc["eventType"].AsString); + + var properties = ExtractProperties(doc); + Assert.Equal(expectedHash, properties["advisory_ai.prompt.hash"]); + Assert.Equal("sha256", properties["advisory_ai.prompt.algorithm"]); + Assert.Equal(payload.Profile, properties["advisory_ai.profile"]); + Assert.False(properties.ContainsKey("advisory_ai.prompt.raw")); + } + + private HttpClient CreateClient(Action? configureOptions = null) + { + const string schemeName = "StellaOpsBearer"; + + var builder = factory.WithWebHostBuilder(hostBuilder => + { + hostBuilder.ConfigureTestServices(services => + { + services.AddAuthentication(options => + { + options.DefaultAuthenticateScheme = schemeName; + options.DefaultChallengeScheme = schemeName; + }) + .AddScheme(schemeName, _ => { }); + + services.PostConfigure(opts => + { + opts.Issuer ??= new Uri("https://authority.test"); + if (string.IsNullOrWhiteSpace(opts.Storage.ConnectionString)) + { + opts.Storage.ConnectionString = factory.ConnectionString; + } + + if (string.IsNullOrWhiteSpace(opts.Storage.DatabaseName)) + { + opts.Storage.DatabaseName = "authority-tests"; + } + + opts.AdvisoryAi.RemoteInference.Enabled = true; + opts.AdvisoryAi.RemoteInference.RequireTenantConsent = true; + opts.AdvisoryAi.RemoteInference.AllowedProfiles.Clear(); + opts.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai"); + + opts.Tenants.Clear(); + opts.Tenants.Add(new AuthorityTenantOptions + { + Id = "tenant-default", + DisplayName = "Tenant Default", + AdvisoryAi = + { + RemoteInference = + { + ConsentGranted = true, + ConsentVersion = "2025-10", + ConsentedAt = DateTimeOffset.Parse("2025-10-31T12:34:56Z"), + ConsentedBy = "legal@example.com" + } + } + }); + + configureOptions?.Invoke(opts); + }); + }); + }); + + var client = builder.CreateClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(schemeName); + return client; + } + + private static void SeedRemoteInferenceEnabled(StellaOpsAuthorityOptions options) + { + options.AdvisoryAi.RemoteInference.Enabled = true; + options.AdvisoryAi.RemoteInference.RequireTenantConsent = true; + options.AdvisoryAi.RemoteInference.AllowedProfiles.Clear(); + options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai"); + } + + private static void SeedTenantConsent(StellaOpsAuthorityOptions options) + { + if (options.Tenants.Count == 0) + { + options.Tenants.Add(new AuthorityTenantOptions { Id = "tenant-default", DisplayName = "Tenant Default" }); + } + + var tenant = options.Tenants[0]; + tenant.Id = "tenant-default"; + tenant.DisplayName = "Tenant Default"; + tenant.AdvisoryAi.RemoteInference.ConsentGranted = true; + tenant.AdvisoryAi.RemoteInference.ConsentVersion = "2025-10"; + tenant.AdvisoryAi.RemoteInference.ConsentedAt = DateTimeOffset.Parse("2025-10-31T12:34:56Z"); + tenant.AdvisoryAi.RemoteInference.ConsentedBy = "legal@example.com"; + } + + private static string ComputeSha256(string value) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(value)); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static Dictionary ExtractProperties(BsonDocument document) + { + var result = new Dictionary(StringComparer.Ordinal); + if (!document.TryGetValue("properties", out var propertiesValue)) + { + return result; + } + + foreach (var item in propertiesValue.AsBsonArray) + { + if (item is not BsonDocument property) + { + continue; + } + + var name = property.TryGetValue("name", out var nameValue) ? nameValue.AsString : null; + var value = property.TryGetValue("value", out var valueNode) ? valueNode.AsString : null; + + if (!string.IsNullOrWhiteSpace(name)) + { + result[name] = value ?? string.Empty; + } + } + + return result; + } + + private static RemoteInferencePayload CreatePayload(string profile, string prompt = "Summarize remedations.") + { + return new RemoteInferencePayload( + TaskType: "summary", + Profile: profile, + ModelId: "gpt-4o-mini", + Prompt: prompt, + ContextDigest: "sha256:context", + OutputHash: "sha256:output", + TaskId: "task-123", + Metadata: new Dictionary + { + ["channel"] = "cli", + ["env"] = "test" + }); + } + + private sealed record RemoteInferencePayload( + [property: JsonPropertyName("taskType")] string TaskType, + [property: JsonPropertyName("profile")] string Profile, + [property: JsonPropertyName("modelId")] string ModelId, + [property: JsonPropertyName("prompt")] string Prompt, + [property: JsonPropertyName("contextDigest")] string ContextDigest, + [property: JsonPropertyName("outputHash")] string OutputHash, + [property: JsonPropertyName("taskId")] string TaskId, + [property: JsonPropertyName("metadata")] IDictionary Metadata); +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/AdvisoryAi/AuthorityAdvisoryAiConsentEvaluatorTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/AdvisoryAi/AuthorityAdvisoryAiConsentEvaluatorTests.cs new file mode 100644 index 00000000..b3305649 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/AdvisoryAi/AuthorityAdvisoryAiConsentEvaluatorTests.cs @@ -0,0 +1,143 @@ +using System; +using Microsoft.Extensions.Options; +using StellaOps.Authority.AdvisoryAi; +using StellaOps.Configuration; +using Xunit; + +namespace StellaOps.Authority.Tests.AdvisoryAi; + +public sealed class AuthorityAdvisoryAiConsentEvaluatorTests +{ + [Fact] + public void EvaluateTenant_ReturnsDisabled_WhenRemoteInferenceDisabled() + { + var options = CreateOptions(); + options.AdvisoryAi.RemoteInference.Enabled = false; + + var evaluator = CreateEvaluator(options); + var result = evaluator.EvaluateTenant("tenant-default"); + + Assert.False(result.Allowed); + Assert.Equal("remote_inference_disabled", result.ErrorCode); + } + + [Fact] + public void EvaluateTenant_RequiresConsent_WhenConfigured() + { + var options = CreateOptions(); + options.AdvisoryAi.RemoteInference.Enabled = true; + options.AdvisoryAi.RemoteInference.RequireTenantConsent = true; + options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai"); + options.Tenants.Add(new AuthorityTenantOptions + { + Id = "tenant-default", + DisplayName = "Tenant Default", + AdvisoryAi = + { + RemoteInference = + { + ConsentGranted = false + } + } + }); + + var evaluator = CreateEvaluator(options); + var result = evaluator.EvaluateTenant("tenant-default"); + + Assert.False(result.Allowed); + Assert.Equal("remote_inference_consent_required", result.ErrorCode); + } + + [Fact] + public void EvaluateTenant_Allows_WhenConsentGranted() + { + var options = CreateOptions(); + options.AdvisoryAi.RemoteInference.Enabled = true; + options.AdvisoryAi.RemoteInference.RequireTenantConsent = true; + options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai"); + options.Tenants.Add(new AuthorityTenantOptions + { + Id = "tenant-default", + DisplayName = "Tenant Default", + AdvisoryAi = + { + RemoteInference = + { + ConsentGranted = true, + ConsentVersion = "2025-10", + ConsentedAt = new DateTimeOffset(2025, 10, 31, 12, 34, 56, TimeSpan.Zero), + ConsentedBy = "legal@example.com" + } + } + }); + + var evaluator = CreateEvaluator(options); + var result = evaluator.EvaluateTenant("tenant-default"); + + Assert.True(result.Allowed); + Assert.Equal("2025-10", result.ConsentVersion); + Assert.Equal(new DateTimeOffset(2025, 10, 31, 12, 34, 56, TimeSpan.Zero), result.ConsentedAt); + Assert.Equal("legal@example.com", result.ConsentedBy); + } + + [Fact] + public void TryNormalizeProfile_ComparesCaseInsensitively() + { + var options = CreateOptions(); + options.AdvisoryAi.RemoteInference.Enabled = true; + options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai"); + + var evaluator = CreateEvaluator(options); + var snapshot = evaluator.GetSnapshot(); + Assert.True(snapshot.Enabled); + + var normalized = evaluator.TryNormalizeProfile("CLOUD-OPENAI", out var profile); + Assert.True(normalized); + Assert.Equal("cloud-openai", profile); + } + + private static AuthorityAdvisoryAiConsentEvaluator CreateEvaluator(StellaOpsAuthorityOptions options) + { + var monitor = new TestOptionsMonitor(options); + return new AuthorityAdvisoryAiConsentEvaluator(monitor); + } + + private static StellaOpsAuthorityOptions CreateOptions() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + + return options; + } + + private sealed class TestOptionsMonitor : IOptionsMonitor + { + private StellaOpsAuthorityOptions current; + + public TestOptionsMonitor(StellaOpsAuthorityOptions value) + { + current = value ?? throw new ArgumentNullException(nameof(value)); + } + + public StellaOpsAuthorityOptions CurrentValue => current; + + public StellaOpsAuthorityOptions Get(string? name) => current; + + public IDisposable OnChange(Action listener) => NullDisposable.Instance; + + private sealed class NullDisposable : IDisposable + { + public static readonly NullDisposable Instance = new(); + + public void Dispose() + { + } + } + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Airgap/AirgapAuditEndpointsTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Airgap/AirgapAuditEndpointsTests.cs new file mode 100644 index 00000000..19645ac2 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Airgap/AirgapAuditEndpointsTests.cs @@ -0,0 +1,256 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Net.Http.Json; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Authentication; +using Microsoft.AspNetCore.TestHost; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Time.Testing; +using MongoDB.Driver; +using StellaOps.Auth.Abstractions; +using StellaOps.Authority.Airgap; +using StellaOps.Authority.Storage.Mongo; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Tests.Infrastructure; +using Xunit; + +namespace StellaOps.Authority.Tests.Airgap; + +public sealed class AirgapAuditEndpointsTests : IClassFixture +{ + private const string TenantId = "tenant-default"; + private readonly AuthorityWebApplicationFactory factory; + + public AirgapAuditEndpointsTests(AuthorityWebApplicationFactory factory) + { + this.factory = factory; + } + + [Fact] + public async Task PostAudit_ReturnsCreatedAndPersists() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-01T12:30:00Z")); + using var client = CreateClient(timeProvider, scopes: $"{StellaOpsScopes.AirgapImport} {StellaOpsScopes.AirgapStatusRead}"); + var collection = GetAuditCollection(); + await collection.DeleteManyAsync(FilterDefinition.Empty); + + var request = new AirgapAuditRecordRequestDto + { + BundleId = "mirror-bundle-001", + Status = "COMPLETED", + Reason = "Initial air-gap import", + Metadata = new Dictionary + { + ["digest"] = "sha256:abc123", + ["operator"] = "ops-user" + } + }; + + var response = await client.PostAsJsonAsync("/authority/audit/airgap", request); + Assert.Equal(HttpStatusCode.Created, response.StatusCode); + + var body = await response.Content.ReadFromJsonAsync(); + Assert.NotNull(body); + Assert.Equal("mirror-bundle-001", body!.BundleId); + Assert.Equal("completed", body.Status); + Assert.Equal(TenantId, body.Tenant); + Assert.Equal("test-client", body.ClientId); + Assert.Equal(timeProvider.GetUtcNow(), body.OccurredAt); + Assert.Equal("sha256:abc123", body.Metadata["digest"]); + + var stored = await collection.Find(Builders.Filter.Eq(d => d.BundleId, "mirror-bundle-001")).SingleAsync(); + Assert.Equal("completed", stored.Status); + Assert.Equal(TenantId, stored.Tenant); + Assert.Equal("test-client", stored.ClientId); + Assert.Contains(stored.Properties!, property => property.Name == "digest" && property.Value == "sha256:abc123"); + + var listResponse = await client.GetFromJsonAsync("/authority/audit/airgap"); + Assert.NotNull(listResponse); + var item = Assert.Single(listResponse!.Items); + Assert.Equal(body.Id, item.Id); + Assert.Equal("mirror-bundle-001", item.BundleId); + } + + [Fact] + public async Task GetAudit_RequiresStatusScope() + { + using var client = CreateClient(new FakeTimeProvider(DateTimeOffset.Parse("2025-11-01T12:30:00Z")), scopes: StellaOpsScopes.AirgapImport); + + var response = await client.GetAsync("/authority/audit/airgap"); + Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); + } + + [Fact] + public async Task PostAudit_RequiresImportScope() + { + using var client = CreateClient(new FakeTimeProvider(DateTimeOffset.Parse("2025-11-01T12:30:00Z")), scopes: StellaOpsScopes.AirgapStatusRead); + + var request = new AirgapAuditRecordRequestDto + { + BundleId = "mirror-bundle-002", + Status = "completed" + }; + + var response = await client.PostAsJsonAsync("/authority/audit/airgap", request); + Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); + } + + [Fact] + public async Task GetAudit_ReturnsBadRequest_WhenTenantHeaderMissing() + { + using var client = CreateClient(new FakeTimeProvider(DateTimeOffset.Parse("2025-11-01T12:30:00Z")), + scopes: StellaOpsScopes.AirgapStatusRead, + includeAuthorityTenantHeader: false); + + var response = await client.GetAsync("/authority/audit/airgap"); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + } + + [Fact] + public async Task GetAudit_PaginatesAndFilters() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-01T12:30:00Z")); + using var client = CreateClient(timeProvider, scopes: $"{StellaOpsScopes.AirgapImport} {StellaOpsScopes.AirgapStatusRead}"); + var collection = GetAuditCollection(); + await collection.DeleteManyAsync(FilterDefinition.Empty); + + await PostAsync(client, "bundle-A", "completed", timeProvider); + timeProvider.Advance(TimeSpan.FromMinutes(1)); + await PostAsync(client, "bundle-B", "completed", timeProvider); + timeProvider.Advance(TimeSpan.FromMinutes(1)); + await PostAsync(client, "bundle-C", "failed", timeProvider, reason: "validation error"); + + var firstPage = await client.GetFromJsonAsync("/authority/audit/airgap?pageSize=2"); + Assert.NotNull(firstPage); + Assert.Equal(2, firstPage!.Items.Count); + Assert.Equal("bundle-C", firstPage.Items[0].BundleId); + Assert.Equal("bundle-B", firstPage.Items[1].BundleId); + Assert.False(string.IsNullOrWhiteSpace(firstPage.NextCursor)); + + var cursor = firstPage.NextCursor; + var secondPage = await client.GetFromJsonAsync($"/authority/audit/airgap?pageSize=2&cursor={cursor}"); + Assert.NotNull(secondPage); + Assert.Single(secondPage!.Items); + Assert.Equal("bundle-A", secondPage.Items[0].BundleId); + Assert.Null(secondPage.NextCursor); + + var filtered = await client.GetFromJsonAsync("/authority/audit/airgap?bundleId=bundle-B"); + Assert.NotNull(filtered); + var entry = Assert.Single(filtered!.Items); + Assert.Equal("bundle-B", entry.BundleId); + Assert.Equal("completed", entry.Status); + } + + private static async Task PostAsync(HttpClient client, string bundleId, string status, FakeTimeProvider timeProvider, string? reason = null) + { + var request = new AirgapAuditRecordRequestDto + { + BundleId = bundleId, + Status = status, + Reason = reason, + Metadata = new Dictionary { ["sequence"] = timeProvider.GetUtcNow().ToUnixTimeSeconds().ToString() } + }; + + var response = await client.PostAsJsonAsync("/authority/audit/airgap", request); + Assert.Equal(HttpStatusCode.Created, response.StatusCode); + } + + private HttpClient CreateClient( + FakeTimeProvider timeProvider, + string scopes, + bool includeAuthorityTenantHeader = true, + bool includeTestTenantHeader = true) + { + const string schemeName = "StellaOpsBearer"; + + var builder = factory.WithWebHostBuilder(hostBuilder => + { + hostBuilder.ConfigureTestServices(services => + { + services.Replace(ServiceDescriptor.Singleton(timeProvider)); + services.AddAuthentication(options => + { + options.DefaultAuthenticateScheme = schemeName; + options.DefaultChallengeScheme = schemeName; + }) + .AddScheme(schemeName, _ => { }); + }); + }); + + var client = builder.CreateClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(schemeName); + if (!string.IsNullOrWhiteSpace(scopes)) + { + client.DefaultRequestHeaders.Add("X-Test-Scopes", scopes); + } + + if (includeTestTenantHeader) + { + client.DefaultRequestHeaders.Add("X-Test-Tenant", TenantId); + } + + if (includeAuthorityTenantHeader) + { + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, TenantId); + } + + return client; + } + + private IMongoCollection GetAuditCollection() + { + var database = new MongoClient(factory.ConnectionString).GetDatabase("authority-tests"); + return database.GetCollection(AuthorityMongoDefaults.Collections.AirgapAudit); + } + + private sealed record AirgapAuditRecordRequestDto + { + public string BundleId { get; init; } = string.Empty; + public string Status { get; init; } = string.Empty; + public string? Reason { get; init; } + public Dictionary? Metadata { get; init; } + } + + private sealed record AirgapAuditRecordResponseDto + { + public string Id { get; init; } = string.Empty; + public string Tenant { get; init; } = string.Empty; + public string BundleId { get; init; } = string.Empty; + public string Status { get; init; } = string.Empty; + public string? Reason { get; init; } + public string? TraceId { get; init; } + public string? SubjectId { get; init; } + public string? Username { get; init; } + public string? DisplayName { get; init; } + public string? ClientId { get; init; } + public DateTimeOffset OccurredAt { get; init; } + public Dictionary Metadata { get; init; } = new(StringComparer.Ordinal); + } + + private sealed record AirgapAuditListResponseDto + { + public List Items { get; init; } = new(); + public string? NextCursor { get; init; } + } + + private sealed record AirgapAuditListItemDto + { + public string Id { get; init; } = string.Empty; + public string Tenant { get; init; } = string.Empty; + public string BundleId { get; init; } = string.Empty; + public string Status { get; init; } = string.Empty; + public string? Reason { get; init; } + public string? TraceId { get; init; } + public string? SubjectId { get; init; } + public string? Username { get; init; } + public string? DisplayName { get; init; } + public string? ClientId { get; init; } + public DateTimeOffset OccurredAt { get; init; } + public Dictionary Metadata { get; init; } = new(StringComparer.Ordinal); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs index a3edbc31..0d498dbf 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs @@ -3,6 +3,7 @@ using System.Net.Http.Headers; using System.Security.Claims; using System.Text.Encodings.Web; using System.Text.Json; +using System.Linq; using Microsoft.AspNetCore.Authentication; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; @@ -51,10 +52,14 @@ public sealed class ConsoleEndpointsTests Assert.Equal(1, tenants.GetArrayLength()); Assert.Equal("tenant-default", tenants[0].GetProperty("id").GetString()); - var audit = Assert.Single(sink.Events); - Assert.Equal("authority.console.tenants.read", audit.EventType); - Assert.Equal(AuthEventOutcome.Success, audit.Outcome); - Assert.Contains("tenant.resolved", audit.Properties.Select(property => property.Name)); + var events = sink.Events; + var authorizeEvent = Assert.Single(events.Where(evt => evt.EventType == "authority.resource.authorize")); + Assert.Equal(AuthEventOutcome.Success, authorizeEvent.Outcome); + + var consoleEvent = Assert.Single(events.Where(evt => evt.EventType == "authority.console.tenants.read")); + Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome); + Assert.Contains("tenant.resolved", consoleEvent.Properties.Select(property => property.Name)); + Assert.Equal(2, events.Count); } [Fact] @@ -75,7 +80,10 @@ public sealed class ConsoleEndpointsTests var response = await client.GetAsync("/console/tenants"); Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); - Assert.Empty(sink.Events); + var authEvent = Assert.Single(sink.Events); + Assert.Equal("authority.resource.authorize", authEvent.EventType); + Assert.Equal(AuthEventOutcome.Success, authEvent.Outcome); + Assert.DoesNotContain(sink.Events, evt => evt.EventType.StartsWith("authority.console.", System.StringComparison.Ordinal)); } [Fact] @@ -97,7 +105,11 @@ public sealed class ConsoleEndpointsTests var response = await client.GetAsync("/console/tenants"); Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); - Assert.Empty(sink.Events); + var authEvent = Assert.Single(sink.Events); + Assert.Equal("authority.resource.authorize", authEvent.EventType); + Assert.Equal(AuthEventOutcome.Success, authEvent.Outcome); + Assert.Null(authEvent.Reason); + Assert.DoesNotContain(sink.Events, evt => evt.EventType.StartsWith("authority.console.", System.StringComparison.Ordinal)); } [Fact] @@ -133,9 +145,13 @@ public sealed class ConsoleEndpointsTests Assert.Equal("console-user", json.RootElement.GetProperty("username").GetString()); Assert.Equal("tenant-default", json.RootElement.GetProperty("tenant").GetString()); - var audit = Assert.Single(sink.Events); - Assert.Equal("authority.console.profile.read", audit.EventType); - Assert.Equal(AuthEventOutcome.Success, audit.Outcome); + var events = sink.Events; + var authorizeEvent = Assert.Single(events.Where(evt => evt.EventType == "authority.resource.authorize")); + Assert.Equal(AuthEventOutcome.Success, authorizeEvent.Outcome); + + var consoleEvent = Assert.Single(events.Where(evt => evt.EventType == "authority.console.profile.read")); + Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome); + Assert.Equal(2, events.Count); } [Fact] @@ -167,9 +183,13 @@ public sealed class ConsoleEndpointsTests Assert.False(json.RootElement.GetProperty("active").GetBoolean()); Assert.Equal("token-abc", json.RootElement.GetProperty("tokenId").GetString()); - var audit = Assert.Single(sink.Events); - Assert.Equal("authority.console.token.introspect", audit.EventType); - Assert.Equal(AuthEventOutcome.Success, audit.Outcome); + var events = sink.Events; + var authorizeEvent = Assert.Single(events.Where(evt => evt.EventType == "authority.resource.authorize")); + Assert.Equal(AuthEventOutcome.Success, authorizeEvent.Outcome); + + var consoleEvent = Assert.Single(events.Where(evt => evt.EventType == "authority.console.token.introspect")); + Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome); + Assert.Equal(2, events.Count); } private static ClaimsPrincipal CreatePrincipal( @@ -336,4 +356,4 @@ internal static class HostTestClientExtensions internal static class TestAuthenticationDefaults { public const string AuthenticationScheme = "AuthorityConsoleTests"; -} +} \ No newline at end of file diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/AuthorityWebApplicationFactory.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/AuthorityWebApplicationFactory.cs index 6a12c92d..8b1906b9 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/AuthorityWebApplicationFactory.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/AuthorityWebApplicationFactory.cs @@ -1,10 +1,11 @@ using System; using System.Collections.Generic; +using System.IO; using System.Threading.Tasks; -using Microsoft.AspNetCore.Mvc.Testing; -using Microsoft.Extensions.Hosting; using Microsoft.AspNetCore.Hosting; +using Microsoft.AspNetCore.Mvc.Testing; using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Hosting; using Mongo2Go; using Xunit; @@ -13,10 +14,35 @@ namespace StellaOps.Authority.Tests.Infrastructure; public sealed class AuthorityWebApplicationFactory : WebApplicationFactory, IAsyncLifetime { private readonly MongoDbRunner mongoRunner; + private readonly string tempContentRoot; + + private const string IssuerKey = "STELLAOPS_AUTHORITY_AUTHORITY__ISSUER"; + private const string SchemaVersionKey = "STELLAOPS_AUTHORITY_AUTHORITY__SCHEMAVERSION"; + private const string StorageConnectionKey = "STELLAOPS_AUTHORITY_AUTHORITY__STORAGE__CONNECTIONSTRING"; + private const string StorageDatabaseKey = "STELLAOPS_AUTHORITY_AUTHORITY__STORAGE__DATABASENAME"; + private const string SigningEnabledKey = "STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__ENABLED"; + private const string AckTokensEnabledKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__ENABLED"; + private const string WebhooksEnabledKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__WEBHOOKS__ENABLED"; public AuthorityWebApplicationFactory() { mongoRunner = MongoDbRunner.Start(singleNodeReplSet: true); + tempContentRoot = System.IO.Path.Combine(System.IO.Path.GetTempPath(), "stellaops-authority-tests", Guid.NewGuid().ToString("N")); + System.IO.Directory.CreateDirectory(tempContentRoot); + + var repositoryRoot = LocateRepositoryRoot(); + var openApiSource = Path.Combine(repositoryRoot, "src", "Api", "StellaOps.Api.OpenApi", "authority", "openapi.yaml"); + var openApiDestination = Path.Combine(tempContentRoot, "OpenApi", "authority.yaml"); + Directory.CreateDirectory(Path.GetDirectoryName(openApiDestination)!); + File.Copy(openApiSource, openApiDestination, overwrite: true); + + Environment.SetEnvironmentVariable(IssuerKey, "https://authority.test"); + Environment.SetEnvironmentVariable(SchemaVersionKey, "1"); + Environment.SetEnvironmentVariable(StorageConnectionKey, mongoRunner.ConnectionString); + Environment.SetEnvironmentVariable(StorageDatabaseKey, "authority-tests"); + Environment.SetEnvironmentVariable(SigningEnabledKey, "false"); + Environment.SetEnvironmentVariable(AckTokensEnabledKey, "false"); + Environment.SetEnvironmentVariable(WebhooksEnabledKey, "false"); } public string ConnectionString => mongoRunner.ConnectionString; @@ -24,25 +50,85 @@ public sealed class AuthorityWebApplicationFactory : WebApplicationFactory { - var settings = new Dictionary + configuration.AddInMemoryCollection(new Dictionary { ["Authority:Issuer"] = "https://authority.test", + ["Authority:SchemaVersion"] = "1", ["Authority:Storage:ConnectionString"] = mongoRunner.ConnectionString, ["Authority:Storage:DatabaseName"] = "authority-tests", - ["Authority:Signing:Enabled"] = "false" - }; - - configuration.AddInMemoryCollection(settings); + ["Authority:Signing:Enabled"] = "false", + ["Authority:Notifications:AckTokens:Enabled"] = "false", + ["Authority:Notifications:Webhooks:Enabled"] = "false" + }); }); + + } + + protected override IHost CreateHost(IHostBuilder builder) + { + builder.ConfigureHostConfiguration(configuration => + { + configuration.AddInMemoryCollection(new Dictionary + { + ["Authority:Issuer"] = "https://authority.test", + ["Authority:SchemaVersion"] = "1", + ["Authority:Storage:ConnectionString"] = mongoRunner.ConnectionString, + ["Authority:Storage:DatabaseName"] = "authority-tests", + ["Authority:Signing:Enabled"] = "false", + ["Authority:Notifications:AckTokens:Enabled"] = "false", + ["Authority:Notifications:Webhooks:Enabled"] = "false" + }); + }); + + return base.CreateHost(builder); } public Task InitializeAsync() => Task.CompletedTask; + private static string LocateRepositoryRoot() + { + var directory = new DirectoryInfo(AppContext.BaseDirectory); + while (directory is not null) + { + var candidate = directory.FullName; + if (File.Exists(Path.Combine(candidate, "README.md")) && Directory.Exists(Path.Combine(candidate, "src"))) + { + return candidate; + } + + directory = directory.Parent; + } + + throw new InvalidOperationException("Failed to locate repository root for Authority tests."); + } + public Task DisposeAsync() { mongoRunner.Dispose(); + + Environment.SetEnvironmentVariable(IssuerKey, null); + Environment.SetEnvironmentVariable(SchemaVersionKey, null); + Environment.SetEnvironmentVariable(StorageConnectionKey, null); + Environment.SetEnvironmentVariable(StorageDatabaseKey, null); + Environment.SetEnvironmentVariable(SigningEnabledKey, null); + Environment.SetEnvironmentVariable(AckTokensEnabledKey, null); + Environment.SetEnvironmentVariable(WebhooksEnabledKey, null); + + try + { + if (System.IO.Directory.Exists(tempContentRoot)) + { + System.IO.Directory.Delete(tempContentRoot, recursive: true); + } + } + catch + { + // ignore cleanup failures + } + return Task.CompletedTask; } } diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/TestAuthHandler.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/TestAuthHandler.cs new file mode 100644 index 00000000..93b691e8 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/TestAuthHandler.cs @@ -0,0 +1,58 @@ +using System; +using System.Collections.Generic; +using System.Security.Claims; +using System.Text.Encodings.Web; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Authentication; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Authority.Tests.Infrastructure; + +internal sealed class TestAuthHandler : AuthenticationHandler +{ + public const string SchemeName = "TestAuth"; + + public TestAuthHandler( + IOptionsMonitor options, + ILoggerFactory logger, + UrlEncoder encoder, + ISystemClock clock) + : base(options, logger, encoder, clock) + { + } + + protected override Task HandleAuthenticateAsync() + { + var tenantHeader = Request.Headers.TryGetValue("X-Test-Tenant", out var tenantValues) + ? tenantValues.ToString() + : "tenant-default"; + + var scopesHeader = Request.Headers.TryGetValue("X-Test-Scopes", out var scopeValues) + ? scopeValues.ToString() + : StellaOpsScopes.AdvisoryAiOperate; + + var claims = new List + { + new Claim(StellaOpsClaimTypes.ClientId, "test-client") + }; + + if (!string.IsNullOrWhiteSpace(tenantHeader) && + !string.Equals(tenantHeader, "none", StringComparison.OrdinalIgnoreCase)) + { + claims.Add(new Claim(StellaOpsClaimTypes.Tenant, tenantHeader.Trim())); + } + + var scopes = scopesHeader.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + foreach (var scope in scopes) + { + claims.Add(new Claim(StellaOpsClaimTypes.ScopeItem, scope)); + } + + var identity = new ClaimsIdentity(claims, Scheme.Name); + var principal = new ClaimsPrincipal(identity); + var ticket = new AuthenticationTicket(principal, Scheme.Name); + return Task.FromResult(AuthenticateResult.Success(ticket)); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityAckTokenIssuerTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityAckTokenIssuerTests.cs new file mode 100644 index 00000000..9d9e8c59 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityAckTokenIssuerTests.cs @@ -0,0 +1,195 @@ +using System; +using System.IO; +using System.Linq; +using System.Security.Cryptography; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.FileProviders; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Authority.Notifications; +using StellaOps.Authority.Notifications.Ack; +using StellaOps.Authority.Signing; +using StellaOps.Configuration; +using StellaOps.Cryptography; +using StellaOps.Cryptography.DependencyInjection; +using Xunit; + +namespace StellaOps.Authority.Tests.Notifications; + +public sealed class AuthorityAckTokenIssuerTests +{ + [Fact] + public async Task IssueAsync_ProducesDeterministicEnvelope() + { + var tempDir = Directory.CreateTempSubdirectory("ack-token-issuer").FullName; + var keyRelative = "ack.pem"; + + try + { + CreateEcPrivateKey(Path.Combine(tempDir, keyRelative)); + var options = BuildOptions(tempDir, keyRelative); + + using var provider = BuildProvider(tempDir, options); + var issuer = provider.GetRequiredService(); + var verifier = provider.GetRequiredService(); + + var request = new AckTokenIssueRequest + { + Tenant = "tenant-dev", + NotificationId = "notif-123", + DeliveryId = "delivery-456", + Channel = "slack", + WebhookUrl = "https://hooks.slack.com/services/T000/B000/XXXX", + Actions = new[] { "ack", "resolve" }, + Metadata = new() { ["priority"] = "high" } + }; + + var result = await issuer.IssueAsync(request, requesterHasEscalateScope: false, cancellationToken: default); + Assert.Equal("application/vnd.stellaops.notify-ack-token+json", result.Envelope.PayloadType); + Assert.NotNull(result.Envelope.Payload); + Assert.Single(result.Envelope.Signatures); + + var verification = await verifier.VerifyAsync(result.Envelope, "ack", request.Tenant, default); + Assert.Equal(request.Tenant, verification.Payload.Tenant); + Assert.Equal(request.NotificationId, verification.Payload.NotificationId); + Assert.Contains("ack", verification.Payload.Actions); + Assert.Contains("resolve", verification.Payload.Actions); + Assert.False(verification.Payload.EscalationAllowed); + Assert.True(verification.SignatureValid); + } + finally + { + try + { + Directory.Delete(tempDir, true); + } + catch + { + // ignore + } + } + } + + [Fact] + public async Task VerifyAsync_Throws_WhenActionNotPermitted() + { + var tempDir = Directory.CreateTempSubdirectory("ack-token-verify").FullName; + var keyRelative = "ack.pem"; + + try + { + CreateEcPrivateKey(Path.Combine(tempDir, keyRelative)); + var options = BuildOptions(tempDir, keyRelative); + + using var provider = BuildProvider(tempDir, options); + var issuer = provider.GetRequiredService(); + var verifier = provider.GetRequiredService(); + + var request = new AckTokenIssueRequest + { + Tenant = "tenant-dev", + NotificationId = "notif-789", + DeliveryId = "delivery-abc", + Channel = "email", + WebhookUrl = "https://hooks.slack.com/services/T000/B000/YYYY", + Actions = new[] { "ack" } + }; + + var result = await issuer.IssueAsync(request, requesterHasEscalateScope: false, cancellationToken: default); + await Assert.ThrowsAsync(() => verifier.VerifyAsync(result.Envelope, "escalate", request.Tenant, default)); + } + finally + { + try + { + Directory.Delete(tempDir, true); + } + catch + { + // ignore + } + } + } + + private static StellaOpsAuthorityOptions BuildOptions(string basePath, string activeKey) + { + return new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test"), + Storage = { ConnectionString = "mongodb://localhost/test" }, + Notifications = + { + AckTokens = + { + Enabled = true, + ActiveKeyId = "ack-key", + KeyPath = activeKey, + Algorithm = SignatureAlgorithms.Es256, + KeySource = "file", + KeyUse = "notify-ack", + DefaultLifetime = TimeSpan.FromMinutes(10), + MaxLifetime = TimeSpan.FromMinutes(30) + }, + Webhooks = + { + Enabled = true, + AllowedHosts = { "hooks.slack.com" } + }, + Escalation = + { + Scope = "notify.escalate", + RequireAdminScope = true + } + } + }; + } + + private static ServiceProvider BuildProvider(string basePath, StellaOpsAuthorityOptions options) + { + var services = new ServiceCollection(); + services.AddLogging(builder => builder.SetMinimumLevel(LogLevel.Debug)); + services.AddSingleton(new TestHostEnvironment(basePath)); + services.AddSingleton(options); + services.AddSingleton>(Options.Create(options)); + services.AddSingleton(TimeProvider.System); + services.AddMemoryCache(); + services.AddStellaOpsCrypto(); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + + return services.BuildServiceProvider(); + } + + private static void CreateEcPrivateKey(string path) + { + Directory.CreateDirectory(Path.GetDirectoryName(path)!); + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + File.WriteAllText(path, ecdsa.ExportECPrivateKeyPem()); + } + + private sealed class TestHostEnvironment : IHostEnvironment + { + public TestHostEnvironment(string contentRoot) + { + ContentRootPath = contentRoot; + ContentRootFileProvider = new PhysicalFileProvider(contentRoot); + EnvironmentName = Environments.Development; + ApplicationName = "StellaOps.Authority.Tests"; + } + + public string EnvironmentName { get; set; } + + public string ApplicationName { get; set; } + + public string ContentRootPath { get; set; } + + public IFileProvider ContentRootFileProvider { get; set; } + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityAckTokenKeyManagerTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityAckTokenKeyManagerTests.cs new file mode 100644 index 00000000..c51789e0 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityAckTokenKeyManagerTests.cs @@ -0,0 +1,149 @@ +using System; +using System.IO; +using System.Linq; +using System.Security.Cryptography; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.FileProviders; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Authority.Notifications.Ack; +using StellaOps.Authority.Signing; +using StellaOps.Configuration; +using StellaOps.Cryptography; +using StellaOps.Cryptography.DependencyInjection; +using Xunit; + +namespace StellaOps.Authority.Tests.Notifications; + +public sealed class AuthorityAckTokenKeyManagerTests +{ + [Fact] + public void Rotate_ReplacesActiveKeyAndRetiresPrevious() + { + var tempDir = Directory.CreateTempSubdirectory("ack-key-tests").FullName; + var key1Relative = "ack-key-1.pem"; + var key2Relative = "ack-key-2.pem"; + + try + { + CreateEcPrivateKey(Path.Combine(tempDir, key1Relative)); + + var options = BuildOptions(tempDir, key1Relative); + using var provider = BuildProvider(tempDir, options); + var manager = provider.GetRequiredService(); + var jwksService = provider.GetRequiredService(); + + var initialKeys = jwksService.Get(); + var initialKey = Assert.Single(initialKeys.Response.Keys); + Assert.Equal("notify-ack", initialKey.Use); + Assert.Equal("ack-key-1", initialKey.Kid); + Assert.Equal("active", initialKey.Status); + + CreateEcPrivateKey(Path.Combine(tempDir, key2Relative)); + var rotation = manager.Rotate(new SigningRotationRequest + { + KeyId = "ack-key-2", + Location = key2Relative + }); + + Assert.Equal("ack-key-2", rotation.ActiveKeyId); + Assert.Equal("ack-key-1", rotation.PreviousKeyId); + Assert.Contains("ack-key-1", rotation.RetiredKeyIds); + + Assert.Equal("ack-key-2", options.Notifications.AckTokens.ActiveKeyId); + var retiredEntry = Assert.Single(options.Notifications.AckTokens.AdditionalKeys); + Assert.Equal("ack-key-1", retiredEntry.KeyId); + Assert.Equal(key1Relative, retiredEntry.Path); + + var postRotation = jwksService.Get(); + Assert.Equal(2, postRotation.Response.Keys.Count); + Assert.Contains(postRotation.Response.Keys, key => key.Kid == "ack-key-2" && key.Status == "active"); + Assert.Contains(postRotation.Response.Keys, key => key.Kid == "ack-key-1" && key.Status == "retired"); + } + finally + { + try + { + Directory.Delete(tempDir, true); + } + catch + { + // ignore cleanup failure + } + } + } + + private static StellaOpsAuthorityOptions BuildOptions(string basePath, string activeKey) + { + return new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test"), + Storage = { ConnectionString = "mongodb://localhost/test" }, + Notifications = + { + AckTokens = + { + Enabled = true, + ActiveKeyId = "ack-key-1", + KeyPath = activeKey, + Algorithm = SignatureAlgorithms.Es256, + KeySource = "file", + KeyUse = "notify-ack", + DefaultLifetime = TimeSpan.FromMinutes(15), + MaxLifetime = TimeSpan.FromMinutes(30) + }, + Webhooks = + { + Enabled = true, + AllowedHosts = { "hooks.slack.com" } + } + } + }; + } + + private static ServiceProvider BuildProvider(string basePath, StellaOpsAuthorityOptions options) + { + var services = new ServiceCollection(); + services.AddLogging(builder => builder.SetMinimumLevel(LogLevel.Debug)); + services.AddSingleton(new TestHostEnvironment(basePath)); + services.AddSingleton(options); + services.AddSingleton>(Options.Create(options)); + services.AddSingleton(TimeProvider.System); + services.AddMemoryCache(); + services.AddStellaOpsCrypto(); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.AddSingleton(); + services.AddSingleton(); + + return services.BuildServiceProvider(); + } + + private static void CreateEcPrivateKey(string path) + { + Directory.CreateDirectory(Path.GetDirectoryName(path)!); + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var pem = ecdsa.ExportECPrivateKeyPem(); + File.WriteAllText(path, pem); + } + + private sealed class TestHostEnvironment : IHostEnvironment + { + public TestHostEnvironment(string contentRoot) + { + ContentRootPath = contentRoot; + ContentRootFileProvider = new PhysicalFileProvider(contentRoot); + EnvironmentName = Environments.Development; + ApplicationName = "StellaOps.Authority.Tests"; + } + + public string EnvironmentName { get; set; } + + public string ApplicationName { get; set; } + + public string ContentRootPath { get; set; } + + public IFileProvider ContentRootFileProvider { get; set; } + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityWebhookAllowlistEvaluatorTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityWebhookAllowlistEvaluatorTests.cs new file mode 100644 index 00000000..aacac148 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/AuthorityWebhookAllowlistEvaluatorTests.cs @@ -0,0 +1,58 @@ +using System; +using Microsoft.Extensions.Options; +using StellaOps.Authority.Notifications; +using StellaOps.Configuration; +using Xunit; + +namespace StellaOps.Authority.Tests.Notifications; + +public sealed class AuthorityWebhookAllowlistEvaluatorTests +{ + [Fact] + public void EnsureAllowed_AllowsExactHost() + { + var options = CreateOptions(); + var evaluator = new AuthorityWebhookAllowlistEvaluator(Options.Create(options)); + + var uri = new Uri("https://hooks.slack.com/services/T000/B000/ZZZ"); + evaluator.EnsureAllowed(uri); + } + + [Fact] + public void EnsureAllowed_AllowsWildcardSuffix() + { + var options = CreateOptions(); + options.Notifications.Webhooks.AllowedHosts.Add("*.pagerduty.com"); + var evaluator = new AuthorityWebhookAllowlistEvaluator(Options.Create(options)); + + var uri = new Uri("https://events.pagerduty.com/integration"); + evaluator.EnsureAllowed(uri); + } + + [Fact] + public void EnsureAllowed_ThrowsForDisallowedHost() + { + var options = CreateOptions(); + var evaluator = new AuthorityWebhookAllowlistEvaluator(Options.Create(options)); + + var uri = new Uri("https://example.com/webhook"); + Assert.Throws(() => evaluator.EnsureAllowed(uri)); + } + + private static StellaOpsAuthorityOptions CreateOptions() + { + return new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test"), + Storage = { ConnectionString = "mongodb://localhost/test" }, + Notifications = + { + Webhooks = + { + Enabled = true, + AllowedHosts = { "hooks.slack.com" } + } + } + }; + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/NotifyAckTokenRotationEndpointTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/NotifyAckTokenRotationEndpointTests.cs new file mode 100644 index 00000000..a1306a25 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Notifications/NotifyAckTokenRotationEndpointTests.cs @@ -0,0 +1,220 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http.Headers; +using System.Net.Http.Json; +using System.Security.Cryptography; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Authentication; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Time.Testing; +using StellaOps.Auth.Abstractions; +using StellaOps.Authority; +using StellaOps.Authority.Tests.Infrastructure; +using StellaOps.Cryptography; +using StellaOps.Cryptography.Audit; +using Xunit; + +namespace StellaOps.Authority.Tests.Notifications; + +public sealed class NotifyAckTokenRotationEndpointTests : IClassFixture +{ + private readonly AuthorityWebApplicationFactory factory; + + public NotifyAckTokenRotationEndpointTests(AuthorityWebApplicationFactory factory) + { + this.factory = factory ?? throw new ArgumentNullException(nameof(factory)); + } + + [Fact] + public async Task Rotate_ReturnsOk_AndEmitsAuditEvent() + { + var tempDir = Directory.CreateTempSubdirectory("ack-rotation-success"); + try + { + var key1Path = Path.Combine(tempDir.FullName, "ack-key-1.pem"); + var key2Path = Path.Combine(tempDir.FullName, "ack-key-2.pem"); + CreateEcPrivateKey(key1Path); + CreateEcPrivateKey(key2Path); + + var sink = new RecordingAuthEventSink(); + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T12:00:00Z")); + + using var app = factory.WithWebHostBuilder(host => + { + host.ConfigureAppConfiguration((_, configuration) => + { + configuration.AddInMemoryCollection(new Dictionary + { + ["Authority:Notifications:AckTokens:Enabled"] = "true", + ["Authority:Notifications:AckTokens:ActiveKeyId"] = "ack-key-1", + ["Authority:Notifications:AckTokens:KeyPath"] = key1Path, + ["Authority:Notifications:AckTokens:KeySource"] = "file", + ["Authority:Notifications:AckTokens:Algorithm"] = SignatureAlgorithms.Es256, + ["Authority:Notifications:Webhooks:Enabled"] = "true", + ["Authority:Notifications:Webhooks:AllowedHosts:0"] = "hooks.slack.com", + ["Authority:Notifications:Escalation:Scope"] = "notify.escalate", + ["Authority:Notifications:Escalation:RequireAdminScope"] = "true" + }); + }); + + host.ConfigureTestServices(services => + { + services.RemoveAll(); + services.AddSingleton(sink); + services.Replace(ServiceDescriptor.Singleton(timeProvider)); + services.AddAuthentication(options => + { + options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName; + options.DefaultChallengeScheme = TestAuthHandler.SchemeName; + }) + .AddScheme(TestAuthHandler.SchemeName, _ => { }); + }); + }); + + using var client = app.CreateClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName); + client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.NotifyAdmin); + client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default"); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.PostAsJsonAsync("/notify/ack-tokens/rotate", new + { + keyId = "ack-key-2", + location = key2Path + }); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var payload = await response.Content.ReadFromJsonAsync(); + Assert.NotNull(payload); + Assert.Equal("ack-key-2", payload!.ActiveKeyId); + Assert.Equal("ack-key-1", payload.PreviousKeyId); + + var rotationEvent = Assert.Single(sink.Events.Where(evt => evt.EventType == "notify.ack.key_rotated")); + Assert.Equal(AuthEventOutcome.Success, rotationEvent.Outcome); + Assert.Contains(rotationEvent.Properties, property => + string.Equals(property.Name, "notify.ack.key_id", StringComparison.Ordinal) && + string.Equals(property.Value.Value, "ack-key-2", StringComparison.Ordinal)); + } + finally + { + TryDeleteDirectory(tempDir.FullName); + } + } + + [Fact] + public async Task Rotate_ReturnsBadRequest_WhenKeyIdMissing_AndAuditsFailure() + { + var tempDir = Directory.CreateTempSubdirectory("ack-rotation-failure"); + try + { + var key1Path = Path.Combine(tempDir.FullName, "ack-key-1.pem"); + var key2Path = Path.Combine(tempDir.FullName, "ack-key-2.pem"); + CreateEcPrivateKey(key1Path); + CreateEcPrivateKey(key2Path); + + var sink = new RecordingAuthEventSink(); + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T13:00:00Z")); + + using var app = factory.WithWebHostBuilder(host => + { + host.ConfigureAppConfiguration((_, configuration) => + { + configuration.AddInMemoryCollection(new Dictionary + { + ["Authority:Notifications:AckTokens:Enabled"] = "true", + ["Authority:Notifications:AckTokens:ActiveKeyId"] = "ack-key-1", + ["Authority:Notifications:AckTokens:KeyPath"] = key1Path, + ["Authority:Notifications:AckTokens:KeySource"] = "file", + ["Authority:Notifications:AckTokens:Algorithm"] = SignatureAlgorithms.Es256, + ["Authority:Notifications:Webhooks:Enabled"] = "true", + ["Authority:Notifications:Webhooks:AllowedHosts:0"] = "hooks.slack.com" + }); + }); + + host.ConfigureTestServices(services => + { + services.RemoveAll(); + services.AddSingleton(sink); + services.Replace(ServiceDescriptor.Singleton(timeProvider)); + services.AddAuthentication(options => + { + options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName; + options.DefaultChallengeScheme = TestAuthHandler.SchemeName; + }) + .AddScheme(TestAuthHandler.SchemeName, _ => { }); + }); + }); + + using var client = app.CreateClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName); + client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.NotifyAdmin); + client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default"); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.PostAsJsonAsync("/notify/ack-tokens/rotate", new + { + location = key2Path + }); + + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + + var failureEvent = Assert.Single(sink.Events.Where(evt => evt.EventType == "notify.ack.key_rotation_failed")); + Assert.Equal(AuthEventOutcome.Failure, failureEvent.Outcome); + Assert.Contains("keyId", failureEvent.Reason, StringComparison.OrdinalIgnoreCase); + } + finally + { + TryDeleteDirectory(tempDir.FullName); + } + } + + private static void CreateEcPrivateKey(string path) + { + Directory.CreateDirectory(Path.GetDirectoryName(path)!); + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + File.WriteAllText(path, ecdsa.ExportECPrivateKeyPem()); + } + + private static void TryDeleteDirectory(string path) + { + try + { + if (Directory.Exists(path)) + { + Directory.Delete(path, recursive: true); + } + } + catch + { + // Ignore cleanup failures in tests. + } + } + + private sealed record AckRotateResponse( + string ActiveKeyId, + string? Provider, + string? Source, + string? Location, + string? PreviousKeyId, + IReadOnlyCollection RetiredKeyIds); + + private sealed class RecordingAuthEventSink : IAuthEventSink + { + private readonly ConcurrentQueue events = new(); + + public IReadOnlyCollection Events => events.ToArray(); + + public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) + { + events.Enqueue(record); + return ValueTask.CompletedTask; + } + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenApi/OpenApiDiscoveryEndpointTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenApi/OpenApiDiscoveryEndpointTests.cs index 09fe1716..0d31764d 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenApi/OpenApiDiscoveryEndpointTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenApi/OpenApiDiscoveryEndpointTests.cs @@ -48,8 +48,10 @@ public sealed class OpenApiDiscoveryEndpointTests : IClassFixture.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Scope 'jobs:write' is not allowed for this client.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_Allows_WhenConfigurationMatches() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read jobs:trigger"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - Assert.False(context.Transaction.Properties.ContainsKey(AuthorityOpenIddictConstants.ClientTenantProperty)); - Assert.Same(clientDocument, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty]); - - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "jobs:read" }, grantedScopes); - Assert.Equal(clientDocument.Plugin, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientProviderTransactionProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_Allows_NewIngestionScopes() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "advisory:ingest advisory:read", - tenant: "tenant-alpha"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:ingest"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "advisory:ingest" }, grantedScopes); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsAdvisoryReadWithoutAocVerify() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "advisory:read aoc:verify", - tenant: "tenant-alpha"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Scope 'aoc:verify' is required when requesting advisory/vex read scopes.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsSignalsScopeWithoutAocVerify() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "signals:read signals:write signals:admin aoc:verify", - tenant: "tenant-alpha"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "signals:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Scope 'aoc:verify' is required when requesting signals scopes.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsPolicyAuthorWithoutTenant() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "policy:author"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "policy:author"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Policy Studio scopes require a tenant assignment.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.PolicyAuthor, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsPolicyAuthorWithTenant() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "policy:author", - tenant: "tenant-alpha"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "policy:author"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "policy:author" }, grantedScopes); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsAdvisoryReadWithAocVerify() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "advisory:read aoc:verify", - tenant: "tenant-alpha"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read aoc:verify"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "advisory:read", "aoc:verify" }, grantedScopes); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsAocVerifyWithoutTenant() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "aoc:verify"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "aoc:verify"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Scope 'aoc:verify' requires a tenant assignment.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsEffectiveWrite_WhenServiceIdentityMissing() - { - var clientDocument = CreateClient( - clientId: "policy-engine", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "effective:write findings:read policy:run", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - Assert.True(clientDocument.Properties.ContainsKey(AuthorityClientMetadataKeys.Tenant)); - Assert.Equal("tenant-default", clientDocument.Properties[AuthorityClientMetadataKeys.Tenant]); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); - Assert.Equal("Scope 'effective:write' is reserved for the Policy Engine service identity.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsEffectiveWrite_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "policy-engine", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "effective:write findings:read policy:run"); - clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Policy Engine service identity requires a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsEffectiveWrite_ForPolicyEngineServiceIdentity() - { - var clientDocument = CreateClient( - clientId: "policy-engine", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "effective:write findings:read policy:run", - tenant: "tenant-default"); - clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "effective:write" }, grantedScopes); - - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsOrchOperate_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "orch-operator", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read orch:operate"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Orchestrator scopes require a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsOrchOperate_WhenReasonMissing() - { - var clientDocument = CreateClient( - clientId: "orch-operator", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read orch:operate", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); - Assert.Equal("Operator actions require 'operator_reason'.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsOrchOperate_WhenTicketMissing() - { - var clientDocument = CreateClient( - clientId: "orch-operator", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read orch:operate", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); - Assert.Equal("Operator actions require 'operator_ticket'.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsOrchOperate_WithReasonAndTicket() - { - var clientDocument = CreateClient( - clientId: "orch-operator", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read orch:operate", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "orch:operate" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.IdentityModel.Tokens.Jwt; +using System.Security.Claims; +using System.Security.Cryptography; +using System.Security.Cryptography.X509Certificates; +using System.Text.Json; +using System.Linq; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Http.Extensions; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Options; +using Microsoft.Extensions.Primitives; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Configuration; +using StellaOps.Authority.Security; +using StellaOps.Auth.Security.Dpop; +using OpenIddict.Abstractions; +using OpenIddict.Extensions; +using OpenIddict.Server; +using OpenIddict.Server.AspNetCore; +using StellaOps.Auth.Abstractions; +using StellaOps.Authority.OpenIddict; +using StellaOps.Authority.OpenIddict.Handlers; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Sessions; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Authority.RateLimiting; +using StellaOps.Cryptography.Audit; +using Xunit; +using MongoDB.Bson; +using MongoDB.Driver; +using static StellaOps.Authority.Tests.OpenIddict.TestHelpers; + +namespace StellaOps.Authority.Tests.OpenIddict; + +public class ClientCredentialsHandlersTests +{ + private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests"); + + [Fact] + public async Task ValidateClientCredentials_Rejects_WhenScopeNotAllowed() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Scope 'jobs:write' is not allowed for this client.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsObsIncidentScope() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "obs:incident"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "obs:incident"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Contains("obs:incident", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_Allows_WhenConfigurationMatches() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read jobs:trigger"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + Assert.False(context.Transaction.Properties.ContainsKey(AuthorityOpenIddictConstants.ClientTenantProperty)); + Assert.Same(clientDocument, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty]); + + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "jobs:read" }, grantedScopes); + Assert.Equal(clientDocument.Plugin, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientProviderTransactionProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_Allows_NewIngestionScopes() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "advisory:ingest advisory:read", + tenant: "tenant-alpha"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:ingest"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "advisory:ingest" }, grantedScopes); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsAdvisoryReadWithoutAocVerify() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "advisory:read aoc:verify", + tenant: "tenant-alpha"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Scope 'aoc:verify' is required when requesting advisory/vex read scopes.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsSignalsScopeWithoutAocVerify() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "signals:read signals:write signals:admin aoc:verify", + tenant: "tenant-alpha"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "signals:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Scope 'aoc:verify' is required when requesting signals scopes.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + } + + [Theory] + [InlineData(StellaOpsScopes.AirgapSeal)] + [InlineData(StellaOpsScopes.AirgapImport)] + [InlineData(StellaOpsScopes.AirgapStatusRead)] + public async Task ValidateClientCredentials_RejectsAirgapScopesWithoutTenant(string scope) + { + var clientId = $"airgap-{scope.Replace(':', '-')}-client"; + var clientDocument = CreateClient( + clientId: clientId, + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: scope); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: scope); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Air-gap scopes require a tenant assignment.", context.ErrorDescription); + Assert.Equal(scope, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + } + + [Theory] + [InlineData(StellaOpsScopes.AirgapSeal)] + [InlineData(StellaOpsScopes.AirgapImport)] + [InlineData(StellaOpsScopes.AirgapStatusRead)] + public async Task ValidateClientCredentials_AllowsAirgapScopesWithTenant(string scope) + { + var clientId = $"airgap-{scope.Replace(':', '-')}-client"; + var clientDocument = CreateClient( + clientId: clientId, + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: scope, + tenant: "tenant-alpha"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: scope); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { scope }, grantedScopes); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsPolicyAuthorWithoutTenant() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "policy:author"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "policy:author"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Policy Studio scopes require a tenant assignment.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.PolicyAuthor, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsPolicyAuthorWithTenant() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "policy:author", + tenant: "tenant-alpha"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "policy:author"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "policy:author" }, grantedScopes); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsAdvisoryReadWithAocVerify() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "advisory:read aoc:verify", + tenant: "tenant-alpha"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read aoc:verify"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "advisory:read", "aoc:verify" }, grantedScopes); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsAocVerifyWithoutTenant() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "aoc:verify"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "aoc:verify"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Scope 'aoc:verify' requires a tenant assignment.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsEffectiveWrite_WhenServiceIdentityMissing() + { + var clientDocument = CreateClient( + clientId: "policy-engine", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "effective:write findings:read policy:run", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + Assert.True(clientDocument.Properties.ContainsKey(AuthorityClientMetadataKeys.Tenant)); + Assert.Equal("tenant-default", clientDocument.Properties[AuthorityClientMetadataKeys.Tenant]); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); + Assert.Equal("Scope 'effective:write' is reserved for the Policy Engine service identity.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsEffectiveWrite_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "policy-engine", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "effective:write findings:read policy:run"); + clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Policy Engine service identity requires a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsEffectiveWrite_ForPolicyEngineServiceIdentity() + { + var clientDocument = CreateClient( + clientId: "policy-engine", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "effective:write findings:read policy:run", + tenant: "tenant-default"); + clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "effective:write" }, grantedScopes); + + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchOperate_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "orch-operator", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read orch:operate"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Orchestrator scopes require a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchOperate_WhenReasonMissing() + { + var clientDocument = CreateClient( + clientId: "orch-operator", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read orch:operate", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Equal("Operator actions require 'operator_reason'.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchOperate_WhenTicketMissing() + { + var clientDocument = CreateClient( + clientId: "orch-operator", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read orch:operate", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Equal("Operator actions require 'operator_ticket'.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsOrchOperate_WithReasonAndTicket() + { + var clientDocument = CreateClient( + clientId: "orch-operator", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read orch:operate", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "orch:operate" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); var reason = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.OperatorReasonProperty]); Assert.Equal("resume source after maintenance", reason); var ticket = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.OperatorTicketProperty]); Assert.Equal("INC-2045", ticket); - var principal = Assert.NotNull(context.Principal); - Assert.Equal("resume source after maintenance", principal.FindFirstValue(StellaOpsClaimTypes.OperatorReason)); - Assert.Equal("INC-2045", principal.FindFirstValue(StellaOpsClaimTypes.OperatorTicket)); - var scopeClaim = principal.FindFirstValue(StellaOpsClaimTypes.Scope); - Assert.Contains("orch:operate", scopeClaim.Split(' ', StringSplitOptions.RemoveEmptyEntries)); - var scopeItems = principal.FindAll(StellaOpsClaimTypes.ScopeItem).Select(claim => claim.Value).ToArray(); - Assert.Contains("orch:operate", scopeItems); + Assert.Equal("resume source after maintenance", context.Transaction.Properties[AuthorityOpenIddictConstants.OperatorReasonProperty]); + Assert.Equal("INC-2045", context.Transaction.Properties[AuthorityOpenIddictConstants.OperatorTicketProperty]); } - - [Fact] - public async Task ValidateClientCredentials_RejectsExportViewer_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "export-viewer", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "export.viewer"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - TestHelpers.CreateAuthorityOptions(), - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.viewer"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Export scopes require a tenant assignment.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.ExportViewer, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsExportViewer_WithTenant() - { - var clientDocument = CreateClient( - clientId: "export-viewer", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "export.viewer", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - TestHelpers.CreateAuthorityOptions(), - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.viewer"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "export.viewer" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsExportAdmin_WhenReasonMissing() - { - var clientDocument = CreateClient( - clientId: "export-admin", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "export.admin", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - TestHelpers.CreateAuthorityOptions(), - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminTicketParameterName, "INC-9001"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); - Assert.Equal("Export admin actions require 'export_reason'.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsExportAdmin_WhenTicketMissing() - { - var clientDocument = CreateClient( - clientId: "export-admin", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "export.admin", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - TestHelpers.CreateAuthorityOptions(), - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminReasonParameterName, "Rotate encryption keys after incident postmortem"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); - Assert.Equal("Export admin actions require 'export_ticket'.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsExportAdmin_WithReasonAndTicket() - { - var clientDocument = CreateClient( - clientId: "export-admin", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "export.admin", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - TestHelpers.CreateAuthorityOptions(), - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminReasonParameterName, "Rotate encryption keys after incident postmortem"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminTicketParameterName, "INC-9001"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "export.admin" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - var reason = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ExportAdminReasonProperty]); - Assert.Equal("Rotate encryption keys after incident postmortem", reason); - var ticket = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ExportAdminTicketProperty]); - Assert.Equal("INC-9001", ticket); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsGraphWrite_WhenServiceIdentityMissing() - { - var clientDocument = CreateClient( - clientId: "cartographer-service", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "graph:write graph:read", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); - Assert.Equal("Scope 'graph:write' is reserved for the Cartographer service identity.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsGraphWrite_WhenServiceIdentityMismatch() - { - var clientDocument = CreateClient( - clientId: "cartographer-service", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "graph:write graph:read", - tenant: "tenant-default"); - clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); - Assert.Equal("Scope 'graph:write' is reserved for the Cartographer service identity.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsGraphScopes_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "graph-api", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "graph:read graph:export"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Graph scopes require a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsGraphRead_WithTenant() - { - var clientDocument = CreateClient( - clientId: "graph-api", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "graph:read graph:export", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "graph:read" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsOrchRead_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "orch-dashboard", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Orchestrator scopes require a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsOrchRead_WithTenant() - { - var clientDocument = CreateClient( - clientId: "orch-dashboard", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "orch:read" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsAdvisoryScopes_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "concelier-ingestor", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "advisory:ingest advisory:read"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:ingest"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Advisory scopes require a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsVexScopes_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "excitor-ingestor", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "vex:ingest vex:read"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "vex:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("VEX scopes require a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsAdvisoryScopes_WithTenant() - { - var clientDocument = CreateClient( - clientId: "concelier-ingestor", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "advisory:ingest advisory:read aoc:verify", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read aoc:verify"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "advisory:read", "aoc:verify" }, grantedScopes); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsGraphWrite_ForCartographerServiceIdentity() - { - var clientDocument = CreateClient( - clientId: "cartographer-service", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "graph:write graph:read", - tenant: "tenant-default"); - clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.Cartographer; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "graph:write" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - } - - [Fact] - public async Task ValidateClientCredentials_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var sink = new TestAuthEventSink(); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - sink, - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - transaction.Request?.SetParameter("unexpected_param", "value"); - - await handler.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); - - var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper"); - Assert.Contains(tamperEvent.Properties, property => - string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) && - string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase)); - } - - [Fact] - public async Task ValidateDpopProof_AllowsSenderConstrainedClient() - { - var options = TestHelpers.CreateAuthorityOptions(opts => - { - opts.Security.SenderConstraints.Dpop.Enabled = true; - opts.Security.SenderConstraints.Dpop.Nonce.Enabled = false; - }); - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read"); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; - clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; - - using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); - var securityKey = new ECDsaSecurityKey(ecdsa) - { - KeyId = Guid.NewGuid().ToString("N") - }; - var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(securityKey); - var expectedThumbprint = ConvertThumbprintToString(jwk.ComputeJwkThumbprint()); - - var clientStore = new TestClientStore(clientDocument); - var auditSink = new TestAuthEventSink(); - var rateMetadata = new TestRateLimiterMetadataAccessor(); - - var dpopValidator = new DpopProofValidator( - Options.Create(new DpopValidationOptions()), - new InMemoryDpopReplayCache(TimeProvider.System), - TimeProvider.System, - NullLogger.Instance); - - var nonceStore = new InMemoryDpopNonceStore(TimeProvider.System, NullLogger.Instance); - - var dpopHandler = new ValidateDpopProofHandler( - options, - clientStore, - dpopValidator, - nonceStore, - rateMetadata, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - transaction.Options = new OpenIddictServerOptions(); - - var httpContext = new DefaultHttpContext(); - httpContext.Request.Method = "POST"; - httpContext.Request.Scheme = "https"; - httpContext.Request.Host = new HostString("authority.test"); - httpContext.Request.Path = "/token"; - - var now = TimeProvider.System.GetUtcNow(); - var proof = TestHelpers.CreateDpopProof(securityKey, httpContext.Request.Method, httpContext.Request.GetDisplayUrl(), now.ToUnixTimeSeconds()); - httpContext.Request.Headers["DPoP"] = proof; - - transaction.Properties[typeof(HttpContext).FullName!] = httpContext; - - var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - await dpopHandler.HandleAsync(validateContext); - - Assert.False(validateContext.IsRejected); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var validateHandler = new ValidateClientCredentialsHandler( - clientStore, - registry, - TestActivitySource, - auditSink, - rateMetadata, - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - await validateHandler.HandleAsync(validateContext); - Assert.False(validateContext.IsRejected); - - var tokenStore = new TestTokenStore(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handleHandler = new HandleClientCredentialsHandler( - registry, - tokenStore, - sessionAccessor, - rateMetadata, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); - await handleHandler.HandleAsync(handleContext); - Assert.True(handleContext.IsRequestHandled); - - var persistHandler = new PersistTokensHandler( - tokenStore, - sessionAccessor, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction) - { - Principal = handleContext.Principal, - AccessTokenPrincipal = handleContext.Principal - }; - - await persistHandler.HandleAsync(signInContext); - - var confirmationClaim = handleContext.Principal?.GetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType); - Assert.False(string.IsNullOrWhiteSpace(confirmationClaim)); - - using (var confirmationJson = JsonDocument.Parse(confirmationClaim!)) - { - Assert.Equal(expectedThumbprint, confirmationJson.RootElement.GetProperty("jkt").GetString()); - } - - Assert.NotNull(tokenStore.Inserted); - Assert.Equal(AuthoritySenderConstraintKinds.Dpop, tokenStore.Inserted!.SenderConstraint); - Assert.Equal(expectedThumbprint, tokenStore.Inserted!.SenderKeyThumbprint); - } - - [Fact] - public async Task ValidateDpopProof_IssuesNonceChallenge_WhenNonceMissing() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Dpop.Enabled = true; - options.Security.SenderConstraints.Dpop.Nonce.Enabled = true; - options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences.Clear(); - options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences.Add("signer"); - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - Assert.Contains("signer", options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences); - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read", - allowedAudiences: "signer"); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; - clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; - - using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); - var securityKey = new ECDsaSecurityKey(ecdsa) - { - KeyId = Guid.NewGuid().ToString("N") - }; - - var clientStore = new TestClientStore(clientDocument); - var auditSink = new TestAuthEventSink(); - var rateMetadata = new TestRateLimiterMetadataAccessor(); - - var dpopValidator = new DpopProofValidator( - Options.Create(new DpopValidationOptions()), - new InMemoryDpopReplayCache(TimeProvider.System), - TimeProvider.System, - NullLogger.Instance); - - var nonceStore = new InMemoryDpopNonceStore(TimeProvider.System, NullLogger.Instance); - - var dpopHandler = new ValidateDpopProofHandler( - options, - clientStore, - dpopValidator, - nonceStore, - rateMetadata, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - transaction.Options = new OpenIddictServerOptions(); - - var httpContext = new DefaultHttpContext(); - httpContext.Request.Method = "POST"; - httpContext.Request.Scheme = "https"; - httpContext.Request.Host = new HostString("authority.test"); - httpContext.Request.Path = "/token"; - - var now = TimeProvider.System.GetUtcNow(); - var proof = TestHelpers.CreateDpopProof(securityKey, httpContext.Request.Method, httpContext.Request.GetDisplayUrl(), now.ToUnixTimeSeconds()); - httpContext.Request.Headers["DPoP"] = proof; - - transaction.Properties[typeof(HttpContext).FullName!] = httpContext; - - var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - await dpopHandler.HandleAsync(validateContext); - - Assert.True(validateContext.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, validateContext.Error); - var authenticateHeader = Assert.Single(httpContext.Response.Headers.Select(header => header) - .Where(header => string.Equals(header.Key, "WWW-Authenticate", StringComparison.OrdinalIgnoreCase))).Value; - Assert.Contains("use_dpop_nonce", authenticateHeader.ToString()); - Assert.True(httpContext.Response.Headers.TryGetValue("DPoP-Nonce", out var nonceValues)); - Assert.False(StringValues.IsNullOrEmpty(nonceValues)); - Assert.Contains(auditSink.Events, record => record.EventType == "authority.dpop.proof.challenge"); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsMtlsClient_WithValidCertificate() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Security.SenderConstraints.Mtls.RequireChainValidation = false; - options.Security.SenderConstraints.Mtls.AllowedSanTypes.Clear(); - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read"); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Mtls; - - using var rsa = RSA.Create(2048); - var certificateRequest = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - using var certificate = certificateRequest.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1)); - var hexThumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)); - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = hexThumbprint - }); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var auditSink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; - httpContextAccessor.HttpContext!.Connection.ClientCertificate = certificate; - - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - auditSink, - metadataAccessor, - TimeProvider.System, - validator, - httpContextAccessor, - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, context.ErrorDescription ?? context.Error); - Assert.Equal(AuthoritySenderConstraintKinds.Mtls, context.Transaction.Properties[AuthorityOpenIddictConstants.SenderConstraintProperty]); - - var expectedBase64 = Base64UrlEncoder.Encode(certificate.GetCertHash(HashAlgorithmName.SHA256)); - Assert.Equal(expectedBase64, context.Transaction.Properties[AuthorityOpenIddictConstants.MtlsCertificateThumbprintProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsMtlsClient_WhenCertificateMissing() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read"); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Mtls; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - validator, - httpContextAccessor, - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - } - - [Fact] - public async Task ValidateClientCredentials_Rejects_WhenAudienceRequiresMtlsButClientConfiguredForDpop() - { - var options = TestHelpers.CreateAuthorityOptions(opts => - { - opts.Security.SenderConstraints.Mtls.Enabled = true; - opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Clear(); - opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Add("signer"); - }); - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read", - allowedAudiences: "signer"); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; - clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Requested audiences require mutual TLS sender constraint.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RequiresMtlsWhenAudienceMatchesEnforcement() - { - var options = TestHelpers.CreateAuthorityOptions(opts => - { - opts.Security.SenderConstraints.Mtls.Enabled = true; - opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Clear(); - opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Add("signer"); - }); - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read", - allowedAudiences: "signer"); - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = "DEADBEEF" - }); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var certificateValidator = new RecordingCertificateValidator(); - var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; - - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - certificateValidator, - httpContextAccessor, - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("client_certificate_required", context.ErrorDescription); - Assert.True(certificateValidator.Invoked); - } - - [Fact] - public async Task HandleClientCredentials_PersistsTokenAndEnrichesClaims() - { - var clientDocument = CreateClient( - secret: null, - clientType: "public", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:trigger", - allowedAudiences: "signer", - tenant: "Tenant-Alpha"); - - var descriptor = CreateDescriptor(clientDocument); - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: descriptor); - var tokenStore = new TestTokenStore(); - var sessionAccessor = new NullMongoSessionAccessor(); - var authSink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var options = TestHelpers.CreateAuthorityOptions(); - var validateHandler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - authSink, - metadataAccessor, - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, secret: null, scope: "jobs:trigger"); - transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(30); - - var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - await validateHandler.HandleAsync(validateContext); - Assert.False(validateContext.IsRejected); - - var handler = new HandleClientCredentialsHandler( - registry, - tokenStore, - sessionAccessor, - metadataAccessor, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - var persistHandler = new PersistTokensHandler(tokenStore, sessionAccessor, TimeProvider.System, TestActivitySource, NullLogger.Instance); - - var context = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRequestHandled); - Assert.NotNull(context.Principal); - Assert.Contains("signer", context.Principal!.GetAudiences()); - - Assert.Contains(authSink.Events, record => record.EventType == "authority.client_credentials.grant" && record.Outcome == AuthEventOutcome.Success); - - var identityProviderClaim = context.Principal?.GetClaim(StellaOpsClaimTypes.IdentityProvider); - Assert.Equal(clientDocument.Plugin, identityProviderClaim); - - var principal = context.Principal ?? throw new InvalidOperationException("Principal missing"); - Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); - var tokenId = principal.GetClaim(OpenIddictConstants.Claims.JwtId); - Assert.False(string.IsNullOrWhiteSpace(tokenId)); - - var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction) - { - Principal = principal, - AccessTokenPrincipal = principal - }; - - await persistHandler.HandleAsync(signInContext); - - var persisted = Assert.IsType(tokenStore.Inserted); - Assert.Equal(tokenId, persisted.TokenId); - Assert.Equal(clientDocument.ClientId, persisted.ClientId); - Assert.Equal("valid", persisted.Status); - Assert.Equal("tenant-alpha", persisted.Tenant); - Assert.Equal(new[] { "jobs:trigger" }, persisted.Scope); - } -} - -public class TokenValidationHandlersTests -{ - private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests.TokenValidation"); - - [Fact] - public async Task ValidateAccessTokenHandler_Rejects_WhenTokenRevoked() - { - var tokenStore = new TestTokenStore(); - tokenStore.Inserted = new AuthorityTokenDocument - { - TokenId = "token-1", - Status = "revoked", - ClientId = "concelier" - }; - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(CreateClient()), - CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(CreateClient())), - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal("concelier", "token-1", "standard"); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-1" - }; - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); - } - - [Fact] - public async Task ValidateAccessTokenHandler_AddsTenantClaim_FromTokenDocument() - { - var clientDocument = CreateClient(tenant: "tenant-alpha"); - var tokenStore = new TestTokenStore - { - Inserted = new AuthorityTokenDocument - { - TokenId = "token-tenant", - Status = "valid", - ClientId = clientDocument.ClientId, - Tenant = "tenant-alpha" - } - }; - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(clientDocument), - CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-tenant" - }; - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected); - Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); - Assert.Equal("tenant-alpha", metadataAccessor.GetMetadata()?.Tenant); - Assert.Equal(StellaOpsTenancyDefaults.AnyProject, principal.FindFirstValue(StellaOpsClaimTypes.Project)); - Assert.Equal(StellaOpsTenancyDefaults.AnyProject, metadataAccessor.GetMetadata()?.Project); - Assert.Equal(StellaOpsTenancyDefaults.AnyProject, principal.FindFirstValue(StellaOpsClaimTypes.Project)); - Assert.Equal(StellaOpsTenancyDefaults.AnyProject, metadataAccessor.GetMetadata()?.Project); - } - - [Fact] - public async Task ValidateAccessTokenHandler_Rejects_WhenTenantDiffersFromToken() - { - var clientDocument = CreateClient(tenant: "tenant-alpha"); - var tokenStore = new TestTokenStore - { - Inserted = new AuthorityTokenDocument - { - TokenId = "token-tenant", - Status = "valid", - ClientId = clientDocument.ClientId, - Tenant = "tenant-alpha" - } - }; - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(clientDocument), - CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); - principal.Identities.First().AddClaim(new Claim(StellaOpsClaimTypes.Tenant, "tenant-beta")); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-tenant" - }; - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); - Assert.Equal("The token tenant does not match the issued tenant.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateAccessTokenHandler_AssignsTenant_FromClientWhenTokenMissing() - { - var clientDocument = CreateClient(tenant: "tenant-alpha"); - var tokenStore = new TestTokenStore - { - Inserted = new AuthorityTokenDocument - { - TokenId = "token-tenant", - Status = "valid", - ClientId = clientDocument.ClientId - } - }; - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(clientDocument), - CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-tenant" - }; - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected); - Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); - Assert.Equal("tenant-alpha", metadataAccessor.GetMetadata()?.Tenant); - } - - [Fact] - public async Task ValidateAccessTokenHandler_Rejects_WhenClientTenantDiffers() - { - var clientDocument = CreateClient(tenant: "tenant-beta"); - var tokenStore = new TestTokenStore - { - Inserted = new AuthorityTokenDocument - { - TokenId = "token-tenant", - Status = "valid", - ClientId = clientDocument.ClientId - } - }; - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(clientDocument), - CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); - principal.Identities.First().AddClaim(new Claim(StellaOpsClaimTypes.Tenant, "tenant-alpha")); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-tenant" - }; - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); - Assert.Equal("The token tenant does not match the registered client tenant.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateAccessTokenHandler_EnrichesClaims_WhenProviderAvailable() - { - var clientDocument = CreateClient(); - var userDescriptor = new AuthorityUserDescriptor("user-1", "alice", displayName: "Alice", requiresPasswordReset: false); - - var plugin = CreatePlugin( - name: "standard", - supportsClientProvisioning: true, - descriptor: CreateDescriptor(clientDocument), - user: userDescriptor); - - var registry = CreateRegistryFromPlugins(plugin); - - var metadataAccessorSuccess = new TestRateLimiterMetadataAccessor(); - var auditSinkSuccess = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - new TestTokenStore(), - sessionAccessor, - new TestClientStore(clientDocument), - registry, - metadataAccessorSuccess, - auditSinkSuccess, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, "token-123", plugin.Name, subject: userDescriptor.SubjectId); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal - }; - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected); - Assert.Contains(principal.Claims, claim => claim.Type == "enriched" && claim.Value == "true"); - } - - [Fact] - public async Task ValidateAccessTokenHandler_AddsConfirmationClaim_ForMtlsToken() - { - var tokenDocument = new AuthorityTokenDocument - { - TokenId = "token-mtls", - Status = "valid", - ClientId = "mtls-client", - SenderConstraint = AuthoritySenderConstraintKinds.Mtls, - SenderKeyThumbprint = "thumb-print" - }; - - var tokenStore = new TestTokenStore - { - Inserted = tokenDocument - }; - - var clientDocument = CreateClient(); - var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(clientDocument), - registry, - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Introspection, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, tokenDocument.TokenId, clientDocument.Plugin); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = tokenDocument.TokenId - }; - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected); - var confirmation = context.Principal?.GetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType); - Assert.False(string.IsNullOrWhiteSpace(confirmation)); - using var json = JsonDocument.Parse(confirmation!); - Assert.Equal(tokenDocument.SenderKeyThumbprint, json.RootElement.GetProperty("x5t#S256").GetString()); - } - - [Fact] - public async Task ValidateAccessTokenHandler_EmitsReplayAudit_WhenStoreDetectsSuspectedReplay() - { - var tokenStore = new TestTokenStore(); - tokenStore.Inserted = new AuthorityTokenDocument - { - TokenId = "token-replay", - Status = "valid", - ClientId = "agent", - Devices = new List - { - new BsonDocument - { - { "remoteAddress", "10.0.0.1" }, - { "userAgent", "agent/1.0" }, - { "firstSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-15)) }, - { "lastSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-5)) }, - { "useCount", 2 } - } - } - }; - - tokenStore.UsageCallback = (remote, agent) => new TokenUsageUpdateResult(TokenUsageUpdateStatus.SuspectedReplay, remote, agent); - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var metadata = metadataAccessor.GetMetadata(); - if (metadata is not null) - { - metadata.RemoteIp = "203.0.113.7"; - metadata.UserAgent = "agent/2.0"; - } - - var clientDocument = CreateClient(); - clientDocument.ClientId = "agent"; - var auditSink = new TestAuthEventSink(); - var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null); - var sessionAccessorReplay = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessorReplay, - new TestClientStore(clientDocument), - registry, - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Introspection, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal("agent", "token-replay", "standard"); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-replay" - }; - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected); - var replayEvent = Assert.Single(auditSink.Events, record => record.EventType == "authority.token.replay.suspected"); - Assert.Equal(AuthEventOutcome.Error, replayEvent.Outcome); - Assert.NotNull(replayEvent.Network); - Assert.Equal("203.0.113.7", replayEvent.Network?.RemoteAddress.Value); - Assert.Contains(replayEvent.Properties, property => property.Name == "token.devices.total"); - } -} - -public class AuthorityClientCertificateValidatorTests -{ - [Fact] - public async Task ValidateAsync_Rejects_WhenSanTypeNotAllowed() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Security.SenderConstraints.Mtls.RequireChainValidation = false; - options.Security.SenderConstraints.Mtls.AllowedSanTypes.Clear(); - options.Security.SenderConstraints.Mtls.AllowedSanTypes.Add("uri"); - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - using var rsa = RSA.Create(2048); - var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - var sanBuilder = new SubjectAlternativeNameBuilder(); - sanBuilder.AddDnsName("client.mtls.test"); - request.CertificateExtensions.Add(sanBuilder.Build()); - using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); - - var clientDocument = CreateClient(); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)) - }); - - var httpContext = new DefaultHttpContext(); - httpContext.Connection.ClientCertificate = certificate; - - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); - - Assert.False(result.Succeeded); - Assert.Equal("certificate_san_type", result.Error); - } - - [Fact] - public async Task ValidateAsync_AllowsBindingWithinRotationGrace() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Security.SenderConstraints.Mtls.RequireChainValidation = false; - options.Security.SenderConstraints.Mtls.RotationGrace = TimeSpan.FromMinutes(5); - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - using var rsa = RSA.Create(2048); - var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - var sanBuilder = new SubjectAlternativeNameBuilder(); - sanBuilder.AddDnsName("client.mtls.test"); - request.CertificateExtensions.Add(sanBuilder.Build()); - using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(10)); - - var thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)); - - var clientDocument = CreateClient(); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = thumbprint, - NotBefore = TimeProvider.System.GetUtcNow().AddMinutes(2) - }); - - var httpContext = new DefaultHttpContext(); - httpContext.Connection.ClientCertificate = certificate; - - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); - - Assert.True(result.Succeeded); - Assert.Equal(thumbprint, result.HexThumbprint); - } - - [Fact] - public async Task ValidateAsync_Rejects_WhenBindingSubjectMismatch() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Security.SenderConstraints.Mtls.RequireChainValidation = false; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - using var rsa = RSA.Create(2048); - var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - var sanBuilder = new SubjectAlternativeNameBuilder(); - sanBuilder.AddDnsName("client.mtls.test"); - request.CertificateExtensions.Add(sanBuilder.Build()); - using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); - - var clientDocument = CreateClient(); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)), - Subject = "CN=different-client" - }); - - var httpContext = new DefaultHttpContext(); - httpContext.Connection.ClientCertificate = certificate; - - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); - - Assert.False(result.Succeeded); - Assert.Equal("certificate_binding_subject_mismatch", result.Error); - } - - [Fact] - public async Task ValidateAsync_Rejects_WhenBindingSansMissing() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Security.SenderConstraints.Mtls.RequireChainValidation = false; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - using var rsa = RSA.Create(2048); - var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - var sanBuilder = new SubjectAlternativeNameBuilder(); - sanBuilder.AddDnsName("client.mtls.test"); - request.CertificateExtensions.Add(sanBuilder.Build()); - using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); - - var clientDocument = CreateClient(); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)), - SubjectAlternativeNames = new List { "spiffe://client" } - }); - - var httpContext = new DefaultHttpContext(); - httpContext.Connection.ClientCertificate = certificate; - - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); - - Assert.False(result.Succeeded); - Assert.Equal("certificate_binding_san_mismatch", result.Error); - } -} - -internal sealed class TestClientStore : IAuthorityClientStore -{ - private readonly Dictionary clients = new(StringComparer.OrdinalIgnoreCase); - - public TestClientStore(params AuthorityClientDocument[] documents) - { - foreach (var document in documents) - { - clients[document.ClientId] = document; - } - } - - public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - clients.TryGetValue(clientId, out var document); - return ValueTask.FromResult(document); - } - - public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - clients[document.ClientId] = document; - return ValueTask.CompletedTask; - } - - public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(clients.Remove(clientId)); -} - -internal sealed class TestTokenStore : IAuthorityTokenStore -{ - public AuthorityTokenDocument? Inserted { get; set; } - - public Func? UsageCallback { get; set; } - - public ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - Inserted = document; - return ValueTask.CompletedTask; - } - - public ValueTask FindByTokenIdAsync(string tokenId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(Inserted is not null && string.Equals(Inserted.TokenId, tokenId, StringComparison.OrdinalIgnoreCase) ? Inserted : null); - - public ValueTask FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(null); - - public ValueTask UpdateStatusAsync(string tokenId, string status, DateTimeOffset? revokedAt, string? reason, string? reasonDescription, IReadOnlyDictionary? metadata, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.CompletedTask; - - public ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(0L); - - public ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(UsageCallback?.Invoke(remoteAddress, userAgent) ?? new TokenUsageUpdateResult(TokenUsageUpdateStatus.Recorded, remoteAddress, userAgent)); - - public ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult>(Array.Empty()); -} - -internal sealed class TestClaimsEnricher : IClaimsEnricher -{ - public ValueTask EnrichAsync(ClaimsIdentity identity, AuthorityClaimsEnrichmentContext context, CancellationToken cancellationToken) - { - if (!identity.HasClaim(c => c.Type == "enriched")) - { - identity.AddClaim(new Claim("enriched", "true")); - } - - return ValueTask.CompletedTask; - } -} - -internal sealed class TestUserCredentialStore : IUserCredentialStore -{ - private readonly AuthorityUserDescriptor? user; - - public TestUserCredentialStore(AuthorityUserDescriptor? user) - { - this.user = user; - } - - public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials)); - - public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginOperationResult.Failure("unsupported", "not implemented")); - - public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) - => ValueTask.FromResult(user); -} - -internal sealed class TestClientProvisioningStore : IClientProvisioningStore -{ - private readonly AuthorityClientDescriptor? descriptor; - - public TestClientProvisioningStore(AuthorityClientDescriptor? descriptor) - { - this.descriptor = descriptor; - } - - public ValueTask> CreateOrUpdateAsync(AuthorityClientRegistration registration, CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginOperationResult.Failure("unsupported", "not implemented")); - - public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken) - => ValueTask.FromResult(descriptor); - - public ValueTask DeleteAsync(string clientId, CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginOperationResult.Success()); -} - -internal sealed class TestIdentityProviderPlugin : IIdentityProviderPlugin -{ - public TestIdentityProviderPlugin( - AuthorityPluginContext context, - IUserCredentialStore credentialStore, - IClaimsEnricher claimsEnricher, - IClientProvisioningStore? clientProvisioning, - AuthorityIdentityProviderCapabilities capabilities) - { - Context = context; - Credentials = credentialStore; - ClaimsEnricher = claimsEnricher; - ClientProvisioning = clientProvisioning; - Capabilities = capabilities; - } - - public string Name => Context.Manifest.Name; - - public string Type => Context.Manifest.Type; - - public AuthorityPluginContext Context { get; } - - public IUserCredentialStore Credentials { get; } - - public IClaimsEnricher ClaimsEnricher { get; } - - public IClientProvisioningStore? ClientProvisioning { get; } - - public AuthorityIdentityProviderCapabilities Capabilities { get; } - - public ValueTask CheckHealthAsync(CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); -} - -internal sealed class TestAuthEventSink : IAuthEventSink -{ - public List Events { get; } = new(); - - public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) - { - Events.Add(record); - return ValueTask.CompletedTask; - } -} - -internal sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor -{ - private readonly AuthorityRateLimiterMetadata metadata = new(); - - public AuthorityRateLimiterMetadata? GetMetadata() => metadata; - - public void SetClientId(string? clientId) => metadata.ClientId = clientId; - - public void SetSubjectId(string? subjectId) => metadata.SubjectId = subjectId; - - public void SetTenant(string? tenant) - { - metadata.Tenant = string.IsNullOrWhiteSpace(tenant) ? null : tenant.Trim().ToLowerInvariant(); - metadata.SetTag("authority.tenant", metadata.Tenant); - } - - public void SetProject(string? project) - { - metadata.Project = string.IsNullOrWhiteSpace(project) ? null : project.Trim().ToLowerInvariant(); - metadata.SetTag("authority.project", metadata.Project); - } - - public void SetTag(string name, string? value) => metadata.SetTag(name, value); -} - -internal sealed class NoopCertificateValidator : IAuthorityClientCertificateValidator -{ - public ValueTask ValidateAsync(HttpContext httpContext, AuthorityClientDocument client, CancellationToken cancellationToken) - { - var binding = new AuthorityClientCertificateBinding - { - Thumbprint = "stub" - }; - - return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Success("stub", "stub", binding)); - } -} - -internal sealed class RecordingCertificateValidator : IAuthorityClientCertificateValidator -{ - public bool Invoked { get; private set; } - - public ValueTask ValidateAsync(HttpContext httpContext, AuthorityClientDocument client, CancellationToken cancellationToken) - { - Invoked = true; - - if (httpContext.Connection.ClientCertificate is null) - { - return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("client_certificate_required")); - } - - AuthorityClientCertificateBinding binding; - if (client.CertificateBindings.Count > 0) - { - binding = client.CertificateBindings[0]; - } - else - { - binding = new AuthorityClientCertificateBinding { Thumbprint = "stub" }; - } - - return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Success("stub", binding.Thumbprint, binding)); - } -} - -internal sealed class NullMongoSessionAccessor : IAuthorityMongoSessionAccessor -{ - public ValueTask GetSessionAsync(CancellationToken cancellationToken = default) - => ValueTask.FromResult(null!); - - public ValueTask DisposeAsync() => ValueTask.CompletedTask; -} - -internal static class TestHelpers -{ - public static StellaOpsAuthorityOptions CreateAuthorityOptions(Action? configure = null) - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - configure?.Invoke(options); - return options; - } - - public static AuthorityClientDocument CreateClient( - string clientId = "concelier", - string? secret = "s3cr3t!", - string clientType = "confidential", - string allowedGrantTypes = "client_credentials", - string allowedScopes = "jobs:read", - string allowedAudiences = "", - string? tenant = null) - { - var document = new AuthorityClientDocument - { - ClientId = clientId, - ClientType = clientType, - SecretHash = secret is null ? null : AuthoritySecretHasher.ComputeHash(secret), - Plugin = "standard", - Properties = new Dictionary(StringComparer.OrdinalIgnoreCase) - { - [AuthorityClientMetadataKeys.AllowedGrantTypes] = allowedGrantTypes, - [AuthorityClientMetadataKeys.AllowedScopes] = allowedScopes - } - }; - - if (!string.IsNullOrWhiteSpace(allowedAudiences)) - { - document.Properties[AuthorityClientMetadataKeys.Audiences] = allowedAudiences; - } - - var normalizedTenant = NormalizeTenant(tenant); - if (normalizedTenant is not null) - { - document.Properties[AuthorityClientMetadataKeys.Tenant] = normalizedTenant; - } - - return document; - } - - private static string? NormalizeTenant(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); - - public static AuthorityClientDescriptor CreateDescriptor(AuthorityClientDocument document) - { - var allowedGrantTypes = document.Properties.TryGetValue(AuthorityClientMetadataKeys.AllowedGrantTypes, out var grants) ? grants?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); - var allowedScopes = document.Properties.TryGetValue(AuthorityClientMetadataKeys.AllowedScopes, out var scopes) ? scopes?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); - var allowedAudiences = document.Properties.TryGetValue(AuthorityClientMetadataKeys.Audiences, out var audiences) ? audiences?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); - - return new AuthorityClientDescriptor( - document.ClientId, - document.DisplayName, - confidential: string.Equals(document.ClientType, "confidential", StringComparison.OrdinalIgnoreCase), - allowedGrantTypes, - allowedScopes, - allowedAudiences, - redirectUris: Array.Empty(), - postLogoutRedirectUris: Array.Empty(), - properties: document.Properties); - } - - public static AuthorityIdentityProviderRegistry CreateRegistry(bool withClientProvisioning, AuthorityClientDescriptor? clientDescriptor) - { - var plugin = CreatePlugin( - name: "standard", - supportsClientProvisioning: withClientProvisioning, - descriptor: clientDescriptor, - user: null); - - return CreateRegistryFromPlugins(plugin); - } - - public static TestIdentityProviderPlugin CreatePlugin( - string name, - bool supportsClientProvisioning, - AuthorityClientDescriptor? descriptor, - AuthorityUserDescriptor? user) - { - var capabilities = supportsClientProvisioning - ? new[] { AuthorityPluginCapabilities.ClientProvisioning } - : Array.Empty(); - - var manifest = new AuthorityPluginManifest( - name, - "standard", - true, - null, - null, - capabilities, - new Dictionary(StringComparer.OrdinalIgnoreCase), - $"{name}.yaml"); - - var context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); - - return new TestIdentityProviderPlugin( - context, - new TestUserCredentialStore(user), - new TestClaimsEnricher(), - supportsClientProvisioning ? new TestClientProvisioningStore(descriptor) : null, - new AuthorityIdentityProviderCapabilities( - SupportsPassword: true, - SupportsMfa: false, - SupportsClientProvisioning: supportsClientProvisioning)); - } - - public static AuthorityIdentityProviderRegistry CreateRegistryFromPlugins(params IIdentityProviderPlugin[] plugins) - { - var services = new ServiceCollection(); - services.AddLogging(); - foreach (var plugin in plugins) - { - services.AddSingleton(plugin); - } - - var provider = services.BuildServiceProvider(); - return new AuthorityIdentityProviderRegistry(provider, NullLogger.Instance); - } - - public static OpenIddictServerTransaction CreateTokenTransaction(string clientId, string? secret, string? scope) - { - var request = new OpenIddictRequest - { - GrantType = OpenIddictConstants.GrantTypes.ClientCredentials, - ClientId = clientId, - ClientSecret = secret - }; - - if (!string.IsNullOrWhiteSpace(scope)) - { - request.Scope = scope; - } - - return new OpenIddictServerTransaction - { - EndpointType = OpenIddictServerEndpointType.Token, - Options = new OpenIddictServerOptions(), - Request = request - }; - } - - public static string ConvertThumbprintToString(object thumbprint) - => thumbprint switch - { - string value => value, - byte[] bytes => Base64UrlEncoder.Encode(bytes), - _ => throw new InvalidOperationException("Unsupported thumbprint representation.") - }; - - public static string CreateDpopProof(ECDsaSecurityKey key, string method, string url, long issuedAt, string? nonce = null) - { - var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(key); - jwk.KeyId ??= key.KeyId ?? Guid.NewGuid().ToString("N"); - - var signingCredentials = new SigningCredentials(key, SecurityAlgorithms.EcdsaSha256); - var header = new JwtHeader(signingCredentials) - { - ["typ"] = "dpop+jwt", - ["jwk"] = new Dictionary - { - ["kty"] = jwk.Kty, - ["crv"] = jwk.Crv, - ["x"] = jwk.X, - ["y"] = jwk.Y, - ["kid"] = jwk.Kid ?? jwk.KeyId - } - }; - - var payload = new JwtPayload - { - ["htm"] = method.ToUpperInvariant(), - ["htu"] = url, - ["iat"] = issuedAt, - ["jti"] = Guid.NewGuid().ToString("N") - }; - - if (!string.IsNullOrWhiteSpace(nonce)) - { - payload["nonce"] = nonce; - } - - var token = new JwtSecurityToken(header, payload); - return new JwtSecurityTokenHandler().WriteToken(token); - } - - public static X509Certificate2 CreateTestCertificate(string subjectName) - { - using var rsa = RSA.Create(2048); - var request = new CertificateRequest(subjectName, rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - return request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1)); - } - - public static ClaimsPrincipal CreatePrincipal(string clientId, string tokenId, string provider, string? subject = null) - { - var identity = new ClaimsIdentity(OpenIddictServerAspNetCoreDefaults.AuthenticationScheme); - identity.AddClaim(new Claim(OpenIddictConstants.Claims.ClientId, clientId)); - identity.AddClaim(new Claim(OpenIddictConstants.Claims.JwtId, tokenId)); - identity.AddClaim(new Claim(StellaOpsClaimTypes.IdentityProvider, provider)); - identity.AddClaim(new Claim(StellaOpsClaimTypes.Project, StellaOpsTenancyDefaults.AnyProject)); - - if (!string.IsNullOrWhiteSpace(subject)) - { - identity.AddClaim(new Claim(OpenIddictConstants.Claims.Subject, subject)); - } - - return new ClaimsPrincipal(identity); - } -} + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchQuota_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "orch-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:quota orch:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:quota"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.QuotaReasonParameterName, "raise export center quota for tenant"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.QuotaTicketParameterName, "CHG-7721"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Orchestrator scopes require a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchQuota_WhenReasonMissing() + { + var clientDocument = CreateClient( + clientId: "orch-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:quota orch:read", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:quota"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Equal("Quota changes require 'quota_reason'.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchQuota_WhenReasonTooLong() + { + var clientDocument = CreateClient( + clientId: "orch-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:quota", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var longReason = new string('a', 257); + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:quota"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.QuotaReasonParameterName, longReason); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Equal("Quota reason must not exceed 256 characters.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchQuota_WhenTicketTooLong() + { + var clientDocument = CreateClient( + clientId: "orch-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:quota", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:quota"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.QuotaReasonParameterName, "increase concurrency to unblock digests"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.QuotaTicketParameterName, new string('b', 129)); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Equal("Quota ticket must not exceed 128 characters.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsOrchQuota_WithReasonOnly() + { + var clientDocument = CreateClient( + clientId: "orch-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:quota orch:read", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:quota"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.QuotaReasonParameterName, "grant five extra concurrent backfills"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "orch:quota" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + var reason = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.QuotaReasonProperty]); + Assert.Equal("grant five extra concurrent backfills", reason); + Assert.False(context.Transaction.Properties.ContainsKey(AuthorityOpenIddictConstants.QuotaTicketProperty)); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsOrchQuota_WithReasonAndTicket() + { + var clientDocument = CreateClient( + clientId: "orch-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:quota", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:quota"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.QuotaReasonParameterName, "temporary burst for export audit"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.QuotaTicketParameterName, "RFC-5541"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "orch:quota" }, grantedScopes); + var reason = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.QuotaReasonProperty]); + Assert.Equal("temporary burst for export audit", reason); + var ticket = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.QuotaTicketProperty]); + Assert.Equal("RFC-5541", ticket); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsExportViewer_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "export-viewer", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "export.viewer"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.viewer"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Export scopes require a tenant assignment.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.ExportViewer, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsExportViewer_WithTenant() + { + var clientDocument = CreateClient( + clientId: "export-viewer", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "export.viewer", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.viewer"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "export.viewer" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsPacksRun_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "task-runner", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "packs.run packs.read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + string? violationTag = null; + using var listener = new ActivityListener + { + ShouldListenTo = source => source.Name == TestActivitySource.Name, + Sample = (ref ActivityCreationOptions options) => ActivitySamplingResult.AllDataAndRecorded, + ActivityStopped = activity => + { + violationTag ??= activity.GetTagItem("authority.pack_scope_violation") as string; + } + }; + ActivitySource.AddActivityListener(listener); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "packs.run"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Pack scopes require a tenant assignment.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.PacksRun, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + Assert.Equal(StellaOpsScopes.PacksRun, violationTag); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsPacksRun_WithTenant() + { + var clientDocument = CreateClient( + clientId: "task-runner", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "packs.run packs.read", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "packs.run"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "packs.run" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsExportAdmin_WhenReasonMissing() + { + var clientDocument = CreateClient( + clientId: "export-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "export.admin", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminTicketParameterName, "INC-9001"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Equal("Export admin actions require 'export_reason'.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsExportAdmin_WhenTicketMissing() + { + var clientDocument = CreateClient( + clientId: "export-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "export.admin", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminReasonParameterName, "Rotate encryption keys after incident postmortem"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Equal("Export admin actions require 'export_ticket'.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsExportAdmin_WithReasonAndTicket() + { + var clientDocument = CreateClient( + clientId: "export-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "export.admin", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminReasonParameterName, "Rotate encryption keys after incident postmortem"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminTicketParameterName, "INC-9001"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "export.admin" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + var reason = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ExportAdminReasonProperty]); + Assert.Equal("Rotate encryption keys after incident postmortem", reason); + var ticket = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ExportAdminTicketProperty]); + Assert.Equal("INC-9001", ticket); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsGraphWrite_WhenServiceIdentityMissing() + { + var clientDocument = CreateClient( + clientId: "cartographer-service", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "graph:write graph:read", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); + Assert.Equal("Scope 'graph:write' is reserved for the Cartographer service identity.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsGraphWrite_WhenServiceIdentityMismatch() + { + var clientDocument = CreateClient( + clientId: "cartographer-service", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "graph:write graph:read", + tenant: "tenant-default"); + clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); + Assert.Equal("Scope 'graph:write' is reserved for the Cartographer service identity.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsGraphScopes_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "graph-api", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "graph:read graph:export"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Graph scopes require a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsGraphRead_WithTenant() + { + var clientDocument = CreateClient( + clientId: "graph-api", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "graph:read graph:export", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "graph:read" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchRead_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "orch-dashboard", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Orchestrator scopes require a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsOrchRead_WithTenant() + { + var clientDocument = CreateClient( + clientId: "orch-dashboard", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "orch:read" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsAdvisoryScopes_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "concelier-ingestor", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "advisory:ingest advisory:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:ingest"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Advisory scopes require a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsVexScopes_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "excitor-ingestor", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "vex:ingest vex:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "vex:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("VEX scopes require a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsAdvisoryScopes_WithTenant() + { + var clientDocument = CreateClient( + clientId: "concelier-ingestor", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "advisory:ingest advisory:read aoc:verify", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read aoc:verify"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "advisory:read", "aoc:verify" }, grantedScopes); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsGraphWrite_ForCartographerServiceIdentity() + { + var clientDocument = CreateClient( + clientId: "cartographer-service", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "graph:write graph:read", + tenant: "tenant-default"); + clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.Cartographer; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "graph:write" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + } + + [Fact] + public async Task ValidateClientCredentials_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var sink = new TestAuthEventSink(); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + sink, + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + transaction.Request?.SetParameter("unexpected_param", "value"); + + await handler.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + + var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper"); + Assert.Contains(tamperEvent.Properties, property => + string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) && + string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase)); + } + + [Fact] + public async Task ValidateDpopProof_AllowsSenderConstrainedClient() + { + var options = TestHelpers.CreateAuthorityOptions(opts => + { + opts.Security.SenderConstraints.Dpop.Enabled = true; + opts.Security.SenderConstraints.Dpop.Nonce.Enabled = false; + }); + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read"); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; + clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; + + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var securityKey = new ECDsaSecurityKey(ecdsa) + { + KeyId = Guid.NewGuid().ToString("N") + }; + var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(securityKey); + var expectedThumbprint = ConvertThumbprintToString(jwk.ComputeJwkThumbprint()); + + var clientStore = new TestClientStore(clientDocument); + var auditSink = new TestAuthEventSink(); + var rateMetadata = new TestRateLimiterMetadataAccessor(); + + var dpopValidator = new DpopProofValidator( + Options.Create(new DpopValidationOptions()), + new InMemoryDpopReplayCache(TimeProvider.System), + TimeProvider.System, + NullLogger.Instance); + + var nonceStore = new InMemoryDpopNonceStore(TimeProvider.System, NullLogger.Instance); + + var dpopHandler = new ValidateDpopProofHandler( + options, + clientStore, + dpopValidator, + nonceStore, + rateMetadata, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + transaction.Options = new OpenIddictServerOptions(); + + var httpContext = new DefaultHttpContext(); + httpContext.Request.Method = "POST"; + httpContext.Request.Scheme = "https"; + httpContext.Request.Host = new HostString("authority.test"); + httpContext.Request.Path = "/token"; + + var now = TimeProvider.System.GetUtcNow(); + var proof = TestHelpers.CreateDpopProof(securityKey, httpContext.Request.Method, httpContext.Request.GetDisplayUrl(), now.ToUnixTimeSeconds()); + httpContext.Request.Headers["DPoP"] = proof; + + transaction.Properties[typeof(HttpContext).FullName!] = httpContext; + + var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + await dpopHandler.HandleAsync(validateContext); + + Assert.False(validateContext.IsRejected); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var validateHandler = new ValidateClientCredentialsHandler( + clientStore, + registry, + TestActivitySource, + auditSink, + rateMetadata, + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + await validateHandler.HandleAsync(validateContext); + Assert.False(validateContext.IsRejected); + + var tokenStore = new TestTokenStore(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handleHandler = new HandleClientCredentialsHandler( + registry, + tokenStore, + sessionAccessor, + rateMetadata, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); + await handleHandler.HandleAsync(handleContext); + Assert.True(handleContext.IsRequestHandled); + + var persistHandler = new PersistTokensHandler( + tokenStore, + sessionAccessor, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction) + { + Principal = handleContext.Principal, + AccessTokenPrincipal = handleContext.Principal + }; + + await persistHandler.HandleAsync(signInContext); + + var confirmationClaim = handleContext.Principal?.GetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType); + Assert.False(string.IsNullOrWhiteSpace(confirmationClaim)); + + using (var confirmationJson = JsonDocument.Parse(confirmationClaim!)) + { + Assert.Equal(expectedThumbprint, confirmationJson.RootElement.GetProperty("jkt").GetString()); + } + + Assert.NotNull(tokenStore.Inserted); + Assert.Equal(AuthoritySenderConstraintKinds.Dpop, tokenStore.Inserted!.SenderConstraint); + Assert.Equal(expectedThumbprint, tokenStore.Inserted!.SenderKeyThumbprint); + } + + [Fact] + public async Task ValidateDpopProof_IssuesNonceChallenge_WhenNonceMissing() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Dpop.Enabled = true; + options.Security.SenderConstraints.Dpop.Nonce.Enabled = true; + options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences.Clear(); + options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences.Add("signer"); + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + Assert.Contains("signer", options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences); + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read", + allowedAudiences: "signer"); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; + clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; + + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var securityKey = new ECDsaSecurityKey(ecdsa) + { + KeyId = Guid.NewGuid().ToString("N") + }; + + var clientStore = new TestClientStore(clientDocument); + var auditSink = new TestAuthEventSink(); + var rateMetadata = new TestRateLimiterMetadataAccessor(); + + var dpopValidator = new DpopProofValidator( + Options.Create(new DpopValidationOptions()), + new InMemoryDpopReplayCache(TimeProvider.System), + TimeProvider.System, + NullLogger.Instance); + + var nonceStore = new InMemoryDpopNonceStore(TimeProvider.System, NullLogger.Instance); + + var dpopHandler = new ValidateDpopProofHandler( + options, + clientStore, + dpopValidator, + nonceStore, + rateMetadata, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + transaction.Options = new OpenIddictServerOptions(); + + var httpContext = new DefaultHttpContext(); + httpContext.Request.Method = "POST"; + httpContext.Request.Scheme = "https"; + httpContext.Request.Host = new HostString("authority.test"); + httpContext.Request.Path = "/token"; + + var now = TimeProvider.System.GetUtcNow(); + var proof = TestHelpers.CreateDpopProof(securityKey, httpContext.Request.Method, httpContext.Request.GetDisplayUrl(), now.ToUnixTimeSeconds()); + httpContext.Request.Headers["DPoP"] = proof; + + transaction.Properties[typeof(HttpContext).FullName!] = httpContext; + + var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + await dpopHandler.HandleAsync(validateContext); + + Assert.True(validateContext.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, validateContext.Error); + var authenticateHeader = Assert.Single(httpContext.Response.Headers.Select(header => header) + .Where(header => string.Equals(header.Key, "WWW-Authenticate", StringComparison.OrdinalIgnoreCase))).Value; + Assert.Contains("use_dpop_nonce", authenticateHeader.ToString()); + Assert.True(httpContext.Response.Headers.TryGetValue("DPoP-Nonce", out var nonceValues)); + Assert.False(StringValues.IsNullOrEmpty(nonceValues)); + Assert.Contains(auditSink.Events, record => record.EventType == "authority.dpop.proof.challenge"); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsMtlsClient_WithValidCertificate() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Security.SenderConstraints.Mtls.RequireChainValidation = false; + options.Security.SenderConstraints.Mtls.AllowedSanTypes.Clear(); + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read"); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Mtls; + + using var rsa = RSA.Create(2048); + var certificateRequest = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + using var certificate = certificateRequest.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1)); + var hexThumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)); + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = hexThumbprint + }); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var auditSink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; + httpContextAccessor.HttpContext!.Connection.ClientCertificate = certificate; + + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + auditSink, + metadataAccessor, + TimeProvider.System, + validator, + httpContextAccessor, + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, context.ErrorDescription ?? context.Error); + Assert.Equal(AuthoritySenderConstraintKinds.Mtls, context.Transaction.Properties[AuthorityOpenIddictConstants.SenderConstraintProperty]); + + var expectedBase64 = Base64UrlEncoder.Encode(certificate.GetCertHash(HashAlgorithmName.SHA256)); + Assert.Equal(expectedBase64, context.Transaction.Properties[AuthorityOpenIddictConstants.MtlsCertificateThumbprintProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsMtlsClient_WhenCertificateMissing() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read"); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Mtls; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + validator, + httpContextAccessor, + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + } + + [Fact] + public async Task ValidateClientCredentials_Rejects_WhenAudienceRequiresMtlsButClientConfiguredForDpop() + { + var options = TestHelpers.CreateAuthorityOptions(opts => + { + opts.Security.SenderConstraints.Mtls.Enabled = true; + opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Clear(); + opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Add("signer"); + }); + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read", + allowedAudiences: "signer"); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; + clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Requested audiences require mutual TLS sender constraint.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RequiresMtlsWhenAudienceMatchesEnforcement() + { + var options = TestHelpers.CreateAuthorityOptions(opts => + { + opts.Security.SenderConstraints.Mtls.Enabled = true; + opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Clear(); + opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Add("signer"); + }); + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read", + allowedAudiences: "signer"); + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = "DEADBEEF" + }); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var certificateValidator = new RecordingCertificateValidator(); + var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; + + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + certificateValidator, + httpContextAccessor, + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("client_certificate_required", context.ErrorDescription); + Assert.True(certificateValidator.Invoked); + } + + [Fact] + public async Task HandleClientCredentials_PersistsTokenAndEnrichesClaims() + { + var clientDocument = CreateClient( + secret: null, + clientType: "public", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:trigger", + allowedAudiences: "signer", + tenant: "Tenant-Alpha"); + + var descriptor = CreateDescriptor(clientDocument); + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: descriptor); + var tokenStore = new TestTokenStore(); + var sessionAccessor = new NullMongoSessionAccessor(); + var authSink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var options = TestHelpers.CreateAuthorityOptions(); + var validateHandler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + authSink, + metadataAccessor, + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, secret: null, scope: "jobs:trigger"); + transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(30); + + var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + await validateHandler.HandleAsync(validateContext); + Assert.False(validateContext.IsRejected); + + var handler = new HandleClientCredentialsHandler( + registry, + tokenStore, + sessionAccessor, + metadataAccessor, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + var persistHandler = new PersistTokensHandler(tokenStore, sessionAccessor, TimeProvider.System, TestActivitySource, NullLogger.Instance); + + var context = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRequestHandled); + Assert.NotNull(context.Principal); + Assert.Contains("signer", context.Principal!.GetAudiences()); + + Assert.Contains(authSink.Events, record => record.EventType == "authority.client_credentials.grant" && record.Outcome == AuthEventOutcome.Success); + + var identityProviderClaim = context.Principal?.GetClaim(StellaOpsClaimTypes.IdentityProvider); + Assert.Equal(clientDocument.Plugin, identityProviderClaim); + + var principal = context.Principal ?? throw new InvalidOperationException("Principal missing"); + Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); + var tokenId = principal.GetClaim(OpenIddictConstants.Claims.JwtId); + Assert.False(string.IsNullOrWhiteSpace(tokenId)); + + var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction) + { + Principal = principal, + AccessTokenPrincipal = principal + }; + + await persistHandler.HandleAsync(signInContext); + + var persisted = Assert.IsType(tokenStore.Inserted); + Assert.Equal(tokenId, persisted.TokenId); + Assert.Equal(clientDocument.ClientId, persisted.ClientId); + Assert.Equal("valid", persisted.Status); + Assert.Equal("tenant-alpha", persisted.Tenant); + Assert.Equal(new[] { "jobs:trigger" }, persisted.Scope); + } +} + +public class TokenValidationHandlersTests +{ + private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests.TokenValidation"); + + [Fact] + public async Task ValidateAccessTokenHandler_Rejects_WhenTokenRevoked() + { + var tokenStore = new TestTokenStore(); + tokenStore.Inserted = new AuthorityTokenDocument + { + TokenId = "token-1", + Status = "revoked", + ClientId = "concelier" + }; + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(CreateClient()), + CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(CreateClient())), + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal("concelier", "token-1", "standard"); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-1" + }; + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); + } + + [Fact] + public async Task ValidateAccessTokenHandler_AddsTenantClaim_FromTokenDocument() + { + var clientDocument = CreateClient(tenant: "tenant-alpha"); + var tokenStore = new TestTokenStore + { + Inserted = new AuthorityTokenDocument + { + TokenId = "token-tenant", + Status = "valid", + ClientId = clientDocument.ClientId, + Tenant = "tenant-alpha" + } + }; + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(clientDocument), + CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-tenant" + }; + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected); + Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); + Assert.Equal("tenant-alpha", metadataAccessor.GetMetadata()?.Tenant); + Assert.Equal(StellaOpsTenancyDefaults.AnyProject, principal.FindFirstValue(StellaOpsClaimTypes.Project)); + Assert.Equal(StellaOpsTenancyDefaults.AnyProject, metadataAccessor.GetMetadata()?.Project); + Assert.Equal(StellaOpsTenancyDefaults.AnyProject, principal.FindFirstValue(StellaOpsClaimTypes.Project)); + Assert.Equal(StellaOpsTenancyDefaults.AnyProject, metadataAccessor.GetMetadata()?.Project); + } + + [Fact] + public async Task ValidateAccessTokenHandler_Rejects_WhenTenantDiffersFromToken() + { + var clientDocument = CreateClient(tenant: "tenant-alpha"); + var tokenStore = new TestTokenStore + { + Inserted = new AuthorityTokenDocument + { + TokenId = "token-tenant", + Status = "valid", + ClientId = clientDocument.ClientId, + Tenant = "tenant-alpha" + } + }; + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(clientDocument), + CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); + principal.Identities.First().AddClaim(new Claim(StellaOpsClaimTypes.Tenant, "tenant-beta")); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-tenant" + }; + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); + Assert.Equal("The token tenant does not match the issued tenant.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateAccessTokenHandler_AssignsTenant_FromClientWhenTokenMissing() + { + var clientDocument = CreateClient(tenant: "tenant-alpha"); + var tokenStore = new TestTokenStore + { + Inserted = new AuthorityTokenDocument + { + TokenId = "token-tenant", + Status = "valid", + ClientId = clientDocument.ClientId + } + }; + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(clientDocument), + CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-tenant" + }; + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected); + Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); + Assert.Equal("tenant-alpha", metadataAccessor.GetMetadata()?.Tenant); + } + + [Fact] + public async Task ValidateAccessTokenHandler_Rejects_WhenClientTenantDiffers() + { + var clientDocument = CreateClient(tenant: "tenant-beta"); + var tokenStore = new TestTokenStore + { + Inserted = new AuthorityTokenDocument + { + TokenId = "token-tenant", + Status = "valid", + ClientId = clientDocument.ClientId + } + }; + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(clientDocument), + CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); + principal.Identities.First().AddClaim(new Claim(StellaOpsClaimTypes.Tenant, "tenant-alpha")); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-tenant" + }; + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); + Assert.Equal("The token tenant does not match the registered client tenant.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateAccessTokenHandler_EnrichesClaims_WhenProviderAvailable() + { + var clientDocument = CreateClient(); + var userDescriptor = new AuthorityUserDescriptor("user-1", "alice", displayName: "Alice", requiresPasswordReset: false); + + var plugin = CreatePlugin( + name: "standard", + supportsClientProvisioning: true, + descriptor: CreateDescriptor(clientDocument), + user: userDescriptor); + + var registry = CreateRegistryFromPlugins(plugin); + + var metadataAccessorSuccess = new TestRateLimiterMetadataAccessor(); + var auditSinkSuccess = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + new TestTokenStore(), + sessionAccessor, + new TestClientStore(clientDocument), + registry, + metadataAccessorSuccess, + auditSinkSuccess, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, "token-123", plugin.Name, subject: userDescriptor.SubjectId); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal + }; + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected); + Assert.Contains(principal.Claims, claim => claim.Type == "enriched" && claim.Value == "true"); + } + + [Fact] + public async Task ValidateAccessTokenHandler_AddsConfirmationClaim_ForMtlsToken() + { + var tokenDocument = new AuthorityTokenDocument + { + TokenId = "token-mtls", + Status = "valid", + ClientId = "mtls-client", + SenderConstraint = AuthoritySenderConstraintKinds.Mtls, + SenderKeyThumbprint = "thumb-print" + }; + + var tokenStore = new TestTokenStore + { + Inserted = tokenDocument + }; + + var clientDocument = CreateClient(); + var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(clientDocument), + registry, + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Introspection, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, tokenDocument.TokenId, clientDocument.Plugin); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = tokenDocument.TokenId + }; + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected); + var confirmation = context.Principal?.GetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType); + Assert.False(string.IsNullOrWhiteSpace(confirmation)); + using var json = JsonDocument.Parse(confirmation!); + Assert.Equal(tokenDocument.SenderKeyThumbprint, json.RootElement.GetProperty("x5t#S256").GetString()); + } + + [Fact] + public async Task ValidateAccessTokenHandler_EmitsReplayAudit_WhenStoreDetectsSuspectedReplay() + { + var tokenStore = new TestTokenStore(); + tokenStore.Inserted = new AuthorityTokenDocument + { + TokenId = "token-replay", + Status = "valid", + ClientId = "agent", + Devices = new List + { + new BsonDocument + { + { "remoteAddress", "10.0.0.1" }, + { "userAgent", "agent/1.0" }, + { "firstSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-15)) }, + { "lastSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-5)) }, + { "useCount", 2 } + } + } + }; + + tokenStore.UsageCallback = (remote, agent) => new TokenUsageUpdateResult(TokenUsageUpdateStatus.SuspectedReplay, remote, agent); + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var metadata = metadataAccessor.GetMetadata(); + if (metadata is not null) + { + metadata.RemoteIp = "203.0.113.7"; + metadata.UserAgent = "agent/2.0"; + } + + var clientDocument = CreateClient(); + clientDocument.ClientId = "agent"; + var auditSink = new TestAuthEventSink(); + var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null); + var sessionAccessorReplay = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessorReplay, + new TestClientStore(clientDocument), + registry, + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Introspection, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal("agent", "token-replay", "standard"); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-replay" + }; + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected); + var replayEvent = Assert.Single(auditSink.Events, record => record.EventType == "authority.token.replay.suspected"); + Assert.Equal(AuthEventOutcome.Error, replayEvent.Outcome); + Assert.NotNull(replayEvent.Network); + Assert.Equal("203.0.113.7", replayEvent.Network?.RemoteAddress.Value); + Assert.Contains(replayEvent.Properties, property => property.Name == "token.devices.total"); + } +} + +public class AuthorityClientCertificateValidatorTests +{ + [Fact] + public async Task ValidateAsync_Rejects_WhenSanTypeNotAllowed() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Security.SenderConstraints.Mtls.RequireChainValidation = false; + options.Security.SenderConstraints.Mtls.AllowedSanTypes.Clear(); + options.Security.SenderConstraints.Mtls.AllowedSanTypes.Add("uri"); + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + using var rsa = RSA.Create(2048); + var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + var sanBuilder = new SubjectAlternativeNameBuilder(); + sanBuilder.AddDnsName("client.mtls.test"); + request.CertificateExtensions.Add(sanBuilder.Build()); + using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); + + var clientDocument = CreateClient(); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)) + }); + + var httpContext = new DefaultHttpContext(); + httpContext.Connection.ClientCertificate = certificate; + + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); + + Assert.False(result.Succeeded); + Assert.Equal("certificate_san_type", result.Error); + } + + [Fact] + public async Task ValidateAsync_AllowsBindingWithinRotationGrace() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Security.SenderConstraints.Mtls.RequireChainValidation = false; + options.Security.SenderConstraints.Mtls.RotationGrace = TimeSpan.FromMinutes(5); + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + using var rsa = RSA.Create(2048); + var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + var sanBuilder = new SubjectAlternativeNameBuilder(); + sanBuilder.AddDnsName("client.mtls.test"); + request.CertificateExtensions.Add(sanBuilder.Build()); + using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(10)); + + var thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)); + + var clientDocument = CreateClient(); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = thumbprint, + NotBefore = TimeProvider.System.GetUtcNow().AddMinutes(2) + }); + + var httpContext = new DefaultHttpContext(); + httpContext.Connection.ClientCertificate = certificate; + + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); + + Assert.True(result.Succeeded); + Assert.Equal(thumbprint, result.HexThumbprint); + } + + [Fact] + public async Task ValidateAsync_Rejects_WhenBindingSubjectMismatch() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Security.SenderConstraints.Mtls.RequireChainValidation = false; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + using var rsa = RSA.Create(2048); + var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + var sanBuilder = new SubjectAlternativeNameBuilder(); + sanBuilder.AddDnsName("client.mtls.test"); + request.CertificateExtensions.Add(sanBuilder.Build()); + using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); + + var clientDocument = CreateClient(); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)), + Subject = "CN=different-client" + }); + + var httpContext = new DefaultHttpContext(); + httpContext.Connection.ClientCertificate = certificate; + + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); + + Assert.False(result.Succeeded); + Assert.Equal("certificate_binding_subject_mismatch", result.Error); + } + + [Fact] + public async Task ValidateAsync_Rejects_WhenBindingSansMissing() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Security.SenderConstraints.Mtls.RequireChainValidation = false; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + using var rsa = RSA.Create(2048); + var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + var sanBuilder = new SubjectAlternativeNameBuilder(); + sanBuilder.AddDnsName("client.mtls.test"); + request.CertificateExtensions.Add(sanBuilder.Build()); + using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); + + var clientDocument = CreateClient(); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)), + SubjectAlternativeNames = new List { "spiffe://client" } + }); + + var httpContext = new DefaultHttpContext(); + httpContext.Connection.ClientCertificate = certificate; + + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); + + Assert.False(result.Succeeded); + Assert.Equal("certificate_binding_san_mismatch", result.Error); + } +} + +internal sealed class TestClientStore : IAuthorityClientStore +{ + private readonly Dictionary clients = new(StringComparer.OrdinalIgnoreCase); + + public TestClientStore(params AuthorityClientDocument[] documents) + { + foreach (var document in documents) + { + clients[document.ClientId] = document; + } + } + + public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + clients.TryGetValue(clientId, out var document); + return ValueTask.FromResult(document); + } + + public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + clients[document.ClientId] = document; + return ValueTask.CompletedTask; + } + + public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(clients.Remove(clientId)); +} + +internal sealed class TestTokenStore : IAuthorityTokenStore +{ + public AuthorityTokenDocument? Inserted { get; set; } + + public Func? UsageCallback { get; set; } + + public ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + Inserted = document; + return ValueTask.CompletedTask; + } + + public ValueTask FindByTokenIdAsync(string tokenId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(Inserted is not null && string.Equals(Inserted.TokenId, tokenId, StringComparison.OrdinalIgnoreCase) ? Inserted : null); + + public ValueTask FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(null); + + public ValueTask UpdateStatusAsync(string tokenId, string status, DateTimeOffset? revokedAt, string? reason, string? reasonDescription, IReadOnlyDictionary? metadata, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.CompletedTask; + + public ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(0L); + + public ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(UsageCallback?.Invoke(remoteAddress, userAgent) ?? new TokenUsageUpdateResult(TokenUsageUpdateStatus.Recorded, remoteAddress, userAgent)); + + public ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult>(Array.Empty()); + public ValueTask> ListByScopeAsync(string scope, string tenant, DateTimeOffset? issuedAfter, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + if (Inserted is null) + { + return ValueTask.FromResult>(Array.Empty()); + } + + var scopeMatches = Inserted.Scope is not null && Inserted.Scope.Any(s => string.Equals(s, scope, StringComparison.OrdinalIgnoreCase)); + var tenantMatches = string.Equals(Inserted.Tenant, tenant, StringComparison.OrdinalIgnoreCase); + var issuedAfterMatches = !issuedAfter.HasValue || Inserted.CreatedAt >= issuedAfter.Value; + + if (scopeMatches && tenantMatches && issuedAfterMatches) + { + return ValueTask.FromResult>(new[] { Inserted }); + } + + return ValueTask.FromResult>(Array.Empty()); + } + +} + +internal sealed class TestClaimsEnricher : IClaimsEnricher +{ + public ValueTask EnrichAsync(ClaimsIdentity identity, AuthorityClaimsEnrichmentContext context, CancellationToken cancellationToken) + { + if (!identity.HasClaim(c => c.Type == "enriched")) + { + identity.AddClaim(new Claim("enriched", "true")); + } + + return ValueTask.CompletedTask; + } +} + +internal sealed class TestUserCredentialStore : IUserCredentialStore +{ + private readonly AuthorityUserDescriptor? user; + + public TestUserCredentialStore(AuthorityUserDescriptor? user) + { + this.user = user; + } + + public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials)); + + public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginOperationResult.Failure("unsupported", "not implemented")); + + public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) + => ValueTask.FromResult(user); +} + +internal sealed class TestClientProvisioningStore : IClientProvisioningStore +{ + private readonly AuthorityClientDescriptor? descriptor; + + public TestClientProvisioningStore(AuthorityClientDescriptor? descriptor) + { + this.descriptor = descriptor; + } + + public ValueTask> CreateOrUpdateAsync(AuthorityClientRegistration registration, CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginOperationResult.Failure("unsupported", "not implemented")); + + public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken) + => ValueTask.FromResult(descriptor); + + public ValueTask DeleteAsync(string clientId, CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginOperationResult.Success()); +} + +internal sealed class TestIdentityProviderPlugin : IIdentityProviderPlugin +{ + public TestIdentityProviderPlugin( + AuthorityPluginContext context, + IUserCredentialStore credentialStore, + IClaimsEnricher claimsEnricher, + IClientProvisioningStore? clientProvisioning, + AuthorityIdentityProviderCapabilities capabilities) + { + Context = context; + Credentials = credentialStore; + ClaimsEnricher = claimsEnricher; + ClientProvisioning = clientProvisioning; + Capabilities = capabilities; + } + + public string Name => Context.Manifest.Name; + + public string Type => Context.Manifest.Type; + + public AuthorityPluginContext Context { get; } + + public IUserCredentialStore Credentials { get; } + + public IClaimsEnricher ClaimsEnricher { get; } + + public IClientProvisioningStore? ClientProvisioning { get; } + + public AuthorityIdentityProviderCapabilities Capabilities { get; } + + public ValueTask CheckHealthAsync(CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); +} + +internal sealed class TestAuthEventSink : IAuthEventSink +{ + public List Events { get; } = new(); + + public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) + { + Events.Add(record); + return ValueTask.CompletedTask; + } +} + +internal sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor +{ + private readonly AuthorityRateLimiterMetadata metadata = new(); + + public AuthorityRateLimiterMetadata? GetMetadata() => metadata; + + public void SetClientId(string? clientId) => metadata.ClientId = clientId; + + public void SetSubjectId(string? subjectId) => metadata.SubjectId = subjectId; + + public void SetTenant(string? tenant) + { + metadata.Tenant = string.IsNullOrWhiteSpace(tenant) ? null : tenant.Trim().ToLowerInvariant(); + metadata.SetTag("authority.tenant", metadata.Tenant); + } + + public void SetProject(string? project) + { + metadata.Project = string.IsNullOrWhiteSpace(project) ? null : project.Trim().ToLowerInvariant(); + metadata.SetTag("authority.project", metadata.Project); + } + + public void SetTag(string name, string? value) => metadata.SetTag(name, value); +} + +internal sealed class NoopCertificateValidator : IAuthorityClientCertificateValidator +{ + public ValueTask ValidateAsync(HttpContext httpContext, AuthorityClientDocument client, CancellationToken cancellationToken) + { + var binding = new AuthorityClientCertificateBinding + { + Thumbprint = "stub" + }; + + return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Success("stub", "stub", binding)); + } +} + +internal sealed class RecordingCertificateValidator : IAuthorityClientCertificateValidator +{ + public bool Invoked { get; private set; } + + public ValueTask ValidateAsync(HttpContext httpContext, AuthorityClientDocument client, CancellationToken cancellationToken) + { + Invoked = true; + + if (httpContext.Connection.ClientCertificate is null) + { + return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("client_certificate_required")); + } + + AuthorityClientCertificateBinding binding; + if (client.CertificateBindings.Count > 0) + { + binding = client.CertificateBindings[0]; + } + else + { + binding = new AuthorityClientCertificateBinding { Thumbprint = "stub" }; + } + + return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Success("stub", binding.Thumbprint, binding)); + } +} + +internal sealed class NullMongoSessionAccessor : IAuthorityMongoSessionAccessor +{ + public ValueTask GetSessionAsync(CancellationToken cancellationToken = default) + => ValueTask.FromResult(null!); + + public ValueTask DisposeAsync() => ValueTask.CompletedTask; +} + +public class ObservabilityIncidentTokenHandlerTests +{ + private static readonly ActivitySource ActivitySource = new("StellaOps.Authority.Tests"); + + [Fact] + public async Task ValidateAccessTokenHandler_Rejects_WhenObsIncidentNotFresh() + { + var clientDocument = CreateClient(tenant: "tenant-alpha"); + var tokenStore = new TestTokenStore + { + Inserted = new AuthorityTokenDocument + { + TokenId = "token-incident", + Status = "valid", + ClientId = clientDocument.ClientId, + Tenant = "tenant-alpha", + Scope = new List { StellaOpsScopes.ObservabilityIncident }, + CreatedAt = DateTimeOffset.UtcNow.AddMinutes(-15) + } + }; + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(clientDocument), + CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), + metadataAccessor, + auditSink, + TimeProvider.System, + ActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, "token-incident", clientDocument.Plugin); + principal.SetScopes(StellaOpsScopes.ObservabilityIncident); + var staleAuthTime = DateTimeOffset.UtcNow.AddMinutes(-10); + principal.SetClaim(OpenIddictConstants.Claims.AuthenticationTime, staleAuthTime.ToUnixTimeSeconds().ToString(CultureInfo.InvariantCulture)); + + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-incident" + }; + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); + } + + [Fact] + public async Task ValidateRefreshTokenHandler_RejectsObsIncidentScope() + { + var handler = new ValidateRefreshTokenGrantHandler(NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + EndpointType = OpenIddictServerEndpointType.Token, + Options = new OpenIddictServerOptions(), + Request = new OpenIddictRequest + { + GrantType = OpenIddictConstants.GrantTypes.RefreshToken + } + }; + + var principal = CreatePrincipal("cli-app", "refresh-token", "standard"); + principal.SetScopes(StellaOpsScopes.ObservabilityIncident); + + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction) + { + Principal = principal + }; + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidGrant, context.Error); + } +} + +internal static class TestHelpers +{ + public static StellaOpsAuthorityOptions CreateAuthorityOptions(Action? configure = null) + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + configure?.Invoke(options); + return options; + } + + public static AuthorityClientDocument CreateClient( + string clientId = "concelier", + string? secret = "s3cr3t!", + string clientType = "confidential", + string allowedGrantTypes = "client_credentials", + string allowedScopes = "jobs:read", + string allowedAudiences = "", + string? tenant = null) + { + var document = new AuthorityClientDocument + { + ClientId = clientId, + ClientType = clientType, + SecretHash = secret is null ? null : AuthoritySecretHasher.ComputeHash(secret), + Plugin = "standard", + Properties = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + [AuthorityClientMetadataKeys.AllowedGrantTypes] = allowedGrantTypes, + [AuthorityClientMetadataKeys.AllowedScopes] = allowedScopes + } + }; + + if (!string.IsNullOrWhiteSpace(allowedAudiences)) + { + document.Properties[AuthorityClientMetadataKeys.Audiences] = allowedAudiences; + } + + var normalizedTenant = NormalizeTenant(tenant); + if (normalizedTenant is not null) + { + document.Properties[AuthorityClientMetadataKeys.Tenant] = normalizedTenant; + } + + return document; + } + + private static string? NormalizeTenant(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); + + public static AuthorityClientDescriptor CreateDescriptor(AuthorityClientDocument document) + { + var allowedGrantTypes = document.Properties.TryGetValue(AuthorityClientMetadataKeys.AllowedGrantTypes, out var grants) ? grants?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); + var allowedScopes = document.Properties.TryGetValue(AuthorityClientMetadataKeys.AllowedScopes, out var scopes) ? scopes?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); + var allowedAudiences = document.Properties.TryGetValue(AuthorityClientMetadataKeys.Audiences, out var audiences) ? audiences?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); + + return new AuthorityClientDescriptor( + document.ClientId, + document.DisplayName, + confidential: string.Equals(document.ClientType, "confidential", StringComparison.OrdinalIgnoreCase), + allowedGrantTypes, + allowedScopes, + allowedAudiences, + redirectUris: Array.Empty(), + postLogoutRedirectUris: Array.Empty(), + properties: document.Properties); + } + + public static AuthorityIdentityProviderRegistry CreateRegistry(bool withClientProvisioning, AuthorityClientDescriptor? clientDescriptor) + { + var plugin = CreatePlugin( + name: "standard", + supportsClientProvisioning: withClientProvisioning, + descriptor: clientDescriptor, + user: null); + + return CreateRegistryFromPlugins(plugin); + } + + public static TestIdentityProviderPlugin CreatePlugin( + string name, + bool supportsClientProvisioning, + AuthorityClientDescriptor? descriptor, + AuthorityUserDescriptor? user) + { + var capabilities = supportsClientProvisioning + ? new[] { AuthorityPluginCapabilities.ClientProvisioning } + : Array.Empty(); + + var manifest = new AuthorityPluginManifest( + name, + "standard", + true, + null, + null, + capabilities, + new Dictionary(StringComparer.OrdinalIgnoreCase), + $"{name}.yaml"); + + var context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); + + return new TestIdentityProviderPlugin( + context, + new TestUserCredentialStore(user), + new TestClaimsEnricher(), + supportsClientProvisioning ? new TestClientProvisioningStore(descriptor) : null, + new AuthorityIdentityProviderCapabilities( + SupportsPassword: true, + SupportsMfa: false, + SupportsClientProvisioning: supportsClientProvisioning)); + } + + public static AuthorityIdentityProviderRegistry CreateRegistryFromPlugins(params IIdentityProviderPlugin[] plugins) + { + var services = new ServiceCollection(); + services.AddLogging(); + foreach (var plugin in plugins) + { + services.AddSingleton(plugin); + } + + var provider = services.BuildServiceProvider(); + return new AuthorityIdentityProviderRegistry(provider, NullLogger.Instance); + } + + public static OpenIddictServerTransaction CreateTokenTransaction(string clientId, string? secret, string? scope) + { + var request = new OpenIddictRequest + { + GrantType = OpenIddictConstants.GrantTypes.ClientCredentials, + ClientId = clientId, + ClientSecret = secret + }; + + if (!string.IsNullOrWhiteSpace(scope)) + { + request.Scope = scope; + } + + return new OpenIddictServerTransaction + { + EndpointType = OpenIddictServerEndpointType.Token, + Options = new OpenIddictServerOptions(), + Request = request + }; + } + + public static string ConvertThumbprintToString(object thumbprint) + => thumbprint switch + { + string value => value, + byte[] bytes => Base64UrlEncoder.Encode(bytes), + _ => throw new InvalidOperationException("Unsupported thumbprint representation.") + }; + + public static string CreateDpopProof(ECDsaSecurityKey key, string method, string url, long issuedAt, string? nonce = null) + { + var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(key); + jwk.KeyId ??= key.KeyId ?? Guid.NewGuid().ToString("N"); + + var signingCredentials = new SigningCredentials(key, SecurityAlgorithms.EcdsaSha256); + var header = new JwtHeader(signingCredentials) + { + ["typ"] = "dpop+jwt", + ["jwk"] = new Dictionary + { + ["kty"] = jwk.Kty, + ["crv"] = jwk.Crv, + ["x"] = jwk.X, + ["y"] = jwk.Y, + ["kid"] = jwk.Kid ?? jwk.KeyId + } + }; + + var payload = new JwtPayload + { + ["htm"] = method.ToUpperInvariant(), + ["htu"] = url, + ["iat"] = issuedAt, + ["jti"] = Guid.NewGuid().ToString("N") + }; + + if (!string.IsNullOrWhiteSpace(nonce)) + { + payload["nonce"] = nonce; + } + + var token = new JwtSecurityToken(header, payload); + return new JwtSecurityTokenHandler().WriteToken(token); + } + + public static X509Certificate2 CreateTestCertificate(string subjectName) + { + using var rsa = RSA.Create(2048); + var request = new CertificateRequest(subjectName, rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + return request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1)); + } + + public static ClaimsPrincipal CreatePrincipal(string clientId, string tokenId, string provider, string? subject = null) + { + var identity = new ClaimsIdentity(OpenIddictServerAspNetCoreDefaults.AuthenticationScheme); + identity.AddClaim(new Claim(OpenIddictConstants.Claims.ClientId, clientId)); + identity.AddClaim(new Claim(OpenIddictConstants.Claims.JwtId, tokenId)); + identity.AddClaim(new Claim(StellaOpsClaimTypes.IdentityProvider, provider)); + identity.AddClaim(new Claim(StellaOpsClaimTypes.Project, StellaOpsTenancyDefaults.AnyProject)); + + if (!string.IsNullOrWhiteSpace(subject)) + { + identity.AddClaim(new Claim(OpenIddictConstants.Claims.Subject, subject)); + } + + return new ClaimsPrincipal(identity); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/DiscoveryMetadataTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/DiscoveryMetadataTests.cs new file mode 100644 index 00000000..489bbc52 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/DiscoveryMetadataTests.cs @@ -0,0 +1,63 @@ +using System.Linq; +using System.Net; +using System.Text.Json; +using StellaOps.Authority.Tests.Infrastructure; +using StellaOps.Auth.Abstractions; +using Xunit; + +namespace StellaOps.Authority.Tests.OpenIddict; + +public sealed class DiscoveryMetadataTests : IClassFixture +{ + private readonly AuthorityWebApplicationFactory factory; + + public DiscoveryMetadataTests(AuthorityWebApplicationFactory factory) + { + this.factory = factory; + } + + [Fact] + public async Task OpenIdDiscovery_IncludesAdvisoryAiMetadata() + { + using var client = factory.CreateClient(); + + using var response = await client.GetAsync("/.well-known/openid-configuration").ConfigureAwait(false); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var payload = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + using var document = JsonDocument.Parse(payload); + + var root = document.RootElement; + Assert.True(root.TryGetProperty("stellaops_advisory_ai_scopes_supported", out var scopesNode)); + + var scopes = scopesNode.EnumerateArray().Select(element => element.GetString()).ToArray(); + Assert.Contains(StellaOpsScopes.AdvisoryAiView, scopes); + Assert.Contains(StellaOpsScopes.AdvisoryAiOperate, scopes); + Assert.Contains(StellaOpsScopes.AdvisoryAiAdmin, scopes); + + Assert.True(root.TryGetProperty("stellaops_advisory_ai_remote_inference", out var remoteNode)); + Assert.False(remoteNode.GetProperty("enabled").GetBoolean()); + Assert.True(remoteNode.GetProperty("require_tenant_consent").GetBoolean()); + + var profiles = remoteNode.GetProperty("allowed_profiles").EnumerateArray().ToArray(); + Assert.Empty(profiles); + + Assert.True(root.TryGetProperty("stellaops_airgap_scopes_supported", out var airgapNode)); + var airgapScopes = airgapNode.EnumerateArray().Select(element => element.GetString()).ToArray(); + Assert.Contains(StellaOpsScopes.AirgapSeal, airgapScopes); + Assert.Contains(StellaOpsScopes.AirgapImport, airgapScopes); + Assert.Contains(StellaOpsScopes.AirgapStatusRead, airgapScopes); + + Assert.True(root.TryGetProperty("stellaops_observability_scopes_supported", out var observabilityNode)); + var observabilityScopes = observabilityNode.EnumerateArray().Select(element => element.GetString()).ToArray(); + Assert.Contains(StellaOpsScopes.ObservabilityRead, observabilityScopes); + Assert.Contains(StellaOpsScopes.TimelineRead, observabilityScopes); + Assert.Contains(StellaOpsScopes.TimelineWrite, observabilityScopes); + Assert.Contains(StellaOpsScopes.EvidenceCreate, observabilityScopes); + Assert.Contains(StellaOpsScopes.EvidenceRead, observabilityScopes); + Assert.Contains(StellaOpsScopes.EvidenceHold, observabilityScopes); + Assert.Contains(StellaOpsScopes.AttestRead, observabilityScopes); + Assert.Contains(StellaOpsScopes.ObservabilityIncident, observabilityScopes); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/LegacyAuthDeprecationTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/LegacyAuthDeprecationTests.cs new file mode 100644 index 00000000..aaf6ea9a --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/LegacyAuthDeprecationTests.cs @@ -0,0 +1,112 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using StellaOps.Authority.Tests.Infrastructure; +using StellaOps.Cryptography.Audit; +using Xunit; + +namespace StellaOps.Authority.Tests.OpenIddict; + +public sealed class LegacyAuthDeprecationTests : IClassFixture +{ + private static readonly string ExpectedDeprecationHeader = new DateTimeOffset(2025, 11, 1, 0, 0, 0, TimeSpan.Zero) + .UtcDateTime.ToString("r", CultureInfo.InvariantCulture); + + private static readonly string ExpectedSunsetHeader = new DateTimeOffset(2026, 5, 1, 0, 0, 0, TimeSpan.Zero) + .UtcDateTime.ToString("r", CultureInfo.InvariantCulture); + + private static readonly string ExpectedSunsetIso = new DateTimeOffset(2026, 5, 1, 0, 0, 0, TimeSpan.Zero) + .ToString("O", CultureInfo.InvariantCulture); + + private readonly AuthorityWebApplicationFactory factory; + + public LegacyAuthDeprecationTests(AuthorityWebApplicationFactory factory) + => this.factory = factory ?? throw new ArgumentNullException(nameof(factory)); + + [Fact] + public async Task LegacyTokenEndpoint_IncludesDeprecationHeaders() + { + using var client = factory.CreateClient(); + + using var response = await client.PostAsync( + "/oauth/token", + new FormUrlEncodedContent(new Dictionary + { + ["grant_type"] = "client_credentials" + })).ConfigureAwait(false); + + Assert.NotNull(response); + Assert.True(response.Headers.TryGetValues("Deprecation", out var deprecationValues)); + Assert.Contains(ExpectedDeprecationHeader, deprecationValues); + + Assert.True(response.Headers.TryGetValues("Sunset", out var sunsetValues)); + Assert.Contains(ExpectedSunsetHeader, sunsetValues); + + Assert.True(response.Headers.TryGetValues("Warning", out var warningValues)); + Assert.Contains(warningValues, warning => warning.Contains("Legacy Authority endpoint", StringComparison.OrdinalIgnoreCase)); + + Assert.True(response.Headers.TryGetValues("Link", out var linkValues)); + Assert.Contains(linkValues, value => value.Contains("rel=\"sunset\"", StringComparison.OrdinalIgnoreCase)); + } + + [Fact] + public async Task LegacyTokenEndpoint_EmitsAuditEvent() + { + var sink = new RecordingAuthEventSink(); + + using var customFactory = factory.WithWebHostBuilder(builder => + { + builder.ConfigureServices(services => + { + services.RemoveAll(); + services.AddSingleton(sink); + }); + }); + + using var client = customFactory.CreateClient(); + + using var response = await client.PostAsync( + "/oauth/token", + new FormUrlEncodedContent(new Dictionary + { + ["grant_type"] = "client_credentials" + })).ConfigureAwait(false); + + Assert.NotNull(response); + + var record = Assert.Single(sink.Events); + Assert.Equal("authority.api.legacy_endpoint", record.EventType); + + Assert.Contains(record.Properties, property => + string.Equals(property.Name, "legacy.endpoint.original", StringComparison.Ordinal) && + string.Equals(property.Value.Value, "/oauth/token", StringComparison.Ordinal)); + + Assert.Contains(record.Properties, property => + string.Equals(property.Name, "legacy.endpoint.canonical", StringComparison.Ordinal) && + string.Equals(property.Value.Value, "/token", StringComparison.Ordinal)); + + Assert.Contains(record.Properties, property => + string.Equals(property.Name, "legacy.sunset_at", StringComparison.Ordinal) && + string.Equals(property.Value.Value, ExpectedSunsetIso, StringComparison.Ordinal)); + } + + private sealed class RecordingAuthEventSink : IAuthEventSink + { + private readonly ConcurrentQueue events = new(); + + public IReadOnlyCollection Events => events.ToArray(); + + public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) + { + events.Enqueue(record); + return ValueTask.CompletedTask; + } + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs index 79e8c509..a32abced 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs @@ -1,516 +1,592 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using System.Security.Claims; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using MongoDB.Driver; -using OpenIddict.Abstractions; -using OpenIddict.Server; -using OpenIddict.Server.AspNetCore; -using StellaOps.Authority.OpenIddict; -using StellaOps.Authority.OpenIddict.Handlers; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Authority.RateLimiting; -using StellaOps.Authority.Storage.Mongo.Documents; -using StellaOps.Authority.Storage.Mongo.Stores; -using StellaOps.Cryptography.Audit; -using StellaOps.Configuration; -using StellaOps.Auth.Abstractions; -using Xunit; - -namespace StellaOps.Authority.Tests.OpenIddict; - -public class PasswordGrantHandlersTests -{ - private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests"); - - [Fact] - public async Task HandlePasswordGrant_EmitsSuccessAuditEvent() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument()); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!"); - - await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); - await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); - - var successEvent = Assert.Single(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); - Assert.Equal("tenant-alpha", successEvent.Tenant.Value); - - var metadata = metadataAccessor.GetMetadata(); - Assert.Equal("tenant-alpha", metadata?.Tenant); - } - - [Fact] - public async Task HandlePasswordGrant_EmitsFailureAuditEvent() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new FailureCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument()); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "BadPassword!"); - - await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); - await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); - - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); - } - - [Fact] - public async Task ValidatePasswordGrant_RejectsAdvisoryReadWithoutAocVerify() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument("advisory:read aoc:verify")); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "advisory:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await validate.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Scope 'aoc:verify' is required when requesting advisory/vex read scopes.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); - } - - [Fact] - public async Task ValidatePasswordGrant_RejectsSignalsScopeWithoutAocVerify() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument("signals:write signals:read signals:admin aoc:verify")); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "signals:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await validate.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Scope 'aoc:verify' is required when requesting signals scopes.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); - } - - [Fact] - public async Task ValidatePasswordGrant_RejectsPolicyAuthorWithoutTenant() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientDocument = CreateClientDocument("policy:author"); - clientDocument.Properties.Remove(AuthorityClientMetadataKeys.Tenant); - var clientStore = new StubClientStore(clientDocument); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "policy:author"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await validate.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Policy Studio scopes require a tenant assignment.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.PolicyAuthor, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); - } - - [Fact] - public async Task ValidatePasswordGrant_AllowsPolicyAuthor() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument("policy:author")); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "policy:author"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await validate.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); - } - - [Fact] - public async Task HandlePasswordGrant_EmitsLockoutAuditEvent() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new LockoutCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument()); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Locked!"); - - await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); - await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); - - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.LockedOut); - } - - [Fact] - public async Task ValidatePasswordGrant_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument()); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!"); - transaction.Request?.SetParameter("unexpected_param", "value"); - - await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); - - var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper"); - Assert.Equal(AuthEventOutcome.Failure, tamperEvent.Outcome); - Assert.Contains(tamperEvent.Properties, property => - string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) && - string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase)); - } - - [Fact] - public async Task ValidatePasswordGrant_RejectsExceptionsApprove_WhenMfaRequiredAndProviderLacksSupport() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore(), supportsMfa: false); - var clientStore = new StubClientStore(CreateClientDocument("exceptions:approve")); - var authorityOptions = CreateAuthorityOptions(opts => - { - opts.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions - { - Id = "secops", - AuthorityRouteId = "approvals/secops", - RequireMfa = true - }); - }); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "exceptions:approve"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await validate.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Exception approval scope requires an MFA-capable identity provider.", context.ErrorDescription); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); - } - - [Fact] - public async Task HandlePasswordGrant_AllowsExceptionsApprove_WhenMfaSupported() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore(), supportsMfa: true); - var clientStore = new StubClientStore(CreateClientDocument("exceptions:approve")); - var authorityOptions = CreateAuthorityOptions(opts => - { - opts.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions - { - Id = "secops", - AuthorityRouteId = "approvals/secops", - RequireMfa = true - }); - }); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "exceptions:approve"); - var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - await validate.HandleAsync(validateContext); - Assert.False(validateContext.IsRejected); - - var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); - await handle.HandleAsync(handleContext); - - Assert.False(handleContext.IsRejected); - Assert.NotNull(handleContext.Principal); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); - } - - private static AuthorityIdentityProviderRegistry CreateRegistry(IUserCredentialStore store, bool supportsMfa = false) - { - var plugin = new StubIdentityProviderPlugin("stub", store, supportsMfa); - - var services = new ServiceCollection(); - services.AddLogging(); - services.AddSingleton(plugin); - var provider = services.BuildServiceProvider(); - - return new AuthorityIdentityProviderRegistry(provider, NullLogger.Instance); - } - - private static OpenIddictServerTransaction CreatePasswordTransaction(string username, string password, string scope = "jobs:trigger") - { - var request = new OpenIddictRequest - { - GrantType = OpenIddictConstants.GrantTypes.Password, - Username = username, - Password = password, - ClientId = "cli-app", - Scope = scope - }; - - return new OpenIddictServerTransaction - { - EndpointType = OpenIddictServerEndpointType.Token, - Options = new OpenIddictServerOptions(), - Request = request - }; - } - - private static StellaOpsAuthorityOptions CreateAuthorityOptions(Action? configure = null) - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; - - configure?.Invoke(options); - return options; - } - - private static AuthorityClientDocument CreateClientDocument(string allowedScopes = "jobs:trigger") - { - var document = new AuthorityClientDocument - { - ClientId = "cli-app", - ClientType = "public" - }; - - document.Properties[AuthorityClientMetadataKeys.AllowedGrantTypes] = "password"; - document.Properties[AuthorityClientMetadataKeys.AllowedScopes] = allowedScopes; - document.Properties[AuthorityClientMetadataKeys.Tenant] = "tenant-alpha"; - - return document; - } - - private sealed class StubIdentityProviderPlugin : IIdentityProviderPlugin - { - public StubIdentityProviderPlugin(string name, IUserCredentialStore store, bool supportsMfa) - { - Name = name; - Type = "stub"; - var capabilities = supportsMfa - ? new[] { AuthorityPluginCapabilities.Password, AuthorityPluginCapabilities.Mfa } - : new[] { AuthorityPluginCapabilities.Password }; - var manifest = new AuthorityPluginManifest( - Name: name, - Type: "stub", - Enabled: true, - AssemblyName: null, - AssemblyPath: null, - Capabilities: capabilities, - Metadata: new Dictionary(StringComparer.OrdinalIgnoreCase), - ConfigPath: $"{name}.yaml"); - Context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); - Credentials = store; - ClaimsEnricher = new NoopClaimsEnricher(); - Capabilities = new AuthorityIdentityProviderCapabilities(SupportsPassword: true, SupportsMfa: supportsMfa, SupportsClientProvisioning: false); - } - - public string Name { get; } - public string Type { get; } - public AuthorityPluginContext Context { get; } - public IUserCredentialStore Credentials { get; } - public IClaimsEnricher ClaimsEnricher { get; } - public IClientProvisioningStore? ClientProvisioning => null; - public AuthorityIdentityProviderCapabilities Capabilities { get; } - - public ValueTask CheckHealthAsync(CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); - } - - private sealed class NoopClaimsEnricher : IClaimsEnricher - { - public ValueTask EnrichAsync(ClaimsIdentity identity, AuthorityClaimsEnrichmentContext context, CancellationToken cancellationToken) - => ValueTask.CompletedTask; - } - - private sealed class SuccessCredentialStore : IUserCredentialStore - { - public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) - { - var descriptor = new AuthorityUserDescriptor("subject", username, "User", requiresPasswordReset: false); - return ValueTask.FromResult(AuthorityCredentialVerificationResult.Success(descriptor)); - } - - public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) - => ValueTask.FromResult(null); - } - - private sealed class FailureCredentialStore : IUserCredentialStore - { - public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials, "Invalid username or password.")); - - public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) - => ValueTask.FromResult(null); - } - - private sealed class LockoutCredentialStore : IUserCredentialStore - { - public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) - { - var retry = TimeSpan.FromMinutes(5); - var properties = new[] - { - new AuthEventProperty - { - Name = "plugin.lockout_until", - Value = ClassifiedString.Public(timeProvider.GetUtcNow().Add(retry).ToString("O", CultureInfo.InvariantCulture)) - } - }; - - return ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure( - AuthorityCredentialFailureCode.LockedOut, - "Account locked.", - retry, - properties)); - } - - public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) - => ValueTask.FromResult(null); - } - - private sealed class StubClientStore : IAuthorityClientStore - { - private readonly AuthorityClientDocument document; - - public StubClientStore(AuthorityClientDocument document) - { - this.document = document; - } - - public Task> ListAsync(CancellationToken cancellationToken) - => Task.FromResult>(new[] { document }); - - public Task FindAsync(string id, CancellationToken cancellationToken) - => Task.FromResult(id == document.Id ? document : null); - - public Task FindByClientIdAsync(string clientId, CancellationToken cancellationToken) - => Task.FromResult(string.Equals(clientId, document.ClientId, StringComparison.Ordinal) ? document : null); - - public Task InsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public Task UpdateAsync(string id, UpdateDefinition update, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public Task DeleteAsync(string id, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public Task ExistsAsync(string id, CancellationToken cancellationToken) - => throw new NotImplementedException(); - } - - private sealed class TestAuthEventSink : IAuthEventSink - { - public List Events { get; } = new(); - - public Task WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) - { - Events.Add(record); - return Task.CompletedTask; - } - } - - private sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor - { - private AuthorityRateLimiterMetadata? metadata; - - public AuthorityRateLimiterMetadata? GetMetadata() => metadata; - - public void SetClientId(string? clientId) - { - metadata ??= new AuthorityRateLimiterMetadata(); - metadata.ClientId = clientId; - } - - public void SetTenant(string? tenant) - { - metadata ??= new AuthorityRateLimiterMetadata(); - metadata.Tenant = tenant; - } - - public void SetProject(string? project) - { - metadata ??= new AuthorityRateLimiterMetadata(); - metadata.Project = project; - } - - public void Clear() - { - metadata = null; - } - } - - private sealed class SuccessCredentialStore : IUserCredentialStore - { - public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) - { - var descriptor = new AuthorityUserDescriptor("subject", username, "User", requiresPasswordReset: false); - return ValueTask.FromResult(AuthorityCredentialVerificationResult.Success(descriptor)); - } - - public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) - => ValueTask.FromResult(null); - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Security.Claims; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using OpenIddict.Abstractions; +using OpenIddict.Server; +using OpenIddict.Server.AspNetCore; +using StellaOps.Authority.OpenIddict; +using StellaOps.Authority.OpenIddict.Handlers; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.RateLimiting; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Cryptography.Audit; +using StellaOps.Configuration; +using StellaOps.Auth.Abstractions; +using Xunit; + +namespace StellaOps.Authority.Tests.OpenIddict; + +public class PasswordGrantHandlersTests +{ + private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests"); + + [Fact] + public async Task HandlePasswordGrant_EmitsSuccessAuditEvent() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument()); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!"); + + await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); + + var successEvent = Assert.Single(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); + Assert.Equal("tenant-alpha", successEvent.Tenant.Value); + + var metadata = metadataAccessor.GetMetadata(); + Assert.Equal("tenant-alpha", metadata?.Tenant); + } + + [Fact] + public async Task HandlePasswordGrant_EmitsFailureAuditEvent() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new FailureCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument()); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "BadPassword!"); + + await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); + + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task ValidatePasswordGrant_RejectsAdvisoryReadWithoutAocVerify() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument("advisory:read aoc:verify")); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "advisory:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Scope 'aoc:verify' is required when requesting advisory/advisory-ai/vex read scopes.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task ValidatePasswordGrant_RejectsObsIncidentWithoutReason() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument("obs:incident")); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "obs:incident"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Contains("incident_reason", context.ErrorDescription); + } + + [Fact] + public async Task HandlePasswordGrant_AddsIncidentReasonAndAuthTime() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientDocument = CreateClientDocument("obs:incident"); + var clientStore = new StubClientStore(clientDocument); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "obs:incident"); + transaction.Request.SetParameter("incident_reason", "Sev1 drill activation"); + + var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + await validate.HandleAsync(validateContext); + Assert.False(validateContext.IsRejected); + + var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); + await handle.HandleAsync(handleContext); + + Assert.False(handleContext.IsRejected); + var principal = Assert.IsType(handleContext.Principal); + Assert.Equal("Sev1 drill activation", principal.GetClaim(StellaOpsClaimTypes.IncidentReason)); + var authTimeClaim = principal.GetClaim(OpenIddictConstants.Claims.AuthenticationTime); + Assert.False(string.IsNullOrWhiteSpace(authTimeClaim)); + } + + [Fact] + public async Task ValidatePasswordGrant_RejectsAdvisoryAiViewWithoutAocVerify() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument("advisory-ai:view aoc:verify")); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "advisory-ai:view"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Scope 'aoc:verify' is required when requesting advisory/advisory-ai/vex read scopes.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task ValidatePasswordGrant_RejectsAdvisoryAiScopeWithoutTenant() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientDocument = CreateClientDocument("advisory-ai:view"); + clientDocument.Properties.Remove(AuthorityClientMetadataKeys.Tenant); + var clientStore = new StubClientStore(clientDocument); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "advisory-ai:view"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Advisory AI scopes require a tenant assignment.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AdvisoryAiView, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task ValidatePasswordGrant_RejectsSignalsScopeWithoutAocVerify() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument("signals:write signals:read signals:admin aoc:verify")); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "signals:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Scope 'aoc:verify' is required when requesting signals scopes.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task ValidatePasswordGrant_RejectsPolicyAuthorWithoutTenant() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientDocument = CreateClientDocument("policy:author"); + clientDocument.Properties.Remove(AuthorityClientMetadataKeys.Tenant); + var clientStore = new StubClientStore(clientDocument); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "policy:author"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Policy Studio scopes require a tenant assignment.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.PolicyAuthor, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task ValidatePasswordGrant_AllowsPolicyAuthor() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument("policy:author")); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "policy:author"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); + } + + [Fact] + public async Task HandlePasswordGrant_EmitsLockoutAuditEvent() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new LockoutCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument()); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Locked!"); + + await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); + + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.LockedOut); + } + + [Fact] + public async Task ValidatePasswordGrant_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument()); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!"); + transaction.Request?.SetParameter("unexpected_param", "value"); + + await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + + var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper"); + Assert.Equal(AuthEventOutcome.Failure, tamperEvent.Outcome); + Assert.Contains(tamperEvent.Properties, property => + string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) && + string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase)); + } + + [Fact] + public async Task ValidatePasswordGrant_RejectsExceptionsApprove_WhenMfaRequiredAndProviderLacksSupport() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore(), supportsMfa: false); + var clientStore = new StubClientStore(CreateClientDocument("exceptions:approve")); + var authorityOptions = CreateAuthorityOptions(opts => + { + opts.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions + { + Id = "secops", + AuthorityRouteId = "approvals/secops", + RequireMfa = true + }); + }); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "exceptions:approve"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Exception approval scope requires an MFA-capable identity provider.", context.ErrorDescription); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task HandlePasswordGrant_AllowsExceptionsApprove_WhenMfaSupported() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore(), supportsMfa: true); + var clientStore = new StubClientStore(CreateClientDocument("exceptions:approve")); + var authorityOptions = CreateAuthorityOptions(opts => + { + opts.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions + { + Id = "secops", + AuthorityRouteId = "approvals/secops", + RequireMfa = true + }); + }); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, NullLogger.Instance); + var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "exceptions:approve"); + var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + await validate.HandleAsync(validateContext); + Assert.False(validateContext.IsRejected); + + var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); + await handle.HandleAsync(handleContext); + + Assert.False(handleContext.IsRejected); + Assert.NotNull(handleContext.Principal); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); + } + + private static AuthorityIdentityProviderRegistry CreateRegistry(IUserCredentialStore store, bool supportsMfa = false) + { + var plugin = new StubIdentityProviderPlugin("stub", store, supportsMfa); + + var services = new ServiceCollection(); + services.AddLogging(); + services.AddSingleton(plugin); + var provider = services.BuildServiceProvider(); + + return new AuthorityIdentityProviderRegistry(provider, NullLogger.Instance); + } + + private static OpenIddictServerTransaction CreatePasswordTransaction(string username, string password, string scope = "jobs:trigger") + { + var request = new OpenIddictRequest + { + GrantType = OpenIddictConstants.GrantTypes.Password, + Username = username, + Password = password, + ClientId = "cli-app", + Scope = scope + }; + + return new OpenIddictServerTransaction + { + EndpointType = OpenIddictServerEndpointType.Token, + Options = new OpenIddictServerOptions(), + Request = request + }; + } + + private static StellaOpsAuthorityOptions CreateAuthorityOptions(Action? configure = null) + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + + configure?.Invoke(options); + return options; + } + + private static AuthorityClientDocument CreateClientDocument(string allowedScopes = "jobs:trigger") + { + var document = new AuthorityClientDocument + { + ClientId = "cli-app", + ClientType = "public" + }; + + document.Properties[AuthorityClientMetadataKeys.AllowedGrantTypes] = "password"; + document.Properties[AuthorityClientMetadataKeys.AllowedScopes] = allowedScopes; + document.Properties[AuthorityClientMetadataKeys.Tenant] = "tenant-alpha"; + + return document; + } + + private sealed class StubIdentityProviderPlugin : IIdentityProviderPlugin + { + public StubIdentityProviderPlugin(string name, IUserCredentialStore store, bool supportsMfa) + { + Name = name; + Type = "stub"; + var capabilities = supportsMfa + ? new[] { AuthorityPluginCapabilities.Password, AuthorityPluginCapabilities.Mfa } + : new[] { AuthorityPluginCapabilities.Password }; + var manifest = new AuthorityPluginManifest( + Name: name, + Type: "stub", + Enabled: true, + AssemblyName: null, + AssemblyPath: null, + Capabilities: capabilities, + Metadata: new Dictionary(StringComparer.OrdinalIgnoreCase), + ConfigPath: $"{name}.yaml"); + Context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); + Credentials = store; + ClaimsEnricher = new NoopClaimsEnricher(); + Capabilities = new AuthorityIdentityProviderCapabilities(SupportsPassword: true, SupportsMfa: supportsMfa, SupportsClientProvisioning: false); + } + + public string Name { get; } + public string Type { get; } + public AuthorityPluginContext Context { get; } + public IUserCredentialStore Credentials { get; } + public IClaimsEnricher ClaimsEnricher { get; } + public IClientProvisioningStore? ClientProvisioning => null; + public AuthorityIdentityProviderCapabilities Capabilities { get; } + + public ValueTask CheckHealthAsync(CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); + } + + private sealed class NoopClaimsEnricher : IClaimsEnricher + { + public ValueTask EnrichAsync(ClaimsIdentity identity, AuthorityClaimsEnrichmentContext context, CancellationToken cancellationToken) + => ValueTask.CompletedTask; + } + + private sealed class SuccessCredentialStore : IUserCredentialStore + { + public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) + { + var descriptor = new AuthorityUserDescriptor("subject", username, "User", requiresPasswordReset: false); + return ValueTask.FromResult(AuthorityCredentialVerificationResult.Success(descriptor)); + } + + public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) + => ValueTask.FromResult(null); + } + + private sealed class FailureCredentialStore : IUserCredentialStore + { + public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials, "Invalid username or password.")); + + public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) + => ValueTask.FromResult(null); + } + + private sealed class LockoutCredentialStore : IUserCredentialStore + { + public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) + { + var retry = TimeSpan.FromMinutes(5); + var properties = new[] + { + new AuthEventProperty + { + Name = "plugin.lockout_until", + Value = ClassifiedString.Public(TimeProvider.System.GetUtcNow().Add(retry).ToString("O", CultureInfo.InvariantCulture)) + } + }; + + return ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure( + AuthorityCredentialFailureCode.LockedOut, + "Account locked.", + retry, + properties)); + } + + public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) + => ValueTask.FromResult(null); + } + + private sealed class StubClientStore : IAuthorityClientStore + { + private AuthorityClientDocument? document; + + public StubClientStore(AuthorityClientDocument document) + { + this.document = document ?? throw new ArgumentNullException(nameof(document)); + } + + public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var result = document is not null && string.Equals(clientId, document.ClientId, StringComparison.Ordinal) + ? document + : null; + return ValueTask.FromResult(result); + } + + public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + this.document = document ?? throw new ArgumentNullException(nameof(document)); + return ValueTask.CompletedTask; + } + + public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + if (document is not null && string.Equals(clientId, document.ClientId, StringComparison.Ordinal)) + { + document = null; + return ValueTask.FromResult(true); + } + + return ValueTask.FromResult(false); + } + } + + private sealed class TestAuthEventSink : IAuthEventSink + { + public List Events { get; } = new(); + + public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) + { + Events.Add(record); + return ValueTask.CompletedTask; + } + } + + private sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor + { + private AuthorityRateLimiterMetadata? metadata; + + public AuthorityRateLimiterMetadata? GetMetadata() => metadata; + + public void SetClientId(string? clientId) + { + metadata ??= new AuthorityRateLimiterMetadata(); + metadata.ClientId = clientId; + } + + public void SetSubjectId(string? subjectId) + { + metadata ??= new AuthorityRateLimiterMetadata(); + metadata.SubjectId = subjectId; + } + + public void SetTenant(string? tenant) + { + metadata ??= new AuthorityRateLimiterMetadata(); + metadata.Tenant = tenant; + } + + public void SetProject(string? project) + { + metadata ??= new AuthorityRateLimiterMetadata(); + metadata.Project = project; + } + + public void SetTag(string name, string? value) + { + metadata ??= new AuthorityRateLimiterMetadata(); + metadata.SetTag(name, value); + } + + public void Clear() + { + metadata = null; + } + } + +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Permalinks/VulnPermalinkServiceTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Permalinks/VulnPermalinkServiceTests.cs index 0a8cb197..7ecaf14b 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Permalinks/VulnPermalinkServiceTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Permalinks/VulnPermalinkServiceTests.cs @@ -108,19 +108,21 @@ public sealed class VulnPermalinkServiceTests private static ServiceProvider BuildProvider(string basePath, StellaOpsAuthorityOptions options, TimeProvider timeProvider) { - var services = new ServiceCollection(); - services.AddLogging(builder => builder.SetMinimumLevel(LogLevel.Debug)); - services.AddSingleton(new TestHostEnvironment(basePath)); - services.AddSingleton(options); - services.AddSingleton>(Options.Create(options)); - services.AddSingleton(timeProvider); - services.AddStellaOpsCrypto(); - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - services.AddSingleton(); - services.AddSingleton(); - - return services.BuildServiceProvider(); - } + var services = new ServiceCollection(); + services.AddLogging(builder => builder.SetMinimumLevel(LogLevel.Debug)); + services.AddSingleton(new TestHostEnvironment(basePath)); + services.AddSingleton(options); + services.AddSingleton>(Options.Create(options)); + services.AddSingleton(timeProvider); + services.AddMemoryCache(); + services.AddStellaOpsCrypto(); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + + return services.BuildServiceProvider(); + } private static void CreateEcPrivateKey(string path) { diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Signing/AuthorityJwksServiceTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Signing/AuthorityJwksServiceTests.cs index c3f48936..7381b7ed 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Signing/AuthorityJwksServiceTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Signing/AuthorityJwksServiceTests.cs @@ -13,6 +13,8 @@ using StellaOps.Authority.Signing; using StellaOps.Configuration; using StellaOps.Cryptography; using Xunit; +using CryptoProvider = StellaOps.Cryptography.ICryptoProvider; +using CryptoProviderRegistry = StellaOps.Cryptography.ICryptoProviderRegistry; namespace StellaOps.Authority.Tests.Signing; @@ -102,24 +104,24 @@ public sealed class AuthorityJwksServiceTests }; } - private sealed class TestRegistry : ICryptoProviderRegistry + private sealed class TestRegistry : CryptoProviderRegistry { - private readonly IReadOnlyCollection providers; + private readonly IReadOnlyCollection providers; - public TestRegistry(ICryptoProvider provider) + public TestRegistry(CryptoProvider provider) { providers = new[] { provider }; } - public IReadOnlyCollection Providers => providers; + public IReadOnlyCollection Providers => providers; - public bool TryResolve(string preferredProvider, out ICryptoProvider provider) + public bool TryResolve(string preferredProvider, out CryptoProvider provider) { provider = providers.First(); return true; } - public ICryptoProvider ResolveOrThrow(CryptoCapability capability, string algorithmId) + public CryptoProvider ResolveOrThrow(CryptoCapability capability, string algorithmId) => providers.First(); public CryptoSignerResolution ResolveSigner( @@ -133,7 +135,7 @@ public sealed class AuthorityJwksServiceTests } } - private sealed class TestCryptoProvider : ICryptoProvider + private sealed class TestCryptoProvider : CryptoProvider { private readonly Dictionary keys = new(StringComparer.OrdinalIgnoreCase); private int counter; @@ -197,10 +199,11 @@ public sealed class AuthorityJwksServiceTests public CryptoSigningKey ToSigningKey() { + var ecParameters = Parameters; return new CryptoSigningKey( new CryptoKeyReference(KeyId, "test"), SignatureAlgorithms.Es256, - in Parameters, + in ecParameters, DateTimeOffset.UtcNow, metadata: new Dictionary(StringComparer.OrdinalIgnoreCase) { diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/AdvisoryAi/AdvisoryAiRemoteInferenceLogRequest.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/AdvisoryAi/AdvisoryAiRemoteInferenceLogRequest.cs new file mode 100644 index 00000000..243e31f1 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/AdvisoryAi/AdvisoryAiRemoteInferenceLogRequest.cs @@ -0,0 +1,22 @@ +using System.Collections.Generic; + +namespace StellaOps.Authority.AdvisoryAi; + +internal sealed class AdvisoryAiRemoteInferenceLogRequest +{ + public string? TaskId { get; init; } + + public string? TaskType { get; init; } + + public string? Profile { get; init; } + + public string? ModelId { get; init; } + + public string? Prompt { get; init; } + + public string? ContextDigest { get; init; } + + public string? OutputHash { get; init; } + + public IDictionary? Metadata { get; init; } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/AdvisoryAi/AuthorityAdvisoryAiConsentEvaluator.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/AdvisoryAi/AuthorityAdvisoryAiConsentEvaluator.cs new file mode 100644 index 00000000..61a29c22 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/AdvisoryAi/AuthorityAdvisoryAiConsentEvaluator.cs @@ -0,0 +1,151 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.Extensions.Options; +using StellaOps.Configuration; + +namespace StellaOps.Authority.AdvisoryAi; + +internal interface IAuthorityAdvisoryAiConsentEvaluator +{ + AdvisoryAiRemoteInferenceSnapshot GetSnapshot(); + + bool TryNormalizeProfile(string? profile, out string normalizedProfile); + + AuthorityTenantRemoteInferenceConsentResult EvaluateTenant(string? tenantId); +} + +internal sealed class AuthorityAdvisoryAiConsentEvaluator : IAuthorityAdvisoryAiConsentEvaluator +{ + private static readonly StringComparer TenantComparer = StringComparer.Ordinal; + private static readonly StringComparer ProfileComparer = StringComparer.OrdinalIgnoreCase; + + private readonly IOptionsMonitor optionsMonitor; + + public AuthorityAdvisoryAiConsentEvaluator(IOptionsMonitor optionsMonitor) + { + this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + } + + public AdvisoryAiRemoteInferenceSnapshot GetSnapshot() + { + var options = optionsMonitor.CurrentValue ?? throw new InvalidOperationException("Authority configuration is not available."); + var remote = options.AdvisoryAi.RemoteInference; + + IReadOnlyList allowedProfiles = remote.AllowedProfiles.Count == 0 + ? Array.Empty() + : remote.AllowedProfiles + .Where(static profile => !string.IsNullOrWhiteSpace(profile)) + .Select(static profile => profile.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + return new AdvisoryAiRemoteInferenceSnapshot(remote.Enabled, remote.RequireTenantConsent, allowedProfiles); + } + + public bool TryNormalizeProfile(string? profile, out string normalizedProfile) + { + normalizedProfile = string.Empty; + + if (string.IsNullOrWhiteSpace(profile)) + { + return false; + } + + var candidate = profile.Trim(); + var snapshot = GetSnapshot(); + + if (!snapshot.Enabled) + { + return false; + } + + if (snapshot.AllowedProfiles.Count == 0) + { + normalizedProfile = candidate; + return true; + } + + foreach (var allowed in snapshot.AllowedProfiles) + { + if (ProfileComparer.Equals(candidate, allowed)) + { + normalizedProfile = allowed; + return true; + } + } + + return false; + } + + public AuthorityTenantRemoteInferenceConsentResult EvaluateTenant(string? tenantId) + { + var options = optionsMonitor.CurrentValue ?? throw new InvalidOperationException("Authority configuration is not available."); + var remote = options.AdvisoryAi.RemoteInference; + + if (!remote.Enabled) + { + return AuthorityTenantRemoteInferenceConsentResult.CreateDisabled(); + } + + var normalizedTenant = string.IsNullOrWhiteSpace(tenantId) + ? null + : tenantId.Trim().ToLowerInvariant(); + + if (!remote.RequireTenantConsent) + { + return AuthorityTenantRemoteInferenceConsentResult.CreateAllowed(null, null, null); + } + + if (string.IsNullOrWhiteSpace(normalizedTenant)) + { + return AuthorityTenantRemoteInferenceConsentResult.CreateDenied( + "tenant_missing", + "Token is missing tenant claim required for remote inference."); + } + + var tenant = options.Tenants.FirstOrDefault(t => TenantComparer.Equals(t.Id, normalizedTenant)); + if (tenant is null) + { + return AuthorityTenantRemoteInferenceConsentResult.CreateDenied( + "tenant_unknown", + $"Tenant '{normalizedTenant}' is not registered."); + } + + var consent = tenant.AdvisoryAi.RemoteInference; + if (!consent.ConsentGranted) + { + return AuthorityTenantRemoteInferenceConsentResult.CreateDenied( + "remote_inference_consent_required", + "Tenant must record remote inference consent before remote inference can be invoked."); + } + + return AuthorityTenantRemoteInferenceConsentResult.CreateAllowed( + consent.ConsentVersion, + consent.ConsentedAt, + consent.ConsentedBy); + } +} + +internal readonly record struct AdvisoryAiRemoteInferenceSnapshot( + bool Enabled, + bool RequireTenantConsent, + IReadOnlyList AllowedProfiles); + +internal sealed record AuthorityTenantRemoteInferenceConsentResult( + bool Allowed, + string? ErrorCode, + string? ErrorMessage, + string? ConsentVersion, + DateTimeOffset? ConsentedAt, + string? ConsentedBy) +{ + public static AuthorityTenantRemoteInferenceConsentResult CreateDisabled() => + new(false, "remote_inference_disabled", "Remote inference is disabled by configuration.", null, null, null); + + public static AuthorityTenantRemoteInferenceConsentResult CreateDenied(string errorCode, string errorMessage) => + new(false, errorCode, errorMessage, null, null, null); + + public static AuthorityTenantRemoteInferenceConsentResult CreateAllowed(string? consentVersion, DateTimeOffset? consentedAt, string? consentedBy) => + new(true, null, null, consentVersion, consentedAt, consentedBy); +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AirgapAuditEndpointExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AirgapAuditEndpointExtensions.cs new file mode 100644 index 00000000..6ab948b2 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AirgapAuditEndpointExtensions.cs @@ -0,0 +1,321 @@ +using System.Collections.Generic; +using System.Diagnostics; +using System.Net.Mime; +using System.Security.Claims; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using OpenIddict.Abstractions; +using StellaOps.Auth.Abstractions; +using StellaOps.Authority.Console; +using System.Linq; +using StellaOps.Auth.ServerIntegration; + +namespace StellaOps.Authority.Airgap; + +internal static class AirgapAuditEndpointExtensions +{ + private static readonly HashSet AllowedStatuses = new(StringComparer.OrdinalIgnoreCase) + { + "started", + "completed", + "failed", + "replayed", + "validated" + }; + + private const int MaxBundleIdLength = 256; + private const int MaxReasonLength = 512; + private const int MaxMetadataEntries = 16; + private const int MaxMetadataKeyLength = 64; + private const int MaxMetadataValueLength = 512; + private const int DefaultPageSize = 50; + private const int MaxPageSize = 200; + + public static void MapAirgapAuditEndpoints(this WebApplication app) + { + ArgumentNullException.ThrowIfNull(app); + + var group = app.MapGroup("/authority/audit/airgap") + .RequireAuthorization() + .WithTags("AuthorityAirgapAudit"); + + group.AddEndpointFilter(new TenantHeaderFilter()); + + group.MapGet("/", GetAuditAsync) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.AirgapStatusRead)) + .WithName("GetAirgapAudit") + .WithSummary("List air-gapped bundle import audit records for the current tenant.") + .Produces(StatusCodes.Status200OK, MediaTypeNames.Application.Json) + .ProducesProblem(StatusCodes.Status400BadRequest) + .ProducesProblem(StatusCodes.Status401Unauthorized) + .ProducesProblem(StatusCodes.Status403Forbidden); + + group.MapPost("/", RecordAuditAsync) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.AirgapImport)) + .WithName("RecordAirgapAudit") + .WithSummary("Record an audit entry for an air-gapped bundle import action.") + .Accepts(MediaTypeNames.Application.Json) + .Produces(StatusCodes.Status201Created, MediaTypeNames.Application.Json) + .ProducesProblem(StatusCodes.Status400BadRequest) + .ProducesProblem(StatusCodes.Status401Unauthorized) + .ProducesProblem(StatusCodes.Status403Forbidden); + } + + private static async Task GetAuditAsync( + HttpContext httpContext, + IAuthorityAirgapAuditService auditService, + [FromQuery(Name = "bundleId")] string? bundleId, + [FromQuery(Name = "status")] string? status, + [FromQuery(Name = "traceId")] string? traceId, + [FromQuery(Name = "cursor")] string? cursor, + [FromQuery(Name = "pageSize")] int? pageSize, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(auditService); + + var tenant = TenantHeaderFilter.GetTenant(httpContext); + if (string.IsNullOrWhiteSpace(tenant)) + { + return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); + } + + var normalizedStatus = NormalizeStatus(status); + if (normalizedStatus is not null && !AllowedStatuses.Contains(normalizedStatus)) + { + return Results.BadRequest(new { error = "invalid_status", message = $"Status '{status}' is not allowed." }); + } + + var effectivePageSize = pageSize.GetValueOrDefault(DefaultPageSize); + if (effectivePageSize <= 0 || effectivePageSize > MaxPageSize) + { + return Results.BadRequest(new { error = "invalid_page_size", message = $"pageSize must be between 1 and {MaxPageSize}." }); + } + + var search = new AirgapAuditSearch( + Tenant: tenant, + BundleId: Normalize(bundleId), + Status: normalizedStatus, + TraceId: Normalize(traceId), + Cursor: Normalize(cursor), + Limit: effectivePageSize); + + var page = await auditService.QueryAsync(search, cancellationToken).ConfigureAwait(false); + var response = new AirgapAuditListResponse( + page.Items.Select(MapListItem).ToArray(), + page.NextCursor); + + return Results.Ok(response); + } + + private static async Task RecordAuditAsync( + HttpContext httpContext, + AirgapAuditRecordRequest request, + IAuthorityAirgapAuditService auditService, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(auditService); + + if (request is null) + { + return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); + } + + var tenant = TenantHeaderFilter.GetTenant(httpContext); + if (string.IsNullOrWhiteSpace(tenant)) + { + return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); + } + + var bundleId = Normalize(request.BundleId); + if (bundleId is null) + { + return Results.BadRequest(new { error = "bundle_id_required", message = "bundleId is required." }); + } + + if (bundleId.Length > MaxBundleIdLength) + { + return Results.BadRequest(new { error = "bundle_id_too_long", message = $"bundleId must be {MaxBundleIdLength} characters or fewer." }); + } + + var status = NormalizeStatus(request.Status); + if (status is null) + { + return Results.BadRequest(new { error = "status_required", message = "status is required." }); + } + + if (!AllowedStatuses.Contains(status)) + { + return Results.BadRequest(new { error = "invalid_status", message = $"Status '{request.Status}' is not allowed." }); + } + + string? reason = Normalize(request.Reason); + if (reason is not null && reason.Length > MaxReasonLength) + { + return Results.BadRequest(new { error = "reason_too_long", message = $"reason must be {MaxReasonLength} characters or fewer." }); + } + + var metadata = ValidateMetadata(request.Metadata); + if (metadata is null) + { + return Results.BadRequest(new { error = "invalid_metadata", message = "metadata is invalid or exceeds limits." }); + } + + var subjectId = Normalize(httpContext.User.FindFirstValue(StellaOpsClaimTypes.Subject)); + var username = Normalize(httpContext.User.FindFirstValue(OpenIddictConstants.Claims.PreferredUsername)); + var displayName = Normalize(httpContext.User.FindFirstValue(OpenIddictConstants.Claims.Name)); + var clientId = Normalize(httpContext.User.FindFirstValue(StellaOpsClaimTypes.ClientId)); + + var traceId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier; + + var record = new AirgapAuditRecord( + Tenant: tenant, + BundleId: bundleId, + Status: status, + Reason: reason, + TraceId: traceId, + SubjectId: subjectId, + Username: username, + DisplayName: displayName, + ClientId: clientId, + Metadata: metadata); + + var entry = await auditService.RecordAsync(record, cancellationToken).ConfigureAwait(false); + var response = MapRecordResponse(entry); + + return Results.Created($"/authority/audit/airgap/{entry.Id}", response); + } + + private static IReadOnlyDictionary? ValidateMetadata(IReadOnlyDictionary? metadata) + { + if (metadata is null || metadata.Count == 0) + { + return new Dictionary(StringComparer.Ordinal); + } + + if (metadata.Count > MaxMetadataEntries) + { + return null; + } + + var dictionary = new Dictionary(metadata.Count, StringComparer.Ordinal); + + foreach (var (key, value) in metadata) + { + var normalizedKey = Normalize(key); + if (normalizedKey is null || normalizedKey.Length > MaxMetadataKeyLength) + { + return null; + } + + if (dictionary.ContainsKey(normalizedKey)) + { + return null; + } + + if (value is { Length: > MaxMetadataValueLength }) + { + return null; + } + + dictionary[normalizedKey] = string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + } + + return dictionary; + } + + private static AirgapAuditListItem MapListItem(AirgapAuditEntry entry) + { + return new AirgapAuditListItem( + entry.Id, + entry.Tenant, + entry.SubjectId, + entry.Username, + entry.DisplayName, + entry.ClientId, + entry.BundleId, + entry.Status, + entry.Reason, + entry.TraceId, + entry.OccurredAt, + entry.Metadata); + } + + private static AirgapAuditRecordResponse MapRecordResponse(AirgapAuditEntry entry) + { + return new AirgapAuditRecordResponse( + entry.Id, + entry.Tenant, + entry.BundleId, + entry.Status, + entry.Reason, + entry.TraceId, + entry.SubjectId, + entry.Username, + entry.DisplayName, + entry.ClientId, + entry.OccurredAt, + entry.Metadata); + } + + private static string? Normalize(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + return value.Trim(); + } + + private static string? NormalizeStatus(string? value) + { + var normalized = Normalize(value); + return normalized?.ToLowerInvariant(); + } +} + +internal sealed record AirgapAuditRecordRequest +{ + public required string BundleId { get; init; } + + public required string Status { get; init; } + + public string? Reason { get; init; } + + public IReadOnlyDictionary? Metadata { get; init; } +} + +internal sealed record AirgapAuditRecordResponse( + string Id, + string Tenant, + string BundleId, + string Status, + string? Reason, + string? TraceId, + string? SubjectId, + string? Username, + string? DisplayName, + string? ClientId, + DateTimeOffset OccurredAt, + IReadOnlyDictionary Metadata); + +internal sealed record AirgapAuditListResponse( + IReadOnlyList Items, + string? NextCursor); + +internal sealed record AirgapAuditListItem( + string Id, + string Tenant, + string? SubjectId, + string? Username, + string? DisplayName, + string? ClientId, + string BundleId, + string Status, + string? Reason, + string? TraceId, + DateTimeOffset OccurredAt, + IReadOnlyDictionary Metadata); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AuthorityAirgapAuditService.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AuthorityAirgapAuditService.cs new file mode 100644 index 00000000..e236e23c --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AuthorityAirgapAuditService.cs @@ -0,0 +1,146 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Stores; + +namespace StellaOps.Authority.Airgap; + +internal interface IAuthorityAirgapAuditService +{ + ValueTask RecordAsync(AirgapAuditRecord record, CancellationToken cancellationToken); + + ValueTask QueryAsync(AirgapAuditSearch search, CancellationToken cancellationToken); +} + +internal sealed class AuthorityAirgapAuditService : IAuthorityAirgapAuditService +{ + private readonly IAuthorityAirgapAuditStore store; + private readonly TimeProvider timeProvider; + + public AuthorityAirgapAuditService( + IAuthorityAirgapAuditStore store, + TimeProvider timeProvider) + { + this.store = store ?? throw new ArgumentNullException(nameof(store)); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public async ValueTask RecordAsync(AirgapAuditRecord record, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(record); + + var document = new AuthorityAirgapAuditDocument + { + Tenant = record.Tenant, + SubjectId = record.SubjectId, + Username = record.Username, + DisplayName = record.DisplayName, + ClientId = record.ClientId, + BundleId = record.BundleId, + Status = record.Status, + Reason = record.Reason, + TraceId = record.TraceId, + OccurredAt = timeProvider.GetUtcNow() + }; + + if (record.Metadata.Count > 0) + { + var properties = new List(record.Metadata.Count); + foreach (var (name, value) in record.Metadata) + { + properties.Add(new AuthorityAirgapAuditPropertyDocument + { + Name = name, + Value = value + }); + } + + document.Properties = properties; + } + + await store.InsertAsync(document, cancellationToken).ConfigureAwait(false); + return Map(document); + } + + public async ValueTask QueryAsync(AirgapAuditSearch search, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(search); + + var query = new AuthorityAirgapAuditQuery + { + Tenant = search.Tenant, + BundleId = search.BundleId, + Status = search.Status, + TraceId = search.TraceId, + AfterId = search.Cursor, + Limit = search.Limit + }; + + var result = await store.QueryAsync(query, cancellationToken).ConfigureAwait(false); + var items = result.Items.Select(Map).ToImmutableArray(); + return new AirgapAuditPage(items, result.NextCursor); + } + + private static AirgapAuditEntry Map(AuthorityAirgapAuditDocument document) + { + IReadOnlyDictionary metadata = document.Properties is { Count: > 0 } + ? document.Properties.ToDictionary( + property => property.Name, + property => property.Value, + StringComparer.Ordinal) + : ImmutableDictionary.Empty; + + return new AirgapAuditEntry( + document.Id, + document.Tenant, + document.SubjectId, + document.Username, + document.DisplayName, + document.ClientId, + document.BundleId, + document.Status, + document.Reason, + document.TraceId, + document.OccurredAt, + metadata); + } +} + +internal sealed record AirgapAuditRecord( + string Tenant, + string BundleId, + string Status, + string? Reason, + string? TraceId, + string? SubjectId, + string? Username, + string? DisplayName, + string? ClientId, + IReadOnlyDictionary Metadata); + +internal sealed record AirgapAuditSearch( + string Tenant, + string? BundleId, + string? Status, + string? TraceId, + string? Cursor, + int Limit); + +internal sealed record AirgapAuditEntry( + string Id, + string Tenant, + string? SubjectId, + string? Username, + string? DisplayName, + string? ClientId, + string BundleId, + string Status, + string? Reason, + string? TraceId, + DateTimeOffset OccurredAt, + IReadOnlyDictionary Metadata); + +internal sealed record AirgapAuditPage( + IReadOnlyList Items, + string? NextCursor); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/LegacyAuthDeprecationMiddleware.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/LegacyAuthDeprecationMiddleware.cs new file mode 100644 index 00000000..5f5886b2 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/LegacyAuthDeprecationMiddleware.cs @@ -0,0 +1,254 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.Net.Http.Headers; +using StellaOps.Configuration; +using StellaOps.Cryptography.Audit; + +namespace StellaOps.Authority; + +internal sealed class LegacyAuthDeprecationMiddleware +{ + private const string LegacyEventType = "authority.api.legacy_endpoint"; + private const string SunsetHeaderName = "Sunset"; + + private static readonly IReadOnlyDictionary LegacyEndpointMap = + new Dictionary(PathStringComparer.Instance) + { + [new PathString("/oauth/token")] = new PathString("/token"), + [new PathString("/oauth/introspect")] = new PathString("/introspect"), + [new PathString("/oauth/revoke")] = new PathString("/revoke") + }; + + private readonly RequestDelegate next; + private readonly AuthorityLegacyAuthEndpointOptions options; + private readonly IAuthEventSink auditSink; + private readonly TimeProvider clock; + private readonly ILogger logger; + + public LegacyAuthDeprecationMiddleware( + RequestDelegate next, + IOptions authorityOptions, + IAuthEventSink auditSink, + TimeProvider clock, + ILogger logger) + { + this.next = next ?? throw new ArgumentNullException(nameof(next)); + if (authorityOptions is null) + { + throw new ArgumentNullException(nameof(authorityOptions)); + } + + options = authorityOptions.Value.ApiLifecycle.LegacyAuth ?? + throw new InvalidOperationException("Authority legacy auth endpoint options are not configured."); + this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + this.clock = clock ?? throw new ArgumentNullException(nameof(clock)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task InvokeAsync(HttpContext context) + { + ArgumentNullException.ThrowIfNull(context); + + if (!options.Enabled) + { + await next(context).ConfigureAwait(false); + return; + } + + if (!TryResolveLegacyPath(context.Request.Path, out var canonicalPath)) + { + await next(context).ConfigureAwait(false); + return; + } + + var originalPath = context.Request.Path; + context.Request.Path = canonicalPath; + + logger.LogInformation( + "Legacy Authority endpoint {OriginalPath} invoked; routing to {CanonicalPath} and emitting deprecation headers.", + originalPath, + canonicalPath); + + AppendDeprecationHeaders(context.Response); + + await next(context).ConfigureAwait(false); + + await EmitAuditAsync(context, originalPath, canonicalPath).ConfigureAwait(false); + } + + private static bool TryResolveLegacyPath(PathString path, out PathString canonicalPath) + { + if (LegacyEndpointMap.TryGetValue(Normalize(path), out canonicalPath)) + { + return true; + } + + canonicalPath = PathString.Empty; + return false; + } + + private static PathString Normalize(PathString value) + { + if (!value.HasValue) + { + return PathString.Empty; + } + + var trimmed = value.Value!.TrimEnd('/'); + return new PathString(trimmed.Length == 0 ? "/" : trimmed.ToLowerInvariant()); + } + + private void AppendDeprecationHeaders(HttpResponse response) + { + if (response.HasStarted) + { + return; + } + + var deprecation = FormatHttpDate(options.DeprecationDate); + response.Headers["Deprecation"] = deprecation; + + var sunset = FormatHttpDate(options.SunsetDate); + response.Headers[SunsetHeaderName] = sunset; + + if (!string.IsNullOrWhiteSpace(options.DocumentationUrl)) + { + var linkValue = $"<{options.DocumentationUrl}>; rel=\"sunset\""; + response.Headers.Append(HeaderNames.Link, linkValue); + } + + var warning = $"299 - \"Legacy Authority endpoint will be removed after {sunset}. Migrate to canonical endpoints before the sunset date.\""; + response.Headers[HeaderNames.Warning] = warning; + } + + private async Task EmitAuditAsync(HttpContext context, PathString originalPath, PathString canonicalPath) + { + try + { + var correlation = Activity.Current?.TraceId.ToString() ?? context.TraceIdentifier; + + var network = BuildNetwork(context); + + var record = new AuthEventRecord + { + EventType = LegacyEventType, + OccurredAt = clock.GetUtcNow(), + CorrelationId = correlation, + Outcome = AuthEventOutcome.Success, + Reason = null, + Subject = null, + Client = null, + Tenant = ClassifiedString.Empty, + Project = ClassifiedString.Empty, + Scopes = Array.Empty(), + Network = network, + Properties = BuildProperties( + ("legacy.endpoint.original", originalPath.Value), + ("legacy.endpoint.canonical", canonicalPath.Value), + ("legacy.deprecation_at", options.DeprecationDate.ToString("O", CultureInfo.InvariantCulture)), + ("legacy.sunset_at", options.SunsetDate.ToString("O", CultureInfo.InvariantCulture)), + ("http.status_code", context.Response.StatusCode.ToString(CultureInfo.InvariantCulture))) + }; + + await auditSink.WriteAsync(record, context.RequestAborted).ConfigureAwait(false); + } + catch (Exception ex) + { + logger.LogWarning(ex, "Failed to emit legacy auth endpoint audit event."); + } + } + + private static AuthEventNetwork? BuildNetwork(HttpContext context) + { + var remote = context.Connection.RemoteIpAddress?.ToString(); + var forwarded = context.Request.Headers["X-Forwarded-For"].ToString(); + var userAgent = context.Request.Headers.UserAgent.ToString(); + + if (string.IsNullOrWhiteSpace(remote) && + string.IsNullOrWhiteSpace(forwarded) && + string.IsNullOrWhiteSpace(userAgent)) + { + return null; + } + + return new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Personal(Normalize(remote)), + ForwardedFor = ClassifiedString.Personal(Normalize(forwarded)), + UserAgent = ClassifiedString.Personal(Normalize(userAgent)) + }; + } + + private static string? Normalize(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + var trimmed = value.Trim(); + return trimmed.Length == 0 ? null : trimmed; + } + + private static IReadOnlyList BuildProperties(params (string Name, string? Value)[] entries) + { + if (entries.Length == 0) + { + return Array.Empty(); + } + + var list = new List(entries.Length); + foreach (var (name, value) in entries) + { + if (string.IsNullOrWhiteSpace(name)) + { + continue; + } + + list.Add(new AuthEventProperty + { + Name = name, + Value = string.IsNullOrWhiteSpace(value) + ? ClassifiedString.Empty + : ClassifiedString.Public(value) + }); + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static string FormatHttpDate(DateTimeOffset value) + { + return value.UtcDateTime.ToString("r", CultureInfo.InvariantCulture); + } + + private sealed class PathStringComparer : IEqualityComparer + { + public static readonly PathStringComparer Instance = new(); + + public bool Equals(PathString x, PathString y) + { + return string.Equals(Normalize(x).Value, Normalize(y).Value, StringComparison.Ordinal); + } + + public int GetHashCode(PathString obj) + { + return Normalize(obj).Value?.GetHashCode(StringComparison.Ordinal) ?? 0; + } + } +} + +internal static class LegacyAuthDeprecationExtensions +{ + public static IApplicationBuilder UseLegacyAuthDeprecation(this IApplicationBuilder app) + { + ArgumentNullException.ThrowIfNull(app); + return app.UseMiddleware(); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenModels.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenModels.cs new file mode 100644 index 00000000..f7abeead --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenModels.cs @@ -0,0 +1,84 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Authority.Notifications.Ack; + +internal sealed class AckTokenEnvelope +{ + [JsonPropertyName("payloadType")] + public string? PayloadType { get; set; } + + [JsonPropertyName("payload")] + public string? Payload { get; set; } + + [JsonPropertyName("signatures")] + public List Signatures { get; set; } = new(); +} + +internal sealed class AckTokenSignature +{ + [JsonPropertyName("keyid")] + public string? KeyId { get; set; } + + [JsonPropertyName("sig")] + public string? Signature { get; set; } + + [JsonPropertyName("algorithm")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Algorithm { get; set; } +} + +internal sealed class AckTokenIssueRequest +{ + public string Tenant { get; set; } = string.Empty; + public string NotificationId { get; set; } = string.Empty; + public string DeliveryId { get; set; } = string.Empty; + public string Channel { get; set; } = string.Empty; + public string WebhookUrl { get; set; } = string.Empty; + public string[]? Actions { get; set; } + public bool AllowEscalation { get; set; } + public TimeSpan? Lifetime { get; set; } + public string? Nonce { get; set; } + public Dictionary? Metadata { get; set; } +} + +internal sealed class AckTokenVerifyRequest +{ + public AckTokenEnvelope? Envelope { get; set; } + public string Action { get; set; } = "ack"; + public string? ExpectedTenant { get; set; } +} + +internal sealed record AckTokenIssueResult( + AckTokenEnvelope Envelope, + AckTokenPayload Payload, + string KeyId); + +internal sealed record AckTokenVerificationResult( + AckTokenPayload Payload, + string KeyId, + bool SignatureValid); + +internal sealed record AckTokenIssueResponse( + string PayloadType, + string Payload, + IReadOnlyCollection Signatures, + DateTimeOffset IssuedAt, + DateTimeOffset ExpiresAt, + string Nonce); + +internal sealed record AckTokenSignatureResponse( + string KeyId, + string Signature, + string Algorithm); + +internal sealed record AckTokenVerifyResponse( + string Tenant, + string NotificationId, + string DeliveryId, + string Channel, + IReadOnlyCollection Actions, + bool EscalationAllowed, + DateTimeOffset ExpiresAt, + string Nonce); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenPayload.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenPayload.cs new file mode 100644 index 00000000..3a507134 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenPayload.cs @@ -0,0 +1,252 @@ +using System; +using System.Buffers; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; + +namespace StellaOps.Authority.Notifications.Ack; + +internal sealed class AckTokenPayload +{ + private static readonly JsonWriterOptions WriterOptions = new() + { + Indented = false, + SkipValidation = false + }; + + public AckTokenPayload( + string version, + string issuer, + string tenant, + string notificationId, + string deliveryId, + string channel, + string webhook, + string nonce, + DateTimeOffset issuedAt, + DateTimeOffset expiresAt, + IReadOnlyList actions, + bool escalationAllowed, + string? escalationScope, + IReadOnlyDictionary? metadata) + { + Version = version; + Issuer = issuer; + Tenant = tenant; + NotificationId = notificationId; + DeliveryId = deliveryId; + Channel = channel; + Webhook = webhook; + Nonce = nonce; + IssuedAt = issuedAt; + ExpiresAt = expiresAt; + Actions = actions; + EscalationAllowed = escalationAllowed; + EscalationScope = escalationScope; + Metadata = metadata; + } + + public string Version { get; } + + public string Issuer { get; } + + public string Tenant { get; } + + public string NotificationId { get; } + + public string DeliveryId { get; } + + public string Channel { get; } + + public string Webhook { get; } + + public string Nonce { get; } + + public DateTimeOffset IssuedAt { get; } + + public DateTimeOffset ExpiresAt { get; } + + public IReadOnlyList Actions { get; } + + public bool EscalationAllowed { get; } + + public string? EscalationScope { get; } + + public IReadOnlyDictionary? Metadata { get; } + + public byte[] ToCanonicalJson() + { + var buffer = new ArrayBufferWriter(); + using (var writer = new Utf8JsonWriter(buffer, WriterOptions)) + { + WriteCanonicalJson(writer); + } + + return buffer.WrittenSpan.ToArray(); + } + + public void WriteCanonicalJson(Utf8JsonWriter writer) + { + ArgumentNullException.ThrowIfNull(writer); + + writer.WriteStartObject(); + writer.WriteString("version", Version); + writer.WriteString("issuer", Issuer); + writer.WriteString("tenant", Tenant); + writer.WriteString("notificationId", NotificationId); + writer.WriteString("deliveryId", DeliveryId); + writer.WriteString("channel", Channel); + writer.WriteString("webhook", Webhook); + writer.WriteString("nonce", Nonce); + writer.WriteString("issuedAt", IssuedAt.UtcDateTime.ToString("O")); + writer.WriteString("expiresAt", ExpiresAt.UtcDateTime.ToString("O")); + + writer.WritePropertyName("actions"); + writer.WriteStartArray(); + foreach (var action in Actions.OrderBy(static a => a, StringComparer.Ordinal)) + { + writer.WriteStringValue(action); + } + writer.WriteEndArray(); + + writer.WriteBoolean("escalationAllowed", EscalationAllowed); + + if (!string.IsNullOrWhiteSpace(EscalationScope)) + { + writer.WriteString("escalationScope", EscalationScope); + } + + if (Metadata is { Count: > 0 }) + { + writer.WritePropertyName("metadata"); + writer.WriteStartObject(); + foreach (var entry in Metadata.OrderBy(static pair => pair.Key, StringComparer.Ordinal)) + { + writer.WriteString(entry.Key, entry.Value); + } + writer.WriteEndObject(); + } + + writer.WriteEndObject(); + } + + public static AckTokenPayload Parse(ReadOnlySpan json) + { + using var document = JsonDocument.Parse(json.ToArray()); + var root = document.RootElement; + + string RequireString(string property) + { + if (!root.TryGetProperty(property, out var element) || element.ValueKind != JsonValueKind.String) + { + throw new InvalidOperationException($"Ack token payload is missing required property '{property}'."); + } + + var value = element.GetString(); + if (string.IsNullOrWhiteSpace(value)) + { + throw new InvalidOperationException($"Ack token payload property '{property}' cannot be empty."); + } + + return value!; + } + + var version = RequireString("version"); + var issuer = RequireString("issuer"); + var tenant = RequireString("tenant"); + var notificationId = RequireString("notificationId"); + var deliveryId = RequireString("deliveryId"); + var channel = RequireString("channel"); + var webhook = RequireString("webhook"); + var nonce = RequireString("nonce"); + + var issuedAt = ParseTimestamp(root, "issuedAt"); + var expiresAt = ParseTimestamp(root, "expiresAt"); + + if (!root.TryGetProperty("actions", out var actionsElement) || actionsElement.ValueKind != JsonValueKind.Array) + { + throw new InvalidOperationException("Ack token payload must contain an 'actions' array."); + } + + var actions = new List(); + foreach (var item in actionsElement.EnumerateArray()) + { + if (item.ValueKind != JsonValueKind.String) + { + throw new InvalidOperationException("Ack token payload actions must be strings."); + } + + var value = item.GetString(); + if (!string.IsNullOrWhiteSpace(value)) + { + actions.Add(value.Trim().ToLowerInvariant()); + } + } + + if (actions.Count == 0) + { + throw new InvalidOperationException("Ack token payload must contain at least one action."); + } + + var escalationAllowed = root.TryGetProperty("escalationAllowed", out var flagElement) && + flagElement.ValueKind == JsonValueKind.True; + + string? escalationScope = null; + if (root.TryGetProperty("escalationScope", out var scopeElement) && scopeElement.ValueKind == JsonValueKind.String) + { + var value = scopeElement.GetString(); + if (!string.IsNullOrWhiteSpace(value)) + { + escalationScope = value.Trim(); + } + } + + IReadOnlyDictionary? metadata = null; + if (root.TryGetProperty("metadata", out var metadataElement) && metadataElement.ValueKind == JsonValueKind.Object) + { + var dictionary = new Dictionary(StringComparer.Ordinal); + foreach (var property in metadataElement.EnumerateObject()) + { + if (property.Value.ValueKind == JsonValueKind.String) + { + dictionary[property.Name] = property.Value.GetString() ?? string.Empty; + } + } + + if (dictionary.Count > 0) + { + metadata = dictionary; + } + } + + return new AckTokenPayload( + version, + issuer, + tenant, + notificationId, + deliveryId, + channel, + webhook, + nonce, + issuedAt, + expiresAt, + actions, + escalationAllowed, + escalationScope, + metadata); + } + + private static DateTimeOffset ParseTimestamp(JsonElement root, string property) + { + var value = root.TryGetProperty(property, out var element) && element.ValueKind == JsonValueKind.String + ? element.GetString() + : null; + + if (string.IsNullOrWhiteSpace(value) || !DateTimeOffset.TryParse(value, null, System.Globalization.DateTimeStyles.RoundtripKind, out var timestamp)) + { + throw new InvalidOperationException($"Ack token payload property '{property}' is missing or invalid."); + } + + return timestamp; + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenSigningUtilities.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenSigningUtilities.cs new file mode 100644 index 00000000..6ecbb513 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AckTokenSigningUtilities.cs @@ -0,0 +1,42 @@ +using System; +using System.Buffers.Binary; +using System.IO; +using System.Text; + +namespace StellaOps.Authority.Notifications.Ack; + +internal static class AckTokenSigningUtilities +{ + private static readonly byte[] Prefix = Encoding.ASCII.GetBytes("DSSEv1"); + + public static byte[] CreatePreAuthenticationEncoding(string payloadType, ReadOnlySpan payload) + { + ArgumentException.ThrowIfNullOrEmpty(payloadType); + + var payloadTypeBytes = Encoding.UTF8.GetBytes(payloadType); + var totalLength = Prefix.Length + + sizeof(long) + // number of fields + sizeof(long) + payloadTypeBytes.Length + + sizeof(long) + payload.Length; + + using var stream = new MemoryStream(totalLength); + stream.Write(Prefix); + WriteInt64(stream, 2); + WriteField(stream, payloadTypeBytes); + WriteField(stream, payload); + return stream.ToArray(); + } + + private static void WriteField(Stream stream, ReadOnlySpan value) + { + WriteInt64(stream, value.Length); + stream.Write(value); + } + + private static void WriteInt64(Stream stream, long value) + { + Span buffer = stackalloc byte[8]; + BinaryPrimitives.WriteInt64LittleEndian(buffer, value); + stream.Write(buffer); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenIssuer.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenIssuer.cs new file mode 100644 index 00000000..11fdcfe8 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenIssuer.cs @@ -0,0 +1,206 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Configuration; + +namespace StellaOps.Authority.Notifications.Ack; + +internal sealed class AuthorityAckTokenIssuer +{ + private readonly AuthorityAckTokenKeyManager keyManager; + private readonly AuthorityWebhookAllowlistEvaluator allowlistEvaluator; + private readonly StellaOpsAuthorityOptions authorityOptions; + private readonly TimeProvider timeProvider; + private readonly ILogger logger; + + public AuthorityAckTokenIssuer( + AuthorityAckTokenKeyManager keyManager, + AuthorityWebhookAllowlistEvaluator allowlistEvaluator, + IOptions authorityOptions, + TimeProvider timeProvider, + ILogger logger) + { + this.keyManager = keyManager ?? throw new ArgumentNullException(nameof(keyManager)); + this.allowlistEvaluator = allowlistEvaluator ?? throw new ArgumentNullException(nameof(allowlistEvaluator)); + this.authorityOptions = authorityOptions?.Value ?? throw new ArgumentNullException(nameof(authorityOptions)); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task IssueAsync( + AckTokenIssueRequest request, + bool requesterHasEscalateScope, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + + var ackOptions = authorityOptions.Notifications.AckTokens; + if (!ackOptions.Enabled) + { + throw new InvalidOperationException("Ack tokens are disabled. Enable notifications.ackTokens to issue tokens."); + } + + var issuer = authorityOptions.Issuer ?? throw new InvalidOperationException("Authority issuer configuration is required."); + + var tenant = Require(request.Tenant, nameof(request.Tenant)); + var notificationId = Require(request.NotificationId, nameof(request.NotificationId)); + var deliveryId = Require(request.DeliveryId, nameof(request.DeliveryId)); + var channel = Require(request.Channel, nameof(request.Channel)); + var webhookUrl = Require(request.WebhookUrl, nameof(request.WebhookUrl)); + var normalizedNonce = string.IsNullOrWhiteSpace(request.Nonce) + ? Guid.NewGuid().ToString("N") + : request.Nonce!.Trim(); + + if (!Uri.TryCreate(webhookUrl, UriKind.Absolute, out var webhookUri)) + { + throw new InvalidOperationException("Webhook URL must be an absolute URI."); + } + + allowlistEvaluator.EnsureAllowed(webhookUri); + + var lifetime = request.Lifetime ?? ackOptions.DefaultLifetime; + if (lifetime <= TimeSpan.Zero) + { + throw new InvalidOperationException("Requested lifetime must be greater than zero."); + } + + if (lifetime > ackOptions.MaxLifetime) + { + throw new InvalidOperationException($"Requested lifetime exceeds the configured maximum ({ackOptions.MaxLifetime})."); + } + + var normalizedActions = NormalizeActions(request.Actions); + if (normalizedActions.Count == 0) + { + normalizedActions.Add("ack"); + } + + if (!normalizedActions.Contains("ack", StringComparer.Ordinal)) + { + normalizedActions.Insert(0, "ack"); + } + + var escalationOptions = authorityOptions.Notifications.Escalation; + var escalationAllowed = request.AllowEscalation; + if (escalationAllowed) + { + if (string.IsNullOrWhiteSpace(escalationOptions.Scope)) + { + throw new InvalidOperationException("Escalation scope configuration is missing."); + } + + if (!normalizedActions.Contains("escalate", StringComparer.Ordinal)) + { + normalizedActions.Add("escalate"); + } + + if (escalationOptions.RequireAdminScope && !requesterHasEscalateScope) + { + throw new InvalidOperationException("Escalation is not permitted without the notify.escalate scope."); + } + } + + IReadOnlyDictionary? metadata = null; + if (request.Metadata is { Count: > 0 }) + { + metadata = request.Metadata + .Where(static pair => !string.IsNullOrWhiteSpace(pair.Key)) + .ToDictionary( + static pair => pair.Key.Trim(), + static pair => pair.Value ?? string.Empty, + StringComparer.Ordinal); + + if (metadata.Count == 0) + { + metadata = null; + } + } + + var issuedAt = timeProvider.GetUtcNow(); + var expiresAt = issuedAt.Add(lifetime); + var payload = new AckTokenPayload( + version: "1.0", + issuer: issuer.ToString(), + tenant: tenant, + notificationId: notificationId, + deliveryId: deliveryId, + channel: channel, + webhook: webhookUri.ToString(), + nonce: normalizedNonce, + issuedAt: issuedAt, + expiresAt: expiresAt, + actions: normalizedActions, + escalationAllowed: escalationAllowed, + escalationScope: escalationAllowed ? escalationOptions.Scope : null, + metadata: metadata); + + var canonicalPayload = payload.ToCanonicalJson(); + var pae = AckTokenSigningUtilities.CreatePreAuthenticationEncoding(ackOptions.PayloadType, canonicalPayload); + + var signer = keyManager.GetActiveSigner(); + var signature = await signer.Signer.SignAsync(pae, cancellationToken).ConfigureAwait(false); + + var envelope = new AckTokenEnvelope + { + PayloadType = ackOptions.PayloadType, + Payload = Base64UrlEncoder.Encode(canonicalPayload), + Signatures = + { + new AckTokenSignature + { + KeyId = signer.Signer.KeyId, + Signature = Base64UrlEncoder.Encode(signature), + Algorithm = signer.Signer.AlgorithmId + } + } + }; + + logger.LogInformation("Issued ack token for notification {NotificationId} (tenant {Tenant}).", notificationId, tenant); + return new AckTokenIssueResult(envelope, payload, signer.Signer.KeyId); + } + + private static string Require(string value, string propertyName) + { + ArgumentNullException.ThrowIfNull(value, propertyName); + var trimmed = value.Trim(); + if (string.IsNullOrWhiteSpace(trimmed)) + { + throw new InvalidOperationException($"Property '{propertyName}' is required."); + } + + return trimmed; + } + + private static List NormalizeActions(IEnumerable? actions) + { + if (actions is null) + { + return new List(); + } + + var distinct = new HashSet(StringComparer.Ordinal); + foreach (var action in actions) + { + if (string.IsNullOrWhiteSpace(action)) + { + continue; + } + + var normalized = action.Trim().ToLowerInvariant(); + if (normalized.Length == 0) + { + continue; + } + + distinct.Add(normalized); + } + + return distinct.OrderBy(static value => value, StringComparer.Ordinal).ToList(); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenKeyManager.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenKeyManager.cs new file mode 100644 index 00000000..086e276e --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenKeyManager.cs @@ -0,0 +1,397 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Authority.Signing; +using StellaOps.Configuration; +using StellaOps.Cryptography; + +namespace StellaOps.Authority.Notifications.Ack; + +internal sealed class AuthorityAckTokenKeyManager +{ + private readonly object syncRoot = new(); + private readonly ICryptoProviderRegistry registry; + private readonly IReadOnlyList keySources; + private readonly StellaOpsAuthorityOptions authorityOptions; + private readonly string basePath; + private readonly ILogger logger; + private readonly AuthorityJwksService jwksService; + private readonly Dictionary retiredKeys = new(StringComparer.OrdinalIgnoreCase); + private RegisteredAckKey? activeKey; + + public AuthorityAckTokenKeyManager( + ICryptoProviderRegistry registry, + IEnumerable keySources, + IOptions authorityOptions, + IHostEnvironment environment, + ILogger logger, + AuthorityJwksService jwksService) + { + this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); + this.keySources = (keySources ?? throw new ArgumentNullException(nameof(keySources))).ToArray(); + if (this.keySources.Count == 0) + { + throw new InvalidOperationException("At least one Authority signing key source must be registered."); + } + + this.authorityOptions = authorityOptions?.Value ?? throw new ArgumentNullException(nameof(authorityOptions)); + basePath = environment?.ContentRootPath ?? throw new ArgumentNullException(nameof(environment)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + this.jwksService = jwksService ?? throw new ArgumentNullException(nameof(jwksService)); + + LoadInitialKeys(); + } + + public SigningRotationResult Rotate(SigningRotationRequest request) + { + ArgumentNullException.ThrowIfNull(request); + + lock (syncRoot) + { + var ackOptions = authorityOptions.Notifications.AckTokens; + if (!ackOptions.Enabled) + { + throw new InvalidOperationException("Ack token signing is disabled. Enable notifications.ackTokens before rotating keys."); + } + + var keyId = NormaliseKeyId(request.KeyId); + var location = NormaliseLocation(request.Location); + var algorithm = NormaliseAlgorithm(string.IsNullOrWhiteSpace(request.Algorithm) ? ackOptions.Algorithm : request.Algorithm); + var source = NormaliseSource(string.IsNullOrWhiteSpace(request.Source) ? ackOptions.KeySource : request.Source); + var providerName = NormaliseProvider(request.Provider ?? ackOptions.Provider); + + var metadata = BuildMetadata(AuthoritySigningKeyStatus.Active, ackOptions.KeyUse, request.Metadata); + + var provider = ResolveProvider(providerName, algorithm); + var loader = ResolveSource(source); + var loadRequest = new AuthoritySigningKeyRequest( + keyId, + algorithm, + source, + location, + AuthoritySigningKeyStatus.Active, + basePath, + provider.Name, + additionalMetadata: metadata); + + var newKey = loader.Load(loadRequest); + provider.UpsertSigningKey(newKey); + + if (retiredKeys.Remove(keyId)) + { + logger.LogInformation("Promoted retired ack token key {KeyId} to active status.", keyId); + } + + string? previousKeyId = null; + if (activeKey is not null) + { + previousKeyId = activeKey.Key.Reference.KeyId; + if (!string.Equals(previousKeyId, keyId, StringComparison.OrdinalIgnoreCase)) + { + RetireCurrentActive(); + } + } + + activeKey = new RegisteredAckKey(newKey, provider.Name, source, location); + ackOptions.ActiveKeyId = keyId; + ackOptions.KeyPath = location; + ackOptions.KeySource = source; + ackOptions.Provider = provider.Name; + RemoveAdditionalOption(keyId); + + logger.LogInformation("Ack token signing key rotated. Active key is now {KeyId} via provider {Provider}.", keyId, provider.Name); + jwksService.Invalidate(); + + return new SigningRotationResult( + keyId, + provider.Name, + source, + location, + previousKeyId, + retiredKeys.Keys.ToArray()); + } + } + + public CryptoSignerResolution GetActiveSigner() + { + lock (syncRoot) + { + if (activeKey is null) + { + throw new InvalidOperationException("Ack token signing is not configured."); + } + + return ResolveSigner(activeKey); + } + } + + public bool TryResolveSigner(string keyId, out CryptoSignerResolution resolution) + { + ArgumentException.ThrowIfNullOrEmpty(keyId); + + lock (syncRoot) + { + if (activeKey is not null && string.Equals(activeKey.Key.Reference.KeyId, keyId, StringComparison.OrdinalIgnoreCase)) + { + resolution = ResolveSigner(activeKey); + return true; + } + + if (retiredKeys.TryGetValue(keyId, out var retired)) + { + resolution = ResolveSigner(retired); + return true; + } + } + + resolution = default!; + return false; + } + + private void LoadInitialKeys() + { + var ackOptions = authorityOptions.Notifications.AckTokens; + if (!ackOptions.Enabled) + { + logger.LogInformation("Ack token signing is disabled; issuance and verification endpoints will reject requests."); + return; + } + + var algorithm = NormaliseAlgorithm(ackOptions.Algorithm); + var source = NormaliseSource(ackOptions.KeySource); + var metadata = BuildMetadata(AuthoritySigningKeyStatus.Active, ackOptions.KeyUse, null); + + var activeRequest = new AuthoritySigningKeyRequest( + NormaliseKeyId(ackOptions.ActiveKeyId), + algorithm, + source, + NormaliseLocation(ackOptions.KeyPath), + AuthoritySigningKeyStatus.Active, + basePath, + NormaliseProvider(ackOptions.Provider), + additionalMetadata: metadata); + + activeKey = LoadAndRegister(activeRequest); + ackOptions.KeySource = source; + ackOptions.Provider = activeKey.ProviderName; + + foreach (var additional in ackOptions.AdditionalKeys) + { + var keyId = (additional.KeyId ?? string.Empty).Trim(); + if (string.IsNullOrWhiteSpace(keyId)) + { + logger.LogWarning("Skipped additional ack token key with empty keyId."); + continue; + } + + if (activeKey.Key.Reference.KeyId.Equals(keyId, StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + var additionalLocation = additional.Path?.Trim(); + if (string.IsNullOrWhiteSpace(additionalLocation)) + { + logger.LogWarning("Additional ack token key {KeyId} is missing a path. Skipping.", keyId); + continue; + } + + var additionalSource = NormaliseSource(additional.Source ?? source); + var request = new AuthoritySigningKeyRequest( + keyId, + algorithm, + additionalSource, + additionalLocation, + AuthoritySigningKeyStatus.Retired, + basePath, + NormaliseProvider(ackOptions.Provider), + additionalMetadata: BuildMetadata(AuthoritySigningKeyStatus.Retired, ackOptions.KeyUse, null)); + + try + { + var registration = LoadAndRegister(request); + retiredKeys[registration.Key.Reference.KeyId] = registration; + additional.Source = additionalSource; + } + catch (Exception ex) + { + logger.LogWarning(ex, "Failed to load retired ack token key {KeyId}. It will be ignored for verification.", keyId); + } + } + + jwksService.Invalidate(); + } + + private RegisteredAckKey LoadAndRegister(AuthoritySigningKeyRequest request) + { + var loader = ResolveSource(request.Source); + var provider = ResolveProvider(request.Provider, request.Algorithm); + var key = loader.Load(request); + provider.UpsertSigningKey(key); + + logger.LogDebug("Loaded ack token key {KeyId} (status {Status}) via provider {Provider}.", key.Reference.KeyId, request.Status, provider.Name); + + return new RegisteredAckKey(key, provider.Name, request.Source, request.Location); + } + + private void RetireCurrentActive() + { + if (activeKey is null) + { + return; + } + + var ackOptions = authorityOptions.Notifications.AckTokens; + var previous = activeKey; + var metadata = BuildMetadata(AuthoritySigningKeyStatus.Retired, ackOptions.KeyUse, previous.Key.Metadata); + + var privateParameters = previous.Key.PrivateParameters; + var retiredKey = new CryptoSigningKey( + previous.Key.Reference, + previous.Key.AlgorithmId, + in privateParameters, + previous.Key.CreatedAt, + previous.Key.ExpiresAt, + metadata); + + var provider = ResolveProvider(previous.ProviderName, retiredKey.AlgorithmId); + provider.UpsertSigningKey(retiredKey); + + var registration = new RegisteredAckKey(retiredKey, provider.Name, previous.Source, previous.Location); + retiredKeys[registration.Key.Reference.KeyId] = registration; + UpsertAdditionalOption(registration); + + logger.LogInformation("Moved ack token key {KeyId} to retired set (provider {Provider}).", registration.Key.Reference.KeyId, provider.Name); + } + + private void RemoveAdditionalOption(string keyId) + { + var additional = authorityOptions.Notifications.AckTokens.AdditionalKeys; + for (var index = additional.Count - 1; index >= 0; index--) + { + if (string.Equals(additional[index].KeyId, keyId, StringComparison.OrdinalIgnoreCase)) + { + additional.RemoveAt(index); + } + } + } + + private void UpsertAdditionalOption(RegisteredAckKey registration) + { + var additional = authorityOptions.Notifications.AckTokens.AdditionalKeys; + var existing = additional.FirstOrDefault(key => + string.Equals(key.KeyId, registration.Key.Reference.KeyId, StringComparison.OrdinalIgnoreCase)); + if (existing is not null) + { + additional.Remove(existing); + } + + additional.Add(new AuthoritySigningAdditionalKeyOptions + { + KeyId = registration.Key.Reference.KeyId, + Path = registration.Location, + Source = registration.Source + }); + } + + private CryptoSignerResolution ResolveSigner(RegisteredAckKey key) + { + var resolution = registry.ResolveSigner( + CryptoCapability.Signing, + key.Key.AlgorithmId, + key.Key.Reference, + key.ProviderName); + + return resolution; + } + + private IAuthoritySigningKeySource ResolveSource(string source) + { + foreach (var loader in keySources) + { + if (loader.CanLoad(source)) + { + return loader; + } + } + + throw new InvalidOperationException($"Unknown ack token key source '{source}'."); + } + + private ICryptoProvider ResolveProvider(string? providerHint, string algorithmId) + { + if (!string.IsNullOrWhiteSpace(providerHint) && registry.TryResolve(providerHint, out var provider)) + { + if (!provider.Supports(CryptoCapability.Signing, algorithmId)) + { + throw new InvalidOperationException($"Crypto provider '{provider.Name}' does not support algorithm '{algorithmId}'."); + } + + return provider; + } + + return registry.ResolveOrThrow(CryptoCapability.Signing, algorithmId); + } + + private static string NormaliseKeyId(string? value) + { + var keyId = (value ?? string.Empty).Trim(); + if (string.IsNullOrWhiteSpace(keyId)) + { + throw new InvalidOperationException("Ack token key rotation requires a keyId."); + } + + return keyId; + } + + private static string NormaliseLocation(string? path) + { + var location = (path ?? string.Empty).Trim(); + if (string.IsNullOrWhiteSpace(location)) + { + throw new InvalidOperationException("Ack token key rotation requires a key path/location."); + } + + return location; + } + + private static string NormaliseAlgorithm(string? algorithm) + { + return string.IsNullOrWhiteSpace(algorithm) + ? SignatureAlgorithms.Es256 + : algorithm.Trim(); + } + + private static string NormaliseSource(string? source) + { + return string.IsNullOrWhiteSpace(source) ? "file" : source.Trim(); + } + + private static string? NormaliseProvider(string? provider) + { + return string.IsNullOrWhiteSpace(provider) ? null : provider.Trim(); + } + + private static IReadOnlyDictionary BuildMetadata( + string status, + string use, + IReadOnlyDictionary? existing) + { + var metadata = existing is null + ? new Dictionary(StringComparer.OrdinalIgnoreCase) + : new Dictionary(existing, StringComparer.OrdinalIgnoreCase); + + metadata["status"] = status; + metadata["use"] = use; + return metadata; + } + + private sealed record RegisteredAckKey( + CryptoSigningKey Key, + string ProviderName, + string Source, + string Location); +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenVerifier.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenVerifier.cs new file mode 100644 index 00000000..1222e6e0 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/Ack/AuthorityAckTokenVerifier.cs @@ -0,0 +1,143 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Configuration; + +namespace StellaOps.Authority.Notifications.Ack; + +internal sealed class AuthorityAckTokenVerifier +{ + private readonly AuthorityAckTokenKeyManager keyManager; + private readonly StellaOpsAuthorityOptions authorityOptions; + private readonly TimeProvider timeProvider; + + public AuthorityAckTokenVerifier( + AuthorityAckTokenKeyManager keyManager, + IOptions authorityOptions, + TimeProvider timeProvider) + { + this.keyManager = keyManager ?? throw new ArgumentNullException(nameof(keyManager)); + this.authorityOptions = authorityOptions?.Value ?? throw new ArgumentNullException(nameof(authorityOptions)); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public async Task VerifyAsync( + AckTokenEnvelope envelope, + string expectedAction, + string? expectedTenant, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(envelope); + ArgumentException.ThrowIfNullOrEmpty(expectedAction); + + var ackOptions = authorityOptions.Notifications.AckTokens; + if (!ackOptions.Enabled) + { + throw new InvalidOperationException("Ack tokens are disabled."); + } + + if (string.IsNullOrWhiteSpace(envelope.PayloadType)) + { + throw new InvalidOperationException("Ack token envelope is missing payloadType."); + } + + if (!string.Equals(envelope.PayloadType, ackOptions.PayloadType, StringComparison.Ordinal)) + { + throw new InvalidOperationException($"Unexpected payloadType '{envelope.PayloadType}'. Expected '{ackOptions.PayloadType}'."); + } + + if (string.IsNullOrWhiteSpace(envelope.Payload)) + { + throw new InvalidOperationException("Ack token envelope is missing payload."); + } + + if (envelope.Signatures.Count == 0) + { + throw new InvalidOperationException("Ack token envelope must include at least one signature."); + } + + byte[] payloadBytes; + try + { + payloadBytes = Base64UrlEncoder.DecodeBytes(envelope.Payload); + } + catch (FormatException ex) + { + throw new InvalidOperationException("Ack token payload is not valid base64url.", ex); + } + + var payload = AckTokenPayload.Parse(payloadBytes); + + if (authorityOptions.Issuer is not null && + !string.Equals(payload.Issuer, authorityOptions.Issuer.ToString(), StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("Ack token issuer does not match Authority configuration."); + } + + if (!string.IsNullOrWhiteSpace(expectedTenant) && + !string.Equals(payload.Tenant, expectedTenant, StringComparison.Ordinal)) + { + throw new InvalidOperationException("Ack token tenant mismatch."); + } + + var pae = AckTokenSigningUtilities.CreatePreAuthenticationEncoding(envelope.PayloadType, payloadBytes); + var verifiedKeyId = await VerifySignaturesAsync(envelope.Signatures, pae, cancellationToken).ConfigureAwait(false); + + if (verifiedKeyId is null) + { + throw new InvalidOperationException("Ack token signature validation failed."); + } + + if (payload.ExpiresAt <= timeProvider.GetUtcNow()) + { + throw new InvalidOperationException("Ack token has expired."); + } + + var normalizedAction = expectedAction.Trim().ToLowerInvariant(); + if (!payload.Actions.Contains(normalizedAction, StringComparer.Ordinal)) + { + throw new InvalidOperationException($"Ack token does not permit action '{normalizedAction}'."); + } + + return new AckTokenVerificationResult(payload, verifiedKeyId, true); + } + + private async Task VerifySignaturesAsync( + IReadOnlyCollection signatures, + ReadOnlyMemory pae, + CancellationToken cancellationToken) + { + foreach (var signature in signatures) + { + if (signature.KeyId is null || signature.Signature is null) + { + continue; + } + + if (!keyManager.TryResolveSigner(signature.KeyId, out var signer)) + { + continue; + } + + byte[] signatureBytes; + try + { + signatureBytes = Base64UrlEncoder.DecodeBytes(signature.Signature); + } + catch (FormatException) + { + continue; + } + + if (await signer.Signer.VerifyAsync(pae, signatureBytes, cancellationToken).ConfigureAwait(false)) + { + return signer.Signer.KeyId; + } + } + + return null; + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/AuthorityWebhookAllowlistEvaluator.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/AuthorityWebhookAllowlistEvaluator.cs new file mode 100644 index 00000000..f6b07bfc --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Notifications/AuthorityWebhookAllowlistEvaluator.cs @@ -0,0 +1,107 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.Extensions.Options; +using StellaOps.Configuration; + +namespace StellaOps.Authority.Notifications; + +internal sealed class AuthorityWebhookAllowlistEvaluator +{ + private readonly AuthorityWebhookAllowlistOptions options; + private readonly IReadOnlyList allowedHosts; + private readonly HashSet allowedSchemes; + + public AuthorityWebhookAllowlistEvaluator(IOptions authorityOptions) + { + ArgumentNullException.ThrowIfNull(authorityOptions); + + var notifications = authorityOptions.Value.Notifications ?? throw new InvalidOperationException("Authority notifications configuration is missing."); + options = notifications.Webhooks ?? throw new InvalidOperationException("Authority webhook configuration is missing."); + + allowedHosts = options.AllowedHosts + .Select(static host => host.Trim()) + .Where(static host => host.Length > 0) + .ToArray(); + + allowedSchemes = options.AllowedSchemes + .Select(static scheme => scheme.Trim().ToLowerInvariant()) + .Where(static scheme => scheme.Length > 0) + .ToHashSet(StringComparer.OrdinalIgnoreCase); + } + + public void EnsureAllowed(Uri uri) + { + ArgumentNullException.ThrowIfNull(uri); + + if (!options.Enabled) + { + return; + } + + if (!uri.IsAbsoluteUri) + { + throw new InvalidOperationException("Webhook URL must be an absolute URI."); + } + + if (allowedSchemes.Count > 0 && !allowedSchemes.Contains(uri.Scheme)) + { + throw new InvalidOperationException($"Webhook scheme '{uri.Scheme}' is not permitted. Allowed schemes: {string.Join(", ", allowedSchemes)}."); + } + + if (allowedHosts.Count == 0) + { + throw new InvalidOperationException("Webhook allowlist is empty; configure notifications.webhooks.allowedHosts or disable allowlist enforcement."); + } + + var host = uri.Host; + var port = uri.IsDefaultPort ? (int?)null : uri.Port; + + foreach (var entry in allowedHosts) + { + if (Matches(entry, host, port)) + { + return; + } + } + + throw new InvalidOperationException($"Webhook host '{host}' is not present in the allowlist. Configure notifications.webhooks.allowedHosts to permit it."); + } + + private static bool Matches(string pattern, string host, int? port) + { + if (string.IsNullOrWhiteSpace(pattern)) + { + return false; + } + + var trimmed = pattern.Trim(); + string? patternHost = trimmed; + int? patternPort = null; + + var colonIndex = trimmed.LastIndexOf(':'); + if (colonIndex > 0 && colonIndex < trimmed.Length - 1 && trimmed.IndexOf(']') == -1) + { + var potentialPort = trimmed[(colonIndex + 1)..]; + if (int.TryParse(potentialPort, out var parsedPort) && parsedPort > 0) + { + patternPort = parsedPort; + patternHost = trimmed[..colonIndex]; + } + } + + if (patternPort.HasValue && port.HasValue && patternPort.Value != port.Value) + { + return false; + } + + if (patternHost.StartsWith("*.", StringComparison.Ordinal)) + { + var suffix = patternHost[1..]; + return host.Length > suffix.Length && + host.EndsWith(suffix, StringComparison.OrdinalIgnoreCase); + } + + return string.Equals(patternHost, host, StringComparison.OrdinalIgnoreCase); + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Observability/IncidentAuditEndpointExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Observability/IncidentAuditEndpointExtensions.cs new file mode 100644 index 00000000..5c205999 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Observability/IncidentAuditEndpointExtensions.cs @@ -0,0 +1,102 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Mime; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Authority.Console; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Stores; + +namespace StellaOps.Authority.Observability; + +internal static class IncidentAuditEndpointExtensions +{ + private const int DefaultPageSize = 50; + private const int MaxPageSize = 200; + + public static void MapIncidentAuditEndpoints(this WebApplication app) + { + ArgumentNullException.ThrowIfNull(app); + + var group = app.MapGroup("/authority/audit/incident") + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.ObservabilityRead)) + .WithTags("AuthorityIncidentAudit"); + + group.AddEndpointFilter(new TenantHeaderFilter()); + + group.MapGet("/", GetIncidentAuditAsync) + .WithName("GetIncidentAudit") + .WithSummary("List recent obs:incident token issuances for auditors.") + .Produces(StatusCodes.Status200OK, MediaTypeNames.Application.Json) + .ProducesProblem(StatusCodes.Status400BadRequest) + .ProducesProblem(StatusCodes.Status401Unauthorized) + .ProducesProblem(StatusCodes.Status403Forbidden); + } + + private static async Task GetIncidentAuditAsync( + HttpContext httpContext, + IAuthorityTokenStore tokenStore, + [FromQuery(Name = "since")] DateTimeOffset? since, + [FromQuery(Name = "limit")] int? limit, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(tokenStore); + + var tenant = TenantHeaderFilter.GetTenant(httpContext); + if (string.IsNullOrWhiteSpace(tenant)) + { + return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); + } + + var effectiveLimit = limit.HasValue ? Math.Clamp(limit.Value, 1, MaxPageSize) : DefaultPageSize; + + IReadOnlyList documents; + try + { + documents = await tokenStore.ListByScopeAsync( + StellaOpsScopes.ObservabilityIncident, + tenant, + since, + effectiveLimit, + cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + return Results.Problem( + statusCode: StatusCodes.Status500InternalServerError, + title: "incident_audit_query_failed", + detail: ex.Message); + } + + var items = documents + .Select(doc => new IncidentAuditEntry( + doc.TokenId, + doc.ClientId, + doc.SubjectId, + doc.Tenant, + doc.IncidentReason, + doc.CreatedAt, + doc.ExpiresAt)) + .ToArray(); + + return Results.Ok(new IncidentAuditResponse(items)); + } +} + +internal sealed record IncidentAuditResponse(IncidentAuditEntry[] Items); + +internal sealed record IncidentAuditEntry( + string TokenId, + string? ClientId, + string? SubjectId, + string? Tenant, + string? Reason, + DateTimeOffset IssuedAt, + DateTimeOffset? ExpiresAt); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/AuthorityOpenApiDocumentProvider.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/AuthorityOpenApiDocumentProvider.cs index fd375d39..3a95832e 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/AuthorityOpenApiDocumentProvider.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/AuthorityOpenApiDocumentProvider.cs @@ -103,7 +103,7 @@ internal sealed class AuthorityOpenApiDocumentProvider rootNode.Children[new YamlScalarNode("info")] = infoNode; } - var serviceName = "StellaOps.Authority"; + var serviceName = "authority"; var buildVersion = ResolveBuildVersion(); ApplyInfoMetadata(infoNode, serviceName, buildVersion, grants, scopes); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/OpenApiDiscoveryEndpointExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/OpenApiDiscoveryEndpointExtensions.cs index fb493656..d70cc657 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/OpenApiDiscoveryEndpointExtensions.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/OpenApiDiscoveryEndpointExtensions.cs @@ -3,6 +3,7 @@ using System.Globalization; using System.Linq; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Primitives; using Microsoft.Net.Http.Headers; @@ -20,13 +21,14 @@ internal static class OpenApiDiscoveryEndpointExtensions { ArgumentNullException.ThrowIfNull(endpoints); - var builder = endpoints.MapGet("/.well-known/openapi", async (HttpContext context, AuthorityOpenApiDocumentProvider provider, CancellationToken cancellationToken) => + var builder = endpoints.MapGet("/.well-known/openapi", async (HttpContext context, [FromServices] AuthorityOpenApiDocumentProvider provider, CancellationToken cancellationToken) => { var snapshot = await provider.GetDocumentAsync(cancellationToken).ConfigureAwait(false); var preferYaml = ShouldReturnYaml(context.Request.GetTypedHeaders().Accept); var payload = preferYaml ? snapshot.Yaml : snapshot.Json; var mediaType = preferYaml ? YamlMediaType : JsonMediaType; + var contentType = string.Create(CultureInfo.InvariantCulture, $"{mediaType}; charset=utf-8"); ApplyMetadataHeaders(context.Response, snapshot); @@ -37,7 +39,7 @@ internal static class OpenApiDiscoveryEndpointExtensions } context.Response.StatusCode = StatusCodes.Status200OK; - context.Response.ContentType = mediaType; + context.Response.ContentType = contentType; await context.Response.WriteAsync(payload, cancellationToken).ConfigureAwait(false); }); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityOpenIddictConstants.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityOpenIddictConstants.cs index 11fc9851..c0addbb0 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityOpenIddictConstants.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityOpenIddictConstants.cs @@ -15,6 +15,7 @@ internal static class AuthorityOpenIddictConstants internal const string AuditRequestedScopesProperty = "authority:audit_requested_scopes"; internal const string AuditGrantedScopesProperty = "authority:audit_granted_scopes"; internal const string AuditInvalidScopeProperty = "authority:audit_invalid_scope"; + internal const string AuditSuccessRecordedProperty = "authority:audit_success_recorded"; internal const string ClientSenderConstraintProperty = "authority:client_sender_constraint"; internal const string SenderConstraintProperty = "authority:sender_constraint"; internal const string DpopKeyThumbprintProperty = "authority:dpop_thumbprint"; @@ -35,4 +36,10 @@ internal static class AuthorityOpenIddictConstants internal const string ExportAdminTicketProperty = "authority:export_admin_ticket"; internal const string ExportAdminReasonParameterName = "export_reason"; internal const string ExportAdminTicketParameterName = "export_ticket"; + internal const string IncidentReasonProperty = "authority:incident_reason"; + internal const string IncidentReasonParameterName = "incident_reason"; + internal const string QuotaReasonProperty = "authority:quota_reason"; + internal const string QuotaTicketProperty = "authority:quota_ticket"; + internal const string QuotaReasonParameterName = "quota_reason"; + internal const string QuotaTicketParameterName = "quota_ticket"; } diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs index 2ace328c..b1d6734f 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs @@ -288,18 +288,27 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle var graphScopesRequested = hasGraphRead || hasGraphWrite || hasGraphExport || hasGraphSimulate; var hasOrchRead = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.OrchRead) >= 0; var hasOrchOperate = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.OrchOperate) >= 0; + var hasOrchQuota = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.OrchQuota) >= 0; var hasExportViewer = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.ExportViewer) >= 0; var hasExportOperator = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.ExportOperator) >= 0; var hasExportAdmin = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.ExportAdmin) >= 0; var exportScopesRequested = hasExportViewer || hasExportOperator || hasExportAdmin; var hasAdvisoryIngest = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.AdvisoryIngest) >= 0; var hasAdvisoryRead = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.AdvisoryRead) >= 0; + var hasAdvisoryAiView = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.AdvisoryAiView) >= 0; + var hasAdvisoryAiOperate = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.AdvisoryAiOperate) >= 0; + var hasAdvisoryAiAdmin = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.AdvisoryAiAdmin) >= 0; + var advisoryAiScopesRequested = hasAdvisoryAiView || hasAdvisoryAiOperate || hasAdvisoryAiAdmin; var hasVexIngest = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.VexIngest) >= 0; var hasVexRead = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.VexRead) >= 0; var hasVulnRead = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.VulnRead) >= 0; + var hasObservabilityIncident = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.ObservabilityIncident) >= 0; var hasSignalsRead = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.SignalsRead) >= 0; var hasSignalsWrite = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.SignalsWrite) >= 0; var hasSignalsAdmin = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.SignalsAdmin) >= 0; + var hasAirgapSeal = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.AirgapSeal) >= 0; + var hasAirgapImport = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.AirgapImport) >= 0; + var hasAirgapStatusRead = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.AirgapStatusRead) >= 0; var signalsScopesRequested = hasSignalsRead || hasSignalsWrite || hasSignalsAdmin; var hasPolicyAuthor = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.PolicyAuthor) >= 0; var hasPolicyReview = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.PolicyReview) >= 0; @@ -309,6 +318,11 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle var hasPolicyRun = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.PolicyRun) >= 0; var hasPolicyActivate = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.PolicyActivate) >= 0; var hasPolicySimulate = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.PolicySimulate) >= 0; + var hasPacksRead = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.PacksRead) >= 0; + var hasPacksWrite = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.PacksWrite) >= 0; + var hasPacksRun = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.PacksRun) >= 0; + var hasPacksApprove = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.PacksApprove) >= 0; + var packsScopesRequested = hasPacksRead || hasPacksWrite || hasPacksRun || hasPacksApprove; var policyStudioScopesRequested = hasPolicyAuthor || hasPolicyReview || hasPolicyOperate @@ -320,6 +334,34 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle || grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.PolicyRead) >= 0; var hasAocVerify = grantedScopes.Length > 0 && Array.IndexOf(grantedScopes, StellaOpsScopes.AocVerify) >= 0; + if (hasObservabilityIncident) + { + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = StellaOpsScopes.ObservabilityIncident; + context.Reject(OpenIddictConstants.Errors.InvalidScope, "Scope 'obs:incident' requires interactive authentication and cannot be issued via client credentials."); + activity?.SetTag("authority.incident_scope_rejected", true); + logger.LogWarning("Client credentials validation failed for {ClientId}: obs:incident requires interactive authentication.", document.ClientId); + return; + } + + if (packsScopesRequested && !EnsureTenantAssigned()) + { + var packsScopeForAudit = hasPacksWrite + ? StellaOpsScopes.PacksWrite + : hasPacksRun + ? StellaOpsScopes.PacksRun + : hasPacksApprove + ? StellaOpsScopes.PacksApprove + : StellaOpsScopes.PacksRead; + + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = packsScopeForAudit; + activity?.SetTag("authority.pack_scope_violation", packsScopeForAudit); + context.Reject(OpenIddictConstants.Errors.InvalidClient, "Pack scopes require a tenant assignment."); + logger.LogWarning( + "Client credentials validation failed for {ClientId}: pack scopes require tenant assignment.", + document.ClientId); + return; + } + if (exportScopesRequested && !EnsureTenantAssigned()) { var exportScopeForAudit = hasExportAdmin @@ -354,20 +396,54 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle return; } - if ((hasAdvisoryIngest || hasAdvisoryRead) && !EnsureTenantAssigned()) + if ((hasAdvisoryIngest || hasAdvisoryRead || advisoryAiScopesRequested) && !EnsureTenantAssigned()) { - var advisoryScope = hasAdvisoryIngest ? StellaOpsScopes.AdvisoryIngest : StellaOpsScopes.AdvisoryRead; + var advisoryScope = hasAdvisoryIngest + ? StellaOpsScopes.AdvisoryIngest + : hasAdvisoryRead + ? StellaOpsScopes.AdvisoryRead + : hasAdvisoryAiAdmin + ? StellaOpsScopes.AdvisoryAiAdmin + : hasAdvisoryAiOperate + ? StellaOpsScopes.AdvisoryAiOperate + : StellaOpsScopes.AdvisoryAiView; + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = advisoryScope; - context.Reject(OpenIddictConstants.Errors.InvalidClient, "Advisory scopes require a tenant assignment."); + var errorMessage = advisoryAiScopesRequested + ? "Advisory AI scopes require a tenant assignment." + : "Advisory scopes require a tenant assignment."; + context.Reject(OpenIddictConstants.Errors.InvalidClient, errorMessage); logger.LogWarning( - "Client credentials validation failed for {ClientId}: advisory scopes require tenant assignment.", + "Client credentials validation failed for {ClientId}: {ScopeType} scopes require tenant assignment.", + document.ClientId, + advisoryAiScopesRequested ? "advisory AI" : "advisory"); + return; + } + + if ((hasAirgapSeal || hasAirgapImport || hasAirgapStatusRead) && !EnsureTenantAssigned()) + { + var invalidScope = hasAirgapSeal + ? StellaOpsScopes.AirgapSeal + : hasAirgapImport + ? StellaOpsScopes.AirgapImport + : StellaOpsScopes.AirgapStatusRead; + + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = invalidScope; + activity?.SetTag("authority.airgap_scope_violation", "tenant_required"); + context.Reject(OpenIddictConstants.Errors.InvalidClient, "Air-gap scopes require a tenant assignment."); + logger.LogWarning( + "Client credentials validation failed for {ClientId}: air-gap scopes require tenant assignment.", document.ClientId); return; } - if ((hasOrchRead || hasOrchOperate) && !EnsureTenantAssigned()) + if ((hasOrchRead || hasOrchOperate || hasOrchQuota) && !EnsureTenantAssigned()) { - var invalidScope = hasOrchOperate ? StellaOpsScopes.OrchOperate : StellaOpsScopes.OrchRead; + var invalidScope = hasOrchQuota + ? StellaOpsScopes.OrchQuota + : hasOrchOperate + ? StellaOpsScopes.OrchOperate + : StellaOpsScopes.OrchRead; context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = invalidScope; context.Reject(OpenIddictConstants.Errors.InvalidClient, "Orchestrator scopes require a tenant assignment."); logger.LogWarning( @@ -416,6 +492,43 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle activity?.SetTag("authority.operator_ticket_present", true); } + if (hasOrchQuota) + { + var quotaReasonRaw = context.Request.GetParameter(AuthorityOpenIddictConstants.QuotaReasonParameterName)?.Value?.ToString(); + var quotaReason = NormalizeMetadata(quotaReasonRaw); + if (string.IsNullOrWhiteSpace(quotaReason)) + { + context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Quota changes require 'quota_reason'."); + logger.LogWarning("Client credentials validation failed for {ClientId}: quota_reason missing.", document.ClientId); + return; + } + + if (quotaReason.Length > 256) + { + context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Quota reason must not exceed 256 characters."); + logger.LogWarning("Client credentials validation failed for {ClientId}: quota_reason exceeded length limit.", document.ClientId); + return; + } + + var quotaTicketRaw = context.Request.GetParameter(AuthorityOpenIddictConstants.QuotaTicketParameterName)?.Value?.ToString(); + var quotaTicket = NormalizeMetadata(quotaTicketRaw); + if (!string.IsNullOrWhiteSpace(quotaTicket) && quotaTicket.Length > 128) + { + context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Quota ticket must not exceed 128 characters."); + logger.LogWarning("Client credentials validation failed for {ClientId}: quota_ticket exceeded length limit.", document.ClientId); + return; + } + + context.Transaction.Properties[AuthorityOpenIddictConstants.QuotaReasonProperty] = quotaReason; + activity?.SetTag("authority.quota_reason_present", true); + + if (!string.IsNullOrWhiteSpace(quotaTicket)) + { + context.Transaction.Properties[AuthorityOpenIddictConstants.QuotaTicketProperty] = quotaTicket; + activity?.SetTag("authority.quota_ticket_present", true); + } + } + if (hasExportAdmin) { var reasonRaw = context.Request.GetParameter(AuthorityOpenIddictConstants.ExportAdminReasonParameterName)?.Value?.ToString(); @@ -477,13 +590,20 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle return; } - if ((hasAdvisoryRead || hasVexRead) && !hasAocVerify) + if ((hasAdvisoryRead || hasVexRead || advisoryAiScopesRequested) && !hasAocVerify) { context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = StellaOpsScopes.AocVerify; - activity?.SetTag("authority.aoc_scope_violation", "advisory_vex_requires_aoc"); - context.Reject(OpenIddictConstants.Errors.InvalidScope, "Scope 'aoc:verify' is required when requesting advisory/vex read scopes."); + var violationTag = advisoryAiScopesRequested && !hasAdvisoryRead && !hasVexRead + ? "advisory_ai_requires_aoc" + : "advisory_vex_requires_aoc"; + activity?.SetTag("authority.aoc_scope_violation", violationTag); + + var errorMessage = advisoryAiScopesRequested + ? "Scope 'aoc:verify' is required when requesting advisory/advisory-ai/vex read scopes." + : "Scope 'aoc:verify' is required when requesting advisory/vex read scopes."; + context.Reject(OpenIddictConstants.Errors.InvalidScope, errorMessage); logger.LogWarning( - "Client credentials validation failed for {ClientId}: advisory/vex read scopes require aoc:verify.", + "Client credentials validation failed for {ClientId}: advisory/advisory-ai/vex read scopes require aoc:verify.", document.ClientId); return; } @@ -657,6 +777,28 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle }); } + if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.QuotaReasonProperty, out var quotaReasonObj) && + quotaReasonObj is string quotaReason && + !string.IsNullOrWhiteSpace(quotaReason)) + { + extraProperties.Add(new AuthEventProperty + { + Name = "quota.reason", + Value = ClassifiedString.Sensitive(quotaReason) + }); + } + + if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.QuotaTicketProperty, out var quotaTicketObj) && + quotaTicketObj is string quotaTicket && + !string.IsNullOrWhiteSpace(quotaTicket)) + { + extraProperties.Add(new AuthEventProperty + { + Name = "quota.ticket", + Value = ClassifiedString.Sensitive(quotaTicket) + }); + } + var record = ClientCredentialsAuditHelper.CreateRecord( timeProvider, context.Transaction, @@ -917,6 +1059,20 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler< identity.SetClaim(StellaOpsClaimTypes.OperatorTicket, operatorTicketValueString); } + if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.QuotaReasonProperty, out var quotaReasonValue) && + quotaReasonValue is string quotaReasonValueString && + !string.IsNullOrWhiteSpace(quotaReasonValueString)) + { + identity.SetClaim(StellaOpsClaimTypes.QuotaReason, quotaReasonValueString); + } + + if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.QuotaTicketProperty, out var quotaTicketValue) && + quotaTicketValue is string quotaTicketValueString && + !string.IsNullOrWhiteSpace(quotaTicketValueString)) + { + identity.SetClaim(StellaOpsClaimTypes.QuotaTicket, quotaTicketValueString); + } + var (providerHandle, descriptor) = await ResolveProviderAsync(context, document).ConfigureAwait(false); if (context.IsRejected) { @@ -1289,4 +1445,4 @@ internal static class ClientCredentialHandlerHelpers return null; } -} +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/DiscoveryHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/DiscoveryHandlers.cs new file mode 100644 index 00000000..1deef479 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/DiscoveryHandlers.cs @@ -0,0 +1,77 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using System.Text.Json; +using Microsoft.Extensions.Options; +using OpenIddict.Abstractions; +using OpenIddict.Server; +using StellaOps.Auth.Abstractions; +using StellaOps.Configuration; + +namespace StellaOps.Authority.OpenIddict.Handlers; + +internal sealed class ConfigureAuthorityDiscoveryHandler : IOpenIddictServerHandler +{ + private readonly IOptionsMonitor optionsMonitor; + + public ConfigureAuthorityDiscoveryHandler(IOptionsMonitor optionsMonitor) + { + this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + } + + public ValueTask HandleAsync(OpenIddictServerEvents.HandleConfigurationRequestContext context) + { + ArgumentNullException.ThrowIfNull(context); + + var authorityOptions = optionsMonitor.CurrentValue ?? throw new InvalidOperationException("Authority configuration is not available."); + + context.Metadata["stellaops_advisory_ai_scopes_supported"] = new[] + { + StellaOpsScopes.AdvisoryAiView, + StellaOpsScopes.AdvisoryAiOperate, + StellaOpsScopes.AdvisoryAiAdmin + }; + + context.Metadata["stellaops_airgap_scopes_supported"] = new[] + { + StellaOpsScopes.AirgapSeal, + StellaOpsScopes.AirgapImport, + StellaOpsScopes.AirgapStatusRead + }; + + context.Metadata["stellaops_notify_scopes_supported"] = new[] + { + StellaOpsScopes.NotifyViewer, + StellaOpsScopes.NotifyOperator, + StellaOpsScopes.NotifyAdmin + }; + + context.Metadata["stellaops_observability_scopes_supported"] = new[] + { + StellaOpsScopes.ObservabilityRead, + StellaOpsScopes.TimelineRead, + StellaOpsScopes.TimelineWrite, + StellaOpsScopes.EvidenceCreate, + StellaOpsScopes.EvidenceRead, + StellaOpsScopes.EvidenceHold, + StellaOpsScopes.AttestRead, + StellaOpsScopes.ObservabilityIncident + }; + + var remote = authorityOptions.AdvisoryAi.RemoteInference; + + var remoteMetadata = JsonSerializer.SerializeToElement(new + { + enabled = remote.Enabled, + require_tenant_consent = remote.RequireTenantConsent, + allowed_profiles = remote.AllowedProfiles.Count == 0 + ? Array.Empty() + : remote.AllowedProfiles.ToArray() + }); + + context.Metadata["stellaops_advisory_ai_remote_inference"] = new OpenIddictParameter(remoteMetadata); + + return ValueTask.CompletedTask; + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs index 9ab8fa7b..0d1ddc0c 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs @@ -5,18 +5,18 @@ using System.Globalization; using System.Linq; using System.Security.Claims; using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Logging; -using OpenIddict.Abstractions; -using OpenIddict.Extensions; -using OpenIddict.Server; -using OpenIddict.Server.AspNetCore; -using StellaOps.Auth.Abstractions; -using StellaOps.Authority.OpenIddict; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Authority.RateLimiting; -using StellaOps.Authority.Storage.Mongo.Documents; -using StellaOps.Authority.Storage.Mongo.Stores; -using StellaOps.Cryptography.Audit; +using Microsoft.Extensions.Logging; +using OpenIddict.Abstractions; +using OpenIddict.Extensions; +using OpenIddict.Server; +using OpenIddict.Server.AspNetCore; +using StellaOps.Auth.Abstractions; +using StellaOps.Authority.OpenIddict; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.RateLimiting; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Cryptography.Audit; namespace StellaOps.Authority.OpenIddict.Handlers; @@ -28,25 +28,25 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler logger; - - public ValidatePasswordGrantHandler( - IAuthorityIdentityProviderRegistry registry, - ActivitySource activitySource, - IAuthEventSink auditSink, - IAuthorityRateLimiterMetadataAccessor metadataAccessor, - IAuthorityClientStore clientStore, - TimeProvider timeProvider, - ILogger logger) - { - this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); - this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); - this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); - this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor)); - this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); - this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } + private readonly ILogger logger; + + public ValidatePasswordGrantHandler( + IAuthorityIdentityProviderRegistry registry, + ActivitySource activitySource, + IAuthEventSink auditSink, + IAuthorityRateLimiterMetadataAccessor metadataAccessor, + IAuthorityClientStore clientStore, + TimeProvider timeProvider, + ILogger logger) + { + this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); + this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); + this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor)); + this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } public async ValueTask HandleAsync(OpenIddictServerEvents.ValidateTokenRequestContext context) { @@ -194,14 +194,318 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler 0) - { - var providerHint = context.Request.GetParameter(AuthorityOpenIddictConstants.ProviderParameterName)?.Value?.ToString(); - var tamperRecord = PasswordGrantAuditHelper.CreateTamperRecord( + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditGrantedScopesProperty] = resolvedScopes.Scopes; + context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = resolvedScopes.Scopes; + + var grantedScopesArray = resolvedScopes.Scopes; + static bool ContainsScope(string[] scopes, string scope) + => scopes.Length > 0 && Array.IndexOf(scopes, scope) >= 0; + static string? Normalize(string? value) => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + + const int IncidentReasonMaxLength = 512; + + var hasAdvisoryIngest = ContainsScope(grantedScopesArray, StellaOpsScopes.AdvisoryIngest); + var hasAdvisoryRead = ContainsScope(grantedScopesArray, StellaOpsScopes.AdvisoryRead); + var hasAdvisoryAiView = ContainsScope(grantedScopesArray, StellaOpsScopes.AdvisoryAiView); + var hasAdvisoryAiOperate = ContainsScope(grantedScopesArray, StellaOpsScopes.AdvisoryAiOperate); + var hasAdvisoryAiAdmin = ContainsScope(grantedScopesArray, StellaOpsScopes.AdvisoryAiAdmin); + var advisoryAiScopesRequested = hasAdvisoryAiView || hasAdvisoryAiOperate || hasAdvisoryAiAdmin; + var hasVexIngest = ContainsScope(grantedScopesArray, StellaOpsScopes.VexIngest); + var hasVexRead = ContainsScope(grantedScopesArray, StellaOpsScopes.VexRead); + var hasAocVerify = ContainsScope(grantedScopesArray, StellaOpsScopes.AocVerify); + var hasObservabilityIncident = ContainsScope(grantedScopesArray, StellaOpsScopes.ObservabilityIncident); + var hasSignalsRead = ContainsScope(grantedScopesArray, StellaOpsScopes.SignalsRead); + var hasSignalsWrite = ContainsScope(grantedScopesArray, StellaOpsScopes.SignalsWrite); + var hasSignalsAdmin = ContainsScope(grantedScopesArray, StellaOpsScopes.SignalsAdmin); + var signalsScopesRequested = hasSignalsRead || hasSignalsWrite || hasSignalsAdmin; + var hasPolicyAuthor = ContainsScope(grantedScopesArray, StellaOpsScopes.PolicyAuthor); + var hasPolicyReview = ContainsScope(grantedScopesArray, StellaOpsScopes.PolicyReview); + var hasPolicyOperate = ContainsScope(grantedScopesArray, StellaOpsScopes.PolicyOperate); + var hasPolicyAudit = ContainsScope(grantedScopesArray, StellaOpsScopes.PolicyAudit); + var hasPolicyApprove = ContainsScope(grantedScopesArray, StellaOpsScopes.PolicyApprove); + var hasPolicyRun = ContainsScope(grantedScopesArray, StellaOpsScopes.PolicyRun); + var hasPolicyActivate = ContainsScope(grantedScopesArray, StellaOpsScopes.PolicyActivate); + var hasPolicySimulate = ContainsScope(grantedScopesArray, StellaOpsScopes.PolicySimulate); + var hasPolicyRead = ContainsScope(grantedScopesArray, StellaOpsScopes.PolicyRead); + var policyStudioScopesRequested = hasPolicyAuthor + || hasPolicyReview + || hasPolicyOperate + || hasPolicyAudit + || hasPolicyApprove + || hasPolicyRun + || hasPolicyActivate + || hasPolicySimulate + || hasPolicyRead; + var hasExceptionsApprove = ContainsScope(grantedScopesArray, StellaOpsScopes.ExceptionsApprove); + + if ((hasAdvisoryIngest || hasAdvisoryRead || advisoryAiScopesRequested) && + string.IsNullOrWhiteSpace(tenant)) + { + var advisoryScope = hasAdvisoryIngest + ? StellaOpsScopes.AdvisoryIngest + : hasAdvisoryRead + ? StellaOpsScopes.AdvisoryRead + : hasAdvisoryAiAdmin + ? StellaOpsScopes.AdvisoryAiAdmin + : hasAdvisoryAiOperate + ? StellaOpsScopes.AdvisoryAiOperate + : StellaOpsScopes.AdvisoryAiView; + + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = advisoryScope; + + var requiresAdvisoryAiTenant = advisoryAiScopesRequested && !hasAdvisoryIngest && !hasAdvisoryRead; + var reason = requiresAdvisoryAiTenant + ? "Advisory AI scopes require a tenant assignment." + : "Advisory scopes require a tenant assignment."; + + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + reason, + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: grantedScopesArray, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidClient, reason); + logger.LogWarning( + "Password grant validation failed for client {ClientId}: {ScopeType} scopes require tenant assignment.", + clientId, + requiresAdvisoryAiTenant ? "advisory AI" : "advisory"); + return; + } + + if ((hasVexIngest || hasVexRead) && string.IsNullOrWhiteSpace(tenant)) + { + var vexScope = hasVexIngest ? StellaOpsScopes.VexIngest : StellaOpsScopes.VexRead; + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = vexScope; + + const string vexReason = "VEX scopes require a tenant assignment."; + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + vexReason, + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: grantedScopesArray, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidClient, vexReason); + logger.LogWarning( + "Password grant validation failed for client {ClientId}: vex scopes require tenant assignment.", + clientId); + return; + } + + if ((hasAdvisoryRead || hasVexRead || advisoryAiScopesRequested) && !hasAocVerify) + { + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = StellaOpsScopes.AocVerify; + var violationTag = advisoryAiScopesRequested && !hasAdvisoryRead && !hasVexRead + ? "advisory_ai_requires_aoc" + : "advisory_vex_requires_aoc"; + activity?.SetTag("authority.aoc_scope_violation", violationTag); + + const string reason = "Scope 'aoc:verify' is required when requesting advisory/advisory-ai/vex read scopes."; + + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + reason, + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: grantedScopesArray, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidScope, reason); + logger.LogWarning( + "Password grant validation failed for client {ClientId}: advisory and VEX scopes require aoc:verify.", + clientId); + return; + } + + if (hasObservabilityIncident) + { + var reasonRaw = context.Request.GetParameter(AuthorityOpenIddictConstants.IncidentReasonParameterName)?.Value?.ToString(); + var incidentReason = Normalize(reasonRaw); + if (string.IsNullOrWhiteSpace(incidentReason)) + { + const string message = "Incident mode activation requires 'incident_reason'."; + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + message, + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: grantedScopesArray, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = StellaOpsScopes.ObservabilityIncident; + context.Reject(OpenIddictConstants.Errors.InvalidRequest, message); + logger.LogWarning("Password grant validation failed for {Username}: incident_reason missing for obs:incident.", context.Request.Username); + return; + } + + if (incidentReason.Length > IncidentReasonMaxLength) + { + var message = $"incident_reason must not exceed {IncidentReasonMaxLength} characters."; + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + message, + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: grantedScopesArray, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = StellaOpsScopes.ObservabilityIncident; + context.Reject(OpenIddictConstants.Errors.InvalidRequest, message); + logger.LogWarning("Password grant validation failed for {Username}: incident_reason exceeded length limit.", context.Request.Username); + return; + } + + context.Transaction.Properties[AuthorityOpenIddictConstants.IncidentReasonProperty] = incidentReason; + } + + if ((signalsScopesRequested || policyStudioScopesRequested) && string.IsNullOrWhiteSpace(tenant)) + { + var scopeForAudit = signalsScopesRequested + ? (hasSignalsAdmin + ? StellaOpsScopes.SignalsAdmin + : hasSignalsWrite + ? StellaOpsScopes.SignalsWrite + : StellaOpsScopes.SignalsRead) + : hasPolicyActivate + ? StellaOpsScopes.PolicyActivate + : hasPolicyRun + ? StellaOpsScopes.PolicyRun + : hasPolicyApprove + ? StellaOpsScopes.PolicyApprove + : hasPolicyOperate + ? StellaOpsScopes.PolicyOperate + : hasPolicyReview + ? StellaOpsScopes.PolicyReview + : hasPolicyAudit + ? StellaOpsScopes.PolicyAudit + : hasPolicySimulate + ? StellaOpsScopes.PolicySimulate + : hasPolicyRead + ? StellaOpsScopes.PolicyRead + : StellaOpsScopes.PolicyAuthor; + + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = scopeForAudit; + + var reason = signalsScopesRequested + ? "Signals scopes require a tenant assignment." + : "Policy Studio scopes require a tenant assignment."; + + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + reason, + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: grantedScopesArray, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidClient, reason); + logger.LogWarning( + "Password grant validation failed for client {ClientId}: {ScopeType} scopes require tenant assignment.", + clientId, + signalsScopesRequested ? "signals" : "policy"); + return; + } + + if (signalsScopesRequested && !hasAocVerify) + { + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = StellaOpsScopes.AocVerify; + activity?.SetTag("authority.aoc_scope_violation", "signals_requires_aoc"); + + const string reason = "Scope 'aoc:verify' is required when requesting signals scopes."; + + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + reason, + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: grantedScopesArray, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidScope, reason); + logger.LogWarning( + "Password grant validation failed for client {ClientId}: signals scopes require aoc:verify.", + clientId); + return; + } + + var unexpectedParameters = TokenRequestTamperInspector.GetUnexpectedPasswordGrantParameters(context.Request); + if (unexpectedParameters.Count > 0) + { + var providerHint = context.Request.GetParameter(AuthorityOpenIddictConstants.ProviderParameterName)?.Value?.ToString(); + var tamperRecord = PasswordGrantAuditHelper.CreateTamperRecord( timeProvider, context.Transaction, metadata, @@ -239,11 +543,11 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler? extraProperties = null; + if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.IncidentReasonProperty, out var incidentReasonObj) && + incidentReasonObj is string incidentReasonValue && + !string.IsNullOrWhiteSpace(incidentReasonValue)) + { + extraProperties = new List + { + new() + { + Name = "incident.reason", + Value = ClassifiedString.Sensitive(incidentReasonValue) + } + }; + } + + var validationSuccess = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Success, + "Password grant validation succeeded.", + clientId, + providerName: selectedProvider.Name, + tenant, + user: null, + username: context.Request.Username, + scopes: grantedScopesArray, + retryAfter: null, + failureCode: null, + extraProperties: extraProperties); + + await auditSink.WriteAsync(validationSuccess, context.CancellationToken).ConfigureAwait(false); + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditSuccessRecordedProperty] = true; + } + + logger.LogInformation("Password grant validation succeeded for {Username} using provider {Provider}.", context.Request.Username, selectedProvider.Name); } } @@ -282,25 +653,25 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler logger; - - public HandlePasswordGrantHandler( - IAuthorityIdentityProviderRegistry registry, - IAuthorityClientStore clientStore, - ActivitySource activitySource, - IAuthEventSink auditSink, - IAuthorityRateLimiterMetadataAccessor metadataAccessor, - TimeProvider timeProvider, - ILogger logger) - { - this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); - this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); - this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); - this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); - this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor)); - this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } + private readonly ILogger logger; + + public HandlePasswordGrantHandler( + IAuthorityIdentityProviderRegistry registry, + IAuthorityClientStore clientStore, + ActivitySource activitySource, + IAuthEventSink auditSink, + IAuthorityRateLimiterMetadataAccessor metadataAccessor, + TimeProvider timeProvider, + ILogger logger) + { + this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); + this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); + this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); + this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor)); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } public async ValueTask HandleAsync(OpenIddictServerEvents.HandleTokenRequestContext context) { @@ -542,16 +913,27 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler claim.Type switch - { - OpenIddictConstants.Claims.Subject => new[] { OpenIddictConstants.Destinations.AccessToken, OpenIddictConstants.Destinations.IdentityToken }, - OpenIddictConstants.Claims.Name => new[] { OpenIddictConstants.Destinations.AccessToken, OpenIddictConstants.Destinations.IdentityToken }, - OpenIddictConstants.Claims.PreferredUsername => new[] { OpenIddictConstants.Destinations.AccessToken }, + if (!string.IsNullOrWhiteSpace(tenant)) + { + identity.SetClaim(StellaOpsClaimTypes.Tenant, tenant); + } + + if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.IncidentReasonProperty, out var incidentReasonValueObj) && + incidentReasonValueObj is string incidentReasonValue && + !string.IsNullOrWhiteSpace(incidentReasonValue)) + { + identity.SetClaim(StellaOpsClaimTypes.IncidentReason, incidentReasonValue); + activity?.SetTag("authority.incident_reason_present", true); + } + + var issuedAt = timeProvider.GetUtcNow(); + identity.SetClaim(OpenIddictConstants.Claims.AuthenticationTime, issuedAt.ToUnixTimeSeconds().ToString(CultureInfo.InvariantCulture)); + + identity.SetDestinations(static claim => claim.Type switch + { + OpenIddictConstants.Claims.Subject => new[] { OpenIddictConstants.Destinations.AccessToken, OpenIddictConstants.Destinations.IdentityToken }, + OpenIddictConstants.Claims.Name => new[] { OpenIddictConstants.Destinations.AccessToken, OpenIddictConstants.Destinations.IdentityToken }, + OpenIddictConstants.Claims.PreferredUsername => new[] { OpenIddictConstants.Destinations.AccessToken }, OpenIddictConstants.Claims.Role => new[] { OpenIddictConstants.Destinations.AccessToken }, _ => new[] { OpenIddictConstants.Destinations.AccessToken } }); @@ -562,28 +944,53 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler? successProperties = null; + if (verification.AuditProperties is { } existingProperties) + { + successProperties = new List(existingProperties); + } + + if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.IncidentReasonProperty, out var successIncidentReasonObj) && + successIncidentReasonObj is string successIncidentReason && + !string.IsNullOrWhiteSpace(successIncidentReason)) + { + successProperties ??= new List(); + successProperties.Add(new AuthEventProperty + { + Name = "incident.reason", + Value = ClassifiedString.Sensitive(successIncidentReason) + }); + } + + var successRecord = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Success, + verification.Message, + clientId, + providerMetadata.Name, + tenant, + verification.User, + username, + scopes: grantedScopes, + retryAfter: null, + failureCode: null, + extraProperties: successProperties); + + await auditSink.WriteAsync(successRecord, context.CancellationToken).ConfigureAwait(false); + } + + context.Principal = principal; + context.HandleRequest(); + activity?.SetTag("authority.subject_id", verification.User.SubjectId); + logger.LogInformation("Password grant issued for {Username} with subject {SubjectId}.", verification.User.Username, verification.User.SubjectId); } } diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RefreshTokenHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RefreshTokenHandlers.cs new file mode 100644 index 00000000..2aa0b694 --- /dev/null +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RefreshTokenHandlers.cs @@ -0,0 +1,40 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using OpenIddict.Abstractions; +using OpenIddict.Extensions; +using OpenIddict.Server; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Authority.OpenIddict.Handlers; + +internal sealed class ValidateRefreshTokenGrantHandler : IOpenIddictServerHandler +{ + private readonly ILogger logger; + + public ValidateRefreshTokenGrantHandler(ILogger logger) + { + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public ValueTask HandleAsync(OpenIddictServerEvents.ValidateTokenRequestContext context) + { + ArgumentNullException.ThrowIfNull(context); + + if (!context.Request.IsRefreshTokenGrantType()) + { + return ValueTask.CompletedTask; + } + + var requestedScopes = context.Request.GetScopes(); + var refreshPrincipal = context.Principal; + if ((requestedScopes.Contains(StellaOpsScopes.ObservabilityIncident) || refreshPrincipal?.HasScope(StellaOpsScopes.ObservabilityIncident) == true)) + { + context.Reject(OpenIddictConstants.Errors.InvalidGrant, "obs:incident tokens require fresh authentication; refresh is not permitted."); + logger.LogWarning("Refresh token validation failed for client {ClientId}: obs:incident scope requires fresh authentication.", context.ClientId ?? context.Request.ClientId ?? ""); + } + + return ValueTask.CompletedTask; + } +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs index 050c2d1c..7d458cb5 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs @@ -110,13 +110,19 @@ internal sealed class PersistTokensHandler : IOpenIddictServerHandler -{ - private readonly IAuthorityTokenStore tokenStore; - private readonly IAuthorityMongoSessionAccessor sessionAccessor; - private readonly IAuthorityClientStore clientStore; +internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler +{ + private readonly IAuthorityTokenStore tokenStore; + private readonly IAuthorityMongoSessionAccessor sessionAccessor; + private readonly IAuthorityClientStore clientStore; private readonly IAuthorityIdentityProviderRegistry registry; private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor; private readonly IAuthEventSink auditSink; - private readonly TimeProvider clock; - private readonly ActivitySource activitySource; - private readonly ILogger logger; + private readonly TimeProvider clock; + private readonly ActivitySource activitySource; + private readonly ILogger logger; + private static readonly TimeSpan IncidentFreshAuthWindow = TimeSpan.FromMinutes(5); public ValidateAccessTokenHandler( IAuthorityTokenStore tokenStore, @@ -339,12 +340,35 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler"); + return; + } + + var authTime = DateTimeOffset.FromUnixTimeSeconds(authTimeSeconds); + var now = clock.GetUtcNow(); + if (now - authTime > IncidentFreshAuthWindow) + { + context.Reject(OpenIddictConstants.Errors.InvalidToken, "obs:incident tokens require fresh authentication."); + logger.LogWarning("Access token validation failed: obs:incident token stale. ClientId={ClientId}; AuthTime={AuthTime:o}; Now={Now:o}; Window={Window}", clientId ?? "", authTime, now, IncidentFreshAuthWindow); + return; + } + + metadataAccessor.SetTag("authority.incident_scope_validated", "true"); + } + + var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, user, client); + await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false); + logger.LogInformation("Access token validated for subject {Subject} and client {ClientId}.", + identity.GetClaim(OpenIddictConstants.Claims.Subject), + identity.GetClaim(OpenIddictConstants.Claims.ClientId)); + } private async ValueTask TrackTokenUsageAsync( OpenIddictServerEvents.ValidateTokenContext context, diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs index 1e496565..d8dcb7ec 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs @@ -43,12 +43,16 @@ internal static class TokenRequestTamperInspector AuthorityOpenIddictConstants.ProviderParameterName }; - private static readonly HashSet ClientCredentialsParameters = new(StringComparer.OrdinalIgnoreCase) - { - AuthorityOpenIddictConstants.ProviderParameterName, - AuthorityOpenIddictConstants.OperatorReasonParameterName, - AuthorityOpenIddictConstants.OperatorTicketParameterName - }; + private static readonly HashSet ClientCredentialsParameters = new(StringComparer.OrdinalIgnoreCase) + { + AuthorityOpenIddictConstants.ProviderParameterName, + AuthorityOpenIddictConstants.OperatorReasonParameterName, + AuthorityOpenIddictConstants.OperatorTicketParameterName, + AuthorityOpenIddictConstants.ExportAdminReasonParameterName, + AuthorityOpenIddictConstants.ExportAdminTicketParameterName, + AuthorityOpenIddictConstants.QuotaReasonParameterName, + AuthorityOpenIddictConstants.QuotaTicketParameterName + }; internal static IReadOnlyList GetUnexpectedPasswordGrantParameters(OpenIddictRequest request) => DetectUnexpectedParameters(request, PasswordGrantParameters); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Program.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Program.cs index 15570eb7..06c39398 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/Program.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Program.cs @@ -1,19 +1,21 @@ using System; +using System.Collections.Generic; using System.Diagnostics; -using System.Globalization; -using System.IO; +using System.Security.Claims; +using System.Globalization; +using System.IO; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; using Microsoft.AspNetCore.RateLimiting; -using Microsoft.AspNetCore.Server.Kestrel.Https; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Net.Http.Headers; +using Microsoft.AspNetCore.Server.Kestrel.Https; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Net.Http.Headers; using OpenIddict.Abstractions; using OpenIddict.Server; using OpenIddict.Server.AspNetCore; @@ -21,7 +23,11 @@ using MongoDB.Driver; using Serilog; using Serilog.Events; using StellaOps.Authority; +using StellaOps.Authority.Airgap; using StellaOps.Authority.Audit; +using StellaOps.Authority.AdvisoryAi; +using StellaOps.Authority.Notifications; +using StellaOps.Authority.Notifications.Ack; using StellaOps.Authority.Plugins.Abstractions; using StellaOps.Authority.Plugins; using StellaOps.Authority.Bootstrap; @@ -30,19 +36,23 @@ using StellaOps.Authority.Storage.Mongo.Initialization; using StellaOps.Authority.Storage.Mongo.Stores; using StellaOps.Authority.RateLimiting; using StellaOps.Configuration; -using StellaOps.Plugin.DependencyInjection; -using StellaOps.Plugin.Hosting; -using StellaOps.Authority.OpenIddict.Handlers; -using System.Linq; -using StellaOps.Cryptography.Audit; -using StellaOps.Cryptography.DependencyInjection; -using StellaOps.Authority.Permalinks; -using StellaOps.Authority.Revocation; -using StellaOps.Authority.Signing; -using StellaOps.Cryptography; -using StellaOps.Cryptography.Kms; +using StellaOps.Plugin.DependencyInjection; +using StellaOps.Plugin.Hosting; +using StellaOps.Authority.OpenIddict.Handlers; +using StellaOps.Authority.Observability; +using System.Linq; +using StellaOps.Cryptography.Audit; +using StellaOps.Cryptography.DependencyInjection; +using StellaOps.Authority.Permalinks; +using StellaOps.Authority.Revocation; +using System.Security.Cryptography; +using System.Text; +using StellaOps.Authority.Signing; +using StellaOps.Cryptography; +using StellaOps.Cryptography.Kms; using StellaOps.Authority.Storage.Mongo.Documents; using StellaOps.Authority.Security; +using StellaOps.Authority.OpenApi; using StellaOps.Auth.Abstractions; using StellaOps.Auth.ServerIntegration; #if STELLAOPS_AUTH_SECURITY @@ -112,14 +122,16 @@ if (issuerUri is null) } authorityOptions.Issuer = issuerUri; -builder.Services.AddSingleton(authorityOptions); -builder.Services.AddSingleton>(Options.Create(authorityOptions)); -builder.Services.AddHttpContextAccessor(); -builder.Services.TryAddSingleton(_ => TimeProvider.System); -builder.Services.AddMemoryCache(); -builder.Services.TryAddSingleton(); +builder.Services.AddSingleton(authorityOptions); +builder.Services.AddSingleton>(Options.Create(authorityOptions)); +builder.Services.AddHttpContextAccessor(); +builder.Services.TryAddSingleton(_ => TimeProvider.System); +builder.Services.AddMemoryCache(); +builder.Services.TryAddSingleton(); builder.Services.TryAddSingleton(); builder.Services.AddSingleton(); +builder.Services.TryAddSingleton(); +builder.Services.AddSingleton(); #if STELLAOPS_AUTH_SECURITY var senderConstraints = authorityOptions.Security.SenderConstraints; @@ -166,36 +178,40 @@ else builder.Services.AddScoped(); #endif -builder.Services.AddRateLimiter(rateLimiterOptions => -{ - AuthorityRateLimiter.Configure(rateLimiterOptions, authorityOptions); -}); - -var requiresKms = string.Equals(authorityOptions.Signing.KeySource, "kms", StringComparison.OrdinalIgnoreCase) - || authorityOptions.Signing.AdditionalKeys.Any(k => string.Equals(k.Source, "kms", StringComparison.OrdinalIgnoreCase)); - -if (requiresKms) -{ - if (string.IsNullOrWhiteSpace(authorityOptions.Signing.KeyPassphrase)) - { - throw new InvalidOperationException("Authority signing with source 'kms' requires signing.keyPassphrase to be configured."); - } - - var kmsRoot = Path.Combine(builder.Environment.ContentRootPath, "kms"); - builder.Services.AddFileKms(options => - { - options.RootPath = kmsRoot; - options.Password = authorityOptions.Signing.KeyPassphrase!; - options.Algorithm = authorityOptions.Signing.Algorithm; - }); - - builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton()); -} - -builder.Services.AddStellaOpsCrypto(); -builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton()); -builder.Services.AddSingleton(); -builder.Services.AddSingleton(); +builder.Services.AddRateLimiter(rateLimiterOptions => +{ + AuthorityRateLimiter.Configure(rateLimiterOptions, authorityOptions); +}); + +var requiresKms = string.Equals(authorityOptions.Signing.KeySource, "kms", StringComparison.OrdinalIgnoreCase) + || authorityOptions.Signing.AdditionalKeys.Any(k => string.Equals(k.Source, "kms", StringComparison.OrdinalIgnoreCase)); + +if (requiresKms) +{ + if (string.IsNullOrWhiteSpace(authorityOptions.Signing.KeyPassphrase)) + { + throw new InvalidOperationException("Authority signing with source 'kms' requires signing.keyPassphrase to be configured."); + } + + var kmsRoot = Path.Combine(builder.Environment.ContentRootPath, "kms"); + builder.Services.AddFileKms(options => + { + options.RootPath = kmsRoot; + options.Password = authorityOptions.Signing.KeyPassphrase!; + options.Algorithm = authorityOptions.Signing.Algorithm; + }); + + builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton()); +} + +builder.Services.AddStellaOpsCrypto(); +builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton()); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); AuthorityPluginContext[] pluginContexts = AuthorityPluginConfigurationLoader .Load(authorityOptions, builder.Environment.ContentRootPath) @@ -224,10 +240,12 @@ builder.Services.AddScoped(); builder.Services.AddScoped(); builder.Services.AddScoped(); builder.Services.AddScoped(); +builder.Services.AddScoped(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); +builder.Services.AddSingleton(); builder.Services.AddHostedService(); var pluginRegistrationSummary = AuthorityPluginLoader.RegisterPlugins( @@ -244,6 +262,18 @@ builder.Services.AddProblemDetails(); builder.Services.AddAuthentication(); builder.Services.AddAuthorization(); +builder.Services.AddStellaOpsScopeHandler(); + +builder.Services.TryAddSingleton(); + +builder.Services.AddOptions() + .Configure(options => + { + options.Authority = issuerUri.ToString(); + options.RequireHttpsMetadata = !issuerUri.IsLoopback; + }) + .PostConfigure(static options => options.Validate()); + builder.Services.AddOpenIddict() .AddServer(options => { @@ -314,6 +344,11 @@ builder.Services.AddOpenIddict() descriptor.UseScopedHandler(); }); + options.AddEventHandler(descriptor => + { + descriptor.UseScopedHandler(); + }); + options.AddEventHandler(descriptor => { descriptor.UseScopedHandler(); @@ -333,6 +368,11 @@ builder.Services.AddOpenIddict() { descriptor.UseScopedHandler(); }); + + options.AddEventHandler(descriptor => + { + descriptor.UseScopedHandler(); + }); }); builder.Services.Configure(options => @@ -1211,6 +1251,44 @@ if (authorityOptions.Bootstrap.Enabled) return Results.Problem("Failed to rotate signing key."); } }); + + bootstrapGroup.MapPost("/notifications/ack-tokens/rotate", ( + SigningRotationRequest? request, + AuthorityAckTokenKeyManager ackManager, + ILogger ackLogger) => + { + if (request is null) + { + ackLogger.LogWarning("Ack token rotation request payload missing."); + return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); + } + + try + { + var result = ackManager.Rotate(request); + ackLogger.LogInformation("Ack token key rotation completed. Active key {KeyId}.", result.ActiveKeyId); + + return Results.Ok(new + { + activeKeyId = result.ActiveKeyId, + provider = result.ActiveProvider, + source = result.ActiveSource, + location = result.ActiveLocation, + previousKeyId = result.PreviousKeyId, + retiredKeyIds = result.RetiredKeyIds + }); + } + catch (InvalidOperationException ex) + { + ackLogger.LogWarning(ex, "Ack token rotation failed due to invalid input."); + return Results.BadRequest(new { error = "rotation_failed", message = ex.Message }); + } + catch (Exception ex) + { + ackLogger.LogError(ex, "Unexpected failure rotating ack token key."); + return Results.Problem("Failed to rotate ack token key."); + } + }); } app.UseSerilogRequestLogging(options => @@ -1236,6 +1314,7 @@ app.UseExceptionHandler(static errorApp => }); }); +app.UseLegacyAuthDeprecation(); app.UseRouting(); app.UseAuthorityRateLimiterContext(); app.UseRateLimiter(); @@ -1291,32 +1370,971 @@ app.MapPost("/permalinks/vuln", async ( { return Results.Problem(ex.Message); } -}) +}) .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.VulnRead)) .WithName("CreateVulnPermalink"); -app.MapGet("/jwks", (AuthorityJwksService jwksService, HttpContext context) => +app.MapPost("/notify/ack-tokens/rotate", async ( + HttpContext context, + SigningRotationRequest? request, + AuthorityAckTokenKeyManager ackManager, + IOptions optionsAccessor, + IAuthEventSink auditSink, + TimeProvider timeProvider, + ILogger logger, + CancellationToken cancellationToken) => { - var result = jwksService.Get(); + var scopes = ExtractScopes(context.User); - if (context.Request.Headers.TryGetValue(HeaderNames.IfNoneMatch, out var etagValues) && - etagValues.Contains(result.ETag, StringComparer.Ordinal)) + if (request is null) { - context.Response.Headers.CacheControl = result.CacheControl; - context.Response.Headers.ETag = result.ETag; - context.Response.Headers.Expires = result.ExpiresAt.ToString("R", CultureInfo.InvariantCulture); - return Results.StatusCode(StatusCodes.Status304NotModified); + const string message = "Request payload is required."; + logger.LogWarning("Ack token rotation request payload missing."); + await WriteAckRotationAuditAsync( + context, + auditSink, + timeProvider, + AuthEventOutcome.Failure, + null, + null, + null, + null, + null, + message, + scopes, + cancellationToken).ConfigureAwait(false); + + return Results.BadRequest(new { error = "invalid_request", message }); } - context.Response.Headers.CacheControl = result.CacheControl; - context.Response.Headers.ETag = result.ETag; - context.Response.Headers.Expires = result.ExpiresAt.ToString("R", CultureInfo.InvariantCulture); + var notifications = optionsAccessor.Value.Notifications ?? throw new InvalidOperationException("Authority notifications configuration is missing."); + var ackOptions = notifications.AckTokens ?? throw new InvalidOperationException("Ack token configuration is missing."); - return Results.Json(result.Response); + if (!ackOptions.Enabled) + { + const string message = "Ack tokens are disabled. Enable notifications.ackTokens before rotating keys."; + logger.LogWarning("Ack token rotation attempted while ack tokens are disabled."); + await WriteAckRotationAuditAsync( + context, + auditSink, + timeProvider, + AuthEventOutcome.Failure, + request.KeyId, + null, + null, + null, + null, + message, + scopes, + cancellationToken).ConfigureAwait(false); + + return Results.BadRequest(new { error = "ack_tokens_disabled", message }); + } + + try + { + var result = ackManager.Rotate(request); + logger.LogInformation("Ack token key rotation completed. Active key {KeyId}.", result.ActiveKeyId); + + await WriteAckRotationAuditAsync( + context, + auditSink, + timeProvider, + AuthEventOutcome.Success, + result.ActiveKeyId, + result.PreviousKeyId, + result.RetiredKeyIds, + result.ActiveProvider, + result.ActiveSource, + reason: null, + scopes, + cancellationToken).ConfigureAwait(false); + + return Results.Ok(new + { + activeKeyId = result.ActiveKeyId, + provider = result.ActiveProvider, + source = result.ActiveSource, + location = result.ActiveLocation, + previousKeyId = result.PreviousKeyId, + retiredKeyIds = result.RetiredKeyIds + }); + } + catch (InvalidOperationException ex) + { + logger.LogWarning(ex, "Ack token rotation failed due to invalid input."); + + await WriteAckRotationAuditAsync( + context, + auditSink, + timeProvider, + AuthEventOutcome.Failure, + request.KeyId, + null, + null, + null, + null, + ex.Message, + scopes, + cancellationToken).ConfigureAwait(false); + + return Results.BadRequest(new { error = "rotation_failed", message = ex.Message }); + } + catch (Exception ex) + { + logger.LogError(ex, "Unexpected failure rotating ack token key."); + + const string message = "Unexpected failure rotating ack token key."; + await WriteAckRotationAuditAsync( + context, + auditSink, + timeProvider, + AuthEventOutcome.Failure, + request.KeyId, + null, + null, + null, + null, + message, + scopes, + cancellationToken).ConfigureAwait(false); + + return Results.Problem("Failed to rotate ack token key."); + } }) - .WithName("JsonWebKeySet"); + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.NotifyAdmin)) + .WithName("RotateNotifyAckTokenKey"); + +app.MapPost("/notify/ack-tokens/issue", async ( + HttpContext httpContext, + AckTokenIssueRequest request, + AuthorityAckTokenIssuer issuer, + IAuthEventSink auditSink, + IOptions optionsAccessor, + TimeProvider timeProvider, + CancellationToken cancellationToken) => +{ + if (request is null) + { + return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); + } + + var escalationOptions = optionsAccessor.Value.Notifications.Escalation; + var escalationScope = escalationOptions.Scope; + var hasEscalateScope = !string.IsNullOrWhiteSpace(escalationScope) && PrincipalHasScope(httpContext.User, escalationScope); + var hasAdminScope = PrincipalHasScope(httpContext.User, StellaOpsScopes.NotifyAdmin); + + if (request.AllowEscalation) + { + if (!hasEscalateScope) + { + return Results.Json(new + { + error = "insufficient_scope", + message = $"Scope '{escalationScope}' is required to issue escalation-enabled ack tokens." + }, statusCode: StatusCodes.Status403Forbidden); + } + + if (escalationOptions.RequireAdminScope && !hasAdminScope) + { + return Results.Json(new + { + error = "insufficient_scope", + message = "Escalation-enabled ack tokens require notify.admin." + }, statusCode: StatusCodes.Status403Forbidden); + } + } + + AckTokenIssueResult result; + try + { + result = await issuer.IssueAsync(request, hasEscalateScope, cancellationToken).ConfigureAwait(false); + } + catch (InvalidOperationException ex) + { + return Results.BadRequest(new { error = "invalid_request", message = ex.Message }); + } + + var signatures = new List(); + foreach (var signature in result.Envelope.Signatures) + { + if (string.IsNullOrWhiteSpace(signature.KeyId) || string.IsNullOrWhiteSpace(signature.Signature)) + { + continue; + } + + signatures.Add(new AckTokenSignatureResponse( + signature.KeyId, + signature.Signature, + signature.Algorithm ?? string.Empty)); + } + + var response = new AckTokenIssueResponse( + result.Envelope.PayloadType ?? string.Empty, + result.Envelope.Payload ?? string.Empty, + signatures, + result.Payload.IssuedAt, + result.Payload.ExpiresAt, + result.Payload.Nonce); + + await WriteAckAuditAsync( + httpContext, + auditSink, + timeProvider, + "notify.ack.issued", + AuthEventOutcome.Success, + result.Payload, + request.AllowEscalation ? "escalate" : "ack", + ExtractScopes(httpContext.User), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(response); +}) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.NotifyOperator)) + .WithName("IssueNotifyAckToken"); + +app.MapPost("/notify/ack-tokens/verify", async ( + HttpContext httpContext, + AckTokenVerifyRequest request, + AuthorityAckTokenVerifier verifier, + IAuthEventSink auditSink, + IOptions optionsAccessor, + TimeProvider timeProvider, + CancellationToken cancellationToken) => +{ + if (request?.Envelope is null) + { + return Results.BadRequest(new { error = "invalid_request", message = "Envelope is required." }); + } + + var normalizedAction = string.IsNullOrWhiteSpace(request.Action) + ? "ack" + : request.Action!.Trim().ToLowerInvariant(); + + AckTokenVerificationResult verification; + try + { + verification = await verifier.VerifyAsync(request.Envelope, normalizedAction, request.ExpectedTenant, cancellationToken).ConfigureAwait(false); + } + catch (InvalidOperationException ex) + { + return Results.BadRequest(new { error = "invalid_token", message = ex.Message }); + } + + if (normalizedAction == "escalate") + { + var escalationOptions = optionsAccessor.Value.Notifications.Escalation; + if (!verification.Payload.EscalationAllowed) + { + return Results.Json(new { error = "escalation_not_permitted", message = "Ack token does not permit escalation." }, statusCode: StatusCodes.Status403Forbidden); + } + + var escalationScope = escalationOptions.Scope; + if (string.IsNullOrWhiteSpace(escalationScope) || !PrincipalHasScope(httpContext.User, escalationScope)) + { + return Results.Json(new { error = "insufficient_scope", message = $"Scope '{escalationScope}' is required to perform escalation acknowledgement." }, statusCode: StatusCodes.Status403Forbidden); + } + + if (escalationOptions.RequireAdminScope && !PrincipalHasScope(httpContext.User, StellaOpsScopes.NotifyAdmin)) + { + return Results.Json(new { error = "insufficient_scope", message = "Escalation acknowledgement requires notify.admin." }, statusCode: StatusCodes.Status403Forbidden); + } + } + + var response = new AckTokenVerifyResponse( + verification.Payload.Tenant, + verification.Payload.NotificationId, + verification.Payload.DeliveryId, + verification.Payload.Channel, + verification.Payload.Actions, + verification.Payload.EscalationAllowed, + verification.Payload.ExpiresAt, + verification.Payload.Nonce); + + await WriteAckAuditAsync( + httpContext, + auditSink, + timeProvider, + normalizedAction == "escalate" ? "notify.ack.escalated" : "notify.ack.verified", + AuthEventOutcome.Success, + verification.Payload, + normalizedAction, + ExtractScopes(httpContext.User), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(response); +}) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.NotifyOperator)) + .WithName("VerifyNotifyAckToken"); + +static IReadOnlyList ExtractScopes(ClaimsPrincipal principal) +{ + if (principal?.Identity?.IsAuthenticated != true) + { + return Array.Empty(); + } + + var scopes = new HashSet(StringComparer.Ordinal); + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) + { + foreach (var segment in claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) + { + var normalized = StellaOpsScopes.Normalize(segment); + if (normalized is not null) + { + scopes.Add(normalized); + } + } + } + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) + { + var normalized = StellaOpsScopes.Normalize(claim.Value); + if (normalized is not null) + { + scopes.Add(normalized); + } + } + + return scopes.Count == 0 + ? Array.Empty() + : scopes.OrderBy(static value => value, StringComparer.Ordinal).ToArray(); +} + +static bool PrincipalHasScope(ClaimsPrincipal principal, string scope) +{ + if (principal?.Identity?.IsAuthenticated != true) + { + return false; + } + + var normalizedTarget = StellaOpsScopes.Normalize(scope); + if (normalizedTarget is null) + { + return false; + } + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) + { + foreach (var segment in claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) + { + if (string.Equals(StellaOpsScopes.Normalize(segment), normalizedTarget, StringComparison.Ordinal)) + { + return true; + } + } + } + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) + { + if (string.Equals(StellaOpsScopes.Normalize(claim.Value), normalizedTarget, StringComparison.Ordinal)) + { + return true; + } + } + + return false; +} + +static AuthEventNetwork BuildNetwork(HttpContext context) +{ + context.Request.Headers.TryGetValue("X-Forwarded-For", out var forwarded); + context.Request.Headers.TryGetValue(HeaderNames.UserAgent, out var userAgent); + + return new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Public(context.Connection.RemoteIpAddress?.ToString()), + ForwardedFor = ClassifiedString.Public(forwarded.ToString()), + UserAgent = ClassifiedString.Public(userAgent.ToString()) + }; +} + +static AuthEventClient? BuildClientContext(ClaimsPrincipal principal) +{ + if (principal?.Identity?.IsAuthenticated != true) + { + return null; + } + + var clientId = principal.FindFirst(StellaOpsClaimTypes.ClientId)?.Value; + if (string.IsNullOrWhiteSpace(clientId)) + { + return null; + } + + var provider = principal.FindFirst(StellaOpsClaimTypes.IdentityProvider)?.Value; + + return new AuthEventClient + { + ClientId = ClassifiedString.Personal(clientId), + Provider = string.IsNullOrWhiteSpace(provider) + ? ClassifiedString.Empty + : ClassifiedString.Public(provider), + Name = ClassifiedString.Empty + }; +} + +static async Task WriteAckRotationAuditAsync( + HttpContext context, + IAuthEventSink auditSink, + TimeProvider timeProvider, + AuthEventOutcome outcome, + string? activeKeyId, + string? previousKeyId, + IReadOnlyCollection? retiredKeyIds, + string? provider, + string? source, + string? reason, + IReadOnlyList scopes, + CancellationToken cancellationToken) +{ + var eventType = outcome == AuthEventOutcome.Success + ? "notify.ack.key_rotated" + : "notify.ack.key_rotation_failed"; + + var properties = new List(); + + if (!string.IsNullOrWhiteSpace(activeKeyId)) + { + properties.Add(new AuthEventProperty + { + Name = "notify.ack.key_id", + Value = ClassifiedString.Public(activeKeyId) + }); + } + + if (!string.IsNullOrWhiteSpace(previousKeyId)) + { + properties.Add(new AuthEventProperty + { + Name = "notify.ack.previous_key_id", + Value = ClassifiedString.Public(previousKeyId) + }); + } + + if (!string.IsNullOrWhiteSpace(provider)) + { + properties.Add(new AuthEventProperty + { + Name = "notify.ack.provider", + Value = ClassifiedString.Public(provider) + }); + } + + if (!string.IsNullOrWhiteSpace(source)) + { + properties.Add(new AuthEventProperty + { + Name = "notify.ack.source", + Value = ClassifiedString.Public(source) + }); + } + + if (retiredKeyIds is { Count: > 0 }) + { + properties.Add(new AuthEventProperty + { + Name = "notify.ack.retired_key_ids", + Value = ClassifiedString.Public(string.Join(",", retiredKeyIds)) + }); + } + + var record = new AuthEventRecord + { + EventType = eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = Activity.Current?.TraceId.ToString() ?? context.TraceIdentifier, + Outcome = outcome, + Reason = reason, + Client = BuildClientContext(context.User), + Tenant = ClassifiedString.Empty, + Scopes = scopes, + Network = BuildNetwork(context), + Properties = properties + }; + + await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false); +} + +static async Task WriteAckAuditAsync( + HttpContext context, + IAuthEventSink auditSink, + TimeProvider timeProvider, + string eventType, + AuthEventOutcome outcome, + AckTokenPayload payload, + string action, + IReadOnlyList scopes, + CancellationToken cancellationToken) +{ + var properties = new List + { + new() { Name = "notify.notification_id", Value = ClassifiedString.Public(payload.NotificationId) }, + new() { Name = "notify.delivery_id", Value = ClassifiedString.Public(payload.DeliveryId) }, + new() { Name = "notify.channel", Value = ClassifiedString.Public(payload.Channel) }, + new() { Name = "notify.webhook", Value = ClassifiedString.Public(payload.Webhook) }, + new() { Name = "notify.actions", Value = ClassifiedString.Public(string.Join(",", payload.Actions)) }, + new() { Name = "notify.action", Value = ClassifiedString.Public(action) }, + new() { Name = "notify.nonce", Value = ClassifiedString.Public(payload.Nonce) } + }; + + if (payload.EscalationAllowed) + { + properties.Add(new AuthEventProperty + { + Name = "notify.escalation_allowed", + Value = ClassifiedString.Public("true") + }); + } + + var record = new AuthEventRecord + { + EventType = eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = Activity.Current?.TraceId.ToString() ?? context.TraceIdentifier, + Outcome = outcome, + Client = BuildClientContext(context.User), + Tenant = ClassifiedString.Public(payload.Tenant), + Scopes = scopes, + Network = BuildNetwork(context), + Properties = properties + }; + + await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false); +} + +var advisoryAiGroup = app.MapGroup("/advisory-ai"); + +advisoryAiGroup.MapPost("/remote-inference/logs", async ( + HttpContext httpContext, + AdvisoryAiRemoteInferenceLogRequest request, + IAuthorityAdvisoryAiConsentEvaluator consentEvaluator, + IAuthEventSink auditSink, + TimeProvider timeProvider, + IAuthorityRateLimiterMetadataAccessor metadataAccessor, + ILoggerFactory loggerFactory, + CancellationToken cancellationToken) => +{ + const int MaxPromptLength = 16_384; + const int MaxMetadataEntries = 16; + const int MaxMetadataValueLength = 256; + + if (request is null) + { + return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); + } + + var snapshot = consentEvaluator.GetSnapshot(); + if (!snapshot.Enabled) + { + return Results.Json( + new { error = "remote_inference_disabled", message = "Remote inference is disabled by configuration." }, + statusCode: StatusCodes.Status403Forbidden); + } + + var principal = httpContext.User; + if (principal?.Identity?.IsAuthenticated != true) + { + return Results.Json( + new { error = "unauthorized", message = "Authentication is required." }, + statusCode: StatusCodes.Status401Unauthorized); + } + + var tenantClaim = principal.FindFirst(StellaOpsClaimTypes.Tenant)?.Value; + var clientId = principal.FindFirst(StellaOpsClaimTypes.ClientId)?.Value; + var projectClaim = principal.FindFirst(StellaOpsClaimTypes.Project)?.Value; + + var consentResult = consentEvaluator.EvaluateTenant(tenantClaim); + if (!consentResult.Allowed) + { + return Results.Json( + new { error = consentResult.ErrorCode, message = consentResult.ErrorMessage }, + statusCode: StatusCodes.Status403Forbidden); + } + + if (!consentEvaluator.TryNormalizeProfile(request.Profile, out var profile)) + { + return Results.Json( + new { error = "profile_not_allowed", message = "Requested remote inference profile is not allowed." }, + statusCode: StatusCodes.Status400BadRequest); + } + + var taskType = NormalizeTaskType(request.TaskType); + if (taskType is null) + { + return Results.Json( + new { error = "task_type_invalid", message = "Task type must contain only lowercase letters, digits, dot, underscore, or hyphen (1-64 characters)." }, + statusCode: StatusCodes.Status400BadRequest); + } + + var prompt = NormalizePrompt(request.Prompt); + if (prompt is null) + { + return Results.Json( + new { error = "prompt_required", message = "Prompt content is required." }, + statusCode: StatusCodes.Status400BadRequest); + } + + if (prompt.Length > MaxPromptLength) + { + return Results.Json( + new { error = "prompt_too_long", message = $"Prompt exceeds maximum length of {MaxPromptLength} characters." }, + statusCode: StatusCodes.Status400BadRequest); + } + + var promptHash = ComputeSha256(prompt); + var metadata = metadataAccessor.GetMetadata(); + metadataAccessor.SetClientId(clientId); + metadataAccessor.SetTenant(tenantClaim); + metadataAccessor.SetProject(projectClaim); + metadataAccessor.SetTag("authority.advisory_ai.profile", profile); + metadataAccessor.SetTag("authority.advisory_ai.task_type", taskType); + + var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier; + var network = BuildNetwork(metadata); + var scopes = ExtractScopes(principal); + + var auditProperties = BuildProperties( + request, + taskType, + profile, + promptHash, + prompt.Length, + consentResult); + + var tenantNormalized = NormalizeTenant(tenantClaim); + var projectNormalized = NormalizeProject(projectClaim); + + var record = new AuthEventRecord + { + EventType = "authority.advisory_ai.remote_inference", + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = correlationId, + Outcome = AuthEventOutcome.Success, + Client = BuildClient(clientId), + Tenant = ClassifiedString.Public(tenantNormalized), + Project = ClassifiedString.Public(projectNormalized), + Scopes = scopes, + Network = network, + Properties = auditProperties + }; + + await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false); + + var logger = loggerFactory.CreateLogger("StellaOps.Authority.AdvisoryAi"); + if (logger.IsEnabled(LogLevel.Information)) + { + logger.LogInformation( + "Recorded advisory AI remote inference task {TaskType} for tenant {Tenant} with profile {Profile} (hash={PromptHash}).", + taskType, + tenantNormalized, + profile, + promptHash); + } + + return Results.Json(new + { + status = "logged", + prompt_hash = promptHash, + correlation_id = correlationId, + consent_version = consentResult.ConsentVersion + }); + + static string? NormalizeTaskType(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + var trimmed = value.Trim().ToLowerInvariant(); + if (trimmed.Length is < 1 or > 64) + { + return null; + } + + foreach (var ch in trimmed) + { + var allowed = char.IsLetterOrDigit(ch) || ch is '-' or '_' or '.'; + if (!allowed) + { + return null; + } + } + + return trimmed; + } + + static string? NormalizePrompt(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + var normalized = value.Trim(); + return normalized.Replace("\r\n", "\n", StringComparison.Ordinal); + } + + static string NormalizeTenant(string? value) + { + return string.IsNullOrWhiteSpace(value) + ? string.Empty + : value.Trim().ToLowerInvariant(); + } + + static string NormalizeProject(string? value) + { + return string.IsNullOrWhiteSpace(value) + ? StellaOpsTenancyDefaults.AnyProject + : value.Trim().ToLowerInvariant(); + } + + static AuthEventClient? BuildClient(string? clientId) + { + if (string.IsNullOrWhiteSpace(clientId)) + { + return null; + } + + return new AuthEventClient + { + ClientId = ClassifiedString.Personal(clientId.Trim()), + Name = ClassifiedString.Empty, + Provider = ClassifiedString.Empty + }; + } + + static AuthEventNetwork? BuildNetwork(AuthorityRateLimiterMetadata? metadata) + { + if (metadata is null) + { + return null; + } + + var remote = string.IsNullOrWhiteSpace(metadata.RemoteIp) ? null : metadata.RemoteIp; + var forwarded = string.IsNullOrWhiteSpace(metadata.ForwardedFor) ? null : metadata.ForwardedFor; + var userAgent = string.IsNullOrWhiteSpace(metadata.UserAgent) ? null : metadata.UserAgent; + + if (remote is null && forwarded is null && userAgent is null) + { + return null; + } + + return new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Personal(remote), + ForwardedFor = ClassifiedString.Personal(forwarded), + UserAgent = ClassifiedString.Personal(userAgent) + }; + } + + static IReadOnlyList ExtractScopes(ClaimsPrincipal principal) + { + var set = new HashSet(StringComparer.Ordinal); + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + set.Add(claim.Value.Trim()); + } + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + foreach (var part in parts) + { + var normalized = StellaOpsScopes.Normalize(part); + if (!string.IsNullOrWhiteSpace(normalized)) + { + set.Add(normalized); + } + } + } + + return set.Count == 0 + ? Array.Empty() + : set.OrderBy(static scope => scope, StringComparer.Ordinal).ToArray(); + } + + static IReadOnlyList BuildProperties( + AdvisoryAiRemoteInferenceLogRequest request, + string taskType, + string profile, + string promptHash, + int promptLength, + AuthorityTenantRemoteInferenceConsentResult consentResult) + { + var properties = new List + { + CreateProperty("advisory_ai.task_type", taskType), + CreateProperty("advisory_ai.profile", profile), + CreateProperty("advisory_ai.prompt.hash", promptHash), + CreateProperty("advisory_ai.prompt.length", promptLength.ToString(CultureInfo.InvariantCulture)), + CreateProperty("advisory_ai.prompt.algorithm", "sha256") + }; + + if (!string.IsNullOrWhiteSpace(request.ModelId)) + { + properties.Add(CreateProperty("advisory_ai.model_id", request.ModelId.Trim())); + } + + if (!string.IsNullOrWhiteSpace(request.ContextDigest)) + { + properties.Add(CreateProperty("advisory_ai.context.digest", request.ContextDigest.Trim())); + } + + if (!string.IsNullOrWhiteSpace(request.OutputHash)) + { + properties.Add(CreateProperty("advisory_ai.output.hash", request.OutputHash.Trim())); + } + + if (!string.IsNullOrWhiteSpace(request.TaskId)) + { + properties.Add(CreateProperty("advisory_ai.task_id", request.TaskId.Trim())); + } + + if (!string.IsNullOrWhiteSpace(consentResult.ConsentVersion)) + { + properties.Add(CreateProperty("advisory_ai.consent.version", consentResult.ConsentVersion.Trim())); + } + + if (consentResult.ConsentedAt.HasValue) + { + properties.Add(CreateProperty( + "advisory_ai.consent.timestamp", + consentResult.ConsentedAt.Value.ToString("O", CultureInfo.InvariantCulture))); + } + + if (!string.IsNullOrWhiteSpace(consentResult.ConsentedBy)) + { + properties.Add(CreateProperty("advisory_ai.consent.actor", consentResult.ConsentedBy.Trim())); + } + + if (request.Metadata is { Count: > 0 }) + { + var appended = 0; + foreach (var (key, value) in request.Metadata) + { + if (appended >= MaxMetadataEntries) + { + break; + } + + var normalizedKey = NormalizeMetadataKey(key); + if (normalizedKey is null) + { + continue; + } + + var normalizedValue = NormalizeMetadataValue(value); + properties.Add(CreateProperty($"advisory_ai.metadata.{normalizedKey}", normalizedValue)); + appended++; + } + } + + return properties; + } + + static AuthEventProperty CreateProperty(string name, string? value) + { + return new AuthEventProperty + { + Name = name, + Value = ClassifiedString.Public(string.IsNullOrWhiteSpace(value) ? null : value.Trim()) + }; + } + + static string NormalizeMetadataValue(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return string.Empty; + } + + var trimmed = value.Trim(); + return trimmed.Length <= MaxMetadataValueLength + ? trimmed + : trimmed[..MaxMetadataValueLength]; + } + + static string? NormalizeMetadataKey(string? key) + { + if (string.IsNullOrWhiteSpace(key)) + { + return null; + } + + var trimmed = key.Trim().ToLowerInvariant(); + if (trimmed.Length is < 1 or > 32) + { + return null; + } + + foreach (var ch in trimmed) + { + var allowed = char.IsLetterOrDigit(ch) || ch is '-' or '_' or '.'; + if (!allowed) + { + return null; + } + } + + return trimmed; + } + + static string ComputeSha256(string value) + { + var bytes = Encoding.UTF8.GetBytes(value); + var hash = SHA256.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + +}) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.AdvisoryAiOperate)) + .WithName("LogAdvisoryAiRemoteInference"); + + +app.MapAirgapAuditEndpoints(); +app.MapIncidentAuditEndpoints(); +app.MapAuthorityOpenApiDiscovery(); + + + + +app.MapGet("/jwks", (AuthorityJwksService jwksService, HttpContext context) => +{ + var result = jwksService.Get(); + + if (context.Request.Headers.TryGetValue(HeaderNames.IfNoneMatch, out var etagValues) && + etagValues.Contains(result.ETag, StringComparer.Ordinal)) + { + context.Response.Headers.CacheControl = result.CacheControl; + context.Response.Headers.ETag = result.ETag; + context.Response.Headers.Expires = result.ExpiresAt.ToString("R", CultureInfo.InvariantCulture); + return Results.StatusCode(StatusCodes.Status304NotModified); + } + + context.Response.Headers.CacheControl = result.CacheControl; + context.Response.Headers.ETag = result.ETag; + context.Response.Headers.Expires = result.ExpiresAt.ToString("R", CultureInfo.InvariantCulture); + + return Results.Json(result.Response); +}) + .WithName("JsonWebKeySet"); // Ensure signing key manager initialises key material on startup. +app.Services.GetRequiredService(); app.Services.GetRequiredService(); app.Run(); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthorityJwksService.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthorityJwksService.cs index 7ab2c5a1..7b846b07 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthorityJwksService.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthorityJwksService.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; using System.Linq; using System.Security.Cryptography; +using System.Globalization; using System.Text; using System.Text.Json; using System.Text.Json.Serialization; @@ -57,18 +58,21 @@ internal sealed class AuthorityJwksService } var response = new AuthorityJwksResponse(BuildKeys()); - var etag = ComputeEtag(response); var signingOptions = authorityOptions.Signing; var lifetime = signingOptions.JwksCacheLifetime > TimeSpan.Zero ? signingOptions.JwksCacheLifetime : TimeSpan.FromMinutes(5); var expires = timeProvider.GetUtcNow().Add(lifetime); + var etag = ComputeEtag(response, expires); var cacheControl = $"public, max-age={(int)lifetime.TotalSeconds}"; var result = new AuthorityJwksResult(response, etag, expires, cacheControl); var entry = new AuthorityJwksCacheEntry(result, expires); - cache.Set(CacheKey, entry, expires); + cache.Set(CacheKey, entry, new MemoryCacheEntryOptions + { + AbsoluteExpirationRelativeToNow = lifetime + }); return result; } @@ -96,11 +100,20 @@ internal sealed class AuthorityJwksService { var signer = provider.GetSigner(signingKey.AlgorithmId, signingKey.Reference); var jwk = signer.ExportPublicJsonWebKey(); + var keyUse = signingKey.Metadata.TryGetValue("use", out var metadataUse) && !string.IsNullOrWhiteSpace(metadataUse) + ? metadataUse + : jwk.Use; + + if (string.IsNullOrWhiteSpace(keyUse)) + { + keyUse = "sig"; + } + var entry = new JwksKeyEntry { Kid = jwk.Kid, Kty = jwk.Kty, - Use = string.IsNullOrWhiteSpace(jwk.Use) ? "sig" : jwk.Use, + Use = keyUse, Alg = jwk.Alg, Crv = jwk.Crv, X = jwk.X, @@ -120,10 +133,11 @@ internal sealed class AuthorityJwksService return keys; } - private static string ComputeEtag(AuthorityJwksResponse response) + private static string ComputeEtag(AuthorityJwksResponse response, DateTimeOffset expiresAt) { var payload = JsonSerializer.Serialize(response, SerializerOptions); - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(payload)); + var buffer = Encoding.UTF8.GetBytes(payload + "|" + expiresAt.ToUnixTimeSeconds().ToString(CultureInfo.InvariantCulture)); + var hash = SHA256.HashData(buffer); return $"\"{Convert.ToHexString(hash)}\""; } diff --git a/src/Authority/StellaOps.Authority/TASKS.md b/src/Authority/StellaOps.Authority/TASKS.md index 48ac0554..20440093 100644 --- a/src/Authority/StellaOps.Authority/TASKS.md +++ b/src/Authority/StellaOps.Authority/TASKS.md @@ -51,7 +51,7 @@ > 2025-10-27: Added `orch:operate` scope, enforced `operator_reason`/`operator_ticket` on token issuance, updated Authority configs/docs, and captured audit metadata for control actions. > 2025-10-28: Policy gateway + scanner now pass the expanded token client signature (`null` metadata by default), test stubs capture the optional parameters, and Policy Gateway/Scanner suites are green after fixing the Concelier storage build break. > 2025-10-28: Authority password-grant tests now hit the new constructors but still need updates to drop obsolete `IOptions` arguments before the suite can pass. -| AUTH-ORCH-34-001 | TODO | Authority Core & Security Guild | AUTH-ORCH-33-001 | Introduce `Orch.Admin` role with quota/backfill scopes, enforce audit reason on quota changes, and update offline defaults/docs. | Admin role available; quotas/backfills require scope + reason; tests confirm tenant isolation; documentation updated. | +| AUTH-ORCH-34-001 | DOING (2025-11-02) | Authority Core & Security Guild | AUTH-ORCH-33-001 | Introduce `Orch.Admin` role with quota/backfill scopes, enforce audit reason on quota changes, and update offline defaults/docs. | Admin role available; quotas/backfills require scope + reason; tests confirm tenant isolation; documentation updated. | ## StellaOps Console (Sprint 23) @@ -97,8 +97,8 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| AUTH-AIAI-31-001 | TODO | Authority Core & Security Guild | AUTH-VULN-29-001 | Define Advisory AI scopes (`advisory-ai:view`, `advisory-ai:operate`, `advisory-ai:admin`) and remote inference toggles; update discovery metadata/offline defaults. | Scopes/flags published; integration tests cover RBAC + opt-in settings; docs updated. | -| AUTH-AIAI-31-002 | TODO | Authority Core & Security Guild | AUTH-AIAI-31-001, AIAI-31-006 | Enforce anonymized prompt logging, tenant consent for remote inference, and audit logging of assistant tasks. | Logging/audit flows verified; privacy review passed; docs updated. | +| AUTH-AIAI-31-001 | DONE (2025-11-01) | Authority Core & Security Guild | AUTH-VULN-29-001 | Define Advisory AI scopes (`advisory-ai:view`, `advisory-ai:operate`, `advisory-ai:admin`) and remote inference toggles; update discovery metadata/offline defaults. | Scopes/flags published; integration tests cover RBAC + opt-in settings; docs updated. | +| AUTH-AIAI-31-002 | DONE (2025-11-01) | Authority Core & Security Guild | AUTH-AIAI-31-001, AIAI-31-006 | Enforce anonymized prompt logging, tenant consent for remote inference, and audit logging of assistant tasks. | Logging/audit flows verified; privacy review passed; docs updated. | ## Export Center | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | @@ -107,8 +107,9 @@ ## Notifications Studio | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| AUTH-NOTIFY-38-001 | TODO | Authority Core & Security Guild | — | Define `Notify.Viewer`, `Notify.Operator`, `Notify.Admin` scopes/roles, update discovery metadata, offline defaults, and issuer templates. | Scopes available; metadata updated; tests ensure enforcement; offline kit defaults refreshed. | -| AUTH-NOTIFY-40-001 | TODO | Authority Core & Security Guild | AUTH-NOTIFY-38-001, WEB-NOTIFY-40-001 | Implement signed ack token key rotation, webhook allowlists, admin-only escalation settings, and audit logging of ack actions. | Ack tokens signed/rotated; webhook allowlists enforced; admin enforcement validated; audit logs capture ack/resolution. | +| AUTH-NOTIFY-38-001 | DONE (2025-11-01) | Authority Core & Security Guild | — | Define `Notify.Viewer`, `Notify.Operator`, `Notify.Admin` scopes/roles, update discovery metadata, offline defaults, and issuer templates. | Scopes available; metadata updated; tests ensure enforcement; offline kit defaults refreshed. | +| AUTH-NOTIFY-40-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-NOTIFY-38-001, WEB-NOTIFY-40-001 | Implement signed ack token key rotation, webhook allowlists, admin-only escalation settings, and audit logging of ack actions. | Ack tokens signed/rotated; webhook allowlists enforced; admin enforcement validated; audit logs capture ack/resolution. | +> 2025-11-02: `/notify/ack-tokens/rotate` exposed (notify.admin), emits `notify.ack.key_rotated|notify.ack.key_rotation_failed`, and DSSE rotation tests cover allowlist + scope enforcement. ## CLI Parity & Task Packs | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | @@ -127,22 +128,25 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| AUTH-OBS-50-001 | TODO | Authority Core & Security Guild | AUTH-AOC-19-001 | Introduce scopes `obs:read`, `timeline:read`, `timeline:write`, `evidence:create`, `evidence:read`, `evidence:hold`, `attest:read`, and `obs:incident` (all tenant-scoped). Update discovery metadata, offline defaults, and scope grammar docs. | Scopes exposed via metadata; issuer templates updated; offline kit seeded; integration tests cover new scopes. | -| AUTH-OBS-52-001 | TODO | Authority Core & Security Guild | AUTH-OBS-50-001, TIMELINE-OBS-52-003, EVID-OBS-53-003 | Configure resource server policies for Timeline Indexer, Evidence Locker, Exporter, and Observability APIs enforcing new scopes + tenant claims. Emit audit events including scope usage and trace IDs. | Policies deployed; unauthorized access blocked; audit logs prove scope usage; contract tests updated. | -| AUTH-OBS-55-001 | TODO | Authority Core & Security Guild, Ops Guild | AUTH-OBS-50-001, WEB-OBS-55-001 | Harden incident mode authorization: require `obs:incident` scope + fresh auth, log activation reason, and expose verification endpoint for auditors. Update docs/runbooks. | Incident activate/deactivate requires scope; audit entries logged; docs updated with imposed rule reminder. | +| AUTH-OBS-50-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-AOC-19-001 | Introduce scopes `obs:read`, `timeline:read`, `timeline:write`, `evidence:create`, `evidence:read`, `evidence:hold`, `attest:read`, and `obs:incident` (all tenant-scoped). Update discovery metadata, offline defaults, and scope grammar docs. | Scopes exposed via metadata; issuer templates updated; offline kit seeded; integration tests cover new scopes. | +| AUTH-OBS-52-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-OBS-50-001, TIMELINE-OBS-52-003, EVID-OBS-53-003 | Configure resource server policies for Timeline Indexer, Evidence Locker, Exporter, and Observability APIs enforcing new scopes + tenant claims. Emit audit events including scope usage and trace IDs. | Policies deployed; unauthorized access blocked; audit logs prove scope usage; contract tests updated. | +| AUTH-OBS-55-001 | DONE (2025-11-02) | Authority Core & Security Guild, Ops Guild | AUTH-OBS-50-001, WEB-OBS-55-001 | Harden incident mode authorization: require `obs:incident` scope + fresh auth, log activation reason, and expose verification endpoint for auditors. Update docs/runbooks. | Incident activate/deactivate requires scope; audit entries logged; docs updated with imposed rule reminder. | ## Air-Gapped Mode (Epic 16) | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| AUTH-AIRGAP-56-001 | TODO | Authority Core & Security Guild | AIRGAP-CTL-56-001 | Provision new scopes (`airgap:seal`, `airgap:import`, `airgap:status:read`) in configuration metadata, offline kit defaults, and issuer templates. | Scopes exposed in discovery docs; offline kit updated; integration tests cover issuance. | -| AUTH-AIRGAP-56-002 | TODO | Authority Core & Security Guild | AUTH-AIRGAP-56-001, AIRGAP-IMP-58-001 | Audit import actions with actor, tenant, bundle ID, and trace ID; expose `/authority/audit/airgap` endpoint. | Audit records persisted; endpoint paginates results; tests cover RBAC + filtering. | -| AUTH-AIRGAP-57-001 | TODO | Authority Core & Security Guild, DevOps Guild | AUTH-AIRGAP-56-001, DEVOPS-AIRGAP-57-002 | Enforce sealed-mode CI gating by refusing token issuance when declared sealed install lacks sealing confirmation. | CI scenario validated; error surfaces remediation; docs updated. | +| AUTH-AIRGAP-56-001 | DOING (2025-11-01) | Authority Core & Security Guild | AIRGAP-CTL-56-001 | Provision new scopes (`airgap:seal`, `airgap:import`, `airgap:status:read`) in configuration metadata, offline kit defaults, and issuer templates. | Scopes exposed in discovery docs; offline kit updated; integration tests cover issuance. | +| AUTH-AIRGAP-56-002 | DOING | Authority Core & Security Guild | AUTH-AIRGAP-56-001, AIRGAP-IMP-58-001 | Audit import actions with actor, tenant, bundle ID, and trace ID; expose `/authority/audit/airgap` endpoint. | Audit records persisted; endpoint paginates results; tests cover RBAC + filtering. | +| AUTH-AIRGAP-57-001 | BLOCKED (2025-11-01) | Authority Core & Security Guild, DevOps Guild | AUTH-AIRGAP-56-001, DEVOPS-AIRGAP-57-002 | Enforce sealed-mode CI gating by refusing token issuance when declared sealed install lacks sealing confirmation. | Awaiting clarified sealed-confirmation contract and configuration structure before implementation. | +> 2025-11-01: AUTH-AIRGAP-57-001 blocked pending guidance on sealed-confirmation contract and configuration expectations before gating changes (Authority Core & Security Guild, DevOps Guild). ## SDKs & OpenAPI (Epic 17) | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| > 2025-10-28: Auth OpenAPI authored at `src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml` covering `/token`, `/introspect`, `/revoke`, `/jwks`, scope catalog, and error envelopes; parsed via PyYAML sanity check and referenced in Epic 17 docs. > 2025-10-28: Added `/.well-known/openapi` endpoint wiring cached spec metadata, YAML/JSON negotiation, HTTP cache headers, and tests verifying ETag + Accept handling. Authority spec (`src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml`) now includes grant/scope extensions. -| AUTH-OAS-62-001 | TODO | Authority Core & Security Guild, SDK Generator Guild | AUTH-OAS-61-001, SDKGEN-63-001 | Provide SDK helpers for OAuth2/PAT flows, tenancy override header; add integration tests. | SDKs expose auth helpers; tests cover token issuance; docs updated. | -| AUTH-OAS-63-001 | TODO | Authority Core & Security Guild, API Governance Guild | APIGOV-63-001 | Emit deprecation headers and notifications for legacy auth endpoints. | Headers emitted; notifications verified; migration guide published. | +| AUTH-OAS-62-001 | DONE (2025-11-02) | Authority Core & Security Guild, SDK Generator Guild | AUTH-OAS-61-001, SDKGEN-63-001 | Provide SDK helpers for OAuth2/PAT flows, tenancy override header; add integration tests. | SDKs expose auth helpers; tests cover token issuance; docs updated. | +> 2025-11-02: `AddStellaOpsApiAuthentication` shipped (OAuth2 + PAT), tenant header injection added, and client tests updated for caching behaviour. +| AUTH-OAS-63-001 | DONE (2025-11-02) | Authority Core & Security Guild, API Governance Guild | APIGOV-63-001 | Emit deprecation headers and notifications for legacy auth endpoints. | Headers emitted; notifications verified; migration guide published. | +> 2025-11-02: AUTH-OAS-63-001 completed — legacy OAuth shims emit Deprecation/Sunset/Warning headers, audit events captured, and migration guide published (Authority Core & Security Guild, API Governance Guild). diff --git a/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs b/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs index 0653bce1..fb2f4b80 100644 --- a/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs +++ b/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs @@ -1,13 +1,13 @@ -using System; -using System.CommandLine; +using System; +using System.CommandLine; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; using StellaOps.Cli.Configuration; using StellaOps.Cli.Plugins; - -namespace StellaOps.Cli.Commands; - + +namespace StellaOps.Cli.Commands; + internal static class CommandFactory { public static RootCommand Create( @@ -22,11 +22,11 @@ internal static class CommandFactory { Description = "Enable verbose logging output." }; - - var root = new RootCommand("StellaOps command-line interface") - { - TreatUnmatchedTokensAsErrors = true - }; + + var root = new RootCommand("StellaOps command-line interface") + { + TreatUnmatchedTokensAsErrors = true + }; root.Add(verboseOption); root.Add(BuildScannerCommand(services, verboseOption, cancellationToken)); @@ -47,108 +47,132 @@ internal static class CommandFactory return root; } - - private static Command BuildScannerCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) - { - var scanner = new Command("scanner", "Manage scanner artifacts and lifecycle."); - - var download = new Command("download", "Download the latest scanner bundle."); - var channelOption = new Option("--channel", new[] { "-c" }) - { - Description = "Scanner channel (stable, beta, nightly)." - }; - - var outputOption = new Option("--output") - { - Description = "Optional output path for the downloaded bundle." - }; - - var overwriteOption = new Option("--overwrite") - { - Description = "Overwrite existing bundle if present." - }; - - var noInstallOption = new Option("--no-install") - { - Description = "Skip installing the scanner container after download." - }; - - download.Add(channelOption); - download.Add(outputOption); - download.Add(overwriteOption); - download.Add(noInstallOption); - - download.SetAction((parseResult, _) => - { - var channel = parseResult.GetValue(channelOption) ?? "stable"; - var output = parseResult.GetValue(outputOption); - var overwrite = parseResult.GetValue(overwriteOption); - var install = !parseResult.GetValue(noInstallOption); - var verbose = parseResult.GetValue(verboseOption); - - return CommandHandlers.HandleScannerDownloadAsync(services, channel, output, overwrite, install, verbose, cancellationToken); - }); - - scanner.Add(download); - return scanner; - } - + + private static Command BuildScannerCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) + { + var scanner = new Command("scanner", "Manage scanner artifacts and lifecycle."); + + var download = new Command("download", "Download the latest scanner bundle."); + var channelOption = new Option("--channel", new[] { "-c" }) + { + Description = "Scanner channel (stable, beta, nightly)." + }; + + var outputOption = new Option("--output") + { + Description = "Optional output path for the downloaded bundle." + }; + + var overwriteOption = new Option("--overwrite") + { + Description = "Overwrite existing bundle if present." + }; + + var noInstallOption = new Option("--no-install") + { + Description = "Skip installing the scanner container after download." + }; + + download.Add(channelOption); + download.Add(outputOption); + download.Add(overwriteOption); + download.Add(noInstallOption); + + download.SetAction((parseResult, _) => + { + var channel = parseResult.GetValue(channelOption) ?? "stable"; + var output = parseResult.GetValue(outputOption); + var overwrite = parseResult.GetValue(overwriteOption); + var install = !parseResult.GetValue(noInstallOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleScannerDownloadAsync(services, channel, output, overwrite, install, verbose, cancellationToken); + }); + + scanner.Add(download); + return scanner; + } + private static Command BuildScanCommand(IServiceProvider services, StellaOpsCliOptions options, Option verboseOption, CancellationToken cancellationToken) { var scan = new Command("scan", "Execute scanners and manage scan outputs."); - - var run = new Command("run", "Execute a scanner bundle with the configured runner."); - var runnerOption = new Option("--runner") - { - Description = "Execution runtime (dotnet, self, docker)." - }; - var entryOption = new Option("--entry") - { - Description = "Path to the scanner entrypoint or Docker image.", - Required = true - }; - var targetOption = new Option("--target") - { - Description = "Directory to scan.", - Required = true - }; - - var argsArgument = new Argument("scanner-args") - { - Arity = ArgumentArity.ZeroOrMore - }; - - run.Add(runnerOption); - run.Add(entryOption); - run.Add(targetOption); - run.Add(argsArgument); - - run.SetAction((parseResult, _) => - { - var runner = parseResult.GetValue(runnerOption) ?? options.DefaultRunner; - var entry = parseResult.GetValue(entryOption) ?? string.Empty; - var target = parseResult.GetValue(targetOption) ?? string.Empty; - var forwardedArgs = parseResult.GetValue(argsArgument) ?? Array.Empty(); - var verbose = parseResult.GetValue(verboseOption); - - return CommandHandlers.HandleScannerRunAsync(services, runner, entry, target, forwardedArgs, verbose, cancellationToken); - }); - - var upload = new Command("upload", "Upload completed scan results to the backend."); - var fileOption = new Option("--file") - { - Description = "Path to the scan result artifact.", - Required = true - }; - upload.Add(fileOption); - upload.SetAction((parseResult, _) => - { - var file = parseResult.GetValue(fileOption) ?? string.Empty; - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleScanUploadAsync(services, file, verbose, cancellationToken); - }); - - scan.Add(run); + + var run = new Command("run", "Execute a scanner bundle with the configured runner."); + var runnerOption = new Option("--runner") + { + Description = "Execution runtime (dotnet, self, docker)." + }; + var entryOption = new Option("--entry") + { + Description = "Path to the scanner entrypoint or Docker image.", + Required = true + }; + var targetOption = new Option("--target") + { + Description = "Directory to scan.", + Required = true + }; + + var argsArgument = new Argument("scanner-args") + { + Arity = ArgumentArity.ZeroOrMore + }; + + run.Add(runnerOption); + run.Add(entryOption); + run.Add(targetOption); + run.Add(argsArgument); + + run.SetAction((parseResult, _) => + { + var runner = parseResult.GetValue(runnerOption) ?? options.DefaultRunner; + var entry = parseResult.GetValue(entryOption) ?? string.Empty; + var target = parseResult.GetValue(targetOption) ?? string.Empty; + var forwardedArgs = parseResult.GetValue(argsArgument) ?? Array.Empty(); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleScannerRunAsync(services, runner, entry, target, forwardedArgs, verbose, cancellationToken); + }); + + var upload = new Command("upload", "Upload completed scan results to the backend."); + var fileOption = new Option("--file") + { + Description = "Path to the scan result artifact.", + Required = true + }; + upload.Add(fileOption); + upload.SetAction((parseResult, _) => + { + var file = parseResult.GetValue(fileOption) ?? string.Empty; + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleScanUploadAsync(services, file, verbose, cancellationToken); + }); + + var entryTrace = new Command("entrytrace", "Show entry trace summary for a scan."); + var scanIdOption = new Option("--scan-id") + { + Description = "Scan identifier.", + Required = true + }; + var includeNdjsonOption = new Option("--include-ndjson") + { + Description = "Include raw NDJSON output." + }; + + entryTrace.Add(scanIdOption); + entryTrace.Add(includeNdjsonOption); + + entryTrace.SetAction((parseResult, _) => + { + var id = parseResult.GetValue(scanIdOption) ?? string.Empty; + var includeNdjson = parseResult.GetValue(includeNdjsonOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleScanEntryTraceAsync(services, id, includeNdjson, verbose, cancellationToken); + }); + + scan.Add(entryTrace); + + scan.Add(run); scan.Add(upload); return scan; } @@ -272,43 +296,43 @@ internal static class CommandFactory private static Command BuildDatabaseCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) { var db = new Command("db", "Trigger Concelier database operations via backend jobs."); - - var fetch = new Command("fetch", "Trigger connector fetch/parse/map stages."); - var sourceOption = new Option("--source") - { - Description = "Connector source identifier (e.g. redhat, osv, vmware).", - Required = true - }; - var stageOption = new Option("--stage") - { - Description = "Stage to trigger: fetch, parse, or map." - }; - var modeOption = new Option("--mode") - { - Description = "Optional connector-specific mode (init, resume, cursor)." - }; - - fetch.Add(sourceOption); - fetch.Add(stageOption); - fetch.Add(modeOption); - fetch.SetAction((parseResult, _) => - { - var source = parseResult.GetValue(sourceOption) ?? string.Empty; - var stage = parseResult.GetValue(stageOption) ?? "fetch"; - var mode = parseResult.GetValue(modeOption); - var verbose = parseResult.GetValue(verboseOption); - - return CommandHandlers.HandleConnectorJobAsync(services, source, stage, mode, verbose, cancellationToken); - }); - - var merge = new Command("merge", "Run canonical merge reconciliation."); - merge.SetAction((parseResult, _) => - { - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleMergeJobAsync(services, verbose, cancellationToken); - }); - - var export = new Command("export", "Run Concelier export jobs."); + + var fetch = new Command("fetch", "Trigger connector fetch/parse/map stages."); + var sourceOption = new Option("--source") + { + Description = "Connector source identifier (e.g. redhat, osv, vmware).", + Required = true + }; + var stageOption = new Option("--stage") + { + Description = "Stage to trigger: fetch, parse, or map." + }; + var modeOption = new Option("--mode") + { + Description = "Optional connector-specific mode (init, resume, cursor)." + }; + + fetch.Add(sourceOption); + fetch.Add(stageOption); + fetch.Add(modeOption); + fetch.SetAction((parseResult, _) => + { + var source = parseResult.GetValue(sourceOption) ?? string.Empty; + var stage = parseResult.GetValue(stageOption) ?? "fetch"; + var mode = parseResult.GetValue(modeOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleConnectorJobAsync(services, source, stage, mode, verbose, cancellationToken); + }); + + var merge = new Command("merge", "Run canonical merge reconciliation."); + merge.SetAction((parseResult, _) => + { + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleMergeJobAsync(services, verbose, cancellationToken); + }); + + var export = new Command("export", "Run Concelier export jobs."); var formatOption = new Option("--format") { Description = "Export format: json or trivy-db." @@ -351,7 +375,7 @@ internal static class CommandFactory var verbose = parseResult.GetValue(verboseOption); return CommandHandlers.HandleExportJobAsync(services, format, delta, publishFull, publishDelta, includeFull, includeDelta, verbose, cancellationToken); }); - + db.Add(fetch); db.Add(merge); db.Add(export); @@ -1087,25 +1111,25 @@ internal static class CommandFactory return Task.CompletedTask; }); - - config.Add(show); - return config; - } - - private static string MaskIfEmpty(string value) - => string.IsNullOrWhiteSpace(value) ? "" : value; - - private static string DescribeSecret(string value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return ""; - } - - return value.Length switch - { - <= 4 => "****", - _ => $"{value[..2]}***{value[^2..]}" - }; - } -} + + config.Add(show); + return config; + } + + private static string MaskIfEmpty(string value) + => string.IsNullOrWhiteSpace(value) ? "" : value; + + private static string DescribeSecret(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return ""; + } + + return value.Length switch + { + <= 4 => "****", + _ => $"{value[..2]}***{value[^2..]}" + }; + } +} diff --git a/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs b/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs index 457c22b9..f9311cf1 100644 --- a/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs +++ b/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs @@ -1,35 +1,35 @@ -using System; -using System.Buffers; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Diagnostics; -using System.Globalization; -using System.IO; -using System.IO.Compression; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Security.Cryptography; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Spectre.Console; -using StellaOps.Auth.Client; -using StellaOps.Cli.Configuration; -using StellaOps.Cli.Prompts; +using System; +using System.Buffers; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Diagnostics; +using System.Globalization; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Spectre.Console; +using StellaOps.Auth.Client; +using StellaOps.Cli.Configuration; +using StellaOps.Cli.Prompts; using StellaOps.Cli.Services; using StellaOps.Cli.Services.Models; using StellaOps.Cli.Telemetry; using StellaOps.Cryptography; using StellaOps.Cryptography.Kms; - -namespace StellaOps.Cli.Commands; - + +namespace StellaOps.Cli.Commands; + internal static class CommandHandlers { private const string KmsPassphraseEnvironmentVariable = "STELLAOPS_KMS_PASSPHRASE"; @@ -42,5507 +42,5613 @@ internal static class CommandHandlers IServiceProvider services, string channel, string? output, - bool overwrite, - bool install, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-download"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.scanner.download", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "scanner download"); - activity?.SetTag("stellaops.cli.channel", channel); - using var duration = CliMetrics.MeasureCommandDuration("scanner download"); - - try - { - var result = await client.DownloadScannerAsync(channel, output ?? string.Empty, overwrite, verbose, cancellationToken).ConfigureAwait(false); - - if (result.FromCache) - { - logger.LogInformation("Using cached scanner at {Path}.", result.Path); - } - else - { - logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", result.Path, result.SizeBytes); - } - - CliMetrics.RecordScannerDownload(channel, result.FromCache); - - if (install) - { - var installer = scope.ServiceProvider.GetRequiredService(); - await installer.InstallAsync(result.Path, verbose, cancellationToken).ConfigureAwait(false); - CliMetrics.RecordScannerInstall(channel); - } - - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to download scanner bundle."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleScannerRunAsync( - IServiceProvider services, - string runner, - string entry, - string targetDirectory, - IReadOnlyList arguments, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var executor = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-run"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.scan.run", ActivityKind.Internal); - activity?.SetTag("stellaops.cli.command", "scan run"); - activity?.SetTag("stellaops.cli.runner", runner); - activity?.SetTag("stellaops.cli.entry", entry); - activity?.SetTag("stellaops.cli.target", targetDirectory); - using var duration = CliMetrics.MeasureCommandDuration("scan run"); - - try - { - var options = scope.ServiceProvider.GetRequiredService(); - var resultsDirectory = options.ResultsDirectory; - - var executionResult = await executor.RunAsync( - runner, - entry, - targetDirectory, - resultsDirectory, - arguments, - verbose, - cancellationToken).ConfigureAwait(false); - - Environment.ExitCode = executionResult.ExitCode; - CliMetrics.RecordScanRun(runner, executionResult.ExitCode); - - if (executionResult.ExitCode == 0) - { - var backend = scope.ServiceProvider.GetRequiredService(); - logger.LogInformation("Uploading scan artefact {Path}...", executionResult.ResultsPath); - await backend.UploadScanResultsAsync(executionResult.ResultsPath, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Scan artefact uploaded."); - activity?.SetTag("stellaops.cli.results", executionResult.ResultsPath); - } - else - { - logger.LogWarning("Skipping automatic upload because scan exited with code {Code}.", executionResult.ExitCode); - } - - logger.LogInformation("Run metadata written to {Path}.", executionResult.RunMetadataPath); - activity?.SetTag("stellaops.cli.run_metadata", executionResult.RunMetadataPath); - } - catch (Exception ex) - { - logger.LogError(ex, "Scanner execution failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleScanUploadAsync( - IServiceProvider services, - string file, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-upload"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.scan.upload", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "scan upload"); - activity?.SetTag("stellaops.cli.file", file); - using var duration = CliMetrics.MeasureCommandDuration("scan upload"); - - try - { - var path = Path.GetFullPath(file); - await client.UploadScanResultsAsync(path, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Scan results uploaded successfully."); - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to upload scan results."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleSourcesIngestAsync( - IServiceProvider services, - bool dryRun, - string source, - string input, - string? tenantOverride, - string format, - bool disableColor, - string? output, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("sources-ingest"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - using var activity = CliActivitySource.Instance.StartActivity("cli.sources.ingest.dry_run", ActivityKind.Client); - var statusMetric = "unknown"; - using var duration = CliMetrics.MeasureCommandDuration("sources ingest dry-run"); - - try - { - if (!dryRun) - { - statusMetric = "unsupported"; - logger.LogError("Only --dry-run mode is supported for 'stella sources ingest' at this time."); - Environment.ExitCode = 1; - return; - } - - source = source?.Trim() ?? string.Empty; - if (string.IsNullOrWhiteSpace(source)) - { - throw new InvalidOperationException("Source identifier must be provided."); - } - - var formatNormalized = string.IsNullOrWhiteSpace(format) - ? "table" - : format.Trim().ToLowerInvariant(); - - if (formatNormalized is not ("table" or "json")) - { - throw new InvalidOperationException("Format must be either 'table' or 'json'."); - } - - var tenant = ResolveTenant(tenantOverride); - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); - } - - var payload = await LoadIngestInputAsync(input, cancellationToken).ConfigureAwait(false); - - logger.LogInformation("Executing ingestion dry-run for source {Source} using input {Input}.", source, payload.Name); - - activity?.SetTag("stellaops.cli.command", "sources ingest dry-run"); - activity?.SetTag("stellaops.cli.source", source); - activity?.SetTag("stellaops.cli.tenant", tenant); - activity?.SetTag("stellaops.cli.format", formatNormalized); - activity?.SetTag("stellaops.cli.input_kind", payload.Kind); - - var request = new AocIngestDryRunRequest - { - Tenant = tenant, - Source = source, - Document = new AocIngestDryRunDocument - { - Name = payload.Name, - Content = payload.Content, - ContentType = payload.ContentType, - ContentEncoding = payload.ContentEncoding - } - }; - - var response = await client.ExecuteAocIngestDryRunAsync(request, cancellationToken).ConfigureAwait(false); - activity?.SetTag("stellaops.cli.status", response.Status ?? "unknown"); - - if (!string.IsNullOrWhiteSpace(output)) - { - var reportPath = await WriteJsonReportAsync(response, output, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Dry-run report written to {Path}.", reportPath); - } - - if (formatNormalized == "json") - { - var json = JsonSerializer.Serialize(response, new JsonSerializerOptions - { - WriteIndented = true - }); - Console.WriteLine(json); - } - else - { - RenderDryRunTable(response, !disableColor); - } - - var exitCode = DetermineDryRunExitCode(response); - Environment.ExitCode = exitCode; - statusMetric = exitCode == 0 ? "ok" : "violation"; - activity?.SetTag("stellaops.cli.exit_code", exitCode); - } - catch (Exception ex) - { - statusMetric = "transport_error"; - logger.LogError(ex, "Dry-run ingestion failed."); - Environment.ExitCode = 70; - } - finally - { - verbosity.MinimumLevel = previousLevel; - CliMetrics.RecordSourcesDryRun(statusMetric); - } - } - - public static async Task HandleAocVerifyAsync( - IServiceProvider services, - string? sinceOption, - int? limitOption, - string? sourcesOption, - string? codesOption, - string format, - string? exportPath, - string? tenantOverride, - bool disableColor, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("aoc-verify"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - using var activity = CliActivitySource.Instance.StartActivity("cli.aoc.verify", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("aoc verify"); - var outcome = "unknown"; - - try - { - var tenant = ResolveTenant(tenantOverride); - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); - } - - var normalizedFormat = string.IsNullOrWhiteSpace(format) - ? "table" - : format.Trim().ToLowerInvariant(); - - if (normalizedFormat is not ("table" or "json")) - { - throw new InvalidOperationException("Format must be either 'table' or 'json'."); - } - - var since = DetermineVerificationSince(sinceOption); - var sinceIso = since.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - var limit = NormalizeLimit(limitOption); - var sources = ParseCommaSeparatedList(sourcesOption); - var codes = ParseCommaSeparatedList(codesOption); - - var normalizedSources = sources.Count == 0 - ? Array.Empty() - : sources.Select(item => item.ToLowerInvariant()).ToArray(); - - var normalizedCodes = codes.Count == 0 - ? Array.Empty() - : codes.Select(item => item.ToUpperInvariant()).ToArray(); - - activity?.SetTag("stellaops.cli.command", "aoc verify"); - activity?.SetTag("stellaops.cli.tenant", tenant); - activity?.SetTag("stellaops.cli.since", sinceIso); - activity?.SetTag("stellaops.cli.limit", limit); - activity?.SetTag("stellaops.cli.format", normalizedFormat); - if (normalizedSources.Length > 0) - { - activity?.SetTag("stellaops.cli.sources", string.Join(",", normalizedSources)); - } - - if (normalizedCodes.Length > 0) - { - activity?.SetTag("stellaops.cli.codes", string.Join(",", normalizedCodes)); - } - - var request = new AocVerifyRequest - { - Tenant = tenant, - Since = sinceIso, - Limit = limit, - Sources = normalizedSources.Length == 0 ? null : normalizedSources, - Codes = normalizedCodes.Length == 0 ? null : normalizedCodes - }; - - var response = await client.ExecuteAocVerifyAsync(request, cancellationToken).ConfigureAwait(false); - - if (!string.IsNullOrWhiteSpace(exportPath)) - { - var reportPath = await WriteJsonReportAsync(response, exportPath, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Verification report written to {Path}.", reportPath); - } - - if (normalizedFormat == "json") - { - var json = JsonSerializer.Serialize(response, new JsonSerializerOptions - { - WriteIndented = true - }); - Console.WriteLine(json); - } - else - { - RenderAocVerifyTable(response, !disableColor, limit); - } - - var exitCode = DetermineVerifyExitCode(response); - Environment.ExitCode = exitCode; - activity?.SetTag("stellaops.cli.exit_code", exitCode); - outcome = exitCode switch - { - 0 => "ok", - >= 11 and <= 17 => "violations", - 18 => "truncated", - _ => "unknown" - }; - } - catch (InvalidOperationException ex) - { - outcome = "usage_error"; - logger.LogError(ex, "Verification failed: {Message}", ex.Message); - Console.Error.WriteLine(ex.Message); - Environment.ExitCode = 71; - activity?.SetStatus(ActivityStatusCode.Error, ex.Message); - } - catch (Exception ex) - { - outcome = "transport_error"; - logger.LogError(ex, "Verification request failed."); - Console.Error.WriteLine(ex.Message); - Environment.ExitCode = 70; - activity?.SetStatus(ActivityStatusCode.Error, ex.Message); - } - finally - { - verbosity.MinimumLevel = previousLevel; - CliMetrics.RecordAocVerify(outcome); - } - } - - public static async Task HandleConnectorJobAsync( - IServiceProvider services, - string source, - string stage, - string? mode, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-connector"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.db.fetch", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "db fetch"); - activity?.SetTag("stellaops.cli.source", source); - activity?.SetTag("stellaops.cli.stage", stage); - if (!string.IsNullOrWhiteSpace(mode)) - { - activity?.SetTag("stellaops.cli.mode", mode); - } - using var duration = CliMetrics.MeasureCommandDuration("db fetch"); - - try - { - var jobKind = $"source:{source}:{stage}"; - var parameters = new Dictionary(StringComparer.Ordinal); - if (!string.IsNullOrWhiteSpace(mode)) - { - parameters["mode"] = mode; - } - - await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - logger.LogError(ex, "Connector job failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleMergeJobAsync( - IServiceProvider services, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-merge"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.db.merge", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "db merge"); - using var duration = CliMetrics.MeasureCommandDuration("db merge"); - - try - { - await TriggerJobAsync(client, logger, "merge:reconcile", new Dictionary(StringComparer.Ordinal), cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - logger.LogError(ex, "Merge job failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleExportJobAsync( - IServiceProvider services, - string format, - bool delta, - bool? publishFull, - bool? publishDelta, - bool? includeFull, - bool? includeDelta, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-export"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.db.export", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "db export"); - activity?.SetTag("stellaops.cli.format", format); - activity?.SetTag("stellaops.cli.delta", delta); - using var duration = CliMetrics.MeasureCommandDuration("db export"); - activity?.SetTag("stellaops.cli.publish_full", publishFull); - activity?.SetTag("stellaops.cli.publish_delta", publishDelta); - activity?.SetTag("stellaops.cli.include_full", includeFull); - activity?.SetTag("stellaops.cli.include_delta", includeDelta); - - try - { - var jobKind = format switch - { - "trivy-db" or "trivy" => "export:trivy-db", - _ => "export:json" - }; - - var isTrivy = jobKind == "export:trivy-db"; - if (isTrivy - && !publishFull.HasValue - && !publishDelta.HasValue - && !includeFull.HasValue - && !includeDelta.HasValue - && AnsiConsole.Profile.Capabilities.Interactive) - { - var overrides = TrivyDbExportPrompt.PromptOverrides(); - publishFull = overrides.publishFull; - publishDelta = overrides.publishDelta; - includeFull = overrides.includeFull; - includeDelta = overrides.includeDelta; - } - - var parameters = new Dictionary(StringComparer.Ordinal) - { - ["delta"] = delta - }; - if (publishFull.HasValue) - { - parameters["publishFull"] = publishFull.Value; - } - if (publishDelta.HasValue) - { - parameters["publishDelta"] = publishDelta.Value; - } - if (includeFull.HasValue) - { - parameters["includeFull"] = includeFull.Value; - } - if (includeDelta.HasValue) - { - parameters["includeDelta"] = includeDelta.Value; - } - - await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - logger.LogError(ex, "Export job failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static Task HandleExcititorInitAsync( - IServiceProvider services, - IReadOnlyList providers, - bool resume, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (resume) - { - payload["resume"] = true; - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor init", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["resume"] = resume - }, - client => client.ExecuteExcititorOperationAsync("init", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorPullAsync( - IServiceProvider services, - IReadOnlyList providers, - DateTimeOffset? since, - TimeSpan? window, - bool force, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (since.HasValue) - { - payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - if (window.HasValue) - { - payload["window"] = window.Value.ToString("c", CultureInfo.InvariantCulture); - } - if (force) - { - payload["force"] = true; - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor pull", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["force"] = force, - ["since"] = since?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture), - ["window"] = window?.ToString("c", CultureInfo.InvariantCulture) - }, - client => client.ExecuteExcititorOperationAsync("ingest/run", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorResumeAsync( - IServiceProvider services, - IReadOnlyList providers, - string? checkpoint, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (!string.IsNullOrWhiteSpace(checkpoint)) - { - payload["checkpoint"] = checkpoint.Trim(); - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor resume", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["checkpoint"] = checkpoint - }, - client => client.ExecuteExcititorOperationAsync("ingest/resume", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static async Task HandleExcititorListProvidersAsync( - IServiceProvider services, - bool includeDisabled, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("excititor-list-providers"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.list-providers", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "excititor list-providers"); - activity?.SetTag("stellaops.cli.include_disabled", includeDisabled); - using var duration = CliMetrics.MeasureCommandDuration("excititor list-providers"); - - try - { - var providers = await client.GetExcititorProvidersAsync(includeDisabled, cancellationToken).ConfigureAwait(false); - Environment.ExitCode = 0; - logger.LogInformation("Providers returned: {Count}", providers.Count); - - if (providers.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().Border(TableBorder.Rounded).AddColumns("Provider", "Kind", "Trust", "Enabled", "Last Ingested"); - foreach (var provider in providers) - { - table.AddRow( - provider.Id, - provider.Kind, - string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, - provider.Enabled ? "yes" : "no", - provider.LastIngestedAt?.ToString("yyyy-MM-dd HH:mm:ss 'UTC'", CultureInfo.InvariantCulture) ?? "unknown"); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var provider in providers) - { - logger.LogInformation("{ProviderId} [{Kind}] Enabled={Enabled} Trust={Trust} LastIngested={LastIngested}", - provider.Id, - provider.Kind, - provider.Enabled ? "yes" : "no", - string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, - provider.LastIngestedAt?.ToString("O", CultureInfo.InvariantCulture) ?? "unknown"); - } - } - } - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to list Excititor providers."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleExcititorExportAsync( - IServiceProvider services, - string format, - bool delta, - string? scope, - DateTimeOffset? since, - string? provider, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scopeHandle = services.CreateAsyncScope(); - var client = scopeHandle.ServiceProvider.GetRequiredService(); - var logger = scopeHandle.ServiceProvider.GetRequiredService().CreateLogger("excititor-export"); - var options = scopeHandle.ServiceProvider.GetRequiredService(); - var verbosity = scopeHandle.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.export", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "excititor export"); - activity?.SetTag("stellaops.cli.format", format); - activity?.SetTag("stellaops.cli.delta", delta); - if (!string.IsNullOrWhiteSpace(scope)) - { - activity?.SetTag("stellaops.cli.scope", scope); - } - if (since.HasValue) - { - activity?.SetTag("stellaops.cli.since", since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture)); - } - if (!string.IsNullOrWhiteSpace(provider)) - { - activity?.SetTag("stellaops.cli.provider", provider); - } - if (!string.IsNullOrWhiteSpace(outputPath)) - { - activity?.SetTag("stellaops.cli.output", outputPath); - } - using var duration = CliMetrics.MeasureCommandDuration("excititor export"); - - try - { - var payload = new Dictionary(StringComparer.Ordinal) - { - ["format"] = string.IsNullOrWhiteSpace(format) ? "openvex" : format.Trim(), - ["delta"] = delta - }; - - if (!string.IsNullOrWhiteSpace(scope)) - { - payload["scope"] = scope.Trim(); - } - if (since.HasValue) - { - payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - if (!string.IsNullOrWhiteSpace(provider)) - { - payload["provider"] = provider.Trim(); - } - - var result = await client.ExecuteExcititorOperationAsync( - "export", - HttpMethod.Post, - RemoveNullValues(payload), - cancellationToken).ConfigureAwait(false); - - if (!result.Success) - { - logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Excititor export failed." : result.Message); - Environment.ExitCode = 1; - return; - } - - Environment.ExitCode = 0; - - var manifest = TryParseExportManifest(result.Payload); - if (!string.IsNullOrWhiteSpace(result.Message) - && (manifest is null || !string.Equals(result.Message, "ok", StringComparison.OrdinalIgnoreCase))) - { - logger.LogInformation(result.Message); - } - - if (manifest is not null) - { - activity?.SetTag("stellaops.cli.export_id", manifest.ExportId); - if (!string.IsNullOrWhiteSpace(manifest.Format)) - { - activity?.SetTag("stellaops.cli.export_format", manifest.Format); - } - if (manifest.FromCache.HasValue) - { - activity?.SetTag("stellaops.cli.export_cached", manifest.FromCache.Value); - } - if (manifest.SizeBytes.HasValue) - { - activity?.SetTag("stellaops.cli.export_size", manifest.SizeBytes.Value); - } - - if (manifest.FromCache == true) - { - logger.LogInformation("Reusing cached export {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); - } - else - { - logger.LogInformation("Export ready: {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); - } - - if (manifest.CreatedAt.HasValue) - { - logger.LogInformation("Created at {CreatedAt}.", manifest.CreatedAt.Value.ToString("u", CultureInfo.InvariantCulture)); - } - - if (!string.IsNullOrWhiteSpace(manifest.Digest)) - { - var digestDisplay = BuildDigestDisplay(manifest.Algorithm, manifest.Digest); - if (manifest.SizeBytes.HasValue) - { - logger.LogInformation("Digest {Digest} ({Size}).", digestDisplay, FormatSize(manifest.SizeBytes.Value)); - } - else - { - logger.LogInformation("Digest {Digest}.", digestDisplay); - } - } - - if (!string.IsNullOrWhiteSpace(manifest.RekorLocation)) - { - if (!string.IsNullOrWhiteSpace(manifest.RekorIndex)) - { - logger.LogInformation("Rekor entry: {Location} (index {Index}).", manifest.RekorLocation, manifest.RekorIndex); - } - else - { - logger.LogInformation("Rekor entry: {Location}.", manifest.RekorLocation); - } - } - - if (!string.IsNullOrWhiteSpace(manifest.RekorInclusionUrl) - && !string.Equals(manifest.RekorInclusionUrl, manifest.RekorLocation, StringComparison.OrdinalIgnoreCase)) - { - logger.LogInformation("Rekor inclusion proof: {Url}.", manifest.RekorInclusionUrl); - } - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - var resolvedPath = ResolveExportOutputPath(outputPath!, manifest); - var download = await client.DownloadExcititorExportAsync( - manifest.ExportId, - resolvedPath, - manifest.Algorithm, - manifest.Digest, - cancellationToken).ConfigureAwait(false); - - activity?.SetTag("stellaops.cli.export_path", download.Path); - - if (download.FromCache) - { - logger.LogInformation("Export already cached at {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); - } - else - { - logger.LogInformation("Export saved to {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); - } - } - else if (!string.IsNullOrWhiteSpace(result.Location)) - { - var downloadUrl = ResolveLocationUrl(options, result.Location); - if (!string.IsNullOrWhiteSpace(downloadUrl)) - { - logger.LogInformation("Download URL: {Url}", downloadUrl); - } - else - { - logger.LogInformation("Download location: {Location}", result.Location); - } - } - } - else - { - if (!string.IsNullOrWhiteSpace(result.Location)) - { - var downloadUrl = ResolveLocationUrl(options, result.Location); - if (!string.IsNullOrWhiteSpace(downloadUrl)) - { - logger.LogInformation("Download URL: {Url}", downloadUrl); - } - else - { - logger.LogInformation("Location: {Location}", result.Location); - } - } - else if (string.IsNullOrWhiteSpace(result.Message)) - { - logger.LogInformation("Export request accepted."); - } - } - } - catch (Exception ex) - { - logger.LogError(ex, "Excititor export failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static Task HandleExcititorBackfillStatementsAsync( - IServiceProvider services, - DateTimeOffset? retrievedSince, - bool force, - int batchSize, - int? maxDocuments, - bool verbose, - CancellationToken cancellationToken) - { - if (batchSize <= 0) - { - throw new ArgumentOutOfRangeException(nameof(batchSize), "Batch size must be greater than zero."); - } - - if (maxDocuments.HasValue && maxDocuments.Value <= 0) - { - throw new ArgumentOutOfRangeException(nameof(maxDocuments), "Max documents must be greater than zero when specified."); - } - - var payload = new Dictionary(StringComparer.Ordinal) - { - ["force"] = force, - ["batchSize"] = batchSize, - ["maxDocuments"] = maxDocuments - }; - - if (retrievedSince.HasValue) - { - payload["retrievedSince"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - - var activityTags = new Dictionary(StringComparer.Ordinal) - { - ["stellaops.cli.force"] = force, - ["stellaops.cli.batch_size"] = batchSize, - ["stellaops.cli.max_documents"] = maxDocuments - }; - - if (retrievedSince.HasValue) - { - activityTags["stellaops.cli.retrieved_since"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor backfill-statements", - verbose, - activityTags, - client => client.ExecuteExcititorOperationAsync( - "admin/backfill-statements", - HttpMethod.Post, - RemoveNullValues(payload), - cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorVerifyAsync( - IServiceProvider services, - string? exportId, - string? digest, - string? attestationPath, - bool verbose, - CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(exportId) && string.IsNullOrWhiteSpace(digest) && string.IsNullOrWhiteSpace(attestationPath)) - { - var logger = services.GetRequiredService().CreateLogger("excititor-verify"); - logger.LogError("At least one of --export-id, --digest, or --attestation must be provided."); - Environment.ExitCode = 1; - return Task.CompletedTask; - } - - var payload = new Dictionary(StringComparer.Ordinal); - if (!string.IsNullOrWhiteSpace(exportId)) - { - payload["exportId"] = exportId.Trim(); - } - if (!string.IsNullOrWhiteSpace(digest)) - { - payload["digest"] = digest.Trim(); - } - if (!string.IsNullOrWhiteSpace(attestationPath)) - { - var fullPath = Path.GetFullPath(attestationPath); - if (!File.Exists(fullPath)) - { - var logger = services.GetRequiredService().CreateLogger("excititor-verify"); - logger.LogError("Attestation file not found at {Path}.", fullPath); - Environment.ExitCode = 1; - return Task.CompletedTask; - } - - var bytes = File.ReadAllBytes(fullPath); - payload["attestation"] = new Dictionary(StringComparer.Ordinal) - { - ["fileName"] = Path.GetFileName(fullPath), - ["base64"] = Convert.ToBase64String(bytes) - }; - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor verify", - verbose, - new Dictionary - { - ["export_id"] = exportId, - ["digest"] = digest, - ["attestation_path"] = attestationPath - }, - client => client.ExecuteExcititorOperationAsync("verify", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorReconcileAsync( - IServiceProvider services, - IReadOnlyList providers, - TimeSpan? maxAge, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (maxAge.HasValue) - { - payload["maxAge"] = maxAge.Value.ToString("c", CultureInfo.InvariantCulture); - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor reconcile", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["max_age"] = maxAge?.ToString("c", CultureInfo.InvariantCulture) - }, - client => client.ExecuteExcititorOperationAsync("reconcile", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static async Task HandleRuntimePolicyTestAsync( - IServiceProvider services, - string? namespaceValue, - IReadOnlyList imageArguments, - string? filePath, - IReadOnlyList labelArguments, - bool outputJson, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("runtime-policy-test"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.runtime.policy.test", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "runtime policy test"); - if (!string.IsNullOrWhiteSpace(namespaceValue)) - { - activity?.SetTag("stellaops.cli.namespace", namespaceValue); - } - using var duration = CliMetrics.MeasureCommandDuration("runtime policy test"); - - try - { - IReadOnlyList images; - try - { - images = await GatherImageDigestsAsync(imageArguments, filePath, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or ArgumentException or FileNotFoundException) - { - logger.LogError(ex, "Failed to gather image digests: {Message}", ex.Message); - Environment.ExitCode = 9; - return; - } - - if (images.Count == 0) - { - logger.LogError("No image digests provided. Use --image, --file, or pipe digests via stdin."); - Environment.ExitCode = 9; - return; - } - - IReadOnlyDictionary labels; - try - { - labels = ParseLabelSelectors(labelArguments); - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - Environment.ExitCode = 9; - return; - } - - activity?.SetTag("stellaops.cli.images", images.Count); - activity?.SetTag("stellaops.cli.labels", labels.Count); - - var request = new RuntimePolicyEvaluationRequest(namespaceValue, labels, images); - var result = await client.EvaluateRuntimePolicyAsync(request, cancellationToken).ConfigureAwait(false); - - activity?.SetTag("stellaops.cli.ttl_seconds", result.TtlSeconds); - Environment.ExitCode = 0; - - if (outputJson) - { - var json = BuildRuntimePolicyJson(result, images); - Console.WriteLine(json); - return; - } - - if (result.ExpiresAtUtc.HasValue) - { - logger.LogInformation("Decision TTL: {TtlSeconds}s (expires {ExpiresAt})", result.TtlSeconds, result.ExpiresAtUtc.Value.ToString("u", CultureInfo.InvariantCulture)); - } - else - { - logger.LogInformation("Decision TTL: {TtlSeconds}s", result.TtlSeconds); - } - - if (!string.IsNullOrWhiteSpace(result.PolicyRevision)) - { - logger.LogInformation("Policy revision: {Revision}", result.PolicyRevision); - } - - DisplayRuntimePolicyResults(logger, result, images); - } - catch (Exception ex) - { - logger.LogError(ex, "Runtime policy evaluation failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleAuthLoginAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - bool force, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-login"); - Environment.ExitCode = 0; - - if (string.IsNullOrWhiteSpace(options.Authority?.Url)) - { - logger.LogError("Authority URL is not configured. Set STELLAOPS_AUTHORITY_URL or update your configuration."); - Environment.ExitCode = 1; - return; - } - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogError("Authority client is not available. Ensure AddStellaOpsAuthClient is registered in Program.cs."); - Environment.ExitCode = 1; - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogError("Authority configuration is incomplete; unable to determine cache key."); - Environment.ExitCode = 1; - return; - } - - try - { - if (force) - { - await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - } - - var scopeName = AuthorityTokenUtilities.ResolveScope(options); - StellaOpsTokenResult token; - - if (!string.IsNullOrWhiteSpace(options.Authority.Username)) - { - if (string.IsNullOrWhiteSpace(options.Authority.Password)) - { - logger.LogError("Authority password must be provided when username is configured."); - Environment.ExitCode = 1; - return; - } - - token = await tokenClient.RequestPasswordTokenAsync( - options.Authority.Username, - options.Authority.Password!, - scopeName, - null, - cancellationToken).ConfigureAwait(false); - } - else - { - token = await tokenClient.RequestClientCredentialsTokenAsync(scopeName, null, cancellationToken).ConfigureAwait(false); - } - - await tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); - - if (verbose) - { - logger.LogInformation("Authenticated with {Authority} (scopes: {Scopes}).", options.Authority.Url, string.Join(", ", token.Scopes)); - } - - logger.LogInformation("Login successful. Access token expires at {Expires}.", token.ExpiresAtUtc.ToString("u")); - } - catch (Exception ex) - { - logger.LogError(ex, "Authentication failed: {Message}", ex.Message); - Environment.ExitCode = 1; - } - } - - public static async Task HandleAuthLogoutAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-logout"); - Environment.ExitCode = 0; - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogInformation("No authority client registered; nothing to remove."); - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogInformation("Authority configuration missing; no cached tokens to remove."); - return; - } - - await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (verbose) - { - logger.LogInformation("Cleared cached token for {Authority}.", options.Authority?.Url ?? "authority"); - } - } - - public static async Task HandleAuthStatusAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-status"); - Environment.ExitCode = 0; - - if (string.IsNullOrWhiteSpace(options.Authority?.Url)) - { - logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); - Environment.ExitCode = 1; - return; - } - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogInformation("Authority client not registered; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogInformation("Authority configuration incomplete; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (entry is null) - { - logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); - Environment.ExitCode = 1; - return; - } - - logger.LogInformation("Cached token for {Authority} expires at {Expires}.", options.Authority.Url, entry.ExpiresAtUtc.ToString("u")); - if (verbose) - { - logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); - } - } - - public static async Task HandleAuthWhoAmIAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-whoami"); - Environment.ExitCode = 0; - - if (string.IsNullOrWhiteSpace(options.Authority?.Url)) - { - logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); - Environment.ExitCode = 1; - return; - } - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogInformation("Authority client not registered; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogInformation("Authority configuration incomplete; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (entry is null) - { - logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); - Environment.ExitCode = 1; - return; - } - - var grantType = string.IsNullOrWhiteSpace(options.Authority.Username) ? "client_credentials" : "password"; - var now = DateTimeOffset.UtcNow; - var remaining = entry.ExpiresAtUtc - now; - if (remaining < TimeSpan.Zero) - { - remaining = TimeSpan.Zero; - } - - logger.LogInformation("Authority: {Authority}", options.Authority.Url); - logger.LogInformation("Grant type: {GrantType}", grantType); - logger.LogInformation("Token type: {TokenType}", entry.TokenType); - logger.LogInformation("Expires: {Expires} ({Remaining})", entry.ExpiresAtUtc.ToString("u"), FormatDuration(remaining)); - - if (entry.Scopes.Count > 0) - { - logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); - } - - if (TryExtractJwtClaims(entry.AccessToken, out var claims, out var issuedAt, out var notBefore)) - { - if (claims.TryGetValue("sub", out var subject) && !string.IsNullOrWhiteSpace(subject)) - { - logger.LogInformation("Subject: {Subject}", subject); - } - - if (claims.TryGetValue("client_id", out var clientId) && !string.IsNullOrWhiteSpace(clientId)) - { - logger.LogInformation("Client ID (token): {ClientId}", clientId); - } - - if (claims.TryGetValue("aud", out var audience) && !string.IsNullOrWhiteSpace(audience)) - { - logger.LogInformation("Audience: {Audience}", audience); - } - - if (claims.TryGetValue("iss", out var issuer) && !string.IsNullOrWhiteSpace(issuer)) - { - logger.LogInformation("Issuer: {Issuer}", issuer); - } - - if (issuedAt is not null) - { - logger.LogInformation("Issued at: {IssuedAt}", issuedAt.Value.ToString("u")); - } - - if (notBefore is not null) - { - logger.LogInformation("Not before: {NotBefore}", notBefore.Value.ToString("u")); - } - - var extraClaims = CollectAdditionalClaims(claims); - if (extraClaims.Count > 0 && verbose) - { - logger.LogInformation("Additional claims: {Claims}", string.Join(", ", extraClaims)); - } - } - else - { - logger.LogInformation("Access token appears opaque; claims are unavailable."); - } - } - - public static async Task HandleAuthRevokeExportAsync( - IServiceProvider services, - StellaOpsCliOptions options, - string? outputDirectory, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-revoke-export"); - Environment.ExitCode = 0; - - try - { - var client = scope.ServiceProvider.GetRequiredService(); - var result = await client.ExportAsync(verbose, cancellationToken).ConfigureAwait(false); - - var directory = string.IsNullOrWhiteSpace(outputDirectory) - ? Directory.GetCurrentDirectory() - : Path.GetFullPath(outputDirectory); - - Directory.CreateDirectory(directory); - - var bundlePath = Path.Combine(directory, "revocation-bundle.json"); - var signaturePath = Path.Combine(directory, "revocation-bundle.json.jws"); - var digestPath = Path.Combine(directory, "revocation-bundle.json.sha256"); - - await File.WriteAllBytesAsync(bundlePath, result.BundleBytes, cancellationToken).ConfigureAwait(false); - await File.WriteAllTextAsync(signaturePath, result.Signature, cancellationToken).ConfigureAwait(false); - await File.WriteAllTextAsync(digestPath, $"sha256:{result.Digest}", cancellationToken).ConfigureAwait(false); - - var computedDigest = Convert.ToHexString(SHA256.HashData(result.BundleBytes)).ToLowerInvariant(); - if (!string.Equals(computedDigest, result.Digest, StringComparison.OrdinalIgnoreCase)) - { - logger.LogError("Digest mismatch. Expected {Expected} but computed {Actual}.", result.Digest, computedDigest); - Environment.ExitCode = 1; - return; - } - - logger.LogInformation( - "Revocation bundle exported to {Directory} (sequence {Sequence}, issued {Issued:u}, signing key {KeyId}, provider {Provider}).", - directory, - result.Sequence, - result.IssuedAt, - string.IsNullOrWhiteSpace(result.SigningKeyId) ? "" : result.SigningKeyId, - string.IsNullOrWhiteSpace(result.SigningProvider) ? "default" : result.SigningProvider); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to export revocation bundle."); - Environment.ExitCode = 1; - } - } - - public static async Task HandleAuthRevokeVerifyAsync( - string bundlePath, - string signaturePath, - string keyPath, - bool verbose, - CancellationToken cancellationToken) - { - var loggerFactory = LoggerFactory.Create(builder => builder.AddSimpleConsole(options => - { - options.SingleLine = true; - options.TimestampFormat = "HH:mm:ss "; - })); - var logger = loggerFactory.CreateLogger("auth-revoke-verify"); - Environment.ExitCode = 0; - - try - { - if (string.IsNullOrWhiteSpace(bundlePath) || string.IsNullOrWhiteSpace(signaturePath) || string.IsNullOrWhiteSpace(keyPath)) - { - logger.LogError("Arguments --bundle, --signature, and --key are required."); - Environment.ExitCode = 1; - return; - } - - var bundleBytes = await File.ReadAllBytesAsync(bundlePath, cancellationToken).ConfigureAwait(false); - var signatureContent = (await File.ReadAllTextAsync(signaturePath, cancellationToken).ConfigureAwait(false)).Trim(); - var keyPem = await File.ReadAllTextAsync(keyPath, cancellationToken).ConfigureAwait(false); - - var digest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); - logger.LogInformation("Bundle digest sha256:{Digest}", digest); - - if (!TryParseDetachedJws(signatureContent, out var encodedHeader, out var encodedSignature)) - { - logger.LogError("Signature is not in detached JWS format."); - Environment.ExitCode = 1; - return; - } - - var headerJson = Encoding.UTF8.GetString(Base64UrlDecode(encodedHeader)); - using var headerDocument = JsonDocument.Parse(headerJson); - var header = headerDocument.RootElement; - - if (!header.TryGetProperty("b64", out var b64Element) || b64Element.GetBoolean()) - { - logger.LogError("Detached JWS header must include '\"b64\": false'."); - Environment.ExitCode = 1; - return; - } - - var algorithm = header.TryGetProperty("alg", out var algElement) ? algElement.GetString() : SignatureAlgorithms.Es256; - if (string.IsNullOrWhiteSpace(algorithm)) - { - algorithm = SignatureAlgorithms.Es256; - } - - var providerHint = header.TryGetProperty("provider", out var providerElement) - ? providerElement.GetString() - : null; - - var keyId = header.TryGetProperty("kid", out var kidElement) ? kidElement.GetString() : null; - if (string.IsNullOrWhiteSpace(keyId)) - { - keyId = Path.GetFileNameWithoutExtension(keyPath); - logger.LogWarning("JWS header missing 'kid'; using fallback key id {KeyId}.", keyId); - } - - CryptoSigningKey signingKey; - try - { - signingKey = CreateVerificationSigningKey(keyId!, algorithm!, providerHint, keyPem, keyPath); - } - catch (Exception ex) when (ex is InvalidOperationException or CryptographicException) - { - logger.LogError(ex, "Failed to load verification key material."); - Environment.ExitCode = 1; - return; - } - - var providers = new List - { - new DefaultCryptoProvider() - }; - -#if STELLAOPS_CRYPTO_SODIUM - providers.Add(new LibsodiumCryptoProvider()); -#endif - - foreach (var provider in providers) - { - if (provider.Supports(CryptoCapability.Verification, algorithm!)) - { - provider.UpsertSigningKey(signingKey); - } - } - - var preferredOrder = !string.IsNullOrWhiteSpace(providerHint) - ? new[] { providerHint! } - : Array.Empty(); - var registry = new CryptoProviderRegistry(providers, preferredOrder); - CryptoSignerResolution resolution; - try - { - resolution = registry.ResolveSigner( - CryptoCapability.Verification, - algorithm!, - signingKey.Reference, - providerHint); - } - catch (Exception ex) - { - logger.LogError(ex, "No crypto provider available for verification (algorithm {Algorithm}).", algorithm); - Environment.ExitCode = 1; - return; - } - - var signingInputLength = encodedHeader.Length + 1 + bundleBytes.Length; - var buffer = ArrayPool.Shared.Rent(signingInputLength); - try - { - var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); - Buffer.BlockCopy(headerBytes, 0, buffer, 0, headerBytes.Length); - buffer[headerBytes.Length] = (byte)'.'; - Buffer.BlockCopy(bundleBytes, 0, buffer, headerBytes.Length + 1, bundleBytes.Length); - - var signatureBytes = Base64UrlDecode(encodedSignature); - var verified = await resolution.Signer.VerifyAsync( - new ReadOnlyMemory(buffer, 0, signingInputLength), - signatureBytes, - cancellationToken).ConfigureAwait(false); - - if (!verified) - { - logger.LogError("Signature verification failed."); - Environment.ExitCode = 1; - return; - } - } - finally - { - ArrayPool.Shared.Return(buffer); - } - - if (!string.IsNullOrWhiteSpace(providerHint) && !string.Equals(providerHint, resolution.ProviderName, StringComparison.OrdinalIgnoreCase)) - { - logger.LogWarning( - "Preferred provider '{Preferred}' unavailable; verification used '{Provider}'.", - providerHint, - resolution.ProviderName); - } - - logger.LogInformation( - "Signature verified using algorithm {Algorithm} via provider {Provider} (kid {KeyId}).", - algorithm, - resolution.ProviderName, - signingKey.Reference.KeyId); - - if (verbose) - { - logger.LogInformation("JWS header: {Header}", headerJson); - } - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to verify revocation bundle."); - Environment.ExitCode = 1; - } - finally - { - loggerFactory.Dispose(); - } - } - - public static async Task HandleVulnObservationsAsync( - IServiceProvider services, - string tenant, - IReadOnlyList observationIds, - IReadOnlyList aliases, - IReadOnlyList purls, - IReadOnlyList cpes, - int? limit, - string? cursor, - bool emitJson, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-observations"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.observations", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "vuln observations"); - activity?.SetTag("stellaops.cli.tenant", tenant); - using var duration = CliMetrics.MeasureCommandDuration("vuln observations"); - - try - { - tenant = tenant?.Trim().ToLowerInvariant() ?? string.Empty; - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new InvalidOperationException("Tenant must be provided."); - } - - var query = new AdvisoryObservationsQuery( - tenant, - NormalizeSet(observationIds, toLower: false), - NormalizeSet(aliases, toLower: true), - NormalizeSet(purls, toLower: false), - NormalizeSet(cpes, toLower: false), - limit, - cursor); - - var response = await client.GetObservationsAsync(query, cancellationToken).ConfigureAwait(false); - - if (emitJson) - { - var json = JsonSerializer.Serialize(response, new JsonSerializerOptions - { - WriteIndented = true - }); - Console.WriteLine(json); - Environment.ExitCode = 0; - return; - } - - RenderObservationTable(response); - if (!emitJson && response.HasMore && !string.IsNullOrWhiteSpace(response.NextCursor)) - { - var escapedCursor = Markup.Escape(response.NextCursor); - AnsiConsole.MarkupLine($"[yellow]More observations available. Continue with[/] [cyan]--cursor[/] [grey]{escapedCursor}[/]"); - } - Environment.ExitCode = 0; - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - logger.LogWarning("Operation cancelled by user."); - Environment.ExitCode = 130; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to fetch observations from Concelier."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - - static IReadOnlyList NormalizeSet(IReadOnlyList values, bool toLower) - { - if (values is null || values.Count == 0) - { - return Array.Empty(); - } - - var set = new HashSet(StringComparer.Ordinal); - foreach (var raw in values) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var normalized = raw.Trim(); - if (toLower) - { - normalized = normalized.ToLowerInvariant(); - } - - set.Add(normalized); - } - - return set.Count == 0 ? Array.Empty() : set.ToArray(); - } - - static void RenderObservationTable(AdvisoryObservationsResponse response) - { - var observations = response.Observations ?? Array.Empty(); - if (observations.Count == 0) - { - AnsiConsole.MarkupLine("[yellow]No observations matched the provided filters.[/]"); - return; - } - - var table = new Table() - .Centered() - .Border(TableBorder.Rounded); - - table.AddColumn("Observation"); - table.AddColumn("Source"); - table.AddColumn("Upstream Id"); - table.AddColumn("Aliases"); - table.AddColumn("PURLs"); - table.AddColumn("CPEs"); - table.AddColumn("Created (UTC)"); - - foreach (var observation in observations) - { - var sourceVendor = observation.Source?.Vendor ?? "(unknown)"; - var upstreamId = observation.Upstream?.UpstreamId ?? "(unknown)"; - var aliasesText = FormatList(observation.Linkset?.Aliases); - var purlsText = FormatList(observation.Linkset?.Purls); - var cpesText = FormatList(observation.Linkset?.Cpes); - - table.AddRow( - Markup.Escape(observation.ObservationId), - Markup.Escape(sourceVendor), - Markup.Escape(upstreamId), - Markup.Escape(aliasesText), - Markup.Escape(purlsText), - Markup.Escape(cpesText), - observation.CreatedAt.ToUniversalTime().ToString("u", CultureInfo.InvariantCulture)); - } - - AnsiConsole.Write(table); - AnsiConsole.MarkupLine( - "[green]{0}[/] observation(s). Aliases: [green]{1}[/], PURLs: [green]{2}[/], CPEs: [green]{3}[/].", - observations.Count, - response.Linkset?.Aliases?.Count ?? 0, - response.Linkset?.Purls?.Count ?? 0, - response.Linkset?.Cpes?.Count ?? 0); - } - - static string FormatList(IReadOnlyList? values) - { - if (values is null || values.Count == 0) - { - return "(none)"; - } - - const int MaxItems = 3; - if (values.Count <= MaxItems) - { - return string.Join(", ", values); - } - - var preview = values.Take(MaxItems); - return $"{string.Join(", ", preview)} (+{values.Count - MaxItems})"; - } - } - - public static async Task HandleOfflineKitPullAsync( - IServiceProvider services, - string? bundleId, - string? destinationDirectory, - bool overwrite, - bool resume, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var options = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-pull"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.pull", ActivityKind.Client); - activity?.SetTag("stellaops.cli.bundle_id", string.IsNullOrWhiteSpace(bundleId) ? "latest" : bundleId); - using var duration = CliMetrics.MeasureCommandDuration("offline kit pull"); - - try - { - var targetDirectory = string.IsNullOrWhiteSpace(destinationDirectory) - ? options.Offline?.KitsDirectory ?? Path.Combine(Environment.CurrentDirectory, "offline-kits") - : destinationDirectory; - - targetDirectory = Path.GetFullPath(targetDirectory); - Directory.CreateDirectory(targetDirectory); - - var result = await client.DownloadOfflineKitAsync(bundleId, targetDirectory, overwrite, resume, cancellationToken).ConfigureAwait(false); - - logger.LogInformation( - "Bundle {BundleId} stored at {Path} (captured {Captured:u}, sha256:{Digest}).", - result.Descriptor.BundleId, - result.BundlePath, - result.Descriptor.CapturedAt, - result.Descriptor.BundleSha256); - - logger.LogInformation("Manifest saved to {Manifest}.", result.ManifestPath); - - if (!string.IsNullOrWhiteSpace(result.MetadataPath)) - { - logger.LogDebug("Metadata recorded at {Metadata}.", result.MetadataPath); - } - - if (result.BundleSignaturePath is not null) - { - logger.LogInformation("Bundle signature saved to {Signature}.", result.BundleSignaturePath); - } - - if (result.ManifestSignaturePath is not null) - { - logger.LogInformation("Manifest signature saved to {Signature}.", result.ManifestSignaturePath); - } - - CliMetrics.RecordOfflineKitDownload(result.Descriptor.Kind ?? "unknown", result.FromCache); - activity?.SetTag("stellaops.cli.bundle_cache", result.FromCache); - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to download offline kit bundle."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyFindingsListAsync( - IServiceProvider services, - string policyId, - string[] sbomFilters, - string[] statusFilters, - string[] severityFilters, - string? since, - string? cursor, - int? page, - int? pageSize, - string? format, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-ls"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.list", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("policy findings list"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (page.HasValue && page.Value < 1) - { - throw new ArgumentException("--page must be greater than or equal to 1.", nameof(page)); - } - - if (pageSize.HasValue && (pageSize.Value < 1 || pageSize.Value > 500)) - { - throw new ArgumentException("--page-size must be between 1 and 500.", nameof(pageSize)); - } - - var normalizedPolicyId = policyId.Trim(); - var sboms = NormalizePolicyFilterValues(sbomFilters); - var statuses = NormalizePolicyFilterValues(statusFilters, toLower: true); - var severities = NormalizePolicyFilterValues(severityFilters); - var sinceValue = ParsePolicySince(since); - var cursorValue = string.IsNullOrWhiteSpace(cursor) ? null : cursor.Trim(); - - var query = new PolicyFindingsQuery( - normalizedPolicyId, - sboms, - statuses, - severities, - cursorValue, - page, - pageSize, - sinceValue); - - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - if (sboms.Count > 0) - { - activity?.SetTag("stellaops.cli.findings.sbom_filters", string.Join(",", sboms)); - } - - if (statuses.Count > 0) - { - activity?.SetTag("stellaops.cli.findings.status_filters", string.Join(",", statuses)); - } - - if (severities.Count > 0) - { - activity?.SetTag("stellaops.cli.findings.severity_filters", string.Join(",", severities)); - } - - if (!string.IsNullOrWhiteSpace(cursorValue)) - { - activity?.SetTag("stellaops.cli.findings.cursor", cursorValue); - } - - if (page.HasValue) - { - activity?.SetTag("stellaops.cli.findings.page", page.Value); - } - - if (pageSize.HasValue) - { - activity?.SetTag("stellaops.cli.findings.page_size", pageSize.Value); - } - - if (sinceValue.HasValue) - { - activity?.SetTag("stellaops.cli.findings.since", sinceValue.Value.ToString("o", CultureInfo.InvariantCulture)); - } - - var result = await client.GetPolicyFindingsAsync(query, cancellationToken).ConfigureAwait(false); - activity?.SetTag("stellaops.cli.findings.count", result.Items.Count); - if (!string.IsNullOrWhiteSpace(result.NextCursor)) - { - activity?.SetTag("stellaops.cli.findings.next_cursor", result.NextCursor); - } - - var payload = BuildPolicyFindingsPayload(normalizedPolicyId, query, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Results written to {Path}.", Path.GetFullPath(outputPath!)); - } - - var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); - if (outputFormat == PolicyFindingsOutputFormat.Json) - { - var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); - Console.WriteLine(json); - } - else - { - RenderPolicyFindingsTable(logger, result); - } - - CliMetrics.RecordPolicyFindingsList(result.Items.Count == 0 ? "empty" : "ok"); - Environment.ExitCode = 0; - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyFindingsList("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsList); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to list policy findings."); - CliMetrics.RecordPolicyFindingsList("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyFindingsGetAsync( - IServiceProvider services, - string policyId, - string findingId, - string? format, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-get"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.get", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("policy findings get"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); - } - - var normalizedPolicyId = policyId.Trim(); - var normalizedFindingId = findingId.Trim(); - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); - - var result = await client.GetPolicyFindingAsync(normalizedPolicyId, normalizedFindingId, cancellationToken).ConfigureAwait(false); - var payload = BuildPolicyFindingPayload(normalizedPolicyId, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Finding written to {Path}.", Path.GetFullPath(outputPath!)); - } - - var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); - if (outputFormat == PolicyFindingsOutputFormat.Json) - { - Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); - } - else - { - RenderPolicyFindingDetails(logger, result); - } - - var outcome = string.IsNullOrWhiteSpace(result.Status) ? "unknown" : result.Status.ToLowerInvariant(); - CliMetrics.RecordPolicyFindingsGet(outcome); - Environment.ExitCode = 0; - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyFindingsGet("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsGet); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to retrieve policy finding."); - CliMetrics.RecordPolicyFindingsGet("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyFindingsExplainAsync( - IServiceProvider services, - string policyId, - string findingId, - string? mode, - string? format, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-explain"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.explain", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("policy findings explain"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); - } - - var normalizedPolicyId = policyId.Trim(); - var normalizedFindingId = findingId.Trim(); - var normalizedMode = NormalizeExplainMode(mode); - - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); - if (!string.IsNullOrWhiteSpace(normalizedMode)) - { - activity?.SetTag("stellaops.cli.findings.mode", normalizedMode); - } - - var result = await client.GetPolicyFindingExplainAsync(normalizedPolicyId, normalizedFindingId, normalizedMode, cancellationToken).ConfigureAwait(false); - activity?.SetTag("stellaops.cli.findings.step_count", result.Steps.Count); - - var payload = BuildPolicyFindingExplainPayload(normalizedPolicyId, normalizedFindingId, normalizedMode, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Explain trace written to {Path}.", Path.GetFullPath(outputPath!)); - } - - var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); - if (outputFormat == PolicyFindingsOutputFormat.Json) - { - Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); - } - else - { - RenderPolicyFindingExplain(logger, result); - } - - CliMetrics.RecordPolicyFindingsExplain(result.Steps.Count == 0 ? "empty" : "ok"); - Environment.ExitCode = 0; - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyFindingsExplain("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsExplain); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to fetch policy explain trace."); - CliMetrics.RecordPolicyFindingsExplain("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyActivateAsync( - IServiceProvider services, - string policyId, - int version, - string? note, - bool runNow, - string? scheduledAt, - string? priority, - bool rollback, - string? incidentId, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-activate"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.activate", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "policy activate"); - using var duration = CliMetrics.MeasureCommandDuration("policy activate"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (version <= 0) - { - throw new ArgumentOutOfRangeException(nameof(version), "Version must be greater than zero."); - } - - var normalizedPolicyId = policyId.Trim(); - DateTimeOffset? scheduled = null; - if (!string.IsNullOrWhiteSpace(scheduledAt)) - { - if (!DateTimeOffset.TryParse(scheduledAt, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed)) - { - throw new ArgumentException("Scheduled timestamp must be a valid ISO-8601 value.", nameof(scheduledAt)); - } - - scheduled = parsed; - } - - var request = new PolicyActivationRequest( - runNow, - scheduled, - NormalizePolicyPriority(priority), - rollback, - string.IsNullOrWhiteSpace(incidentId) ? null : incidentId.Trim(), - string.IsNullOrWhiteSpace(note) ? null : note.Trim()); - - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - activity?.SetTag("stellaops.cli.policy_version", version); - if (request.RunNow) - { - activity?.SetTag("stellaops.cli.policy_run_now", true); - } - - if (request.ScheduledAt.HasValue) - { - activity?.SetTag("stellaops.cli.policy_scheduled_at", request.ScheduledAt.Value.ToString("o", CultureInfo.InvariantCulture)); - } - - if (!string.IsNullOrWhiteSpace(request.Priority)) - { - activity?.SetTag("stellaops.cli.policy_priority", request.Priority); - } - - if (request.Rollback) - { - activity?.SetTag("stellaops.cli.policy_rollback", true); - } - - var result = await client.ActivatePolicyRevisionAsync(normalizedPolicyId, version, request, cancellationToken).ConfigureAwait(false); - - var outcome = NormalizePolicyActivationOutcome(result.Status); - CliMetrics.RecordPolicyActivation(outcome); - RenderPolicyActivationResult(result, request); - - var exitCode = DeterminePolicyActivationExitCode(outcome); - Environment.ExitCode = exitCode; - - if (exitCode == 0) - { - logger.LogInformation("Policy {PolicyId} v{Version} activation status: {Status}.", result.Revision.PolicyId, result.Revision.Version, outcome); - } - else - { - logger.LogWarning("Policy {PolicyId} v{Version} requires additional approval (status: {Status}).", result.Revision.PolicyId, result.Revision.Version, outcome); - } - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyActivation("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyActivationFailure(ex, logger); - } - catch (Exception ex) - { - logger.LogError(ex, "Policy activation failed."); - CliMetrics.RecordPolicyActivation("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicySimulateAsync( - IServiceProvider services, - string policyId, - int? baseVersion, - int? candidateVersion, - IReadOnlyList sbomArguments, - IReadOnlyList environmentArguments, - string? format, - string? outputPath, - bool explain, - bool failOnDiff, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-simulate"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.simulate", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "policy simulate"); - activity?.SetTag("stellaops.cli.policy_id", policyId); - if (baseVersion.HasValue) - { - activity?.SetTag("stellaops.cli.base_version", baseVersion.Value); - } - if (candidateVersion.HasValue) - { - activity?.SetTag("stellaops.cli.candidate_version", candidateVersion.Value); - } - using var duration = CliMetrics.MeasureCommandDuration("policy simulate"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - var normalizedPolicyId = policyId.Trim(); - var sbomSet = NormalizePolicySbomSet(sbomArguments); - var environment = ParsePolicyEnvironment(environmentArguments); - - var input = new PolicySimulationInput( - baseVersion, - candidateVersion, - sbomSet, - environment, - explain); - - var result = await client.SimulatePolicyAsync(normalizedPolicyId, input, cancellationToken).ConfigureAwait(false); - - activity?.SetTag("stellaops.cli.diff_added", result.Diff.Added); - activity?.SetTag("stellaops.cli.diff_removed", result.Diff.Removed); - if (result.Diff.BySeverity.Count > 0) - { - activity?.SetTag("stellaops.cli.severity_buckets", result.Diff.BySeverity.Count); - } - - var outputFormat = DeterminePolicySimulationFormat(format, outputPath); - var payload = BuildPolicySimulationPayload(normalizedPolicyId, baseVersion, candidateVersion, sbomSet, environment, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteSimulationOutputAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Simulation results written to {Path}.", Path.GetFullPath(outputPath!)); - } - - RenderPolicySimulationResult(logger, payload, result, outputFormat); - - var exitCode = DetermineSimulationExitCode(result, failOnDiff); - Environment.ExitCode = exitCode; - - var outcome = exitCode == 20 - ? "diff_blocked" - : (result.Diff.Added + result.Diff.Removed) > 0 ? "diff" : "clean"; - CliMetrics.RecordPolicySimulation(outcome); - - if (exitCode == 20) - { - logger.LogWarning("Differences detected; exiting with code 20 due to --fail-on-diff."); - } - - if (!string.IsNullOrWhiteSpace(result.ExplainUri)) - { - activity?.SetTag("stellaops.cli.explain_uri", result.ExplainUri); - } - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicySimulation("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicySimulationFailure(ex, logger); - } - catch (Exception ex) - { - logger.LogError(ex, "Policy simulation failed."); - CliMetrics.RecordPolicySimulation("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleOfflineKitImportAsync( - IServiceProvider services, - string bundlePath, - string? manifestPath, - string? bundleSignaturePath, - string? manifestSignaturePath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var options = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-import"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.import", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("offline kit import"); - - try - { - if (string.IsNullOrWhiteSpace(bundlePath)) - { - logger.LogError("Bundle path is required."); - Environment.ExitCode = 1; - return; - } - - bundlePath = Path.GetFullPath(bundlePath); - if (!File.Exists(bundlePath)) - { - logger.LogError("Bundle file {Path} not found.", bundlePath); - Environment.ExitCode = 1; - return; - } - - var metadata = await LoadOfflineKitMetadataAsync(bundlePath, cancellationToken).ConfigureAwait(false); - if (metadata is not null) - { - manifestPath ??= metadata.ManifestPath; - bundleSignaturePath ??= metadata.BundleSignaturePath; - manifestSignaturePath ??= metadata.ManifestSignaturePath; - } - - manifestPath = NormalizeFilePath(manifestPath); - bundleSignaturePath = NormalizeFilePath(bundleSignaturePath); - manifestSignaturePath = NormalizeFilePath(manifestSignaturePath); - - if (manifestPath is null) - { - manifestPath = TryInferManifestPath(bundlePath); - if (manifestPath is not null) - { - logger.LogDebug("Using inferred manifest path {Path}.", manifestPath); - } - } - - if (manifestPath is not null && !File.Exists(manifestPath)) - { - logger.LogError("Manifest file {Path} not found.", manifestPath); - Environment.ExitCode = 1; - return; - } - - if (bundleSignaturePath is not null && !File.Exists(bundleSignaturePath)) - { - logger.LogWarning("Bundle signature {Path} not found; skipping.", bundleSignaturePath); - bundleSignaturePath = null; - } - - if (manifestSignaturePath is not null && !File.Exists(manifestSignaturePath)) - { - logger.LogWarning("Manifest signature {Path} not found; skipping.", manifestSignaturePath); - manifestSignaturePath = null; - } - - if (metadata is not null) - { - var computedBundleDigest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false); - if (!DigestsEqual(computedBundleDigest, metadata.BundleSha256)) - { - logger.LogError("Bundle digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.BundleSha256, computedBundleDigest); - Environment.ExitCode = 1; - return; - } - - if (manifestPath is not null) - { - var computedManifestDigest = await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false); - if (!DigestsEqual(computedManifestDigest, metadata.ManifestSha256)) - { - logger.LogError("Manifest digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.ManifestSha256, computedManifestDigest); - Environment.ExitCode = 1; - return; - } - } - } - - var request = new OfflineKitImportRequest( - bundlePath, - manifestPath, - bundleSignaturePath, - manifestSignaturePath, - metadata?.BundleId, - metadata?.BundleSha256, - metadata?.BundleSize, - metadata?.CapturedAt, - metadata?.Channel, - metadata?.Kind, - metadata?.IsDelta, - metadata?.BaseBundleId, - metadata?.ManifestSha256, - metadata?.ManifestSize); - - var result = await client.ImportOfflineKitAsync(request, cancellationToken).ConfigureAwait(false); - CliMetrics.RecordOfflineKitImport(result.Status); - - logger.LogInformation( - "Import {ImportId} submitted at {Submitted:u} with status {Status}.", - string.IsNullOrWhiteSpace(result.ImportId) ? "" : result.ImportId, - result.SubmittedAt, - string.IsNullOrWhiteSpace(result.Status) ? "queued" : result.Status); - - if (!string.IsNullOrWhiteSpace(result.Message)) - { - logger.LogInformation(result.Message); - } - - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Offline kit import failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleOfflineKitStatusAsync( - IServiceProvider services, - bool asJson, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-status"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.status", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("offline kit status"); - - try - { - var status = await client.GetOfflineKitStatusAsync(cancellationToken).ConfigureAwait(false); - - if (asJson) - { - var payload = new - { - bundleId = status.BundleId, - channel = status.Channel, - kind = status.Kind, - isDelta = status.IsDelta, - baseBundleId = status.BaseBundleId, - capturedAt = status.CapturedAt, - importedAt = status.ImportedAt, - sha256 = status.BundleSha256, - sizeBytes = status.BundleSize, - components = status.Components.Select(component => new - { - component.Name, - component.Version, - component.Digest, - component.CapturedAt, - component.SizeBytes - }) - }; - - var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true }); - Console.WriteLine(json); - } - else - { - if (string.IsNullOrWhiteSpace(status.BundleId)) - { - logger.LogInformation("No offline kit bundle has been imported yet."); - } - else - { - logger.LogInformation( - "Current bundle {BundleId} ({Kind}) captured {Captured:u}, imported {Imported:u}, sha256:{Digest}, size {Size}.", - status.BundleId, - status.Kind ?? "unknown", - status.CapturedAt ?? default, - status.ImportedAt ?? default, - status.BundleSha256 ?? "", - status.BundleSize.HasValue ? status.BundleSize.Value.ToString("N0", CultureInfo.InvariantCulture) : ""); - } - - if (status.Components.Count > 0) - { - var table = new Table().AddColumns("Component", "Version", "Digest", "Captured", "Size (bytes)"); - foreach (var component in status.Components) - { - table.AddRow( - component.Name, - string.IsNullOrWhiteSpace(component.Version) ? "-" : component.Version!, - string.IsNullOrWhiteSpace(component.Digest) ? "-" : $"sha256:{component.Digest}", - component.CapturedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "-", - component.SizeBytes.HasValue ? component.SizeBytes.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"); - } - - AnsiConsole.Write(table); - } - } - - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to read offline kit status."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - private static async Task LoadOfflineKitMetadataAsync(string bundlePath, CancellationToken cancellationToken) - { - var metadataPath = bundlePath + ".metadata.json"; - if (!File.Exists(metadataPath)) - { - return null; - } - - try - { - await using var stream = File.OpenRead(metadataPath); - return await JsonSerializer.DeserializeAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); - } - catch - { - return null; - } - } - - private static string? NormalizeFilePath(string? path) - { - if (string.IsNullOrWhiteSpace(path)) - { - return null; - } - - return Path.GetFullPath(path); - } - - private static string? TryInferManifestPath(string bundlePath) - { - var directory = Path.GetDirectoryName(bundlePath); - if (string.IsNullOrWhiteSpace(directory)) - { - return null; - } - - var baseName = Path.GetFileName(bundlePath); - if (string.IsNullOrWhiteSpace(baseName)) - { - return null; - } - - baseName = Path.GetFileNameWithoutExtension(baseName); - if (baseName.EndsWith(".tar", StringComparison.OrdinalIgnoreCase)) - { - baseName = Path.GetFileNameWithoutExtension(baseName); - } - - var candidates = new[] - { - Path.Combine(directory, $"offline-manifest-{baseName}.json"), - Path.Combine(directory, "offline-manifest.json") - }; - - foreach (var candidate in candidates) - { - if (File.Exists(candidate)) - { - return Path.GetFullPath(candidate); - } - } - - return Directory.EnumerateFiles(directory, "offline-manifest*.json").FirstOrDefault(); - } - - private static bool DigestsEqual(string computed, string? expected) - { - if (string.IsNullOrWhiteSpace(expected)) - { - return true; - } - - return string.Equals(NormalizeDigest(computed), NormalizeDigest(expected), StringComparison.OrdinalIgnoreCase); - } - - private static string NormalizeDigest(string digest) - { - var value = digest.Trim(); - if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) - { - value = value.Substring("sha256:".Length); - } - - return value.ToLowerInvariant(); - } - - private static async Task ComputeSha256Async(string path, CancellationToken cancellationToken) - { - await using var stream = File.OpenRead(path); - var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature) - { - encodedHeader = string.Empty; - encodedSignature = string.Empty; - - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - var parts = value.Split('.'); - if (parts.Length != 3) - { - return false; - } - - encodedHeader = parts[0]; - encodedSignature = parts[2]; - return parts[1].Length == 0; - } - - private static byte[] Base64UrlDecode(string value) - { - var normalized = value.Replace('-', '+').Replace('_', '/'); - var padding = normalized.Length % 4; - if (padding == 2) - { - normalized += "=="; - } - else if (padding == 3) - { - normalized += "="; - } - else if (padding == 1) - { - throw new FormatException("Invalid Base64Url value."); - } - - return Convert.FromBase64String(normalized); - } - - private static CryptoSigningKey CreateVerificationSigningKey( - string keyId, - string algorithm, - string? providerHint, - string keyPem, - string keyPath) - { - if (string.IsNullOrWhiteSpace(keyPem)) - { - throw new InvalidOperationException("Verification key PEM content is empty."); - } - - using var ecdsa = ECDsa.Create(); - ecdsa.ImportFromPem(keyPem); - - var parameters = ecdsa.ExportParameters(includePrivateParameters: false); - if (parameters.D is null || parameters.D.Length == 0) - { - parameters.D = new byte[] { 0x01 }; - } - - var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) - { - ["source"] = Path.GetFullPath(keyPath), - ["verificationOnly"] = "true" - }; - - return new CryptoSigningKey( - new CryptoKeyReference(keyId, providerHint), - algorithm, - in parameters, - DateTimeOffset.UtcNow, - metadata: metadata); - } - - private static string FormatDuration(TimeSpan duration) - { - if (duration <= TimeSpan.Zero) - { - return "expired"; - } - - if (duration.TotalDays >= 1) - { - var days = (int)duration.TotalDays; - var hours = duration.Hours; - return hours > 0 - ? FormattableString.Invariant($"{days}d {hours}h") - : FormattableString.Invariant($"{days}d"); - } - - if (duration.TotalHours >= 1) - { - return FormattableString.Invariant($"{(int)duration.TotalHours}h {duration.Minutes}m"); - } - - if (duration.TotalMinutes >= 1) - { - return FormattableString.Invariant($"{(int)duration.TotalMinutes}m {duration.Seconds}s"); - } - - return FormattableString.Invariant($"{duration.Seconds}s"); - } - - private static bool TryExtractJwtClaims( - string accessToken, - out Dictionary claims, - out DateTimeOffset? issuedAt, - out DateTimeOffset? notBefore) - { - claims = new Dictionary(StringComparer.OrdinalIgnoreCase); - issuedAt = null; - notBefore = null; - - if (string.IsNullOrWhiteSpace(accessToken)) - { - return false; - } - - var parts = accessToken.Split('.'); - if (parts.Length < 2) - { - return false; - } - - if (!TryDecodeBase64Url(parts[1], out var payloadBytes)) - { - return false; - } - - try - { - using var document = JsonDocument.Parse(payloadBytes); - foreach (var property in document.RootElement.EnumerateObject()) - { - var value = FormatJsonValue(property.Value); - claims[property.Name] = value; - - if (issuedAt is null && property.NameEquals("iat") && TryParseUnixSeconds(property.Value, out var parsedIat)) - { - issuedAt = parsedIat; - } - - if (notBefore is null && property.NameEquals("nbf") && TryParseUnixSeconds(property.Value, out var parsedNbf)) - { - notBefore = parsedNbf; - } - } - - return true; - } - catch (JsonException) - { - claims.Clear(); - issuedAt = null; - notBefore = null; - return false; - } - } - - private static bool TryDecodeBase64Url(string value, out byte[] bytes) - { - bytes = Array.Empty(); - - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - var normalized = value.Replace('-', '+').Replace('_', '/'); - var padding = normalized.Length % 4; - if (padding is 2 or 3) - { - normalized = normalized.PadRight(normalized.Length + (4 - padding), '='); - } - else if (padding == 1) - { - return false; - } - - try - { - bytes = Convert.FromBase64String(normalized); - return true; - } - catch (FormatException) - { - return false; - } - } - - private static string FormatJsonValue(JsonElement element) - { - return element.ValueKind switch - { - JsonValueKind.String => element.GetString() ?? string.Empty, - JsonValueKind.Number => element.TryGetInt64(out var longValue) - ? longValue.ToString(CultureInfo.InvariantCulture) - : element.GetDouble().ToString(CultureInfo.InvariantCulture), - JsonValueKind.True => "true", - JsonValueKind.False => "false", - JsonValueKind.Null => "null", - JsonValueKind.Array => FormatArray(element), - JsonValueKind.Object => element.GetRawText(), - _ => element.GetRawText() - }; - } - - private static string FormatArray(JsonElement array) - { - var values = new List(); - foreach (var item in array.EnumerateArray()) - { - values.Add(FormatJsonValue(item)); - } - - return string.Join(", ", values); - } - - private static bool TryParseUnixSeconds(JsonElement element, out DateTimeOffset value) - { - value = default; - - if (element.ValueKind == JsonValueKind.Number) - { - if (element.TryGetInt64(out var seconds)) - { - value = DateTimeOffset.FromUnixTimeSeconds(seconds); - return true; - } - - if (element.TryGetDouble(out var doubleValue)) - { - value = DateTimeOffset.FromUnixTimeSeconds((long)doubleValue); - return true; - } - } - - if (element.ValueKind == JsonValueKind.String) - { - var text = element.GetString(); - if (!string.IsNullOrWhiteSpace(text) && long.TryParse(text, NumberStyles.Integer, CultureInfo.InvariantCulture, out var seconds)) - { - value = DateTimeOffset.FromUnixTimeSeconds(seconds); - return true; - } - } - - return false; - } - - private static List CollectAdditionalClaims(Dictionary claims) - { - var result = new List(); - foreach (var pair in claims) - { - if (CommonClaimNames.Contains(pair.Key)) - { - continue; - } - - result.Add(FormattableString.Invariant($"{pair.Key}={pair.Value}")); - } - - result.Sort(StringComparer.OrdinalIgnoreCase); - return result; - } - - private static readonly HashSet CommonClaimNames = new(StringComparer.OrdinalIgnoreCase) - { - "aud", - "client_id", - "exp", - "iat", - "iss", - "nbf", - "scope", - "scopes", - "sub", - "token_type", - "jti" - }; - - private static async Task ExecuteExcititorCommandAsync( - IServiceProvider services, - string commandName, - bool verbose, - IDictionary? activityTags, - Func> operation, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger(commandName.Replace(' ', '-')); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity($"cli.{commandName.Replace(' ', '.')}" , ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", commandName); - if (activityTags is not null) - { - foreach (var tag in activityTags) - { - activity?.SetTag(tag.Key, tag.Value); - } - } - using var duration = CliMetrics.MeasureCommandDuration(commandName); - - try - { - var result = await operation(client).ConfigureAwait(false); - if (result.Success) - { - if (!string.IsNullOrWhiteSpace(result.Message)) - { - logger.LogInformation(result.Message); - } - else - { - logger.LogInformation("Operation completed successfully."); - } - - if (!string.IsNullOrWhiteSpace(result.Location)) - { - logger.LogInformation("Location: {Location}", result.Location); - } - - if (result.Payload is JsonElement payload && payload.ValueKind is not JsonValueKind.Undefined and not JsonValueKind.Null) - { - logger.LogDebug("Response payload: {Payload}", payload.ToString()); - } - - Environment.ExitCode = 0; - } - else - { - logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Operation failed." : result.Message); - Environment.ExitCode = 1; - } - } - catch (Exception ex) - { - logger.LogError(ex, "Excititor operation failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - private static async Task> GatherImageDigestsAsync( - IReadOnlyList inline, - string? filePath, - CancellationToken cancellationToken) - { - var results = new List(); - var seen = new HashSet(StringComparer.Ordinal); - - void AddCandidates(string? candidate) - { - foreach (var image in SplitImageCandidates(candidate)) - { - if (seen.Add(image)) - { - results.Add(image); - } - } - } - - if (inline is not null) - { - foreach (var entry in inline) - { - AddCandidates(entry); - } - } - - if (!string.IsNullOrWhiteSpace(filePath)) - { - var path = Path.GetFullPath(filePath); - if (!File.Exists(path)) - { - throw new FileNotFoundException("Input file not found.", path); - } - - foreach (var line in File.ReadLines(path)) - { - cancellationToken.ThrowIfCancellationRequested(); - AddCandidates(line); - } - } - - if (Console.IsInputRedirected) - { - while (!cancellationToken.IsCancellationRequested) - { - var line = await Console.In.ReadLineAsync().ConfigureAwait(false); - if (line is null) - { - break; - } - - AddCandidates(line); - } - } - - return new ReadOnlyCollection(results); - } - - private static IEnumerable SplitImageCandidates(string? raw) - { - if (string.IsNullOrWhiteSpace(raw)) - { - yield break; - } - - var candidate = raw.Trim(); - var commentIndex = candidate.IndexOf('#'); - if (commentIndex >= 0) - { - candidate = candidate[..commentIndex].Trim(); - } - - if (candidate.Length == 0) - { - yield break; - } - - var tokens = candidate.Split(new[] { ',', ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries); - foreach (var token in tokens) - { - var trimmed = token.Trim(); - if (trimmed.Length > 0) - { - yield return trimmed; - } - } - } - - private static IReadOnlyDictionary ParseLabelSelectors(IReadOnlyList labelArguments) - { - if (labelArguments is null || labelArguments.Count == 0) - { - return EmptyLabelSelectors; - } - - var labels = new Dictionary(StringComparer.OrdinalIgnoreCase); - foreach (var raw in labelArguments) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var trimmed = raw.Trim(); - var delimiter = trimmed.IndexOf('='); - if (delimiter <= 0 || delimiter == trimmed.Length - 1) - { - throw new ArgumentException($"Invalid label '{raw}'. Expected key=value format."); - } - - var key = trimmed[..delimiter].Trim(); - var value = trimmed[(delimiter + 1)..].Trim(); - if (key.Length == 0) - { - throw new ArgumentException($"Invalid label '{raw}'. Label key cannot be empty."); - } - - labels[key] = value; - } - - return labels.Count == 0 ? EmptyLabelSelectors : new ReadOnlyDictionary(labels); - } - - private sealed record ExcititorExportManifestSummary( - string ExportId, - string? Format, - string? Algorithm, - string? Digest, - long? SizeBytes, - bool? FromCache, - DateTimeOffset? CreatedAt, - string? RekorLocation, - string? RekorIndex, - string? RekorInclusionUrl); - - private static ExcititorExportManifestSummary? TryParseExportManifest(JsonElement? payload) - { - if (payload is null || payload.Value.ValueKind is JsonValueKind.Undefined or JsonValueKind.Null) - { - return null; - } - - var element = payload.Value; - var exportId = GetStringProperty(element, "exportId"); - if (string.IsNullOrWhiteSpace(exportId)) - { - return null; - } - - var format = GetStringProperty(element, "format"); - var algorithm = default(string?); - var digest = default(string?); - - if (TryGetPropertyCaseInsensitive(element, "artifact", out var artifact) && artifact.ValueKind == JsonValueKind.Object) - { - algorithm = GetStringProperty(artifact, "algorithm"); - digest = GetStringProperty(artifact, "digest"); - } - - var sizeBytes = GetInt64Property(element, "sizeBytes"); - var fromCache = GetBooleanProperty(element, "fromCache"); - var createdAt = GetDateTimeOffsetProperty(element, "createdAt"); - - string? rekorLocation = null; - string? rekorIndex = null; - string? rekorInclusion = null; - - if (TryGetPropertyCaseInsensitive(element, "attestation", out var attestation) && attestation.ValueKind == JsonValueKind.Object) - { - if (TryGetPropertyCaseInsensitive(attestation, "rekor", out var rekor) && rekor.ValueKind == JsonValueKind.Object) - { - rekorLocation = GetStringProperty(rekor, "location"); - rekorIndex = GetStringProperty(rekor, "logIndex"); - var inclusion = GetStringProperty(rekor, "inclusionProofUri"); - if (!string.IsNullOrWhiteSpace(inclusion)) - { - rekorInclusion = inclusion; - } - } - } - - return new ExcititorExportManifestSummary( - exportId.Trim(), - format, - algorithm, - digest, - sizeBytes, - fromCache, - createdAt, - rekorLocation, - rekorIndex, - rekorInclusion); - } - - private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement property) - { - if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty(propertyName, out property)) - { - return true; - } - - if (element.ValueKind == JsonValueKind.Object) - { - foreach (var candidate in element.EnumerateObject()) - { - if (string.Equals(candidate.Name, propertyName, StringComparison.OrdinalIgnoreCase)) - { - property = candidate.Value; - return true; - } - } - } - - property = default; - return false; - } - - private static string? GetStringProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - return property.ValueKind switch - { - JsonValueKind.String => property.GetString(), - JsonValueKind.Number => property.ToString(), - _ => null - }; - } - - return null; - } - - private static bool? GetBooleanProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - return property.ValueKind switch - { - JsonValueKind.True => true, - JsonValueKind.False => false, - JsonValueKind.String when bool.TryParse(property.GetString(), out var parsed) => parsed, - _ => null - }; - } - - return null; - } - - private static long? GetInt64Property(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - if (property.ValueKind == JsonValueKind.Number && property.TryGetInt64(out var value)) - { - return value; - } - - if (property.ValueKind == JsonValueKind.String - && long.TryParse(property.GetString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) - { - return parsed; - } - } - - return null; - } - - private static DateTimeOffset? GetDateTimeOffsetProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property) - && property.ValueKind == JsonValueKind.String - && DateTimeOffset.TryParse(property.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var value)) - { - return value.ToUniversalTime(); - } - - return null; - } - - private static string BuildDigestDisplay(string? algorithm, string digest) - { - if (string.IsNullOrWhiteSpace(digest)) - { - return string.Empty; - } - - if (digest.Contains(':', StringComparison.Ordinal)) - { - return digest; - } - - if (string.IsNullOrWhiteSpace(algorithm) || algorithm.Equals("sha256", StringComparison.OrdinalIgnoreCase)) - { - return $"sha256:{digest}"; - } - - return $"{algorithm}:{digest}"; - } - - private static string FormatSize(long sizeBytes) - { - if (sizeBytes < 0) - { - return $"{sizeBytes} bytes"; - } - - string[] units = { "bytes", "KB", "MB", "GB", "TB" }; - double size = sizeBytes; - var unit = 0; - - while (size >= 1024 && unit < units.Length - 1) - { - size /= 1024; - unit++; - } - - return unit == 0 ? $"{sizeBytes} bytes" : $"{size:0.##} {units[unit]}"; - } - - private static string ResolveExportOutputPath(string outputPath, ExcititorExportManifestSummary manifest) - { - if (string.IsNullOrWhiteSpace(outputPath)) - { - throw new ArgumentException("Output path must be provided.", nameof(outputPath)); - } - - var fullPath = Path.GetFullPath(outputPath); - if (Directory.Exists(fullPath) - || outputPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal) - || outputPath.EndsWith(Path.AltDirectorySeparatorChar.ToString(), StringComparison.Ordinal)) - { - return Path.Combine(fullPath, BuildExportFileName(manifest)); - } - - var directory = Path.GetDirectoryName(fullPath); - if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) - { - Directory.CreateDirectory(directory); - } - - return fullPath; - } - - private static string BuildExportFileName(ExcititorExportManifestSummary manifest) - { - var token = !string.IsNullOrWhiteSpace(manifest.Digest) - ? manifest.Digest! - : manifest.ExportId; - - token = SanitizeToken(token); - if (token.Length > 40) - { - token = token[..40]; - } - - var extension = DetermineExportExtension(manifest.Format); - return $"stellaops-excititor-{token}{extension}"; - } - - private static string DetermineExportExtension(string? format) - { - if (string.IsNullOrWhiteSpace(format)) - { - return ".bin"; - } - - return format switch - { - not null when format.Equals("jsonl", StringComparison.OrdinalIgnoreCase) => ".jsonl", - not null when format.Equals("json", StringComparison.OrdinalIgnoreCase) => ".json", - not null when format.Equals("openvex", StringComparison.OrdinalIgnoreCase) => ".json", - not null when format.Equals("csaf", StringComparison.OrdinalIgnoreCase) => ".json", - _ => ".bin" - }; - } - - private static string SanitizeToken(string token) - { - var builder = new StringBuilder(token.Length); - foreach (var ch in token) - { - if (char.IsLetterOrDigit(ch)) - { - builder.Append(char.ToLowerInvariant(ch)); - } - } - - if (builder.Length == 0) - { - builder.Append("export"); - } - - return builder.ToString(); - } - - private static string? ResolveLocationUrl(StellaOpsCliOptions options, string location) - { - if (string.IsNullOrWhiteSpace(location)) - { - return null; - } - - if (Uri.TryCreate(location, UriKind.Absolute, out var absolute)) - { - return absolute.ToString(); - } - - if (!string.IsNullOrWhiteSpace(options?.BackendUrl) && Uri.TryCreate(options.BackendUrl, UriKind.Absolute, out var baseUri)) - { - if (!location.StartsWith("/", StringComparison.Ordinal)) - { - location = "/" + location; - } - - return new Uri(baseUri, location).ToString(); - } - - return location; - } - - private static string BuildRuntimePolicyJson(RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) - { - var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); - var results = new Dictionary(StringComparer.Ordinal); - - foreach (var image in orderedImages) - { - if (result.Decisions.TryGetValue(image, out var decision)) - { - results[image] = BuildDecisionMap(decision); - } - } - - var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) - { - WriteIndented = true, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - var payload = new Dictionary(StringComparer.Ordinal) - { - ["ttlSeconds"] = result.TtlSeconds, - ["expiresAtUtc"] = result.ExpiresAtUtc?.ToString("O", CultureInfo.InvariantCulture), - ["policyRevision"] = result.PolicyRevision, - ["results"] = results - }; - - return JsonSerializer.Serialize(payload, options); - } - - private static IDictionary BuildDecisionMap(RuntimePolicyImageDecision decision) - { - var map = new Dictionary(StringComparer.Ordinal) - { - ["policyVerdict"] = decision.PolicyVerdict, - ["signed"] = decision.Signed, - ["hasSbomReferrers"] = decision.HasSbomReferrers - }; - - if (decision.Reasons.Count > 0) - { - map["reasons"] = decision.Reasons; - } - - if (decision.Rekor is not null) - { - var rekorMap = new Dictionary(StringComparer.Ordinal); - if (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid)) - { - rekorMap["uuid"] = decision.Rekor.Uuid; - } - - if (!string.IsNullOrWhiteSpace(decision.Rekor.Url)) - { - rekorMap["url"] = decision.Rekor.Url; - } - - if (decision.Rekor.Verified.HasValue) - { - rekorMap["verified"] = decision.Rekor.Verified; - } - - if (rekorMap.Count > 0) - { - map["rekor"] = rekorMap; - } - } - - foreach (var kvp in decision.AdditionalProperties) - { - map[kvp.Key] = kvp.Value; - } - - return map; - } - - private static void DisplayRuntimePolicyResults(ILogger logger, RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) - { - var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); - var summary = new Dictionary(StringComparer.OrdinalIgnoreCase); - - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().Border(TableBorder.Rounded) - .AddColumns("Image", "Verdict", "Signed", "SBOM Ref", "Quieted", "Confidence", "Reasons", "Attestation"); - - foreach (var image in orderedImages) - { - if (result.Decisions.TryGetValue(image, out var decision)) - { - table.AddRow( - image, - decision.PolicyVerdict, - FormatBoolean(decision.Signed), - FormatBoolean(decision.HasSbomReferrers), - FormatQuietedDisplay(decision.AdditionalProperties), - FormatConfidenceDisplay(decision.AdditionalProperties), - decision.Reasons.Count > 0 ? string.Join(Environment.NewLine, decision.Reasons) : "-", - FormatAttestation(decision.Rekor)); - - summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; - - if (decision.AdditionalProperties.Count > 0) - { - var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); - logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); - } - } - else - { - table.AddRow(image, "", "-", "-", "-", "-", "-", "-"); - } - } - - AnsiConsole.Write(table); - } - else - { - foreach (var image in orderedImages) - { - if (result.Decisions.TryGetValue(image, out var decision)) - { - var reasons = decision.Reasons.Count > 0 ? string.Join(", ", decision.Reasons) : "none"; - logger.LogInformation( - "{Image} -> verdict={Verdict} signed={Signed} sbomRef={Sbom} quieted={Quieted} confidence={Confidence} attestation={Attestation} reasons={Reasons}", - image, - decision.PolicyVerdict, - FormatBoolean(decision.Signed), - FormatBoolean(decision.HasSbomReferrers), - FormatQuietedDisplay(decision.AdditionalProperties), - FormatConfidenceDisplay(decision.AdditionalProperties), - FormatAttestation(decision.Rekor), - reasons); - - summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; - - if (decision.AdditionalProperties.Count > 0) - { - var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); - logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); - } - } - else - { - logger.LogWarning("{Image} -> no decision returned by backend.", image); - } - } - } - - if (summary.Count > 0) - { - var summaryText = string.Join(", ", summary.Select(kvp => $"{kvp.Key}:{kvp.Value}")); - logger.LogInformation("Verdict summary: {Summary}", summaryText); - } - } - - private static IReadOnlyList BuildImageOrder(IReadOnlyList requestedImages, IEnumerable actual) - { - var order = new List(); - var seen = new HashSet(StringComparer.Ordinal); - - if (requestedImages is not null) - { - foreach (var image in requestedImages) - { - if (!string.IsNullOrWhiteSpace(image)) - { - var trimmed = image.Trim(); - if (seen.Add(trimmed)) - { - order.Add(trimmed); - } - } - } - } - - foreach (var image in actual) - { - if (!string.IsNullOrWhiteSpace(image)) - { - var trimmed = image.Trim(); - if (seen.Add(trimmed)) - { - order.Add(trimmed); - } - } - } - - return new ReadOnlyCollection(order); - } - - private static string FormatBoolean(bool? value) - => value is null ? "unknown" : value.Value ? "yes" : "no"; - - private static string FormatQuietedDisplay(IReadOnlyDictionary metadata) - { - var quieted = GetMetadataBoolean(metadata, "quieted", "quiet"); - var quietedBy = GetMetadataString(metadata, "quietedBy", "quietedReason"); - - if (quieted is true) - { - return string.IsNullOrWhiteSpace(quietedBy) ? "yes" : $"yes ({quietedBy})"; - } - - if (quieted is false) - { - return "no"; - } - - return string.IsNullOrWhiteSpace(quietedBy) ? "-" : $"? ({quietedBy})"; - } - - private static string FormatConfidenceDisplay(IReadOnlyDictionary metadata) - { - var confidence = GetMetadataDouble(metadata, "confidence"); - var confidenceBand = GetMetadataString(metadata, "confidenceBand", "confidenceTier"); - - if (confidence.HasValue && !string.IsNullOrWhiteSpace(confidenceBand)) - { - return string.Format(CultureInfo.InvariantCulture, "{0:0.###} ({1})", confidence.Value, confidenceBand); - } - - if (confidence.HasValue) - { - return confidence.Value.ToString("0.###", CultureInfo.InvariantCulture); - } - - if (!string.IsNullOrWhiteSpace(confidenceBand)) - { - return confidenceBand!; - } - - return "-"; - } - - private static string FormatAttestation(RuntimePolicyRekorReference? rekor) - { - if (rekor is null) - { - return "-"; - } - - var uuid = string.IsNullOrWhiteSpace(rekor.Uuid) ? null : rekor.Uuid; - var url = string.IsNullOrWhiteSpace(rekor.Url) ? null : rekor.Url; - var verified = rekor.Verified; - - var core = uuid ?? url; - if (!string.IsNullOrEmpty(core)) - { - if (verified.HasValue) - { - var suffix = verified.Value ? " (verified)" : " (unverified)"; - return core + suffix; - } - - return core!; - } - - if (verified.HasValue) - { - return verified.Value ? "verified" : "unverified"; - } - - return "-"; - } - - private static bool? GetMetadataBoolean(IReadOnlyDictionary metadata, params string[] keys) - { - foreach (var key in keys) - { - if (metadata.TryGetValue(key, out var value) && value is not null) - { - switch (value) - { - case bool b: - return b; - case string s when bool.TryParse(s, out var parsed): - return parsed; - } - } - } - - return null; - } - - private static string? GetMetadataString(IReadOnlyDictionary metadata, params string[] keys) - { - foreach (var key in keys) - { - if (metadata.TryGetValue(key, out var value) && value is not null) - { - if (value is string s) - { - return string.IsNullOrWhiteSpace(s) ? null : s; - } - } - } - - return null; - } - - private static double? GetMetadataDouble(IReadOnlyDictionary metadata, params string[] keys) - { - foreach (var key in keys) - { - if (metadata.TryGetValue(key, out var value) && value is not null) - { - switch (value) - { - case double d: - return d; - case float f: - return f; - case decimal m: - return (double)m; - case long l: - return l; - case int i: - return i; - case string s when double.TryParse(s, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var parsed): - return parsed; - } - } - } - - return null; - } - - private static PolicySimulationOutputFormat DeterminePolicySimulationFormat(string? value, string? outputPath) - { - if (!string.IsNullOrWhiteSpace(value)) - { - return value.Trim().ToLowerInvariant() switch - { - "table" => PolicySimulationOutputFormat.Table, - "json" => PolicySimulationOutputFormat.Json, - _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") - }; - } - - if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) - { - return PolicySimulationOutputFormat.Json; - } - - return PolicySimulationOutputFormat.Table; - } - - private static object BuildPolicySimulationPayload( - string policyId, - int? baseVersion, - int? candidateVersion, - IReadOnlyList sbomSet, - IReadOnlyDictionary environment, - PolicySimulationResult result) - => new - { - policyId, - baseVersion, - candidateVersion, - sbomSet = sbomSet.Count == 0 ? Array.Empty() : sbomSet, - environment = environment.Count == 0 ? null : environment, - diff = result.Diff, - explainUri = result.ExplainUri - }; - - private static void RenderPolicySimulationResult( - ILogger logger, - object payload, - PolicySimulationResult result, - PolicySimulationOutputFormat format) - { - if (format == PolicySimulationOutputFormat.Json) - { - var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); - Console.WriteLine(json); - return; - } - - logger.LogInformation( - "Policy diff summary — Added: {Added}, Removed: {Removed}, Unchanged: {Unchanged}.", - result.Diff.Added, - result.Diff.Removed, - result.Diff.Unchanged); - - if (result.Diff.BySeverity.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().AddColumns("Severity", "Up", "Down"); - foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) - { - table.AddRow( - entry.Key, - FormatDelta(entry.Value.Up), - FormatDelta(entry.Value.Down)); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) - { - logger.LogInformation("Severity {Severity}: up={Up}, down={Down}", entry.Key, entry.Value.Up ?? 0, entry.Value.Down ?? 0); - } - } - } - - if (result.Diff.RuleHits.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().AddColumns("Rule", "Up", "Down"); - foreach (var hit in result.Diff.RuleHits) - { - table.AddRow( - string.IsNullOrWhiteSpace(hit.RuleName) ? hit.RuleId : $"{hit.RuleName} ({hit.RuleId})", - FormatDelta(hit.Up), - FormatDelta(hit.Down)); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var hit in result.Diff.RuleHits) - { - logger.LogInformation("Rule {RuleId}: up={Up}, down={Down}", hit.RuleId, hit.Up ?? 0, hit.Down ?? 0); - } - } - } - - if (!string.IsNullOrWhiteSpace(result.ExplainUri)) - { - logger.LogInformation("Explain trace available at {ExplainUri}.", result.ExplainUri); - } - } - - private static IReadOnlyList NormalizePolicySbomSet(IReadOnlyList arguments) - { - if (arguments is null || arguments.Count == 0) - { - return EmptyPolicySbomSet; - } - - var set = new SortedSet(StringComparer.Ordinal); - foreach (var raw in arguments) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var trimmed = raw.Trim(); - if (trimmed.Length > 0) - { - set.Add(trimmed); - } - } - - if (set.Count == 0) - { - return EmptyPolicySbomSet; - } - - var list = set.ToList(); - return new ReadOnlyCollection(list); - } - - private static IReadOnlyDictionary ParsePolicyEnvironment(IReadOnlyList arguments) - { - if (arguments is null || arguments.Count == 0) - { - return EmptyPolicyEnvironment; - } - - var env = new SortedDictionary(StringComparer.Ordinal); - foreach (var raw in arguments) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var trimmed = raw.Trim(); - var separator = trimmed.IndexOf('='); - if (separator <= 0 || separator == trimmed.Length - 1) - { - throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); - } - - var key = trimmed[..separator].Trim().ToLowerInvariant(); - if (string.IsNullOrWhiteSpace(key)) - { - throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); - } - - var valueToken = trimmed[(separator + 1)..].Trim(); - env[key] = ParsePolicyEnvironmentValue(valueToken); - } - - return env.Count == 0 ? EmptyPolicyEnvironment : new ReadOnlyDictionary(env); - } - - private static object? ParsePolicyEnvironmentValue(string token) - { - if (string.IsNullOrWhiteSpace(token)) - { - return string.Empty; - } - - var value = token; - if ((value.Length >= 2 && value.StartsWith("\"", StringComparison.Ordinal) && value.EndsWith("\"", StringComparison.Ordinal)) || - (value.Length >= 2 && value.StartsWith("'", StringComparison.Ordinal) && value.EndsWith("'", StringComparison.Ordinal))) - { - value = value[1..^1]; - } - - if (string.Equals(value, "null", StringComparison.OrdinalIgnoreCase)) - { - return null; - } - - if (bool.TryParse(value, out var boolResult)) - { - return boolResult; - } - - if (long.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var longResult)) - { - return longResult; - } - - if (double.TryParse(value, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var doubleResult)) - { - return doubleResult; - } - - return value; - } - - private static Task WriteSimulationOutputAsync(string outputPath, object payload, CancellationToken cancellationToken) - => WriteJsonPayloadAsync(outputPath, payload, cancellationToken); - - private static async Task WriteJsonPayloadAsync(string outputPath, object payload, CancellationToken cancellationToken) - { - var fullPath = Path.GetFullPath(outputPath); - var directory = Path.GetDirectoryName(fullPath); - if (!string.IsNullOrWhiteSpace(directory)) - { - Directory.CreateDirectory(directory); - } - - var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); - await File.WriteAllTextAsync(fullPath, json + Environment.NewLine, cancellationToken).ConfigureAwait(false); - } - - private static int DetermineSimulationExitCode(PolicySimulationResult result, bool failOnDiff) - { - if (!failOnDiff) - { - return 0; - } - - return (result.Diff.Added + result.Diff.Removed) > 0 ? 20 : 0; - } - - private static void HandlePolicySimulationFailure(PolicyApiException exception, ILogger logger) - { - var exitCode = exception.ErrorCode switch - { - "ERR_POL_001" => 10, - "ERR_POL_002" or "ERR_POL_005" => 12, - "ERR_POL_003" => 21, - "ERR_POL_004" => 22, - "ERR_POL_006" => 23, - _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, - _ => 1 - }; - - if (string.IsNullOrWhiteSpace(exception.ErrorCode)) - { - logger.LogError("Policy simulation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); - } - else - { - logger.LogError("Policy simulation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); - } - - CliMetrics.RecordPolicySimulation("error"); - Environment.ExitCode = exitCode; - } - - private static void HandlePolicyActivationFailure(PolicyApiException exception, ILogger logger) - { - var exitCode = exception.ErrorCode switch - { - "ERR_POL_002" => 70, - "ERR_POL_003" => 71, - "ERR_POL_004" => 72, - _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, - _ => 1 - }; - - if (string.IsNullOrWhiteSpace(exception.ErrorCode)) - { - logger.LogError("Policy activation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); - } - else - { - logger.LogError("Policy activation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); - } - - CliMetrics.RecordPolicyActivation("error"); - Environment.ExitCode = exitCode; - } - - private static IReadOnlyList NormalizePolicyFilterValues(string[] values, bool toLower = false) - { - if (values is null || values.Length == 0) - { - return Array.Empty(); - } - - var set = new HashSet(StringComparer.OrdinalIgnoreCase); - var list = new List(); - foreach (var raw in values) - { - var candidate = raw?.Trim(); - if (string.IsNullOrWhiteSpace(candidate)) - { - continue; - } - - var normalized = toLower ? candidate.ToLowerInvariant() : candidate; - if (set.Add(normalized)) - { - list.Add(normalized); - } - } - - return list.Count == 0 ? Array.Empty() : list; - } - - private static string? NormalizePolicyPriority(string? priority) - { - if (string.IsNullOrWhiteSpace(priority)) - { - return null; - } - - var normalized = priority.Trim(); - return string.IsNullOrWhiteSpace(normalized) ? null : normalized.ToLowerInvariant(); - } - - private static string NormalizePolicyActivationOutcome(string status) - { - if (string.IsNullOrWhiteSpace(status)) - { - return "unknown"; - } - - return status.Trim().ToLowerInvariant(); - } - - private static int DeterminePolicyActivationExitCode(string outcome) - => string.Equals(outcome, "pending_second_approval", StringComparison.Ordinal) ? 75 : 0; - - private static void RenderPolicyActivationResult(PolicyActivationResult result, PolicyActivationRequest request) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var summary = new Table().Expand(); - summary.Border(TableBorder.Rounded); - summary.AddColumn(new TableColumn("[grey]Field[/]").LeftAligned()); - summary.AddColumn(new TableColumn("[grey]Value[/]").LeftAligned()); - summary.AddRow("Policy", Markup.Escape($"{result.Revision.PolicyId} v{result.Revision.Version}")); - summary.AddRow("Status", FormatActivationStatus(result.Status)); - summary.AddRow("Requires 2 approvals", result.Revision.RequiresTwoPersonApproval ? "[yellow]yes[/]" : "[green]no[/]"); - summary.AddRow("Created (UTC)", Markup.Escape(FormatUpdatedAt(result.Revision.CreatedAt))); - summary.AddRow("Activated (UTC)", result.Revision.ActivatedAt.HasValue - ? Markup.Escape(FormatUpdatedAt(result.Revision.ActivatedAt.Value)) - : "[grey](not yet active)[/]"); - - if (request.RunNow) - { - summary.AddRow("Run", "[green]immediate[/]"); - } - else if (request.ScheduledAt.HasValue) - { - summary.AddRow("Scheduled at", Markup.Escape(FormatUpdatedAt(request.ScheduledAt.Value))); - } - - if (!string.IsNullOrWhiteSpace(request.Priority)) - { - summary.AddRow("Priority", Markup.Escape(request.Priority!)); - } - - if (request.Rollback) - { - summary.AddRow("Rollback", "[yellow]yes[/]"); - } - - if (!string.IsNullOrWhiteSpace(request.IncidentId)) - { - summary.AddRow("Incident", Markup.Escape(request.IncidentId!)); - } - - if (!string.IsNullOrWhiteSpace(request.Comment)) - { - summary.AddRow("Note", Markup.Escape(request.Comment!)); - } - - AnsiConsole.Write(summary); - - if (result.Revision.Approvals.Count > 0) - { - var approvalTable = new Table().Title("[grey]Approvals[/]"); - approvalTable.Border(TableBorder.Minimal); - approvalTable.AddColumn(new TableColumn("Actor").LeftAligned()); - approvalTable.AddColumn(new TableColumn("Approved (UTC)").LeftAligned()); - approvalTable.AddColumn(new TableColumn("Comment").LeftAligned()); - - foreach (var approval in result.Revision.Approvals) - { - var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment!; - approvalTable.AddRow( - Markup.Escape(approval.ActorId), - Markup.Escape(FormatUpdatedAt(approval.ApprovedAt)), - Markup.Escape(comment)); - } - - AnsiConsole.Write(approvalTable); - } - else - { - AnsiConsole.MarkupLine("[grey]No activation approvals recorded yet.[/]"); - } - } - else - { - Console.WriteLine(FormattableString.Invariant($"Policy: {result.Revision.PolicyId} v{result.Revision.Version}")); - Console.WriteLine(FormattableString.Invariant($"Status: {NormalizePolicyActivationOutcome(result.Status)}")); - Console.WriteLine(FormattableString.Invariant($"Requires 2 approvals: {(result.Revision.RequiresTwoPersonApproval ? "yes" : "no")}")); - Console.WriteLine(FormattableString.Invariant($"Created (UTC): {FormatUpdatedAt(result.Revision.CreatedAt)}")); - Console.WriteLine(FormattableString.Invariant($"Activated (UTC): {(result.Revision.ActivatedAt.HasValue ? FormatUpdatedAt(result.Revision.ActivatedAt.Value) : "(not yet active)")}")); - - if (request.RunNow) - { - Console.WriteLine("Run: immediate"); - } - else if (request.ScheduledAt.HasValue) - { - Console.WriteLine(FormattableString.Invariant($"Scheduled at: {FormatUpdatedAt(request.ScheduledAt.Value)}")); - } - - if (!string.IsNullOrWhiteSpace(request.Priority)) - { - Console.WriteLine(FormattableString.Invariant($"Priority: {request.Priority}")); - } - - if (request.Rollback) - { - Console.WriteLine("Rollback: yes"); - } - - if (!string.IsNullOrWhiteSpace(request.IncidentId)) - { - Console.WriteLine(FormattableString.Invariant($"Incident: {request.IncidentId}")); - } - - if (!string.IsNullOrWhiteSpace(request.Comment)) - { - Console.WriteLine(FormattableString.Invariant($"Note: {request.Comment}")); - } - - if (result.Revision.Approvals.Count == 0) - { - Console.WriteLine("Approvals: none"); - } - else - { - foreach (var approval in result.Revision.Approvals) - { - var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment; - Console.WriteLine(FormattableString.Invariant($"Approval: {approval.ActorId} at {FormatUpdatedAt(approval.ApprovedAt)} ({comment})")); - } - } - } - } - - private static string FormatActivationStatus(string status) - { - var normalized = NormalizePolicyActivationOutcome(status); - return normalized switch - { - "activated" => "[green]activated[/]", - "already_active" => "[yellow]already_active[/]", - "pending_second_approval" => "[yellow]pending_second_approval[/]", - _ => "[red]" + Markup.Escape(string.IsNullOrWhiteSpace(status) ? "unknown" : status) + "[/]" - }; - } - - private static DateTimeOffset? ParsePolicySince(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return null; - } - - if (DateTimeOffset.TryParse( - value.Trim(), - CultureInfo.InvariantCulture, - DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, - out var parsed)) - { - return parsed.ToUniversalTime(); - } - - throw new ArgumentException("Invalid --since value. Use an ISO-8601 timestamp."); - } - - private static string? NormalizeExplainMode(string? mode) - => string.IsNullOrWhiteSpace(mode) ? null : mode.Trim().ToLowerInvariant(); - - private static PolicyFindingsOutputFormat DeterminePolicyFindingsFormat(string? value, string? outputPath) - { - if (!string.IsNullOrWhiteSpace(value)) - { - return value.Trim().ToLowerInvariant() switch - { - "table" => PolicyFindingsOutputFormat.Table, - "json" => PolicyFindingsOutputFormat.Json, - _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") - }; - } - - if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) - { - return PolicyFindingsOutputFormat.Json; - } - - return PolicyFindingsOutputFormat.Table; - } - - private static object BuildPolicyFindingsPayload( - string policyId, - PolicyFindingsQuery query, - PolicyFindingsPage page) - => new - { - policyId, - filters = new - { - sbom = query.SbomIds, - status = query.Statuses, - severity = query.Severities, - cursor = query.Cursor, - page = query.Page, - pageSize = query.PageSize, - since = query.Since?.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture) - }, - items = page.Items.Select(item => new - { - findingId = item.FindingId, - status = item.Status, - severity = new - { - normalized = item.Severity.Normalized, - score = item.Severity.Score - }, - sbomId = item.SbomId, - advisoryIds = item.AdvisoryIds, - vex = item.Vex is null ? null : new - { - winningStatementId = item.Vex.WinningStatementId, - source = item.Vex.Source, - status = item.Vex.Status - }, - policyVersion = item.PolicyVersion, - updatedAt = item.UpdatedAt == DateTimeOffset.MinValue ? null : item.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), - runId = item.RunId - }), - nextCursor = page.NextCursor, - totalCount = page.TotalCount - }; - - private static object BuildPolicyFindingPayload(string policyId, PolicyFindingDocument finding) - => new - { - policyId, - finding = new - { - findingId = finding.FindingId, - status = finding.Status, - severity = new - { - normalized = finding.Severity.Normalized, - score = finding.Severity.Score - }, - sbomId = finding.SbomId, - advisoryIds = finding.AdvisoryIds, - vex = finding.Vex is null ? null : new - { - winningStatementId = finding.Vex.WinningStatementId, - source = finding.Vex.Source, - status = finding.Vex.Status - }, - policyVersion = finding.PolicyVersion, - updatedAt = finding.UpdatedAt == DateTimeOffset.MinValue ? null : finding.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), - runId = finding.RunId - } - }; - - private static object BuildPolicyFindingExplainPayload( - string policyId, - string findingId, - string? mode, - PolicyFindingExplainResult explain) - => new - { - policyId, - findingId, - mode, - explain = new - { - policyVersion = explain.PolicyVersion, - steps = explain.Steps.Select(step => new - { - rule = step.Rule, - status = step.Status, - action = step.Action, - score = step.Score, - inputs = step.Inputs, - evidence = step.Evidence - }), - sealedHints = explain.SealedHints.Select(hint => hint.Message) - } - }; - - private static void RenderPolicyFindingsTable(ILogger logger, PolicyFindingsPage page) - { - var items = page.Items; - if (items.Count == 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - AnsiConsole.MarkupLine("[yellow]No findings matched the provided filters.[/]"); - } - else - { - logger.LogWarning("No findings matched the provided filters."); - } - return; - } - - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table() - .Border(TableBorder.Rounded) - .Centered(); - - table.AddColumn("Finding"); - table.AddColumn("Status"); - table.AddColumn("Severity"); - table.AddColumn("Score"); - table.AddColumn("SBOM"); - table.AddColumn("Advisories"); - table.AddColumn("Updated (UTC)"); - - foreach (var item in items) - { - table.AddRow( - Markup.Escape(item.FindingId), - Markup.Escape(item.Status), - Markup.Escape(item.Severity.Normalized), - Markup.Escape(FormatScore(item.Severity.Score)), - Markup.Escape(item.SbomId), - Markup.Escape(FormatListPreview(item.AdvisoryIds)), - Markup.Escape(FormatUpdatedAt(item.UpdatedAt))); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var item in items) - { - logger.LogInformation( - "{Finding} — Status {Status}, Severity {Severity} ({Score}), SBOM {Sbom}, Updated {Updated}", - item.FindingId, - item.Status, - item.Severity.Normalized, - item.Severity.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", - item.SbomId, - FormatUpdatedAt(item.UpdatedAt)); - } - } - - logger.LogInformation("{Count} finding(s).", items.Count); - - if (page.TotalCount.HasValue) - { - logger.LogInformation("Total available: {Total}", page.TotalCount.Value); - } - - if (!string.IsNullOrWhiteSpace(page.NextCursor)) - { - logger.LogInformation("Next cursor: {Cursor}", page.NextCursor); - } - } - - private static void RenderPolicyFindingDetails(ILogger logger, PolicyFindingDocument finding) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table() - .Border(TableBorder.Rounded) - .AddColumn("Field") - .AddColumn("Value"); - - table.AddRow("Finding", Markup.Escape(finding.FindingId)); - table.AddRow("Status", Markup.Escape(finding.Status)); - table.AddRow("Severity", Markup.Escape(FormatSeverity(finding.Severity))); - table.AddRow("SBOM", Markup.Escape(finding.SbomId)); - table.AddRow("Policy Version", Markup.Escape(finding.PolicyVersion.ToString(CultureInfo.InvariantCulture))); - table.AddRow("Updated (UTC)", Markup.Escape(FormatUpdatedAt(finding.UpdatedAt))); - table.AddRow("Run Id", Markup.Escape(string.IsNullOrWhiteSpace(finding.RunId) ? "(none)" : finding.RunId)); - table.AddRow("Advisories", Markup.Escape(FormatListPreview(finding.AdvisoryIds))); - table.AddRow("VEX", Markup.Escape(FormatVexMetadata(finding.Vex))); - - AnsiConsole.Write(table); - } - else - { - logger.LogInformation("Finding {Finding}", finding.FindingId); - logger.LogInformation(" Status: {Status}", finding.Status); - logger.LogInformation(" Severity: {Severity}", FormatSeverity(finding.Severity)); - logger.LogInformation(" SBOM: {Sbom}", finding.SbomId); - logger.LogInformation(" Policy version: {Version}", finding.PolicyVersion); - logger.LogInformation(" Updated (UTC): {Updated}", FormatUpdatedAt(finding.UpdatedAt)); - if (!string.IsNullOrWhiteSpace(finding.RunId)) - { - logger.LogInformation(" Run Id: {Run}", finding.RunId); - } - if (finding.AdvisoryIds.Count > 0) - { - logger.LogInformation(" Advisories: {Advisories}", string.Join(", ", finding.AdvisoryIds)); - } - if (!string.IsNullOrWhiteSpace(FormatVexMetadata(finding.Vex))) - { - logger.LogInformation(" VEX: {Vex}", FormatVexMetadata(finding.Vex)); - } - } - } - - private static void RenderPolicyFindingExplain(ILogger logger, PolicyFindingExplainResult explain) - { - if (explain.Steps.Count == 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - AnsiConsole.MarkupLine("[yellow]No explain steps were returned.[/]"); - } - else - { - logger.LogWarning("No explain steps were returned."); - } - } - else if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table() - .Border(TableBorder.Rounded) - .AddColumn("Rule") - .AddColumn("Status") - .AddColumn("Action") - .AddColumn("Score") - .AddColumn("Inputs") - .AddColumn("Evidence"); - - foreach (var step in explain.Steps) - { - table.AddRow( - Markup.Escape(step.Rule), - Markup.Escape(step.Status ?? "(n/a)"), - Markup.Escape(step.Action ?? "(n/a)"), - Markup.Escape(step.Score.HasValue ? step.Score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"), - Markup.Escape(FormatKeyValuePairs(step.Inputs)), - Markup.Escape(FormatKeyValuePairs(step.Evidence))); - } - - AnsiConsole.Write(table); - } - else - { - logger.LogInformation("{Count} explain step(s).", explain.Steps.Count); - foreach (var step in explain.Steps) - { - logger.LogInformation( - "Rule {Rule} — Status {Status}, Action {Action}, Score {Score}, Inputs {Inputs}", - step.Rule, - step.Status ?? "n/a", - step.Action ?? "n/a", - step.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", - FormatKeyValuePairs(step.Inputs)); - - if (step.Evidence is not null && step.Evidence.Count > 0) - { - logger.LogInformation(" Evidence: {Evidence}", FormatKeyValuePairs(step.Evidence)); - } - } - } - - if (explain.SealedHints.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - AnsiConsole.MarkupLine("[grey]Hints:[/]"); - foreach (var hint in explain.SealedHints) - { - AnsiConsole.MarkupLine($" • {Markup.Escape(hint.Message)}"); - } - } - else - { - foreach (var hint in explain.SealedHints) - { - logger.LogInformation("Hint: {Hint}", hint.Message); - } - } - } - } - - private static string FormatSeverity(PolicyFindingSeverity severity) - { - if (severity.Score.HasValue) - { - return FormattableString.Invariant($"{severity.Normalized} ({severity.Score.Value:0.00})"); - } - - return severity.Normalized; - } - - private static string FormatListPreview(IReadOnlyList values) - { - if (values is null || values.Count == 0) - { - return "(none)"; - } - - const int MaxItems = 3; - if (values.Count <= MaxItems) - { - return string.Join(", ", values); - } - - var preview = string.Join(", ", values.Take(MaxItems)); - return FormattableString.Invariant($"{preview} (+{values.Count - MaxItems})"); - } - - private static string FormatUpdatedAt(DateTimeOffset timestamp) - { - if (timestamp == DateTimeOffset.MinValue) - { - return "(unknown)"; - } - - return timestamp.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss'Z'", CultureInfo.InvariantCulture); - } - - private static string FormatScore(double? score) - => score.HasValue ? score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"; - - private static string FormatKeyValuePairs(IReadOnlyDictionary? values) - { - if (values is null || values.Count == 0) - { - return "(none)"; - } - - return string.Join(", ", values.Select(pair => $"{pair.Key}={pair.Value}")); - } - - private static string FormatVexMetadata(PolicyFindingVexMetadata? value) - { - if (value is null) - { - return "(none)"; - } - - var parts = new List(3); - if (!string.IsNullOrWhiteSpace(value.WinningStatementId)) - { - parts.Add($"winning={value.WinningStatementId}"); - } - - if (!string.IsNullOrWhiteSpace(value.Source)) - { - parts.Add($"source={value.Source}"); - } - - if (!string.IsNullOrWhiteSpace(value.Status)) - { - parts.Add($"status={value.Status}"); - } - - return parts.Count == 0 ? "(none)" : string.Join(", ", parts); - } - - private static void HandlePolicyFindingsFailure(PolicyApiException exception, ILogger logger, Action recordMetric) - { - var exitCode = exception.StatusCode switch - { - HttpStatusCode.Unauthorized or HttpStatusCode.Forbidden => 12, - HttpStatusCode.NotFound => 1, - _ => 1 - }; - - if (string.IsNullOrWhiteSpace(exception.ErrorCode)) - { - logger.LogError("Policy API request failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); - } - else - { - logger.LogError("Policy API request failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); - } - - recordMetric("error"); - Environment.ExitCode = exitCode; - } - - private static string FormatDelta(int? value) - => value.HasValue ? value.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"; - - private static readonly JsonSerializerOptions SimulationJsonOptions = - new(JsonSerializerDefaults.Web) { WriteIndented = true }; - - private static readonly IReadOnlyDictionary EmptyPolicyEnvironment = - new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)); - - private static readonly IReadOnlyList EmptyPolicySbomSet = - new ReadOnlyCollection(Array.Empty()); - - private static readonly IReadOnlyDictionary EmptyLabelSelectors = - new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); - - private enum PolicySimulationOutputFormat - { - Table, - Json - } - - private enum PolicyFindingsOutputFormat - { - Table, - Json - } - - - private static string FormatAdditionalValue(object? value) - { - return value switch - { - null => "null", - bool b => b ? "true" : "false", - double d => d.ToString("G17", CultureInfo.InvariantCulture), - float f => f.ToString("G9", CultureInfo.InvariantCulture), - IFormattable formattable => formattable.ToString(null, CultureInfo.InvariantCulture), - _ => value.ToString() ?? string.Empty - }; - } - - - private static IReadOnlyList NormalizeProviders(IReadOnlyList providers) - { - if (providers is null || providers.Count == 0) - { - return Array.Empty(); - } - - var list = new List(); - foreach (var provider in providers) - { - if (!string.IsNullOrWhiteSpace(provider)) - { - list.Add(provider.Trim()); - } - } - - return list.Count == 0 ? Array.Empty() : list; - } - - private static string ResolveTenant(string? tenantOption) - { - if (!string.IsNullOrWhiteSpace(tenantOption)) - { - return tenantOption.Trim(); - } - - var fromEnvironment = Environment.GetEnvironmentVariable("STELLA_TENANT"); - return string.IsNullOrWhiteSpace(fromEnvironment) ? string.Empty : fromEnvironment.Trim(); - } - - private static async Task LoadIngestInputAsync(string input, CancellationToken cancellationToken) - { - if (Uri.TryCreate(input, UriKind.Absolute, out var uri) && - (uri.Scheme.Equals(Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase) || - uri.Scheme.Equals(Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase))) - { - return await LoadIngestInputFromHttpAsync(uri, cancellationToken).ConfigureAwait(false); - } - - return await LoadIngestInputFromFileAsync(input, cancellationToken).ConfigureAwait(false); - } - - private static async Task LoadIngestInputFromHttpAsync(Uri uri, CancellationToken cancellationToken) - { - using var handler = new HttpClientHandler { AutomaticDecompression = DecompressionMethods.All }; - using var httpClient = new HttpClient(handler); - using var response = await httpClient.GetAsync(uri, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - throw new InvalidOperationException($"Failed to download document from {uri} (HTTP {(int)response.StatusCode})."); - } - - var contentType = response.Content.Headers.ContentType?.MediaType ?? "application/json"; - var contentEncoding = response.Content.Headers.ContentEncoding is { Count: > 0 } - ? string.Join(",", response.Content.Headers.ContentEncoding) - : null; - - var bytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); - var normalized = NormalizeDocument(bytes, contentType, contentEncoding); - - return new IngestInputPayload( - "uri", - uri.ToString(), - normalized.Content, - normalized.ContentType, - normalized.ContentEncoding); - } - - private static async Task LoadIngestInputFromFileAsync(string path, CancellationToken cancellationToken) - { - var fullPath = Path.GetFullPath(path); - if (!File.Exists(fullPath)) - { - throw new FileNotFoundException("Input document not found.", fullPath); - } - - var bytes = await File.ReadAllBytesAsync(fullPath, cancellationToken).ConfigureAwait(false); - var normalized = NormalizeDocument(bytes, GuessContentTypeFromExtension(fullPath), null); - - return new IngestInputPayload( - "file", - Path.GetFileName(fullPath), - normalized.Content, - normalized.ContentType, - normalized.ContentEncoding); - } - - private static DocumentNormalizationResult NormalizeDocument(byte[] bytes, string? contentType, string? encodingHint) - { - if (bytes is null || bytes.Length == 0) - { - throw new InvalidOperationException("Input document is empty."); - } - - var working = bytes; - var encodings = new List(); - if (!string.IsNullOrWhiteSpace(encodingHint)) - { - encodings.Add(encodingHint); - } - - if (IsGzip(working)) - { - working = DecompressGzip(working); - encodings.Add("gzip"); - } - - var text = DecodeText(working); - var trimmed = text.TrimStart(); - - if (!string.IsNullOrWhiteSpace(trimmed) && trimmed[0] != '{' && trimmed[0] != '[') - { - if (TryDecodeBase64(text, out var decodedBytes)) - { - working = decodedBytes; - encodings.Add("base64"); - - if (IsGzip(working)) - { - working = DecompressGzip(working); - encodings.Add("gzip"); - } - - text = DecodeText(working); - } - } - - text = text.Trim(); - if (string.IsNullOrWhiteSpace(text)) - { - throw new InvalidOperationException("Input document contained no data after decoding."); - } - - var encodingLabel = encodings.Count == 0 ? null : string.Join("+", encodings); - var finalContentType = string.IsNullOrWhiteSpace(contentType) ? "application/json" : contentType; - - return new DocumentNormalizationResult(text, finalContentType, encodingLabel); - } - - private static string GuessContentTypeFromExtension(string path) - { - var extension = Path.GetExtension(path); - if (string.IsNullOrWhiteSpace(extension)) - { - return "application/json"; - } - - return extension.ToLowerInvariant() switch - { - ".json" or ".csaf" => "application/json", - ".xml" => "application/xml", - _ => "application/json" - }; - } - - private static DateTimeOffset DetermineVerificationSince(string? sinceOption) - { - if (string.IsNullOrWhiteSpace(sinceOption)) - { - return DateTimeOffset.UtcNow.AddHours(-24); - } - - var trimmed = sinceOption.Trim(); - - if (DateTimeOffset.TryParse( - trimmed, - CultureInfo.InvariantCulture, - DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, - out var parsedTimestamp)) - { - return parsedTimestamp.ToUniversalTime(); - } - - if (TryParseRelativeDuration(trimmed, out var duration)) - { - return DateTimeOffset.UtcNow.Subtract(duration); - } - - throw new InvalidOperationException("Invalid --since value. Use ISO-8601 timestamp or duration (e.g. 24h, 7d)."); - } - - private static bool TryParseRelativeDuration(string value, out TimeSpan duration) - { - duration = TimeSpan.Zero; - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - var normalized = value.Trim().ToLowerInvariant(); - if (normalized.Length < 2) - { - return false; - } - - var suffix = normalized[^1]; - var magnitudeText = normalized[..^1]; - - double multiplier = suffix switch - { - 's' => 1, - 'm' => 60, - 'h' => 3600, - 'd' => 86400, - 'w' => 604800, - _ => 0 - }; - - if (multiplier == 0) - { - return false; - } - - if (!double.TryParse(magnitudeText, NumberStyles.Float, CultureInfo.InvariantCulture, out var magnitude)) - { - return false; - } - - if (double.IsNaN(magnitude) || double.IsInfinity(magnitude) || magnitude <= 0) - { - return false; - } - - var seconds = magnitude * multiplier; - if (double.IsNaN(seconds) || double.IsInfinity(seconds) || seconds <= 0) - { - return false; - } - - duration = TimeSpan.FromSeconds(seconds); - return true; - } - - private static int NormalizeLimit(int? limitOption) - { - if (!limitOption.HasValue) - { - return 20; - } - - if (limitOption.Value < 0) - { - throw new InvalidOperationException("Limit cannot be negative."); - } - - return limitOption.Value; - } - - private static IReadOnlyList ParseCommaSeparatedList(string? raw) - { - if (string.IsNullOrWhiteSpace(raw)) - { - return Array.Empty(); - } - - var tokens = raw - .Split(',', StringSplitOptions.RemoveEmptyEntries) - .Select(token => token.Trim()) - .Where(token => token.Length > 0) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToArray(); - - return tokens.Length == 0 ? Array.Empty() : tokens; - } - - private static string FormatWindowRange(AocVerifyWindow? window) - { - if (window is null) - { - return "(unspecified)"; - } - - var fromText = window.From?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; - var toText = window.To?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; - return $"{fromText} -> {toText}"; - } - - private static string FormatCheckedCounts(AocVerifyChecked? checkedCounts) - { - if (checkedCounts is null) - { - return "(unspecified)"; - } - - return $"advisories: {checkedCounts.Advisories.ToString("N0", CultureInfo.InvariantCulture)}, vex: {checkedCounts.Vex.ToString("N0", CultureInfo.InvariantCulture)}"; - } - - private static string DetermineVerifyStatus(AocVerifyResponse? response) - { - if (response is null) - { - return "unknown"; - } - - if (response.Truncated == true && (response.Violations is null || response.Violations.Count == 0)) - { - return "truncated"; - } - - var total = response.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; - return total > 0 ? "violations" : "ok"; - } - - private static string FormatBoolean(bool value, bool useColor) - { - var text = value ? "yes" : "no"; - if (!useColor) - { - return text; - } - - return value - ? $"[yellow]{text}[/]" - : $"[green]{text}[/]"; - } - - private static string FormatVerifyStatus(string? status, bool useColor) - { - var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); - var escaped = Markup.Escape(normalized); - if (!useColor) - { - return escaped; - } - - return normalized switch - { - "ok" => $"[green]{escaped}[/]", - "violations" => $"[red]{escaped}[/]", - "truncated" => $"[yellow]{escaped}[/]", - _ => $"[grey]{escaped}[/]" - }; - } - - private static string FormatViolationExample(AocVerifyViolationExample? example) - { - if (example is null) - { - return "(n/a)"; - } - - var parts = new List(); - if (!string.IsNullOrWhiteSpace(example.Source)) - { - parts.Add(example.Source.Trim()); - } - - if (!string.IsNullOrWhiteSpace(example.DocumentId)) - { - parts.Add(example.DocumentId.Trim()); - } - - var label = parts.Count == 0 ? "(n/a)" : string.Join(" | ", parts); - if (!string.IsNullOrWhiteSpace(example.ContentHash)) - { - label = $"{label} [{example.ContentHash.Trim()}]"; - } - - return label; - } - - private static void RenderAocVerifyTable(AocVerifyResponse response, bool useColor, int limit) - { - var summary = new Table().Border(TableBorder.Rounded); - summary.AddColumn("Field"); - summary.AddColumn("Value"); - - summary.AddRow("Tenant", Markup.Escape(string.IsNullOrWhiteSpace(response?.Tenant) ? "(unknown)" : response.Tenant!)); - summary.AddRow("Window", Markup.Escape(FormatWindowRange(response?.Window))); - summary.AddRow("Checked", Markup.Escape(FormatCheckedCounts(response?.Checked))); - - summary.AddRow("Limit", Markup.Escape(limit <= 0 ? "unbounded" : limit.ToString(CultureInfo.InvariantCulture))); - summary.AddRow("Status", FormatVerifyStatus(DetermineVerifyStatus(response), useColor)); - - if (response?.Metrics?.IngestionWriteTotal is int writes) - { - summary.AddRow("Ingestion Writes", Markup.Escape(writes.ToString("N0", CultureInfo.InvariantCulture))); - } - - if (response?.Metrics?.AocViolationTotal is int totalViolations) - { - summary.AddRow("Violations (total)", Markup.Escape(totalViolations.ToString("N0", CultureInfo.InvariantCulture))); - } - else - { - var computedViolations = response?.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; - summary.AddRow("Violations (total)", Markup.Escape(computedViolations.ToString("N0", CultureInfo.InvariantCulture))); - } - - summary.AddRow("Truncated", FormatBoolean(response?.Truncated == true, useColor)); - - AnsiConsole.Write(summary); - - if (response?.Violations is null || response.Violations.Count == 0) - { - var message = response?.Truncated == true - ? "No violations reported, but results were truncated. Increase --limit to review full output." - : "No AOC violations detected in the requested window."; - - if (useColor) - { - var color = response?.Truncated == true ? "yellow" : "green"; - AnsiConsole.MarkupLine($"[{color}]{Markup.Escape(message)}[/]"); - } - else - { - Console.WriteLine(message); - } - - return; - } - - var violationTable = new Table().Border(TableBorder.Rounded); - violationTable.AddColumn("Code"); - violationTable.AddColumn("Count"); - violationTable.AddColumn("Sample Document"); - violationTable.AddColumn("Path"); - - foreach (var violation in response.Violations) - { - var codeDisplay = FormatViolationCode(violation.Code, useColor); - var countDisplay = violation.Count.ToString("N0", CultureInfo.InvariantCulture); - var example = violation.Examples?.FirstOrDefault(); - var documentDisplay = Markup.Escape(FormatViolationExample(example)); - var pathDisplay = example is null || string.IsNullOrWhiteSpace(example.Path) - ? "(none)" - : example.Path!; - - violationTable.AddRow(codeDisplay, countDisplay, documentDisplay, Markup.Escape(pathDisplay)); - } - - AnsiConsole.Write(violationTable); -} - - private static int DetermineVerifyExitCode(AocVerifyResponse response) - { - ArgumentNullException.ThrowIfNull(response); - - if (response.Violations is not null && response.Violations.Count > 0) - { - var exitCodes = new List(); - foreach (var violation in response.Violations) - { - if (string.IsNullOrWhiteSpace(violation.Code)) - { - continue; - } - - if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) - { - exitCodes.Add(mapped); - } - } - - if (exitCodes.Count > 0) - { - return exitCodes.Min(); - } - - return response.Truncated == true ? 18 : 17; - } - - if (response.Truncated == true) - { - return 18; - } - - return 0; - } - - private static async Task WriteJsonReportAsync(T payload, string destination, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(payload); - - if (string.IsNullOrWhiteSpace(destination)) - { - throw new InvalidOperationException("Output path must be provided."); - } - - var outputPath = Path.GetFullPath(destination); - var directory = Path.GetDirectoryName(outputPath); - if (!string.IsNullOrWhiteSpace(directory)) - { - Directory.CreateDirectory(directory); - } - - var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions - { - WriteIndented = true - }); - - await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false); - return outputPath; - } - - private static void RenderDryRunTable(AocIngestDryRunResponse response, bool useColor) - { - var summary = new Table().Border(TableBorder.Rounded); - summary.AddColumn("Field"); - summary.AddColumn("Value"); - - summary.AddRow("Source", Markup.Escape(response?.Source ?? "(unknown)")); - summary.AddRow("Tenant", Markup.Escape(response?.Tenant ?? "(unknown)")); - summary.AddRow("Guard Version", Markup.Escape(response?.GuardVersion ?? "(unknown)")); - summary.AddRow("Status", FormatStatusMarkup(response?.Status, useColor)); - - var violationCount = response?.Violations?.Count ?? 0; - summary.AddRow("Violations", violationCount.ToString(CultureInfo.InvariantCulture)); - - if (!string.IsNullOrWhiteSpace(response?.Document?.ContentHash)) - { - summary.AddRow("Content Hash", Markup.Escape(response.Document.ContentHash!)); - } - - if (!string.IsNullOrWhiteSpace(response?.Document?.Supersedes)) - { - summary.AddRow("Supersedes", Markup.Escape(response.Document.Supersedes!)); - } - - if (!string.IsNullOrWhiteSpace(response?.Document?.Provenance?.Signature?.Format)) - { - var signature = response.Document.Provenance.Signature; - var summaryText = signature!.Present - ? signature.Format ?? "present" - : "missing"; - summary.AddRow("Signature", Markup.Escape(summaryText)); - } - - AnsiConsole.Write(summary); - - if (violationCount == 0) - { - if (useColor) - { - AnsiConsole.MarkupLine("[green]No AOC violations detected.[/]"); - } - else - { - Console.WriteLine("No AOC violations detected."); - } - - return; - } - - var violationTable = new Table().Border(TableBorder.Rounded); - violationTable.AddColumn("Code"); - violationTable.AddColumn("Path"); - violationTable.AddColumn("Message"); - - foreach (var violation in response!.Violations!) - { - var codeDisplay = FormatViolationCode(violation.Code, useColor); - var pathDisplay = string.IsNullOrWhiteSpace(violation.Path) ? "(root)" : violation.Path!; - var messageDisplay = string.IsNullOrWhiteSpace(violation.Message) ? "(unspecified)" : violation.Message!; - violationTable.AddRow(codeDisplay, Markup.Escape(pathDisplay), Markup.Escape(messageDisplay)); - } - - AnsiConsole.Write(violationTable); - } - - private static int DetermineDryRunExitCode(AocIngestDryRunResponse response) - { - if (response?.Violations is null || response.Violations.Count == 0) - { - return 0; - } - - var exitCodes = new List(); - foreach (var violation in response.Violations) - { - if (string.IsNullOrWhiteSpace(violation.Code)) - { - continue; - } - - if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) - { - exitCodes.Add(mapped); - } - } - - if (exitCodes.Count == 0) - { - return 17; - } - - return exitCodes.Min(); - } - - private static string FormatStatusMarkup(string? status, bool useColor) - { - var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); - if (!useColor) - { - return Markup.Escape(normalized); - } - - return normalized.Equals("ok", StringComparison.OrdinalIgnoreCase) - ? $"[green]{Markup.Escape(normalized)}[/]" - : $"[red]{Markup.Escape(normalized)}[/]"; - } - - private static string FormatViolationCode(string code, bool useColor) - { - var sanitized = string.IsNullOrWhiteSpace(code) ? "(unknown)" : code.Trim(); - if (!useColor) - { - return Markup.Escape(sanitized); - } - - return $"[red]{Markup.Escape(sanitized)}[/]"; - } - - private static bool IsGzip(ReadOnlySpan data) - { - return data.Length >= 2 && data[0] == 0x1F && data[1] == 0x8B; - } - - private static byte[] DecompressGzip(byte[] payload) - { - using var input = new MemoryStream(payload); - using var gzip = new GZipStream(input, CompressionMode.Decompress); - using var output = new MemoryStream(); - gzip.CopyTo(output); - return output.ToArray(); - } - - private static string DecodeText(byte[] payload) - { - var encoding = DetectEncoding(payload); - return encoding.GetString(payload); - } - - private static Encoding DetectEncoding(ReadOnlySpan data) - { - if (data.Length >= 4) - { - if (data[0] == 0x00 && data[1] == 0x00 && data[2] == 0xFE && data[3] == 0xFF) - { - return new UTF32Encoding(bigEndian: true, byteOrderMark: true); - } - - if (data[0] == 0xFF && data[1] == 0xFE && data[2] == 0x00 && data[3] == 0x00) - { - return new UTF32Encoding(bigEndian: false, byteOrderMark: true); - } - } - - if (data.Length >= 2) - { - if (data[0] == 0xFE && data[1] == 0xFF) - { - return Encoding.BigEndianUnicode; - } - - if (data[0] == 0xFF && data[1] == 0xFE) - { - return Encoding.Unicode; - } - } - - if (data.Length >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF) - { - return Encoding.UTF8; - } - - return Encoding.UTF8; - } - + bool overwrite, + bool install, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-download"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.scanner.download", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "scanner download"); + activity?.SetTag("stellaops.cli.channel", channel); + using var duration = CliMetrics.MeasureCommandDuration("scanner download"); + + try + { + var result = await client.DownloadScannerAsync(channel, output ?? string.Empty, overwrite, verbose, cancellationToken).ConfigureAwait(false); + + if (result.FromCache) + { + logger.LogInformation("Using cached scanner at {Path}.", result.Path); + } + else + { + logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", result.Path, result.SizeBytes); + } + + CliMetrics.RecordScannerDownload(channel, result.FromCache); + + if (install) + { + var installer = scope.ServiceProvider.GetRequiredService(); + await installer.InstallAsync(result.Path, verbose, cancellationToken).ConfigureAwait(false); + CliMetrics.RecordScannerInstall(channel); + } + + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to download scanner bundle."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static void RenderEntryTrace(EntryTraceResponseModel result, bool includeNdjson) + { + AnsiConsole.MarkupLine($"[bold]Scan[/]: {result.ScanId}"); + AnsiConsole.MarkupLine($"Image: {result.ImageDigest}"); + AnsiConsole.MarkupLine($"Generated: {result.GeneratedAt:O}"); + AnsiConsole.MarkupLine($"Outcome: {result.Graph.Outcome}"); + + var planTable = new Table() + .AddColumn("Terminal") + .AddColumn("Runtime") + .AddColumn("Type") + .AddColumn("Confidence") + .AddColumn("User") + .AddColumn("Workdir"); + + foreach (var plan in result.Graph.Plans.OrderByDescending(p => p.Confidence)) + { + planTable.AddRow( + plan.TerminalPath, + plan.Runtime ?? "-", + plan.Type.ToString(), + plan.Confidence.ToString("F1", CultureInfo.InvariantCulture), + plan.User, + plan.WorkingDirectory); + } + + if (planTable.Rows.Count > 0) + { + AnsiConsole.Write(planTable); + } + else + { + AnsiConsole.MarkupLine("[italic]No entry trace plans recorded.[/]"); + } + + if (result.Graph.Diagnostics.Length > 0) + { + var diagTable = new Table() + .AddColumn("Severity") + .AddColumn("Reason") + .AddColumn("Message"); + + foreach (var diagnostic in result.Graph.Diagnostics) + { + diagTable.AddRow( + diagnostic.Severity.ToString(), + diagnostic.Reason.ToString(), + diagnostic.Message); + } + + AnsiConsole.Write(diagTable); + } + + if (includeNdjson && result.Ndjson.Count > 0) + { + AnsiConsole.MarkupLine("[bold]NDJSON Output[/]"); + foreach (var line in result.Ndjson) + { + AnsiConsole.WriteLine(line); + } + } + } + + public static async Task HandleScannerRunAsync( + IServiceProvider services, + string runner, + string entry, + string targetDirectory, + IReadOnlyList arguments, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var executor = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-run"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.scan.run", ActivityKind.Internal); + activity?.SetTag("stellaops.cli.command", "scan run"); + activity?.SetTag("stellaops.cli.runner", runner); + activity?.SetTag("stellaops.cli.entry", entry); + activity?.SetTag("stellaops.cli.target", targetDirectory); + using var duration = CliMetrics.MeasureCommandDuration("scan run"); + + try + { + var options = scope.ServiceProvider.GetRequiredService(); + var resultsDirectory = options.ResultsDirectory; + + var executionResult = await executor.RunAsync( + runner, + entry, + targetDirectory, + resultsDirectory, + arguments, + verbose, + cancellationToken).ConfigureAwait(false); + + Environment.ExitCode = executionResult.ExitCode; + CliMetrics.RecordScanRun(runner, executionResult.ExitCode); + + if (executionResult.ExitCode == 0) + { + var backend = scope.ServiceProvider.GetRequiredService(); + logger.LogInformation("Uploading scan artefact {Path}...", executionResult.ResultsPath); + await backend.UploadScanResultsAsync(executionResult.ResultsPath, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Scan artefact uploaded."); + activity?.SetTag("stellaops.cli.results", executionResult.ResultsPath); + } + else + { + logger.LogWarning("Skipping automatic upload because scan exited with code {Code}.", executionResult.ExitCode); + } + + logger.LogInformation("Run metadata written to {Path}.", executionResult.RunMetadataPath); + activity?.SetTag("stellaops.cli.run_metadata", executionResult.RunMetadataPath); + } + catch (Exception ex) + { + logger.LogError(ex, "Scanner execution failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleScanUploadAsync( + IServiceProvider services, + string file, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-upload"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.scan.upload", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "scan upload"); + activity?.SetTag("stellaops.cli.file", file); + using var duration = CliMetrics.MeasureCommandDuration("scan upload"); + + try + { + var pathFull = Path.GetFullPath(file); + await client.UploadScanResultsAsync(pathFull, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Scan results uploaded successfully."); + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to upload scan results."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleScanEntryTraceAsync( + IServiceProvider services, + string scanId, + bool includeNdjson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scan-entrytrace"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.scan.entrytrace", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "scan entrytrace"); + activity?.SetTag("stellaops.cli.scan_id", scanId); + using var duration = CliMetrics.MeasureCommandDuration("scan entrytrace"); + + try + { + var result = await client.GetEntryTraceAsync(scanId, cancellationToken).ConfigureAwait(false); + if (result is null) + { + logger.LogWarning("No EntryTrace data available for scan {ScanId}.", scanId); + Environment.ExitCode = 1; + return; + } + + RenderEntryTrace(result, includeNdjson); + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch EntryTrace for scan {ScanId}.", scanId); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + + public static async Task HandleSourcesIngestAsync( + IServiceProvider services, + bool dryRun, + string source, + string input, + string? tenantOverride, + string format, + bool disableColor, + string? output, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("sources-ingest"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + using var activity = CliActivitySource.Instance.StartActivity("cli.sources.ingest.dry_run", ActivityKind.Client); + var statusMetric = "unknown"; + using var duration = CliMetrics.MeasureCommandDuration("sources ingest dry-run"); + + try + { + if (!dryRun) + { + statusMetric = "unsupported"; + logger.LogError("Only --dry-run mode is supported for 'stella sources ingest' at this time."); + Environment.ExitCode = 1; + return; + } + + source = source?.Trim() ?? string.Empty; + if (string.IsNullOrWhiteSpace(source)) + { + throw new InvalidOperationException("Source identifier must be provided."); + } + + var formatNormalized = string.IsNullOrWhiteSpace(format) + ? "table" + : format.Trim().ToLowerInvariant(); + + if (formatNormalized is not ("table" or "json")) + { + throw new InvalidOperationException("Format must be either 'table' or 'json'."); + } + + var tenant = ResolveTenant(tenantOverride); + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); + } + + var payload = await LoadIngestInputAsync(input, cancellationToken).ConfigureAwait(false); + + logger.LogInformation("Executing ingestion dry-run for source {Source} using input {Input}.", source, payload.Name); + + activity?.SetTag("stellaops.cli.command", "sources ingest dry-run"); + activity?.SetTag("stellaops.cli.source", source); + activity?.SetTag("stellaops.cli.tenant", tenant); + activity?.SetTag("stellaops.cli.format", formatNormalized); + activity?.SetTag("stellaops.cli.input_kind", payload.Kind); + + var request = new AocIngestDryRunRequest + { + Tenant = tenant, + Source = source, + Document = new AocIngestDryRunDocument + { + Name = payload.Name, + Content = payload.Content, + ContentType = payload.ContentType, + ContentEncoding = payload.ContentEncoding + } + }; + + var response = await client.ExecuteAocIngestDryRunAsync(request, cancellationToken).ConfigureAwait(false); + activity?.SetTag("stellaops.cli.status", response.Status ?? "unknown"); + + if (!string.IsNullOrWhiteSpace(output)) + { + var reportPath = await WriteJsonReportAsync(response, output, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Dry-run report written to {Path}.", reportPath); + } + + if (formatNormalized == "json") + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + } + else + { + RenderDryRunTable(response, !disableColor); + } + + var exitCode = DetermineDryRunExitCode(response); + Environment.ExitCode = exitCode; + statusMetric = exitCode == 0 ? "ok" : "violation"; + activity?.SetTag("stellaops.cli.exit_code", exitCode); + } + catch (Exception ex) + { + statusMetric = "transport_error"; + logger.LogError(ex, "Dry-run ingestion failed."); + Environment.ExitCode = 70; + } + finally + { + verbosity.MinimumLevel = previousLevel; + CliMetrics.RecordSourcesDryRun(statusMetric); + } + } + + public static async Task HandleAocVerifyAsync( + IServiceProvider services, + string? sinceOption, + int? limitOption, + string? sourcesOption, + string? codesOption, + string format, + string? exportPath, + string? tenantOverride, + bool disableColor, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("aoc-verify"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + using var activity = CliActivitySource.Instance.StartActivity("cli.aoc.verify", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("aoc verify"); + var outcome = "unknown"; + + try + { + var tenant = ResolveTenant(tenantOverride); + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); + } + + var normalizedFormat = string.IsNullOrWhiteSpace(format) + ? "table" + : format.Trim().ToLowerInvariant(); + + if (normalizedFormat is not ("table" or "json")) + { + throw new InvalidOperationException("Format must be either 'table' or 'json'."); + } + + var since = DetermineVerificationSince(sinceOption); + var sinceIso = since.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + var limit = NormalizeLimit(limitOption); + var sources = ParseCommaSeparatedList(sourcesOption); + var codes = ParseCommaSeparatedList(codesOption); + + var normalizedSources = sources.Count == 0 + ? Array.Empty() + : sources.Select(item => item.ToLowerInvariant()).ToArray(); + + var normalizedCodes = codes.Count == 0 + ? Array.Empty() + : codes.Select(item => item.ToUpperInvariant()).ToArray(); + + activity?.SetTag("stellaops.cli.command", "aoc verify"); + activity?.SetTag("stellaops.cli.tenant", tenant); + activity?.SetTag("stellaops.cli.since", sinceIso); + activity?.SetTag("stellaops.cli.limit", limit); + activity?.SetTag("stellaops.cli.format", normalizedFormat); + if (normalizedSources.Length > 0) + { + activity?.SetTag("stellaops.cli.sources", string.Join(",", normalizedSources)); + } + + if (normalizedCodes.Length > 0) + { + activity?.SetTag("stellaops.cli.codes", string.Join(",", normalizedCodes)); + } + + var request = new AocVerifyRequest + { + Tenant = tenant, + Since = sinceIso, + Limit = limit, + Sources = normalizedSources.Length == 0 ? null : normalizedSources, + Codes = normalizedCodes.Length == 0 ? null : normalizedCodes + }; + + var response = await client.ExecuteAocVerifyAsync(request, cancellationToken).ConfigureAwait(false); + + if (!string.IsNullOrWhiteSpace(exportPath)) + { + var reportPath = await WriteJsonReportAsync(response, exportPath, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Verification report written to {Path}.", reportPath); + } + + if (normalizedFormat == "json") + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + } + else + { + RenderAocVerifyTable(response, !disableColor, limit); + } + + var exitCode = DetermineVerifyExitCode(response); + Environment.ExitCode = exitCode; + activity?.SetTag("stellaops.cli.exit_code", exitCode); + outcome = exitCode switch + { + 0 => "ok", + >= 11 and <= 17 => "violations", + 18 => "truncated", + _ => "unknown" + }; + } + catch (InvalidOperationException ex) + { + outcome = "usage_error"; + logger.LogError(ex, "Verification failed: {Message}", ex.Message); + Console.Error.WriteLine(ex.Message); + Environment.ExitCode = 71; + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + } + catch (Exception ex) + { + outcome = "transport_error"; + logger.LogError(ex, "Verification request failed."); + Console.Error.WriteLine(ex.Message); + Environment.ExitCode = 70; + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + } + finally + { + verbosity.MinimumLevel = previousLevel; + CliMetrics.RecordAocVerify(outcome); + } + } + + public static async Task HandleConnectorJobAsync( + IServiceProvider services, + string source, + string stage, + string? mode, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-connector"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.db.fetch", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "db fetch"); + activity?.SetTag("stellaops.cli.source", source); + activity?.SetTag("stellaops.cli.stage", stage); + if (!string.IsNullOrWhiteSpace(mode)) + { + activity?.SetTag("stellaops.cli.mode", mode); + } + using var duration = CliMetrics.MeasureCommandDuration("db fetch"); + + try + { + var jobKind = $"source:{source}:{stage}"; + var parameters = new Dictionary(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(mode)) + { + parameters["mode"] = mode; + } + + await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + logger.LogError(ex, "Connector job failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleMergeJobAsync( + IServiceProvider services, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-merge"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.db.merge", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "db merge"); + using var duration = CliMetrics.MeasureCommandDuration("db merge"); + + try + { + await TriggerJobAsync(client, logger, "merge:reconcile", new Dictionary(StringComparer.Ordinal), cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + logger.LogError(ex, "Merge job failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleExportJobAsync( + IServiceProvider services, + string format, + bool delta, + bool? publishFull, + bool? publishDelta, + bool? includeFull, + bool? includeDelta, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-export"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.db.export", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "db export"); + activity?.SetTag("stellaops.cli.format", format); + activity?.SetTag("stellaops.cli.delta", delta); + using var duration = CliMetrics.MeasureCommandDuration("db export"); + activity?.SetTag("stellaops.cli.publish_full", publishFull); + activity?.SetTag("stellaops.cli.publish_delta", publishDelta); + activity?.SetTag("stellaops.cli.include_full", includeFull); + activity?.SetTag("stellaops.cli.include_delta", includeDelta); + + try + { + var jobKind = format switch + { + "trivy-db" or "trivy" => "export:trivy-db", + _ => "export:json" + }; + + var isTrivy = jobKind == "export:trivy-db"; + if (isTrivy + && !publishFull.HasValue + && !publishDelta.HasValue + && !includeFull.HasValue + && !includeDelta.HasValue + && AnsiConsole.Profile.Capabilities.Interactive) + { + var overrides = TrivyDbExportPrompt.PromptOverrides(); + publishFull = overrides.publishFull; + publishDelta = overrides.publishDelta; + includeFull = overrides.includeFull; + includeDelta = overrides.includeDelta; + } + + var parameters = new Dictionary(StringComparer.Ordinal) + { + ["delta"] = delta + }; + if (publishFull.HasValue) + { + parameters["publishFull"] = publishFull.Value; + } + if (publishDelta.HasValue) + { + parameters["publishDelta"] = publishDelta.Value; + } + if (includeFull.HasValue) + { + parameters["includeFull"] = includeFull.Value; + } + if (includeDelta.HasValue) + { + parameters["includeDelta"] = includeDelta.Value; + } + + await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + logger.LogError(ex, "Export job failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static Task HandleExcititorInitAsync( + IServiceProvider services, + IReadOnlyList providers, + bool resume, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (resume) + { + payload["resume"] = true; + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor init", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["resume"] = resume + }, + client => client.ExecuteExcititorOperationAsync("init", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorPullAsync( + IServiceProvider services, + IReadOnlyList providers, + DateTimeOffset? since, + TimeSpan? window, + bool force, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (since.HasValue) + { + payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + if (window.HasValue) + { + payload["window"] = window.Value.ToString("c", CultureInfo.InvariantCulture); + } + if (force) + { + payload["force"] = true; + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor pull", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["force"] = force, + ["since"] = since?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture), + ["window"] = window?.ToString("c", CultureInfo.InvariantCulture) + }, + client => client.ExecuteExcititorOperationAsync("ingest/run", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorResumeAsync( + IServiceProvider services, + IReadOnlyList providers, + string? checkpoint, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (!string.IsNullOrWhiteSpace(checkpoint)) + { + payload["checkpoint"] = checkpoint.Trim(); + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor resume", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["checkpoint"] = checkpoint + }, + client => client.ExecuteExcititorOperationAsync("ingest/resume", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static async Task HandleExcititorListProvidersAsync( + IServiceProvider services, + bool includeDisabled, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("excititor-list-providers"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.list-providers", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "excititor list-providers"); + activity?.SetTag("stellaops.cli.include_disabled", includeDisabled); + using var duration = CliMetrics.MeasureCommandDuration("excititor list-providers"); + + try + { + var providers = await client.GetExcititorProvidersAsync(includeDisabled, cancellationToken).ConfigureAwait(false); + Environment.ExitCode = 0; + logger.LogInformation("Providers returned: {Count}", providers.Count); + + if (providers.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().Border(TableBorder.Rounded).AddColumns("Provider", "Kind", "Trust", "Enabled", "Last Ingested"); + foreach (var provider in providers) + { + table.AddRow( + provider.Id, + provider.Kind, + string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, + provider.Enabled ? "yes" : "no", + provider.LastIngestedAt?.ToString("yyyy-MM-dd HH:mm:ss 'UTC'", CultureInfo.InvariantCulture) ?? "unknown"); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var provider in providers) + { + logger.LogInformation("{ProviderId} [{Kind}] Enabled={Enabled} Trust={Trust} LastIngested={LastIngested}", + provider.Id, + provider.Kind, + provider.Enabled ? "yes" : "no", + string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, + provider.LastIngestedAt?.ToString("O", CultureInfo.InvariantCulture) ?? "unknown"); + } + } + } + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to list Excititor providers."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleExcititorExportAsync( + IServiceProvider services, + string format, + bool delta, + string? scope, + DateTimeOffset? since, + string? provider, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scopeHandle = services.CreateAsyncScope(); + var client = scopeHandle.ServiceProvider.GetRequiredService(); + var logger = scopeHandle.ServiceProvider.GetRequiredService().CreateLogger("excititor-export"); + var options = scopeHandle.ServiceProvider.GetRequiredService(); + var verbosity = scopeHandle.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.export", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "excititor export"); + activity?.SetTag("stellaops.cli.format", format); + activity?.SetTag("stellaops.cli.delta", delta); + if (!string.IsNullOrWhiteSpace(scope)) + { + activity?.SetTag("stellaops.cli.scope", scope); + } + if (since.HasValue) + { + activity?.SetTag("stellaops.cli.since", since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrWhiteSpace(provider)) + { + activity?.SetTag("stellaops.cli.provider", provider); + } + if (!string.IsNullOrWhiteSpace(outputPath)) + { + activity?.SetTag("stellaops.cli.output", outputPath); + } + using var duration = CliMetrics.MeasureCommandDuration("excititor export"); + + try + { + var payload = new Dictionary(StringComparer.Ordinal) + { + ["format"] = string.IsNullOrWhiteSpace(format) ? "openvex" : format.Trim(), + ["delta"] = delta + }; + + if (!string.IsNullOrWhiteSpace(scope)) + { + payload["scope"] = scope.Trim(); + } + if (since.HasValue) + { + payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + if (!string.IsNullOrWhiteSpace(provider)) + { + payload["provider"] = provider.Trim(); + } + + var result = await client.ExecuteExcititorOperationAsync( + "export", + HttpMethod.Post, + RemoveNullValues(payload), + cancellationToken).ConfigureAwait(false); + + if (!result.Success) + { + logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Excititor export failed." : result.Message); + Environment.ExitCode = 1; + return; + } + + Environment.ExitCode = 0; + + var manifest = TryParseExportManifest(result.Payload); + if (!string.IsNullOrWhiteSpace(result.Message) + && (manifest is null || !string.Equals(result.Message, "ok", StringComparison.OrdinalIgnoreCase))) + { + logger.LogInformation(result.Message); + } + + if (manifest is not null) + { + activity?.SetTag("stellaops.cli.export_id", manifest.ExportId); + if (!string.IsNullOrWhiteSpace(manifest.Format)) + { + activity?.SetTag("stellaops.cli.export_format", manifest.Format); + } + if (manifest.FromCache.HasValue) + { + activity?.SetTag("stellaops.cli.export_cached", manifest.FromCache.Value); + } + if (manifest.SizeBytes.HasValue) + { + activity?.SetTag("stellaops.cli.export_size", manifest.SizeBytes.Value); + } + + if (manifest.FromCache == true) + { + logger.LogInformation("Reusing cached export {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); + } + else + { + logger.LogInformation("Export ready: {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); + } + + if (manifest.CreatedAt.HasValue) + { + logger.LogInformation("Created at {CreatedAt}.", manifest.CreatedAt.Value.ToString("u", CultureInfo.InvariantCulture)); + } + + if (!string.IsNullOrWhiteSpace(manifest.Digest)) + { + var digestDisplay = BuildDigestDisplay(manifest.Algorithm, manifest.Digest); + if (manifest.SizeBytes.HasValue) + { + logger.LogInformation("Digest {Digest} ({Size}).", digestDisplay, FormatSize(manifest.SizeBytes.Value)); + } + else + { + logger.LogInformation("Digest {Digest}.", digestDisplay); + } + } + + if (!string.IsNullOrWhiteSpace(manifest.RekorLocation)) + { + if (!string.IsNullOrWhiteSpace(manifest.RekorIndex)) + { + logger.LogInformation("Rekor entry: {Location} (index {Index}).", manifest.RekorLocation, manifest.RekorIndex); + } + else + { + logger.LogInformation("Rekor entry: {Location}.", manifest.RekorLocation); + } + } + + if (!string.IsNullOrWhiteSpace(manifest.RekorInclusionUrl) + && !string.Equals(manifest.RekorInclusionUrl, manifest.RekorLocation, StringComparison.OrdinalIgnoreCase)) + { + logger.LogInformation("Rekor inclusion proof: {Url}.", manifest.RekorInclusionUrl); + } + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + var resolvedPath = ResolveExportOutputPath(outputPath!, manifest); + var download = await client.DownloadExcititorExportAsync( + manifest.ExportId, + resolvedPath, + manifest.Algorithm, + manifest.Digest, + cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.export_path", download.Path); + + if (download.FromCache) + { + logger.LogInformation("Export already cached at {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); + } + else + { + logger.LogInformation("Export saved to {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); + } + } + else if (!string.IsNullOrWhiteSpace(result.Location)) + { + var downloadUrl = ResolveLocationUrl(options, result.Location); + if (!string.IsNullOrWhiteSpace(downloadUrl)) + { + logger.LogInformation("Download URL: {Url}", downloadUrl); + } + else + { + logger.LogInformation("Download location: {Location}", result.Location); + } + } + } + else + { + if (!string.IsNullOrWhiteSpace(result.Location)) + { + var downloadUrl = ResolveLocationUrl(options, result.Location); + if (!string.IsNullOrWhiteSpace(downloadUrl)) + { + logger.LogInformation("Download URL: {Url}", downloadUrl); + } + else + { + logger.LogInformation("Location: {Location}", result.Location); + } + } + else if (string.IsNullOrWhiteSpace(result.Message)) + { + logger.LogInformation("Export request accepted."); + } + } + } + catch (Exception ex) + { + logger.LogError(ex, "Excititor export failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static Task HandleExcititorBackfillStatementsAsync( + IServiceProvider services, + DateTimeOffset? retrievedSince, + bool force, + int batchSize, + int? maxDocuments, + bool verbose, + CancellationToken cancellationToken) + { + if (batchSize <= 0) + { + throw new ArgumentOutOfRangeException(nameof(batchSize), "Batch size must be greater than zero."); + } + + if (maxDocuments.HasValue && maxDocuments.Value <= 0) + { + throw new ArgumentOutOfRangeException(nameof(maxDocuments), "Max documents must be greater than zero when specified."); + } + + var payload = new Dictionary(StringComparer.Ordinal) + { + ["force"] = force, + ["batchSize"] = batchSize, + ["maxDocuments"] = maxDocuments + }; + + if (retrievedSince.HasValue) + { + payload["retrievedSince"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + + var activityTags = new Dictionary(StringComparer.Ordinal) + { + ["stellaops.cli.force"] = force, + ["stellaops.cli.batch_size"] = batchSize, + ["stellaops.cli.max_documents"] = maxDocuments + }; + + if (retrievedSince.HasValue) + { + activityTags["stellaops.cli.retrieved_since"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor backfill-statements", + verbose, + activityTags, + client => client.ExecuteExcititorOperationAsync( + "admin/backfill-statements", + HttpMethod.Post, + RemoveNullValues(payload), + cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorVerifyAsync( + IServiceProvider services, + string? exportId, + string? digest, + string? attestationPath, + bool verbose, + CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(exportId) && string.IsNullOrWhiteSpace(digest) && string.IsNullOrWhiteSpace(attestationPath)) + { + var logger = services.GetRequiredService().CreateLogger("excititor-verify"); + logger.LogError("At least one of --export-id, --digest, or --attestation must be provided."); + Environment.ExitCode = 1; + return Task.CompletedTask; + } + + var payload = new Dictionary(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(exportId)) + { + payload["exportId"] = exportId.Trim(); + } + if (!string.IsNullOrWhiteSpace(digest)) + { + payload["digest"] = digest.Trim(); + } + if (!string.IsNullOrWhiteSpace(attestationPath)) + { + var fullPath = Path.GetFullPath(attestationPath); + if (!File.Exists(fullPath)) + { + var logger = services.GetRequiredService().CreateLogger("excititor-verify"); + logger.LogError("Attestation file not found at {Path}.", fullPath); + Environment.ExitCode = 1; + return Task.CompletedTask; + } + + var bytes = File.ReadAllBytes(fullPath); + payload["attestation"] = new Dictionary(StringComparer.Ordinal) + { + ["fileName"] = Path.GetFileName(fullPath), + ["base64"] = Convert.ToBase64String(bytes) + }; + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor verify", + verbose, + new Dictionary + { + ["export_id"] = exportId, + ["digest"] = digest, + ["attestation_path"] = attestationPath + }, + client => client.ExecuteExcititorOperationAsync("verify", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorReconcileAsync( + IServiceProvider services, + IReadOnlyList providers, + TimeSpan? maxAge, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (maxAge.HasValue) + { + payload["maxAge"] = maxAge.Value.ToString("c", CultureInfo.InvariantCulture); + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor reconcile", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["max_age"] = maxAge?.ToString("c", CultureInfo.InvariantCulture) + }, + client => client.ExecuteExcititorOperationAsync("reconcile", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static async Task HandleRuntimePolicyTestAsync( + IServiceProvider services, + string? namespaceValue, + IReadOnlyList imageArguments, + string? filePath, + IReadOnlyList labelArguments, + bool outputJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("runtime-policy-test"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.runtime.policy.test", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "runtime policy test"); + if (!string.IsNullOrWhiteSpace(namespaceValue)) + { + activity?.SetTag("stellaops.cli.namespace", namespaceValue); + } + using var duration = CliMetrics.MeasureCommandDuration("runtime policy test"); + + try + { + IReadOnlyList images; + try + { + images = await GatherImageDigestsAsync(imageArguments, filePath, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or ArgumentException or FileNotFoundException) + { + logger.LogError(ex, "Failed to gather image digests: {Message}", ex.Message); + Environment.ExitCode = 9; + return; + } + + if (images.Count == 0) + { + logger.LogError("No image digests provided. Use --image, --file, or pipe digests via stdin."); + Environment.ExitCode = 9; + return; + } + + IReadOnlyDictionary labels; + try + { + labels = ParseLabelSelectors(labelArguments); + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + Environment.ExitCode = 9; + return; + } + + activity?.SetTag("stellaops.cli.images", images.Count); + activity?.SetTag("stellaops.cli.labels", labels.Count); + + var request = new RuntimePolicyEvaluationRequest(namespaceValue, labels, images); + var result = await client.EvaluateRuntimePolicyAsync(request, cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.ttl_seconds", result.TtlSeconds); + Environment.ExitCode = 0; + + if (outputJson) + { + var json = BuildRuntimePolicyJson(result, images); + Console.WriteLine(json); + return; + } + + if (result.ExpiresAtUtc.HasValue) + { + logger.LogInformation("Decision TTL: {TtlSeconds}s (expires {ExpiresAt})", result.TtlSeconds, result.ExpiresAtUtc.Value.ToString("u", CultureInfo.InvariantCulture)); + } + else + { + logger.LogInformation("Decision TTL: {TtlSeconds}s", result.TtlSeconds); + } + + if (!string.IsNullOrWhiteSpace(result.PolicyRevision)) + { + logger.LogInformation("Policy revision: {Revision}", result.PolicyRevision); + } + + DisplayRuntimePolicyResults(logger, result, images); + } + catch (Exception ex) + { + logger.LogError(ex, "Runtime policy evaluation failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleAuthLoginAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + bool force, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-login"); + Environment.ExitCode = 0; + + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + logger.LogError("Authority URL is not configured. Set STELLAOPS_AUTHORITY_URL or update your configuration."); + Environment.ExitCode = 1; + return; + } + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogError("Authority client is not available. Ensure AddStellaOpsAuthClient is registered in Program.cs."); + Environment.ExitCode = 1; + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogError("Authority configuration is incomplete; unable to determine cache key."); + Environment.ExitCode = 1; + return; + } + + try + { + if (force) + { + await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + } + + var scopeName = AuthorityTokenUtilities.ResolveScope(options); + StellaOpsTokenResult token; + + if (!string.IsNullOrWhiteSpace(options.Authority.Username)) + { + if (string.IsNullOrWhiteSpace(options.Authority.Password)) + { + logger.LogError("Authority password must be provided when username is configured."); + Environment.ExitCode = 1; + return; + } + + token = await tokenClient.RequestPasswordTokenAsync( + options.Authority.Username, + options.Authority.Password!, + scopeName, + null, + cancellationToken).ConfigureAwait(false); + } + else + { + token = await tokenClient.RequestClientCredentialsTokenAsync(scopeName, null, cancellationToken).ConfigureAwait(false); + } + + await tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); + + if (verbose) + { + logger.LogInformation("Authenticated with {Authority} (scopes: {Scopes}).", options.Authority.Url, string.Join(", ", token.Scopes)); + } + + logger.LogInformation("Login successful. Access token expires at {Expires}.", token.ExpiresAtUtc.ToString("u")); + } + catch (Exception ex) + { + logger.LogError(ex, "Authentication failed: {Message}", ex.Message); + Environment.ExitCode = 1; + } + } + + public static async Task HandleAuthLogoutAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-logout"); + Environment.ExitCode = 0; + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogInformation("No authority client registered; nothing to remove."); + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogInformation("Authority configuration missing; no cached tokens to remove."); + return; + } + + await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (verbose) + { + logger.LogInformation("Cleared cached token for {Authority}.", options.Authority?.Url ?? "authority"); + } + } + + public static async Task HandleAuthStatusAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-status"); + Environment.ExitCode = 0; + + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); + Environment.ExitCode = 1; + return; + } + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogInformation("Authority client not registered; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogInformation("Authority configuration incomplete; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (entry is null) + { + logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); + Environment.ExitCode = 1; + return; + } + + logger.LogInformation("Cached token for {Authority} expires at {Expires}.", options.Authority.Url, entry.ExpiresAtUtc.ToString("u")); + if (verbose) + { + logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); + } + } + + public static async Task HandleAuthWhoAmIAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-whoami"); + Environment.ExitCode = 0; + + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); + Environment.ExitCode = 1; + return; + } + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogInformation("Authority client not registered; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogInformation("Authority configuration incomplete; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (entry is null) + { + logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); + Environment.ExitCode = 1; + return; + } + + var grantType = string.IsNullOrWhiteSpace(options.Authority.Username) ? "client_credentials" : "password"; + var now = DateTimeOffset.UtcNow; + var remaining = entry.ExpiresAtUtc - now; + if (remaining < TimeSpan.Zero) + { + remaining = TimeSpan.Zero; + } + + logger.LogInformation("Authority: {Authority}", options.Authority.Url); + logger.LogInformation("Grant type: {GrantType}", grantType); + logger.LogInformation("Token type: {TokenType}", entry.TokenType); + logger.LogInformation("Expires: {Expires} ({Remaining})", entry.ExpiresAtUtc.ToString("u"), FormatDuration(remaining)); + + if (entry.Scopes.Count > 0) + { + logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); + } + + if (TryExtractJwtClaims(entry.AccessToken, out var claims, out var issuedAt, out var notBefore)) + { + if (claims.TryGetValue("sub", out var subject) && !string.IsNullOrWhiteSpace(subject)) + { + logger.LogInformation("Subject: {Subject}", subject); + } + + if (claims.TryGetValue("client_id", out var clientId) && !string.IsNullOrWhiteSpace(clientId)) + { + logger.LogInformation("Client ID (token): {ClientId}", clientId); + } + + if (claims.TryGetValue("aud", out var audience) && !string.IsNullOrWhiteSpace(audience)) + { + logger.LogInformation("Audience: {Audience}", audience); + } + + if (claims.TryGetValue("iss", out var issuer) && !string.IsNullOrWhiteSpace(issuer)) + { + logger.LogInformation("Issuer: {Issuer}", issuer); + } + + if (issuedAt is not null) + { + logger.LogInformation("Issued at: {IssuedAt}", issuedAt.Value.ToString("u")); + } + + if (notBefore is not null) + { + logger.LogInformation("Not before: {NotBefore}", notBefore.Value.ToString("u")); + } + + var extraClaims = CollectAdditionalClaims(claims); + if (extraClaims.Count > 0 && verbose) + { + logger.LogInformation("Additional claims: {Claims}", string.Join(", ", extraClaims)); + } + } + else + { + logger.LogInformation("Access token appears opaque; claims are unavailable."); + } + } + + public static async Task HandleAuthRevokeExportAsync( + IServiceProvider services, + StellaOpsCliOptions options, + string? outputDirectory, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-revoke-export"); + Environment.ExitCode = 0; + + try + { + var client = scope.ServiceProvider.GetRequiredService(); + var result = await client.ExportAsync(verbose, cancellationToken).ConfigureAwait(false); + + var directory = string.IsNullOrWhiteSpace(outputDirectory) + ? Directory.GetCurrentDirectory() + : Path.GetFullPath(outputDirectory); + + Directory.CreateDirectory(directory); + + var bundlePath = Path.Combine(directory, "revocation-bundle.json"); + var signaturePath = Path.Combine(directory, "revocation-bundle.json.jws"); + var digestPath = Path.Combine(directory, "revocation-bundle.json.sha256"); + + await File.WriteAllBytesAsync(bundlePath, result.BundleBytes, cancellationToken).ConfigureAwait(false); + await File.WriteAllTextAsync(signaturePath, result.Signature, cancellationToken).ConfigureAwait(false); + await File.WriteAllTextAsync(digestPath, $"sha256:{result.Digest}", cancellationToken).ConfigureAwait(false); + + var computedDigest = Convert.ToHexString(SHA256.HashData(result.BundleBytes)).ToLowerInvariant(); + if (!string.Equals(computedDigest, result.Digest, StringComparison.OrdinalIgnoreCase)) + { + logger.LogError("Digest mismatch. Expected {Expected} but computed {Actual}.", result.Digest, computedDigest); + Environment.ExitCode = 1; + return; + } + + logger.LogInformation( + "Revocation bundle exported to {Directory} (sequence {Sequence}, issued {Issued:u}, signing key {KeyId}, provider {Provider}).", + directory, + result.Sequence, + result.IssuedAt, + string.IsNullOrWhiteSpace(result.SigningKeyId) ? "" : result.SigningKeyId, + string.IsNullOrWhiteSpace(result.SigningProvider) ? "default" : result.SigningProvider); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to export revocation bundle."); + Environment.ExitCode = 1; + } + } + + public static async Task HandleAuthRevokeVerifyAsync( + string bundlePath, + string signaturePath, + string keyPath, + bool verbose, + CancellationToken cancellationToken) + { + var loggerFactory = LoggerFactory.Create(builder => builder.AddSimpleConsole(options => + { + options.SingleLine = true; + options.TimestampFormat = "HH:mm:ss "; + })); + var logger = loggerFactory.CreateLogger("auth-revoke-verify"); + Environment.ExitCode = 0; + + try + { + if (string.IsNullOrWhiteSpace(bundlePath) || string.IsNullOrWhiteSpace(signaturePath) || string.IsNullOrWhiteSpace(keyPath)) + { + logger.LogError("Arguments --bundle, --signature, and --key are required."); + Environment.ExitCode = 1; + return; + } + + var bundleBytes = await File.ReadAllBytesAsync(bundlePath, cancellationToken).ConfigureAwait(false); + var signatureContent = (await File.ReadAllTextAsync(signaturePath, cancellationToken).ConfigureAwait(false)).Trim(); + var keyPem = await File.ReadAllTextAsync(keyPath, cancellationToken).ConfigureAwait(false); + + var digest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); + logger.LogInformation("Bundle digest sha256:{Digest}", digest); + + if (!TryParseDetachedJws(signatureContent, out var encodedHeader, out var encodedSignature)) + { + logger.LogError("Signature is not in detached JWS format."); + Environment.ExitCode = 1; + return; + } + + var headerJson = Encoding.UTF8.GetString(Base64UrlDecode(encodedHeader)); + using var headerDocument = JsonDocument.Parse(headerJson); + var header = headerDocument.RootElement; + + if (!header.TryGetProperty("b64", out var b64Element) || b64Element.GetBoolean()) + { + logger.LogError("Detached JWS header must include '\"b64\": false'."); + Environment.ExitCode = 1; + return; + } + + var algorithm = header.TryGetProperty("alg", out var algElement) ? algElement.GetString() : SignatureAlgorithms.Es256; + if (string.IsNullOrWhiteSpace(algorithm)) + { + algorithm = SignatureAlgorithms.Es256; + } + + var providerHint = header.TryGetProperty("provider", out var providerElement) + ? providerElement.GetString() + : null; + + var keyId = header.TryGetProperty("kid", out var kidElement) ? kidElement.GetString() : null; + if (string.IsNullOrWhiteSpace(keyId)) + { + keyId = Path.GetFileNameWithoutExtension(keyPath); + logger.LogWarning("JWS header missing 'kid'; using fallback key id {KeyId}.", keyId); + } + + CryptoSigningKey signingKey; + try + { + signingKey = CreateVerificationSigningKey(keyId!, algorithm!, providerHint, keyPem, keyPath); + } + catch (Exception ex) when (ex is InvalidOperationException or CryptographicException) + { + logger.LogError(ex, "Failed to load verification key material."); + Environment.ExitCode = 1; + return; + } + + var providers = new List + { + new DefaultCryptoProvider() + }; + +#if STELLAOPS_CRYPTO_SODIUM + providers.Add(new LibsodiumCryptoProvider()); +#endif + + foreach (var provider in providers) + { + if (provider.Supports(CryptoCapability.Verification, algorithm!)) + { + provider.UpsertSigningKey(signingKey); + } + } + + var preferredOrder = !string.IsNullOrWhiteSpace(providerHint) + ? new[] { providerHint! } + : Array.Empty(); + var registry = new CryptoProviderRegistry(providers, preferredOrder); + CryptoSignerResolution resolution; + try + { + resolution = registry.ResolveSigner( + CryptoCapability.Verification, + algorithm!, + signingKey.Reference, + providerHint); + } + catch (Exception ex) + { + logger.LogError(ex, "No crypto provider available for verification (algorithm {Algorithm}).", algorithm); + Environment.ExitCode = 1; + return; + } + + var signingInputLength = encodedHeader.Length + 1 + bundleBytes.Length; + var buffer = ArrayPool.Shared.Rent(signingInputLength); + try + { + var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); + Buffer.BlockCopy(headerBytes, 0, buffer, 0, headerBytes.Length); + buffer[headerBytes.Length] = (byte)'.'; + Buffer.BlockCopy(bundleBytes, 0, buffer, headerBytes.Length + 1, bundleBytes.Length); + + var signatureBytes = Base64UrlDecode(encodedSignature); + var verified = await resolution.Signer.VerifyAsync( + new ReadOnlyMemory(buffer, 0, signingInputLength), + signatureBytes, + cancellationToken).ConfigureAwait(false); + + if (!verified) + { + logger.LogError("Signature verification failed."); + Environment.ExitCode = 1; + return; + } + } + finally + { + ArrayPool.Shared.Return(buffer); + } + + if (!string.IsNullOrWhiteSpace(providerHint) && !string.Equals(providerHint, resolution.ProviderName, StringComparison.OrdinalIgnoreCase)) + { + logger.LogWarning( + "Preferred provider '{Preferred}' unavailable; verification used '{Provider}'.", + providerHint, + resolution.ProviderName); + } + + logger.LogInformation( + "Signature verified using algorithm {Algorithm} via provider {Provider} (kid {KeyId}).", + algorithm, + resolution.ProviderName, + signingKey.Reference.KeyId); + + if (verbose) + { + logger.LogInformation("JWS header: {Header}", headerJson); + } + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to verify revocation bundle."); + Environment.ExitCode = 1; + } + finally + { + loggerFactory.Dispose(); + } + } + + public static async Task HandleVulnObservationsAsync( + IServiceProvider services, + string tenant, + IReadOnlyList observationIds, + IReadOnlyList aliases, + IReadOnlyList purls, + IReadOnlyList cpes, + int? limit, + string? cursor, + bool emitJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-observations"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.observations", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vuln observations"); + activity?.SetTag("stellaops.cli.tenant", tenant); + using var duration = CliMetrics.MeasureCommandDuration("vuln observations"); + + try + { + tenant = tenant?.Trim().ToLowerInvariant() ?? string.Empty; + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new InvalidOperationException("Tenant must be provided."); + } + + var query = new AdvisoryObservationsQuery( + tenant, + NormalizeSet(observationIds, toLower: false), + NormalizeSet(aliases, toLower: true), + NormalizeSet(purls, toLower: false), + NormalizeSet(cpes, toLower: false), + limit, + cursor); + + var response = await client.GetObservationsAsync(query, cancellationToken).ConfigureAwait(false); + + if (emitJson) + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + Environment.ExitCode = 0; + return; + } + + RenderObservationTable(response); + if (!emitJson && response.HasMore && !string.IsNullOrWhiteSpace(response.NextCursor)) + { + var escapedCursor = Markup.Escape(response.NextCursor); + AnsiConsole.MarkupLine($"[yellow]More observations available. Continue with[/] [cyan]--cursor[/] [grey]{escapedCursor}[/]"); + } + Environment.ExitCode = 0; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch observations from Concelier."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + + static IReadOnlyList NormalizeSet(IReadOnlyList values, bool toLower) + { + if (values is null || values.Count == 0) + { + return Array.Empty(); + } + + var set = new HashSet(StringComparer.Ordinal); + foreach (var raw in values) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var normalized = raw.Trim(); + if (toLower) + { + normalized = normalized.ToLowerInvariant(); + } + + set.Add(normalized); + } + + return set.Count == 0 ? Array.Empty() : set.ToArray(); + } + + static void RenderObservationTable(AdvisoryObservationsResponse response) + { + var observations = response.Observations ?? Array.Empty(); + if (observations.Count == 0) + { + AnsiConsole.MarkupLine("[yellow]No observations matched the provided filters.[/]"); + return; + } + + var table = new Table() + .Centered() + .Border(TableBorder.Rounded); + + table.AddColumn("Observation"); + table.AddColumn("Source"); + table.AddColumn("Upstream Id"); + table.AddColumn("Aliases"); + table.AddColumn("PURLs"); + table.AddColumn("CPEs"); + table.AddColumn("Created (UTC)"); + + foreach (var observation in observations) + { + var sourceVendor = observation.Source?.Vendor ?? "(unknown)"; + var upstreamId = observation.Upstream?.UpstreamId ?? "(unknown)"; + var aliasesText = FormatList(observation.Linkset?.Aliases); + var purlsText = FormatList(observation.Linkset?.Purls); + var cpesText = FormatList(observation.Linkset?.Cpes); + + table.AddRow( + Markup.Escape(observation.ObservationId), + Markup.Escape(sourceVendor), + Markup.Escape(upstreamId), + Markup.Escape(aliasesText), + Markup.Escape(purlsText), + Markup.Escape(cpesText), + observation.CreatedAt.ToUniversalTime().ToString("u", CultureInfo.InvariantCulture)); + } + + AnsiConsole.Write(table); + AnsiConsole.MarkupLine( + "[green]{0}[/] observation(s). Aliases: [green]{1}[/], PURLs: [green]{2}[/], CPEs: [green]{3}[/].", + observations.Count, + response.Linkset?.Aliases?.Count ?? 0, + response.Linkset?.Purls?.Count ?? 0, + response.Linkset?.Cpes?.Count ?? 0); + } + + static string FormatList(IReadOnlyList? values) + { + if (values is null || values.Count == 0) + { + return "(none)"; + } + + const int MaxItems = 3; + if (values.Count <= MaxItems) + { + return string.Join(", ", values); + } + + var preview = values.Take(MaxItems); + return $"{string.Join(", ", preview)} (+{values.Count - MaxItems})"; + } + } + + public static async Task HandleOfflineKitPullAsync( + IServiceProvider services, + string? bundleId, + string? destinationDirectory, + bool overwrite, + bool resume, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var options = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-pull"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.pull", ActivityKind.Client); + activity?.SetTag("stellaops.cli.bundle_id", string.IsNullOrWhiteSpace(bundleId) ? "latest" : bundleId); + using var duration = CliMetrics.MeasureCommandDuration("offline kit pull"); + + try + { + var targetDirectory = string.IsNullOrWhiteSpace(destinationDirectory) + ? options.Offline?.KitsDirectory ?? Path.Combine(Environment.CurrentDirectory, "offline-kits") + : destinationDirectory; + + targetDirectory = Path.GetFullPath(targetDirectory); + Directory.CreateDirectory(targetDirectory); + + var result = await client.DownloadOfflineKitAsync(bundleId, targetDirectory, overwrite, resume, cancellationToken).ConfigureAwait(false); + + logger.LogInformation( + "Bundle {BundleId} stored at {Path} (captured {Captured:u}, sha256:{Digest}).", + result.Descriptor.BundleId, + result.BundlePath, + result.Descriptor.CapturedAt, + result.Descriptor.BundleSha256); + + logger.LogInformation("Manifest saved to {Manifest}.", result.ManifestPath); + + if (!string.IsNullOrWhiteSpace(result.MetadataPath)) + { + logger.LogDebug("Metadata recorded at {Metadata}.", result.MetadataPath); + } + + if (result.BundleSignaturePath is not null) + { + logger.LogInformation("Bundle signature saved to {Signature}.", result.BundleSignaturePath); + } + + if (result.ManifestSignaturePath is not null) + { + logger.LogInformation("Manifest signature saved to {Signature}.", result.ManifestSignaturePath); + } + + CliMetrics.RecordOfflineKitDownload(result.Descriptor.Kind ?? "unknown", result.FromCache); + activity?.SetTag("stellaops.cli.bundle_cache", result.FromCache); + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to download offline kit bundle."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyFindingsListAsync( + IServiceProvider services, + string policyId, + string[] sbomFilters, + string[] statusFilters, + string[] severityFilters, + string? since, + string? cursor, + int? page, + int? pageSize, + string? format, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-ls"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.list", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("policy findings list"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (page.HasValue && page.Value < 1) + { + throw new ArgumentException("--page must be greater than or equal to 1.", nameof(page)); + } + + if (pageSize.HasValue && (pageSize.Value < 1 || pageSize.Value > 500)) + { + throw new ArgumentException("--page-size must be between 1 and 500.", nameof(pageSize)); + } + + var normalizedPolicyId = policyId.Trim(); + var sboms = NormalizePolicyFilterValues(sbomFilters); + var statuses = NormalizePolicyFilterValues(statusFilters, toLower: true); + var severities = NormalizePolicyFilterValues(severityFilters); + var sinceValue = ParsePolicySince(since); + var cursorValue = string.IsNullOrWhiteSpace(cursor) ? null : cursor.Trim(); + + var query = new PolicyFindingsQuery( + normalizedPolicyId, + sboms, + statuses, + severities, + cursorValue, + page, + pageSize, + sinceValue); + + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + if (sboms.Count > 0) + { + activity?.SetTag("stellaops.cli.findings.sbom_filters", string.Join(",", sboms)); + } + + if (statuses.Count > 0) + { + activity?.SetTag("stellaops.cli.findings.status_filters", string.Join(",", statuses)); + } + + if (severities.Count > 0) + { + activity?.SetTag("stellaops.cli.findings.severity_filters", string.Join(",", severities)); + } + + if (!string.IsNullOrWhiteSpace(cursorValue)) + { + activity?.SetTag("stellaops.cli.findings.cursor", cursorValue); + } + + if (page.HasValue) + { + activity?.SetTag("stellaops.cli.findings.page", page.Value); + } + + if (pageSize.HasValue) + { + activity?.SetTag("stellaops.cli.findings.page_size", pageSize.Value); + } + + if (sinceValue.HasValue) + { + activity?.SetTag("stellaops.cli.findings.since", sinceValue.Value.ToString("o", CultureInfo.InvariantCulture)); + } + + var result = await client.GetPolicyFindingsAsync(query, cancellationToken).ConfigureAwait(false); + activity?.SetTag("stellaops.cli.findings.count", result.Items.Count); + if (!string.IsNullOrWhiteSpace(result.NextCursor)) + { + activity?.SetTag("stellaops.cli.findings.next_cursor", result.NextCursor); + } + + var payload = BuildPolicyFindingsPayload(normalizedPolicyId, query, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Results written to {Path}.", Path.GetFullPath(outputPath!)); + } + + var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); + if (outputFormat == PolicyFindingsOutputFormat.Json) + { + var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); + Console.WriteLine(json); + } + else + { + RenderPolicyFindingsTable(logger, result); + } + + CliMetrics.RecordPolicyFindingsList(result.Items.Count == 0 ? "empty" : "ok"); + Environment.ExitCode = 0; + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyFindingsList("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsList); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to list policy findings."); + CliMetrics.RecordPolicyFindingsList("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyFindingsGetAsync( + IServiceProvider services, + string policyId, + string findingId, + string? format, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-get"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.get", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("policy findings get"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); + } + + var normalizedPolicyId = policyId.Trim(); + var normalizedFindingId = findingId.Trim(); + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); + + var result = await client.GetPolicyFindingAsync(normalizedPolicyId, normalizedFindingId, cancellationToken).ConfigureAwait(false); + var payload = BuildPolicyFindingPayload(normalizedPolicyId, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Finding written to {Path}.", Path.GetFullPath(outputPath!)); + } + + var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); + if (outputFormat == PolicyFindingsOutputFormat.Json) + { + Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); + } + else + { + RenderPolicyFindingDetails(logger, result); + } + + var outcome = string.IsNullOrWhiteSpace(result.Status) ? "unknown" : result.Status.ToLowerInvariant(); + CliMetrics.RecordPolicyFindingsGet(outcome); + Environment.ExitCode = 0; + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyFindingsGet("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsGet); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to retrieve policy finding."); + CliMetrics.RecordPolicyFindingsGet("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyFindingsExplainAsync( + IServiceProvider services, + string policyId, + string findingId, + string? mode, + string? format, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-explain"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.explain", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("policy findings explain"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); + } + + var normalizedPolicyId = policyId.Trim(); + var normalizedFindingId = findingId.Trim(); + var normalizedMode = NormalizeExplainMode(mode); + + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); + if (!string.IsNullOrWhiteSpace(normalizedMode)) + { + activity?.SetTag("stellaops.cli.findings.mode", normalizedMode); + } + + var result = await client.GetPolicyFindingExplainAsync(normalizedPolicyId, normalizedFindingId, normalizedMode, cancellationToken).ConfigureAwait(false); + activity?.SetTag("stellaops.cli.findings.step_count", result.Steps.Count); + + var payload = BuildPolicyFindingExplainPayload(normalizedPolicyId, normalizedFindingId, normalizedMode, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Explain trace written to {Path}.", Path.GetFullPath(outputPath!)); + } + + var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); + if (outputFormat == PolicyFindingsOutputFormat.Json) + { + Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); + } + else + { + RenderPolicyFindingExplain(logger, result); + } + + CliMetrics.RecordPolicyFindingsExplain(result.Steps.Count == 0 ? "empty" : "ok"); + Environment.ExitCode = 0; + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyFindingsExplain("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsExplain); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch policy explain trace."); + CliMetrics.RecordPolicyFindingsExplain("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyActivateAsync( + IServiceProvider services, + string policyId, + int version, + string? note, + bool runNow, + string? scheduledAt, + string? priority, + bool rollback, + string? incidentId, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-activate"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.activate", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "policy activate"); + using var duration = CliMetrics.MeasureCommandDuration("policy activate"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (version <= 0) + { + throw new ArgumentOutOfRangeException(nameof(version), "Version must be greater than zero."); + } + + var normalizedPolicyId = policyId.Trim(); + DateTimeOffset? scheduled = null; + if (!string.IsNullOrWhiteSpace(scheduledAt)) + { + if (!DateTimeOffset.TryParse(scheduledAt, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed)) + { + throw new ArgumentException("Scheduled timestamp must be a valid ISO-8601 value.", nameof(scheduledAt)); + } + + scheduled = parsed; + } + + var request = new PolicyActivationRequest( + runNow, + scheduled, + NormalizePolicyPriority(priority), + rollback, + string.IsNullOrWhiteSpace(incidentId) ? null : incidentId.Trim(), + string.IsNullOrWhiteSpace(note) ? null : note.Trim()); + + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + activity?.SetTag("stellaops.cli.policy_version", version); + if (request.RunNow) + { + activity?.SetTag("stellaops.cli.policy_run_now", true); + } + + if (request.ScheduledAt.HasValue) + { + activity?.SetTag("stellaops.cli.policy_scheduled_at", request.ScheduledAt.Value.ToString("o", CultureInfo.InvariantCulture)); + } + + if (!string.IsNullOrWhiteSpace(request.Priority)) + { + activity?.SetTag("stellaops.cli.policy_priority", request.Priority); + } + + if (request.Rollback) + { + activity?.SetTag("stellaops.cli.policy_rollback", true); + } + + var result = await client.ActivatePolicyRevisionAsync(normalizedPolicyId, version, request, cancellationToken).ConfigureAwait(false); + + var outcome = NormalizePolicyActivationOutcome(result.Status); + CliMetrics.RecordPolicyActivation(outcome); + RenderPolicyActivationResult(result, request); + + var exitCode = DeterminePolicyActivationExitCode(outcome); + Environment.ExitCode = exitCode; + + if (exitCode == 0) + { + logger.LogInformation("Policy {PolicyId} v{Version} activation status: {Status}.", result.Revision.PolicyId, result.Revision.Version, outcome); + } + else + { + logger.LogWarning("Policy {PolicyId} v{Version} requires additional approval (status: {Status}).", result.Revision.PolicyId, result.Revision.Version, outcome); + } + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyActivation("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyActivationFailure(ex, logger); + } + catch (Exception ex) + { + logger.LogError(ex, "Policy activation failed."); + CliMetrics.RecordPolicyActivation("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicySimulateAsync( + IServiceProvider services, + string policyId, + int? baseVersion, + int? candidateVersion, + IReadOnlyList sbomArguments, + IReadOnlyList environmentArguments, + string? format, + string? outputPath, + bool explain, + bool failOnDiff, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-simulate"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.simulate", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "policy simulate"); + activity?.SetTag("stellaops.cli.policy_id", policyId); + if (baseVersion.HasValue) + { + activity?.SetTag("stellaops.cli.base_version", baseVersion.Value); + } + if (candidateVersion.HasValue) + { + activity?.SetTag("stellaops.cli.candidate_version", candidateVersion.Value); + } + using var duration = CliMetrics.MeasureCommandDuration("policy simulate"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + var normalizedPolicyId = policyId.Trim(); + var sbomSet = NormalizePolicySbomSet(sbomArguments); + var environment = ParsePolicyEnvironment(environmentArguments); + + var input = new PolicySimulationInput( + baseVersion, + candidateVersion, + sbomSet, + environment, + explain); + + var result = await client.SimulatePolicyAsync(normalizedPolicyId, input, cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.diff_added", result.Diff.Added); + activity?.SetTag("stellaops.cli.diff_removed", result.Diff.Removed); + if (result.Diff.BySeverity.Count > 0) + { + activity?.SetTag("stellaops.cli.severity_buckets", result.Diff.BySeverity.Count); + } + + var outputFormat = DeterminePolicySimulationFormat(format, outputPath); + var payload = BuildPolicySimulationPayload(normalizedPolicyId, baseVersion, candidateVersion, sbomSet, environment, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteSimulationOutputAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Simulation results written to {Path}.", Path.GetFullPath(outputPath!)); + } + + RenderPolicySimulationResult(logger, payload, result, outputFormat); + + var exitCode = DetermineSimulationExitCode(result, failOnDiff); + Environment.ExitCode = exitCode; + + var outcome = exitCode == 20 + ? "diff_blocked" + : (result.Diff.Added + result.Diff.Removed) > 0 ? "diff" : "clean"; + CliMetrics.RecordPolicySimulation(outcome); + + if (exitCode == 20) + { + logger.LogWarning("Differences detected; exiting with code 20 due to --fail-on-diff."); + } + + if (!string.IsNullOrWhiteSpace(result.ExplainUri)) + { + activity?.SetTag("stellaops.cli.explain_uri", result.ExplainUri); + } + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicySimulation("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicySimulationFailure(ex, logger); + } + catch (Exception ex) + { + logger.LogError(ex, "Policy simulation failed."); + CliMetrics.RecordPolicySimulation("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleOfflineKitImportAsync( + IServiceProvider services, + string bundlePath, + string? manifestPath, + string? bundleSignaturePath, + string? manifestSignaturePath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var options = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-import"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.import", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("offline kit import"); + + try + { + if (string.IsNullOrWhiteSpace(bundlePath)) + { + logger.LogError("Bundle path is required."); + Environment.ExitCode = 1; + return; + } + + bundlePath = Path.GetFullPath(bundlePath); + if (!File.Exists(bundlePath)) + { + logger.LogError("Bundle file {Path} not found.", bundlePath); + Environment.ExitCode = 1; + return; + } + + var metadata = await LoadOfflineKitMetadataAsync(bundlePath, cancellationToken).ConfigureAwait(false); + if (metadata is not null) + { + manifestPath ??= metadata.ManifestPath; + bundleSignaturePath ??= metadata.BundleSignaturePath; + manifestSignaturePath ??= metadata.ManifestSignaturePath; + } + + manifestPath = NormalizeFilePath(manifestPath); + bundleSignaturePath = NormalizeFilePath(bundleSignaturePath); + manifestSignaturePath = NormalizeFilePath(manifestSignaturePath); + + if (manifestPath is null) + { + manifestPath = TryInferManifestPath(bundlePath); + if (manifestPath is not null) + { + logger.LogDebug("Using inferred manifest path {Path}.", manifestPath); + } + } + + if (manifestPath is not null && !File.Exists(manifestPath)) + { + logger.LogError("Manifest file {Path} not found.", manifestPath); + Environment.ExitCode = 1; + return; + } + + if (bundleSignaturePath is not null && !File.Exists(bundleSignaturePath)) + { + logger.LogWarning("Bundle signature {Path} not found; skipping.", bundleSignaturePath); + bundleSignaturePath = null; + } + + if (manifestSignaturePath is not null && !File.Exists(manifestSignaturePath)) + { + logger.LogWarning("Manifest signature {Path} not found; skipping.", manifestSignaturePath); + manifestSignaturePath = null; + } + + if (metadata is not null) + { + var computedBundleDigest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false); + if (!DigestsEqual(computedBundleDigest, metadata.BundleSha256)) + { + logger.LogError("Bundle digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.BundleSha256, computedBundleDigest); + Environment.ExitCode = 1; + return; + } + + if (manifestPath is not null) + { + var computedManifestDigest = await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false); + if (!DigestsEqual(computedManifestDigest, metadata.ManifestSha256)) + { + logger.LogError("Manifest digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.ManifestSha256, computedManifestDigest); + Environment.ExitCode = 1; + return; + } + } + } + + var request = new OfflineKitImportRequest( + bundlePath, + manifestPath, + bundleSignaturePath, + manifestSignaturePath, + metadata?.BundleId, + metadata?.BundleSha256, + metadata?.BundleSize, + metadata?.CapturedAt, + metadata?.Channel, + metadata?.Kind, + metadata?.IsDelta, + metadata?.BaseBundleId, + metadata?.ManifestSha256, + metadata?.ManifestSize); + + var result = await client.ImportOfflineKitAsync(request, cancellationToken).ConfigureAwait(false); + CliMetrics.RecordOfflineKitImport(result.Status); + + logger.LogInformation( + "Import {ImportId} submitted at {Submitted:u} with status {Status}.", + string.IsNullOrWhiteSpace(result.ImportId) ? "" : result.ImportId, + result.SubmittedAt, + string.IsNullOrWhiteSpace(result.Status) ? "queued" : result.Status); + + if (!string.IsNullOrWhiteSpace(result.Message)) + { + logger.LogInformation(result.Message); + } + + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Offline kit import failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleOfflineKitStatusAsync( + IServiceProvider services, + bool asJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-status"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.status", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("offline kit status"); + + try + { + var status = await client.GetOfflineKitStatusAsync(cancellationToken).ConfigureAwait(false); + + if (asJson) + { + var payload = new + { + bundleId = status.BundleId, + channel = status.Channel, + kind = status.Kind, + isDelta = status.IsDelta, + baseBundleId = status.BaseBundleId, + capturedAt = status.CapturedAt, + importedAt = status.ImportedAt, + sha256 = status.BundleSha256, + sizeBytes = status.BundleSize, + components = status.Components.Select(component => new + { + component.Name, + component.Version, + component.Digest, + component.CapturedAt, + component.SizeBytes + }) + }; + + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true }); + Console.WriteLine(json); + } + else + { + if (string.IsNullOrWhiteSpace(status.BundleId)) + { + logger.LogInformation("No offline kit bundle has been imported yet."); + } + else + { + logger.LogInformation( + "Current bundle {BundleId} ({Kind}) captured {Captured:u}, imported {Imported:u}, sha256:{Digest}, size {Size}.", + status.BundleId, + status.Kind ?? "unknown", + status.CapturedAt ?? default, + status.ImportedAt ?? default, + status.BundleSha256 ?? "", + status.BundleSize.HasValue ? status.BundleSize.Value.ToString("N0", CultureInfo.InvariantCulture) : ""); + } + + if (status.Components.Count > 0) + { + var table = new Table().AddColumns("Component", "Version", "Digest", "Captured", "Size (bytes)"); + foreach (var component in status.Components) + { + table.AddRow( + component.Name, + string.IsNullOrWhiteSpace(component.Version) ? "-" : component.Version!, + string.IsNullOrWhiteSpace(component.Digest) ? "-" : $"sha256:{component.Digest}", + component.CapturedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "-", + component.SizeBytes.HasValue ? component.SizeBytes.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"); + } + + AnsiConsole.Write(table); + } + } + + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to read offline kit status."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static async Task LoadOfflineKitMetadataAsync(string bundlePath, CancellationToken cancellationToken) + { + var metadataPath = bundlePath + ".metadata.json"; + if (!File.Exists(metadataPath)) + { + return null; + } + + try + { + await using var stream = File.OpenRead(metadataPath); + return await JsonSerializer.DeserializeAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch + { + return null; + } + } + + private static string? NormalizeFilePath(string? path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return null; + } + + return Path.GetFullPath(path); + } + + private static string? TryInferManifestPath(string bundlePath) + { + var directory = Path.GetDirectoryName(bundlePath); + if (string.IsNullOrWhiteSpace(directory)) + { + return null; + } + + var baseName = Path.GetFileName(bundlePath); + if (string.IsNullOrWhiteSpace(baseName)) + { + return null; + } + + baseName = Path.GetFileNameWithoutExtension(baseName); + if (baseName.EndsWith(".tar", StringComparison.OrdinalIgnoreCase)) + { + baseName = Path.GetFileNameWithoutExtension(baseName); + } + + var candidates = new[] + { + Path.Combine(directory, $"offline-manifest-{baseName}.json"), + Path.Combine(directory, "offline-manifest.json") + }; + + foreach (var candidate in candidates) + { + if (File.Exists(candidate)) + { + return Path.GetFullPath(candidate); + } + } + + return Directory.EnumerateFiles(directory, "offline-manifest*.json").FirstOrDefault(); + } + + private static bool DigestsEqual(string computed, string? expected) + { + if (string.IsNullOrWhiteSpace(expected)) + { + return true; + } + + return string.Equals(NormalizeDigest(computed), NormalizeDigest(expected), StringComparison.OrdinalIgnoreCase); + } + + private static string NormalizeDigest(string digest) + { + var value = digest.Trim(); + if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + value = value.Substring("sha256:".Length); + } + + return value.ToLowerInvariant(); + } + + private static async Task ComputeSha256Async(string path, CancellationToken cancellationToken) + { + await using var stream = File.OpenRead(path); + var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature) + { + encodedHeader = string.Empty; + encodedSignature = string.Empty; + + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var parts = value.Split('.'); + if (parts.Length != 3) + { + return false; + } + + encodedHeader = parts[0]; + encodedSignature = parts[2]; + return parts[1].Length == 0; + } + + private static byte[] Base64UrlDecode(string value) + { + var normalized = value.Replace('-', '+').Replace('_', '/'); + var padding = normalized.Length % 4; + if (padding == 2) + { + normalized += "=="; + } + else if (padding == 3) + { + normalized += "="; + } + else if (padding == 1) + { + throw new FormatException("Invalid Base64Url value."); + } + + return Convert.FromBase64String(normalized); + } + + private static CryptoSigningKey CreateVerificationSigningKey( + string keyId, + string algorithm, + string? providerHint, + string keyPem, + string keyPath) + { + if (string.IsNullOrWhiteSpace(keyPem)) + { + throw new InvalidOperationException("Verification key PEM content is empty."); + } + + using var ecdsa = ECDsa.Create(); + ecdsa.ImportFromPem(keyPem); + + var parameters = ecdsa.ExportParameters(includePrivateParameters: false); + if (parameters.D is null || parameters.D.Length == 0) + { + parameters.D = new byte[] { 0x01 }; + } + + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["source"] = Path.GetFullPath(keyPath), + ["verificationOnly"] = "true" + }; + + return new CryptoSigningKey( + new CryptoKeyReference(keyId, providerHint), + algorithm, + in parameters, + DateTimeOffset.UtcNow, + metadata: metadata); + } + + private static string FormatDuration(TimeSpan duration) + { + if (duration <= TimeSpan.Zero) + { + return "expired"; + } + + if (duration.TotalDays >= 1) + { + var days = (int)duration.TotalDays; + var hours = duration.Hours; + return hours > 0 + ? FormattableString.Invariant($"{days}d {hours}h") + : FormattableString.Invariant($"{days}d"); + } + + if (duration.TotalHours >= 1) + { + return FormattableString.Invariant($"{(int)duration.TotalHours}h {duration.Minutes}m"); + } + + if (duration.TotalMinutes >= 1) + { + return FormattableString.Invariant($"{(int)duration.TotalMinutes}m {duration.Seconds}s"); + } + + return FormattableString.Invariant($"{duration.Seconds}s"); + } + + private static bool TryExtractJwtClaims( + string accessToken, + out Dictionary claims, + out DateTimeOffset? issuedAt, + out DateTimeOffset? notBefore) + { + claims = new Dictionary(StringComparer.OrdinalIgnoreCase); + issuedAt = null; + notBefore = null; + + if (string.IsNullOrWhiteSpace(accessToken)) + { + return false; + } + + var parts = accessToken.Split('.'); + if (parts.Length < 2) + { + return false; + } + + if (!TryDecodeBase64Url(parts[1], out var payloadBytes)) + { + return false; + } + + try + { + using var document = JsonDocument.Parse(payloadBytes); + foreach (var property in document.RootElement.EnumerateObject()) + { + var value = FormatJsonValue(property.Value); + claims[property.Name] = value; + + if (issuedAt is null && property.NameEquals("iat") && TryParseUnixSeconds(property.Value, out var parsedIat)) + { + issuedAt = parsedIat; + } + + if (notBefore is null && property.NameEquals("nbf") && TryParseUnixSeconds(property.Value, out var parsedNbf)) + { + notBefore = parsedNbf; + } + } + + return true; + } + catch (JsonException) + { + claims.Clear(); + issuedAt = null; + notBefore = null; + return false; + } + } + + private static bool TryDecodeBase64Url(string value, out byte[] bytes) + { + bytes = Array.Empty(); + + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var normalized = value.Replace('-', '+').Replace('_', '/'); + var padding = normalized.Length % 4; + if (padding is 2 or 3) + { + normalized = normalized.PadRight(normalized.Length + (4 - padding), '='); + } + else if (padding == 1) + { + return false; + } + + try + { + bytes = Convert.FromBase64String(normalized); + return true; + } + catch (FormatException) + { + return false; + } + } + + private static string FormatJsonValue(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString() ?? string.Empty, + JsonValueKind.Number => element.TryGetInt64(out var longValue) + ? longValue.ToString(CultureInfo.InvariantCulture) + : element.GetDouble().ToString(CultureInfo.InvariantCulture), + JsonValueKind.True => "true", + JsonValueKind.False => "false", + JsonValueKind.Null => "null", + JsonValueKind.Array => FormatArray(element), + JsonValueKind.Object => element.GetRawText(), + _ => element.GetRawText() + }; + } + + private static string FormatArray(JsonElement array) + { + var values = new List(); + foreach (var item in array.EnumerateArray()) + { + values.Add(FormatJsonValue(item)); + } + + return string.Join(", ", values); + } + + private static bool TryParseUnixSeconds(JsonElement element, out DateTimeOffset value) + { + value = default; + + if (element.ValueKind == JsonValueKind.Number) + { + if (element.TryGetInt64(out var seconds)) + { + value = DateTimeOffset.FromUnixTimeSeconds(seconds); + return true; + } + + if (element.TryGetDouble(out var doubleValue)) + { + value = DateTimeOffset.FromUnixTimeSeconds((long)doubleValue); + return true; + } + } + + if (element.ValueKind == JsonValueKind.String) + { + var text = element.GetString(); + if (!string.IsNullOrWhiteSpace(text) && long.TryParse(text, NumberStyles.Integer, CultureInfo.InvariantCulture, out var seconds)) + { + value = DateTimeOffset.FromUnixTimeSeconds(seconds); + return true; + } + } + + return false; + } + + private static List CollectAdditionalClaims(Dictionary claims) + { + var result = new List(); + foreach (var pair in claims) + { + if (CommonClaimNames.Contains(pair.Key)) + { + continue; + } + + result.Add(FormattableString.Invariant($"{pair.Key}={pair.Value}")); + } + + result.Sort(StringComparer.OrdinalIgnoreCase); + return result; + } + + private static readonly HashSet CommonClaimNames = new(StringComparer.OrdinalIgnoreCase) + { + "aud", + "client_id", + "exp", + "iat", + "iss", + "nbf", + "scope", + "scopes", + "sub", + "token_type", + "jti" + }; + + private static async Task ExecuteExcititorCommandAsync( + IServiceProvider services, + string commandName, + bool verbose, + IDictionary? activityTags, + Func> operation, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger(commandName.Replace(' ', '-')); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity($"cli.{commandName.Replace(' ', '.')}" , ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", commandName); + if (activityTags is not null) + { + foreach (var tag in activityTags) + { + activity?.SetTag(tag.Key, tag.Value); + } + } + using var duration = CliMetrics.MeasureCommandDuration(commandName); + + try + { + var result = await operation(client).ConfigureAwait(false); + if (result.Success) + { + if (!string.IsNullOrWhiteSpace(result.Message)) + { + logger.LogInformation(result.Message); + } + else + { + logger.LogInformation("Operation completed successfully."); + } + + if (!string.IsNullOrWhiteSpace(result.Location)) + { + logger.LogInformation("Location: {Location}", result.Location); + } + + if (result.Payload is JsonElement payload && payload.ValueKind is not JsonValueKind.Undefined and not JsonValueKind.Null) + { + logger.LogDebug("Response payload: {Payload}", payload.ToString()); + } + + Environment.ExitCode = 0; + } + else + { + logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Operation failed." : result.Message); + Environment.ExitCode = 1; + } + } + catch (Exception ex) + { + logger.LogError(ex, "Excititor operation failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static async Task> GatherImageDigestsAsync( + IReadOnlyList inline, + string? filePath, + CancellationToken cancellationToken) + { + var results = new List(); + var seen = new HashSet(StringComparer.Ordinal); + + void AddCandidates(string? candidate) + { + foreach (var image in SplitImageCandidates(candidate)) + { + if (seen.Add(image)) + { + results.Add(image); + } + } + } + + if (inline is not null) + { + foreach (var entry in inline) + { + AddCandidates(entry); + } + } + + if (!string.IsNullOrWhiteSpace(filePath)) + { + var path = Path.GetFullPath(filePath); + if (!File.Exists(path)) + { + throw new FileNotFoundException("Input file not found.", path); + } + + foreach (var line in File.ReadLines(path)) + { + cancellationToken.ThrowIfCancellationRequested(); + AddCandidates(line); + } + } + + if (Console.IsInputRedirected) + { + while (!cancellationToken.IsCancellationRequested) + { + var line = await Console.In.ReadLineAsync().ConfigureAwait(false); + if (line is null) + { + break; + } + + AddCandidates(line); + } + } + + return new ReadOnlyCollection(results); + } + + private static IEnumerable SplitImageCandidates(string? raw) + { + if (string.IsNullOrWhiteSpace(raw)) + { + yield break; + } + + var candidate = raw.Trim(); + var commentIndex = candidate.IndexOf('#'); + if (commentIndex >= 0) + { + candidate = candidate[..commentIndex].Trim(); + } + + if (candidate.Length == 0) + { + yield break; + } + + var tokens = candidate.Split(new[] { ',', ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries); + foreach (var token in tokens) + { + var trimmed = token.Trim(); + if (trimmed.Length > 0) + { + yield return trimmed; + } + } + } + + private static IReadOnlyDictionary ParseLabelSelectors(IReadOnlyList labelArguments) + { + if (labelArguments is null || labelArguments.Count == 0) + { + return EmptyLabelSelectors; + } + + var labels = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var raw in labelArguments) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var trimmed = raw.Trim(); + var delimiter = trimmed.IndexOf('='); + if (delimiter <= 0 || delimiter == trimmed.Length - 1) + { + throw new ArgumentException($"Invalid label '{raw}'. Expected key=value format."); + } + + var key = trimmed[..delimiter].Trim(); + var value = trimmed[(delimiter + 1)..].Trim(); + if (key.Length == 0) + { + throw new ArgumentException($"Invalid label '{raw}'. Label key cannot be empty."); + } + + labels[key] = value; + } + + return labels.Count == 0 ? EmptyLabelSelectors : new ReadOnlyDictionary(labels); + } + + private sealed record ExcititorExportManifestSummary( + string ExportId, + string? Format, + string? Algorithm, + string? Digest, + long? SizeBytes, + bool? FromCache, + DateTimeOffset? CreatedAt, + string? RekorLocation, + string? RekorIndex, + string? RekorInclusionUrl); + + private static ExcititorExportManifestSummary? TryParseExportManifest(JsonElement? payload) + { + if (payload is null || payload.Value.ValueKind is JsonValueKind.Undefined or JsonValueKind.Null) + { + return null; + } + + var element = payload.Value; + var exportId = GetStringProperty(element, "exportId"); + if (string.IsNullOrWhiteSpace(exportId)) + { + return null; + } + + var format = GetStringProperty(element, "format"); + var algorithm = default(string?); + var digest = default(string?); + + if (TryGetPropertyCaseInsensitive(element, "artifact", out var artifact) && artifact.ValueKind == JsonValueKind.Object) + { + algorithm = GetStringProperty(artifact, "algorithm"); + digest = GetStringProperty(artifact, "digest"); + } + + var sizeBytes = GetInt64Property(element, "sizeBytes"); + var fromCache = GetBooleanProperty(element, "fromCache"); + var createdAt = GetDateTimeOffsetProperty(element, "createdAt"); + + string? rekorLocation = null; + string? rekorIndex = null; + string? rekorInclusion = null; + + if (TryGetPropertyCaseInsensitive(element, "attestation", out var attestation) && attestation.ValueKind == JsonValueKind.Object) + { + if (TryGetPropertyCaseInsensitive(attestation, "rekor", out var rekor) && rekor.ValueKind == JsonValueKind.Object) + { + rekorLocation = GetStringProperty(rekor, "location"); + rekorIndex = GetStringProperty(rekor, "logIndex"); + var inclusion = GetStringProperty(rekor, "inclusionProofUri"); + if (!string.IsNullOrWhiteSpace(inclusion)) + { + rekorInclusion = inclusion; + } + } + } + + return new ExcititorExportManifestSummary( + exportId.Trim(), + format, + algorithm, + digest, + sizeBytes, + fromCache, + createdAt, + rekorLocation, + rekorIndex, + rekorInclusion); + } + + private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement property) + { + if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty(propertyName, out property)) + { + return true; + } + + if (element.ValueKind == JsonValueKind.Object) + { + foreach (var candidate in element.EnumerateObject()) + { + if (string.Equals(candidate.Name, propertyName, StringComparison.OrdinalIgnoreCase)) + { + property = candidate.Value; + return true; + } + } + } + + property = default; + return false; + } + + private static string? GetStringProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + return property.ValueKind switch + { + JsonValueKind.String => property.GetString(), + JsonValueKind.Number => property.ToString(), + _ => null + }; + } + + return null; + } + + private static bool? GetBooleanProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + return property.ValueKind switch + { + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.String when bool.TryParse(property.GetString(), out var parsed) => parsed, + _ => null + }; + } + + return null; + } + + private static long? GetInt64Property(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + if (property.ValueKind == JsonValueKind.Number && property.TryGetInt64(out var value)) + { + return value; + } + + if (property.ValueKind == JsonValueKind.String + && long.TryParse(property.GetString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) + { + return parsed; + } + } + + return null; + } + + private static DateTimeOffset? GetDateTimeOffsetProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property) + && property.ValueKind == JsonValueKind.String + && DateTimeOffset.TryParse(property.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var value)) + { + return value.ToUniversalTime(); + } + + return null; + } + + private static string BuildDigestDisplay(string? algorithm, string digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return string.Empty; + } + + if (digest.Contains(':', StringComparison.Ordinal)) + { + return digest; + } + + if (string.IsNullOrWhiteSpace(algorithm) || algorithm.Equals("sha256", StringComparison.OrdinalIgnoreCase)) + { + return $"sha256:{digest}"; + } + + return $"{algorithm}:{digest}"; + } + + private static string FormatSize(long sizeBytes) + { + if (sizeBytes < 0) + { + return $"{sizeBytes} bytes"; + } + + string[] units = { "bytes", "KB", "MB", "GB", "TB" }; + double size = sizeBytes; + var unit = 0; + + while (size >= 1024 && unit < units.Length - 1) + { + size /= 1024; + unit++; + } + + return unit == 0 ? $"{sizeBytes} bytes" : $"{size:0.##} {units[unit]}"; + } + + private static string ResolveExportOutputPath(string outputPath, ExcititorExportManifestSummary manifest) + { + if (string.IsNullOrWhiteSpace(outputPath)) + { + throw new ArgumentException("Output path must be provided.", nameof(outputPath)); + } + + var fullPath = Path.GetFullPath(outputPath); + if (Directory.Exists(fullPath) + || outputPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal) + || outputPath.EndsWith(Path.AltDirectorySeparatorChar.ToString(), StringComparison.Ordinal)) + { + return Path.Combine(fullPath, BuildExportFileName(manifest)); + } + + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) + { + Directory.CreateDirectory(directory); + } + + return fullPath; + } + + private static string BuildExportFileName(ExcititorExportManifestSummary manifest) + { + var token = !string.IsNullOrWhiteSpace(manifest.Digest) + ? manifest.Digest! + : manifest.ExportId; + + token = SanitizeToken(token); + if (token.Length > 40) + { + token = token[..40]; + } + + var extension = DetermineExportExtension(manifest.Format); + return $"stellaops-excititor-{token}{extension}"; + } + + private static string DetermineExportExtension(string? format) + { + if (string.IsNullOrWhiteSpace(format)) + { + return ".bin"; + } + + return format switch + { + not null when format.Equals("jsonl", StringComparison.OrdinalIgnoreCase) => ".jsonl", + not null when format.Equals("json", StringComparison.OrdinalIgnoreCase) => ".json", + not null when format.Equals("openvex", StringComparison.OrdinalIgnoreCase) => ".json", + not null when format.Equals("csaf", StringComparison.OrdinalIgnoreCase) => ".json", + _ => ".bin" + }; + } + + private static string SanitizeToken(string token) + { + var builder = new StringBuilder(token.Length); + foreach (var ch in token) + { + if (char.IsLetterOrDigit(ch)) + { + builder.Append(char.ToLowerInvariant(ch)); + } + } + + if (builder.Length == 0) + { + builder.Append("export"); + } + + return builder.ToString(); + } + + private static string? ResolveLocationUrl(StellaOpsCliOptions options, string location) + { + if (string.IsNullOrWhiteSpace(location)) + { + return null; + } + + if (Uri.TryCreate(location, UriKind.Absolute, out var absolute)) + { + return absolute.ToString(); + } + + if (!string.IsNullOrWhiteSpace(options?.BackendUrl) && Uri.TryCreate(options.BackendUrl, UriKind.Absolute, out var baseUri)) + { + if (!location.StartsWith("/", StringComparison.Ordinal)) + { + location = "/" + location; + } + + return new Uri(baseUri, location).ToString(); + } + + return location; + } + + private static string BuildRuntimePolicyJson(RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) + { + var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); + var results = new Dictionary(StringComparer.Ordinal); + + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + results[image] = BuildDecisionMap(decision); + } + } + + var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + var payload = new Dictionary(StringComparer.Ordinal) + { + ["ttlSeconds"] = result.TtlSeconds, + ["expiresAtUtc"] = result.ExpiresAtUtc?.ToString("O", CultureInfo.InvariantCulture), + ["policyRevision"] = result.PolicyRevision, + ["results"] = results + }; + + return JsonSerializer.Serialize(payload, options); + } + + private static IDictionary BuildDecisionMap(RuntimePolicyImageDecision decision) + { + var map = new Dictionary(StringComparer.Ordinal) + { + ["policyVerdict"] = decision.PolicyVerdict, + ["signed"] = decision.Signed, + ["hasSbomReferrers"] = decision.HasSbomReferrers + }; + + if (decision.Reasons.Count > 0) + { + map["reasons"] = decision.Reasons; + } + + if (decision.Rekor is not null) + { + var rekorMap = new Dictionary(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid)) + { + rekorMap["uuid"] = decision.Rekor.Uuid; + } + + if (!string.IsNullOrWhiteSpace(decision.Rekor.Url)) + { + rekorMap["url"] = decision.Rekor.Url; + } + + if (decision.Rekor.Verified.HasValue) + { + rekorMap["verified"] = decision.Rekor.Verified; + } + + if (rekorMap.Count > 0) + { + map["rekor"] = rekorMap; + } + } + + foreach (var kvp in decision.AdditionalProperties) + { + map[kvp.Key] = kvp.Value; + } + + return map; + } + + private static void DisplayRuntimePolicyResults(ILogger logger, RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) + { + var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); + var summary = new Dictionary(StringComparer.OrdinalIgnoreCase); + + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().Border(TableBorder.Rounded) + .AddColumns("Image", "Verdict", "Signed", "SBOM Ref", "Quieted", "Confidence", "Reasons", "Attestation"); + + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + table.AddRow( + image, + decision.PolicyVerdict, + FormatBoolean(decision.Signed), + FormatBoolean(decision.HasSbomReferrers), + FormatQuietedDisplay(decision.AdditionalProperties), + FormatConfidenceDisplay(decision.AdditionalProperties), + decision.Reasons.Count > 0 ? string.Join(Environment.NewLine, decision.Reasons) : "-", + FormatAttestation(decision.Rekor)); + + summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; + + if (decision.AdditionalProperties.Count > 0) + { + var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); + logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); + } + } + else + { + table.AddRow(image, "", "-", "-", "-", "-", "-", "-"); + } + } + + AnsiConsole.Write(table); + } + else + { + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + var reasons = decision.Reasons.Count > 0 ? string.Join(", ", decision.Reasons) : "none"; + logger.LogInformation( + "{Image} -> verdict={Verdict} signed={Signed} sbomRef={Sbom} quieted={Quieted} confidence={Confidence} attestation={Attestation} reasons={Reasons}", + image, + decision.PolicyVerdict, + FormatBoolean(decision.Signed), + FormatBoolean(decision.HasSbomReferrers), + FormatQuietedDisplay(decision.AdditionalProperties), + FormatConfidenceDisplay(decision.AdditionalProperties), + FormatAttestation(decision.Rekor), + reasons); + + summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; + + if (decision.AdditionalProperties.Count > 0) + { + var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); + logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); + } + } + else + { + logger.LogWarning("{Image} -> no decision returned by backend.", image); + } + } + } + + if (summary.Count > 0) + { + var summaryText = string.Join(", ", summary.Select(kvp => $"{kvp.Key}:{kvp.Value}")); + logger.LogInformation("Verdict summary: {Summary}", summaryText); + } + } + + private static IReadOnlyList BuildImageOrder(IReadOnlyList requestedImages, IEnumerable actual) + { + var order = new List(); + var seen = new HashSet(StringComparer.Ordinal); + + if (requestedImages is not null) + { + foreach (var image in requestedImages) + { + if (!string.IsNullOrWhiteSpace(image)) + { + var trimmed = image.Trim(); + if (seen.Add(trimmed)) + { + order.Add(trimmed); + } + } + } + } + + foreach (var image in actual) + { + if (!string.IsNullOrWhiteSpace(image)) + { + var trimmed = image.Trim(); + if (seen.Add(trimmed)) + { + order.Add(trimmed); + } + } + } + + return new ReadOnlyCollection(order); + } + + private static string FormatBoolean(bool? value) + => value is null ? "unknown" : value.Value ? "yes" : "no"; + + private static string FormatQuietedDisplay(IReadOnlyDictionary metadata) + { + var quieted = GetMetadataBoolean(metadata, "quieted", "quiet"); + var quietedBy = GetMetadataString(metadata, "quietedBy", "quietedReason"); + + if (quieted is true) + { + return string.IsNullOrWhiteSpace(quietedBy) ? "yes" : $"yes ({quietedBy})"; + } + + if (quieted is false) + { + return "no"; + } + + return string.IsNullOrWhiteSpace(quietedBy) ? "-" : $"? ({quietedBy})"; + } + + private static string FormatConfidenceDisplay(IReadOnlyDictionary metadata) + { + var confidence = GetMetadataDouble(metadata, "confidence"); + var confidenceBand = GetMetadataString(metadata, "confidenceBand", "confidenceTier"); + + if (confidence.HasValue && !string.IsNullOrWhiteSpace(confidenceBand)) + { + return string.Format(CultureInfo.InvariantCulture, "{0:0.###} ({1})", confidence.Value, confidenceBand); + } + + if (confidence.HasValue) + { + return confidence.Value.ToString("0.###", CultureInfo.InvariantCulture); + } + + if (!string.IsNullOrWhiteSpace(confidenceBand)) + { + return confidenceBand!; + } + + return "-"; + } + + private static string FormatAttestation(RuntimePolicyRekorReference? rekor) + { + if (rekor is null) + { + return "-"; + } + + var uuid = string.IsNullOrWhiteSpace(rekor.Uuid) ? null : rekor.Uuid; + var url = string.IsNullOrWhiteSpace(rekor.Url) ? null : rekor.Url; + var verified = rekor.Verified; + + var core = uuid ?? url; + if (!string.IsNullOrEmpty(core)) + { + if (verified.HasValue) + { + var suffix = verified.Value ? " (verified)" : " (unverified)"; + return core + suffix; + } + + return core!; + } + + if (verified.HasValue) + { + return verified.Value ? "verified" : "unverified"; + } + + return "-"; + } + + private static bool? GetMetadataBoolean(IReadOnlyDictionary metadata, params string[] keys) + { + foreach (var key in keys) + { + if (metadata.TryGetValue(key, out var value) && value is not null) + { + switch (value) + { + case bool b: + return b; + case string s when bool.TryParse(s, out var parsed): + return parsed; + } + } + } + + return null; + } + + private static string? GetMetadataString(IReadOnlyDictionary metadata, params string[] keys) + { + foreach (var key in keys) + { + if (metadata.TryGetValue(key, out var value) && value is not null) + { + if (value is string s) + { + return string.IsNullOrWhiteSpace(s) ? null : s; + } + } + } + + return null; + } + + private static double? GetMetadataDouble(IReadOnlyDictionary metadata, params string[] keys) + { + foreach (var key in keys) + { + if (metadata.TryGetValue(key, out var value) && value is not null) + { + switch (value) + { + case double d: + return d; + case float f: + return f; + case decimal m: + return (double)m; + case long l: + return l; + case int i: + return i; + case string s when double.TryParse(s, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var parsed): + return parsed; + } + } + } + + return null; + } + + private static PolicySimulationOutputFormat DeterminePolicySimulationFormat(string? value, string? outputPath) + { + if (!string.IsNullOrWhiteSpace(value)) + { + return value.Trim().ToLowerInvariant() switch + { + "table" => PolicySimulationOutputFormat.Table, + "json" => PolicySimulationOutputFormat.Json, + _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") + }; + } + + if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) + { + return PolicySimulationOutputFormat.Json; + } + + return PolicySimulationOutputFormat.Table; + } + + private static object BuildPolicySimulationPayload( + string policyId, + int? baseVersion, + int? candidateVersion, + IReadOnlyList sbomSet, + IReadOnlyDictionary environment, + PolicySimulationResult result) + => new + { + policyId, + baseVersion, + candidateVersion, + sbomSet = sbomSet.Count == 0 ? Array.Empty() : sbomSet, + environment = environment.Count == 0 ? null : environment, + diff = result.Diff, + explainUri = result.ExplainUri + }; + + private static void RenderPolicySimulationResult( + ILogger logger, + object payload, + PolicySimulationResult result, + PolicySimulationOutputFormat format) + { + if (format == PolicySimulationOutputFormat.Json) + { + var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); + Console.WriteLine(json); + return; + } + + logger.LogInformation( + "Policy diff summary — Added: {Added}, Removed: {Removed}, Unchanged: {Unchanged}.", + result.Diff.Added, + result.Diff.Removed, + result.Diff.Unchanged); + + if (result.Diff.BySeverity.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().AddColumns("Severity", "Up", "Down"); + foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) + { + table.AddRow( + entry.Key, + FormatDelta(entry.Value.Up), + FormatDelta(entry.Value.Down)); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) + { + logger.LogInformation("Severity {Severity}: up={Up}, down={Down}", entry.Key, entry.Value.Up ?? 0, entry.Value.Down ?? 0); + } + } + } + + if (result.Diff.RuleHits.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().AddColumns("Rule", "Up", "Down"); + foreach (var hit in result.Diff.RuleHits) + { + table.AddRow( + string.IsNullOrWhiteSpace(hit.RuleName) ? hit.RuleId : $"{hit.RuleName} ({hit.RuleId})", + FormatDelta(hit.Up), + FormatDelta(hit.Down)); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var hit in result.Diff.RuleHits) + { + logger.LogInformation("Rule {RuleId}: up={Up}, down={Down}", hit.RuleId, hit.Up ?? 0, hit.Down ?? 0); + } + } + } + + if (!string.IsNullOrWhiteSpace(result.ExplainUri)) + { + logger.LogInformation("Explain trace available at {ExplainUri}.", result.ExplainUri); + } + } + + private static IReadOnlyList NormalizePolicySbomSet(IReadOnlyList arguments) + { + if (arguments is null || arguments.Count == 0) + { + return EmptyPolicySbomSet; + } + + var set = new SortedSet(StringComparer.Ordinal); + foreach (var raw in arguments) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var trimmed = raw.Trim(); + if (trimmed.Length > 0) + { + set.Add(trimmed); + } + } + + if (set.Count == 0) + { + return EmptyPolicySbomSet; + } + + var list = set.ToList(); + return new ReadOnlyCollection(list); + } + + private static IReadOnlyDictionary ParsePolicyEnvironment(IReadOnlyList arguments) + { + if (arguments is null || arguments.Count == 0) + { + return EmptyPolicyEnvironment; + } + + var env = new SortedDictionary(StringComparer.Ordinal); + foreach (var raw in arguments) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var trimmed = raw.Trim(); + var separator = trimmed.IndexOf('='); + if (separator <= 0 || separator == trimmed.Length - 1) + { + throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); + } + + var key = trimmed[..separator].Trim().ToLowerInvariant(); + if (string.IsNullOrWhiteSpace(key)) + { + throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); + } + + var valueToken = trimmed[(separator + 1)..].Trim(); + env[key] = ParsePolicyEnvironmentValue(valueToken); + } + + return env.Count == 0 ? EmptyPolicyEnvironment : new ReadOnlyDictionary(env); + } + + private static object? ParsePolicyEnvironmentValue(string token) + { + if (string.IsNullOrWhiteSpace(token)) + { + return string.Empty; + } + + var value = token; + if ((value.Length >= 2 && value.StartsWith("\"", StringComparison.Ordinal) && value.EndsWith("\"", StringComparison.Ordinal)) || + (value.Length >= 2 && value.StartsWith("'", StringComparison.Ordinal) && value.EndsWith("'", StringComparison.Ordinal))) + { + value = value[1..^1]; + } + + if (string.Equals(value, "null", StringComparison.OrdinalIgnoreCase)) + { + return null; + } + + if (bool.TryParse(value, out var boolResult)) + { + return boolResult; + } + + if (long.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var longResult)) + { + return longResult; + } + + if (double.TryParse(value, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var doubleResult)) + { + return doubleResult; + } + + return value; + } + + private static Task WriteSimulationOutputAsync(string outputPath, object payload, CancellationToken cancellationToken) + => WriteJsonPayloadAsync(outputPath, payload, cancellationToken); + + private static async Task WriteJsonPayloadAsync(string outputPath, object payload, CancellationToken cancellationToken) + { + var fullPath = Path.GetFullPath(outputPath); + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrWhiteSpace(directory)) + { + Directory.CreateDirectory(directory); + } + + var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); + await File.WriteAllTextAsync(fullPath, json + Environment.NewLine, cancellationToken).ConfigureAwait(false); + } + + private static int DetermineSimulationExitCode(PolicySimulationResult result, bool failOnDiff) + { + if (!failOnDiff) + { + return 0; + } + + return (result.Diff.Added + result.Diff.Removed) > 0 ? 20 : 0; + } + + private static void HandlePolicySimulationFailure(PolicyApiException exception, ILogger logger) + { + var exitCode = exception.ErrorCode switch + { + "ERR_POL_001" => 10, + "ERR_POL_002" or "ERR_POL_005" => 12, + "ERR_POL_003" => 21, + "ERR_POL_004" => 22, + "ERR_POL_006" => 23, + _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, + _ => 1 + }; + + if (string.IsNullOrWhiteSpace(exception.ErrorCode)) + { + logger.LogError("Policy simulation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); + } + else + { + logger.LogError("Policy simulation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); + } + + CliMetrics.RecordPolicySimulation("error"); + Environment.ExitCode = exitCode; + } + + private static void HandlePolicyActivationFailure(PolicyApiException exception, ILogger logger) + { + var exitCode = exception.ErrorCode switch + { + "ERR_POL_002" => 70, + "ERR_POL_003" => 71, + "ERR_POL_004" => 72, + _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, + _ => 1 + }; + + if (string.IsNullOrWhiteSpace(exception.ErrorCode)) + { + logger.LogError("Policy activation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); + } + else + { + logger.LogError("Policy activation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); + } + + CliMetrics.RecordPolicyActivation("error"); + Environment.ExitCode = exitCode; + } + + private static IReadOnlyList NormalizePolicyFilterValues(string[] values, bool toLower = false) + { + if (values is null || values.Length == 0) + { + return Array.Empty(); + } + + var set = new HashSet(StringComparer.OrdinalIgnoreCase); + var list = new List(); + foreach (var raw in values) + { + var candidate = raw?.Trim(); + if (string.IsNullOrWhiteSpace(candidate)) + { + continue; + } + + var normalized = toLower ? candidate.ToLowerInvariant() : candidate; + if (set.Add(normalized)) + { + list.Add(normalized); + } + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static string? NormalizePolicyPriority(string? priority) + { + if (string.IsNullOrWhiteSpace(priority)) + { + return null; + } + + var normalized = priority.Trim(); + return string.IsNullOrWhiteSpace(normalized) ? null : normalized.ToLowerInvariant(); + } + + private static string NormalizePolicyActivationOutcome(string status) + { + if (string.IsNullOrWhiteSpace(status)) + { + return "unknown"; + } + + return status.Trim().ToLowerInvariant(); + } + + private static int DeterminePolicyActivationExitCode(string outcome) + => string.Equals(outcome, "pending_second_approval", StringComparison.Ordinal) ? 75 : 0; + + private static void RenderPolicyActivationResult(PolicyActivationResult result, PolicyActivationRequest request) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var summary = new Table().Expand(); + summary.Border(TableBorder.Rounded); + summary.AddColumn(new TableColumn("[grey]Field[/]").LeftAligned()); + summary.AddColumn(new TableColumn("[grey]Value[/]").LeftAligned()); + summary.AddRow("Policy", Markup.Escape($"{result.Revision.PolicyId} v{result.Revision.Version}")); + summary.AddRow("Status", FormatActivationStatus(result.Status)); + summary.AddRow("Requires 2 approvals", result.Revision.RequiresTwoPersonApproval ? "[yellow]yes[/]" : "[green]no[/]"); + summary.AddRow("Created (UTC)", Markup.Escape(FormatUpdatedAt(result.Revision.CreatedAt))); + summary.AddRow("Activated (UTC)", result.Revision.ActivatedAt.HasValue + ? Markup.Escape(FormatUpdatedAt(result.Revision.ActivatedAt.Value)) + : "[grey](not yet active)[/]"); + + if (request.RunNow) + { + summary.AddRow("Run", "[green]immediate[/]"); + } + else if (request.ScheduledAt.HasValue) + { + summary.AddRow("Scheduled at", Markup.Escape(FormatUpdatedAt(request.ScheduledAt.Value))); + } + + if (!string.IsNullOrWhiteSpace(request.Priority)) + { + summary.AddRow("Priority", Markup.Escape(request.Priority!)); + } + + if (request.Rollback) + { + summary.AddRow("Rollback", "[yellow]yes[/]"); + } + + if (!string.IsNullOrWhiteSpace(request.IncidentId)) + { + summary.AddRow("Incident", Markup.Escape(request.IncidentId!)); + } + + if (!string.IsNullOrWhiteSpace(request.Comment)) + { + summary.AddRow("Note", Markup.Escape(request.Comment!)); + } + + AnsiConsole.Write(summary); + + if (result.Revision.Approvals.Count > 0) + { + var approvalTable = new Table().Title("[grey]Approvals[/]"); + approvalTable.Border(TableBorder.Minimal); + approvalTable.AddColumn(new TableColumn("Actor").LeftAligned()); + approvalTable.AddColumn(new TableColumn("Approved (UTC)").LeftAligned()); + approvalTable.AddColumn(new TableColumn("Comment").LeftAligned()); + + foreach (var approval in result.Revision.Approvals) + { + var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment!; + approvalTable.AddRow( + Markup.Escape(approval.ActorId), + Markup.Escape(FormatUpdatedAt(approval.ApprovedAt)), + Markup.Escape(comment)); + } + + AnsiConsole.Write(approvalTable); + } + else + { + AnsiConsole.MarkupLine("[grey]No activation approvals recorded yet.[/]"); + } + } + else + { + Console.WriteLine(FormattableString.Invariant($"Policy: {result.Revision.PolicyId} v{result.Revision.Version}")); + Console.WriteLine(FormattableString.Invariant($"Status: {NormalizePolicyActivationOutcome(result.Status)}")); + Console.WriteLine(FormattableString.Invariant($"Requires 2 approvals: {(result.Revision.RequiresTwoPersonApproval ? "yes" : "no")}")); + Console.WriteLine(FormattableString.Invariant($"Created (UTC): {FormatUpdatedAt(result.Revision.CreatedAt)}")); + Console.WriteLine(FormattableString.Invariant($"Activated (UTC): {(result.Revision.ActivatedAt.HasValue ? FormatUpdatedAt(result.Revision.ActivatedAt.Value) : "(not yet active)")}")); + + if (request.RunNow) + { + Console.WriteLine("Run: immediate"); + } + else if (request.ScheduledAt.HasValue) + { + Console.WriteLine(FormattableString.Invariant($"Scheduled at: {FormatUpdatedAt(request.ScheduledAt.Value)}")); + } + + if (!string.IsNullOrWhiteSpace(request.Priority)) + { + Console.WriteLine(FormattableString.Invariant($"Priority: {request.Priority}")); + } + + if (request.Rollback) + { + Console.WriteLine("Rollback: yes"); + } + + if (!string.IsNullOrWhiteSpace(request.IncidentId)) + { + Console.WriteLine(FormattableString.Invariant($"Incident: {request.IncidentId}")); + } + + if (!string.IsNullOrWhiteSpace(request.Comment)) + { + Console.WriteLine(FormattableString.Invariant($"Note: {request.Comment}")); + } + + if (result.Revision.Approvals.Count == 0) + { + Console.WriteLine("Approvals: none"); + } + else + { + foreach (var approval in result.Revision.Approvals) + { + var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment; + Console.WriteLine(FormattableString.Invariant($"Approval: {approval.ActorId} at {FormatUpdatedAt(approval.ApprovedAt)} ({comment})")); + } + } + } + } + + private static string FormatActivationStatus(string status) + { + var normalized = NormalizePolicyActivationOutcome(status); + return normalized switch + { + "activated" => "[green]activated[/]", + "already_active" => "[yellow]already_active[/]", + "pending_second_approval" => "[yellow]pending_second_approval[/]", + _ => "[red]" + Markup.Escape(string.IsNullOrWhiteSpace(status) ? "unknown" : status) + "[/]" + }; + } + + private static DateTimeOffset? ParsePolicySince(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + if (DateTimeOffset.TryParse( + value.Trim(), + CultureInfo.InvariantCulture, + DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, + out var parsed)) + { + return parsed.ToUniversalTime(); + } + + throw new ArgumentException("Invalid --since value. Use an ISO-8601 timestamp."); + } + + private static string? NormalizeExplainMode(string? mode) + => string.IsNullOrWhiteSpace(mode) ? null : mode.Trim().ToLowerInvariant(); + + private static PolicyFindingsOutputFormat DeterminePolicyFindingsFormat(string? value, string? outputPath) + { + if (!string.IsNullOrWhiteSpace(value)) + { + return value.Trim().ToLowerInvariant() switch + { + "table" => PolicyFindingsOutputFormat.Table, + "json" => PolicyFindingsOutputFormat.Json, + _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") + }; + } + + if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) + { + return PolicyFindingsOutputFormat.Json; + } + + return PolicyFindingsOutputFormat.Table; + } + + private static object BuildPolicyFindingsPayload( + string policyId, + PolicyFindingsQuery query, + PolicyFindingsPage page) + => new + { + policyId, + filters = new + { + sbom = query.SbomIds, + status = query.Statuses, + severity = query.Severities, + cursor = query.Cursor, + page = query.Page, + pageSize = query.PageSize, + since = query.Since?.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture) + }, + items = page.Items.Select(item => new + { + findingId = item.FindingId, + status = item.Status, + severity = new + { + normalized = item.Severity.Normalized, + score = item.Severity.Score + }, + sbomId = item.SbomId, + advisoryIds = item.AdvisoryIds, + vex = item.Vex is null ? null : new + { + winningStatementId = item.Vex.WinningStatementId, + source = item.Vex.Source, + status = item.Vex.Status + }, + policyVersion = item.PolicyVersion, + updatedAt = item.UpdatedAt == DateTimeOffset.MinValue ? null : item.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), + runId = item.RunId + }), + nextCursor = page.NextCursor, + totalCount = page.TotalCount + }; + + private static object BuildPolicyFindingPayload(string policyId, PolicyFindingDocument finding) + => new + { + policyId, + finding = new + { + findingId = finding.FindingId, + status = finding.Status, + severity = new + { + normalized = finding.Severity.Normalized, + score = finding.Severity.Score + }, + sbomId = finding.SbomId, + advisoryIds = finding.AdvisoryIds, + vex = finding.Vex is null ? null : new + { + winningStatementId = finding.Vex.WinningStatementId, + source = finding.Vex.Source, + status = finding.Vex.Status + }, + policyVersion = finding.PolicyVersion, + updatedAt = finding.UpdatedAt == DateTimeOffset.MinValue ? null : finding.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), + runId = finding.RunId + } + }; + + private static object BuildPolicyFindingExplainPayload( + string policyId, + string findingId, + string? mode, + PolicyFindingExplainResult explain) + => new + { + policyId, + findingId, + mode, + explain = new + { + policyVersion = explain.PolicyVersion, + steps = explain.Steps.Select(step => new + { + rule = step.Rule, + status = step.Status, + action = step.Action, + score = step.Score, + inputs = step.Inputs, + evidence = step.Evidence + }), + sealedHints = explain.SealedHints.Select(hint => hint.Message) + } + }; + + private static void RenderPolicyFindingsTable(ILogger logger, PolicyFindingsPage page) + { + var items = page.Items; + if (items.Count == 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + AnsiConsole.MarkupLine("[yellow]No findings matched the provided filters.[/]"); + } + else + { + logger.LogWarning("No findings matched the provided filters."); + } + return; + } + + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table() + .Border(TableBorder.Rounded) + .Centered(); + + table.AddColumn("Finding"); + table.AddColumn("Status"); + table.AddColumn("Severity"); + table.AddColumn("Score"); + table.AddColumn("SBOM"); + table.AddColumn("Advisories"); + table.AddColumn("Updated (UTC)"); + + foreach (var item in items) + { + table.AddRow( + Markup.Escape(item.FindingId), + Markup.Escape(item.Status), + Markup.Escape(item.Severity.Normalized), + Markup.Escape(FormatScore(item.Severity.Score)), + Markup.Escape(item.SbomId), + Markup.Escape(FormatListPreview(item.AdvisoryIds)), + Markup.Escape(FormatUpdatedAt(item.UpdatedAt))); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var item in items) + { + logger.LogInformation( + "{Finding} — Status {Status}, Severity {Severity} ({Score}), SBOM {Sbom}, Updated {Updated}", + item.FindingId, + item.Status, + item.Severity.Normalized, + item.Severity.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", + item.SbomId, + FormatUpdatedAt(item.UpdatedAt)); + } + } + + logger.LogInformation("{Count} finding(s).", items.Count); + + if (page.TotalCount.HasValue) + { + logger.LogInformation("Total available: {Total}", page.TotalCount.Value); + } + + if (!string.IsNullOrWhiteSpace(page.NextCursor)) + { + logger.LogInformation("Next cursor: {Cursor}", page.NextCursor); + } + } + + private static void RenderPolicyFindingDetails(ILogger logger, PolicyFindingDocument finding) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table() + .Border(TableBorder.Rounded) + .AddColumn("Field") + .AddColumn("Value"); + + table.AddRow("Finding", Markup.Escape(finding.FindingId)); + table.AddRow("Status", Markup.Escape(finding.Status)); + table.AddRow("Severity", Markup.Escape(FormatSeverity(finding.Severity))); + table.AddRow("SBOM", Markup.Escape(finding.SbomId)); + table.AddRow("Policy Version", Markup.Escape(finding.PolicyVersion.ToString(CultureInfo.InvariantCulture))); + table.AddRow("Updated (UTC)", Markup.Escape(FormatUpdatedAt(finding.UpdatedAt))); + table.AddRow("Run Id", Markup.Escape(string.IsNullOrWhiteSpace(finding.RunId) ? "(none)" : finding.RunId)); + table.AddRow("Advisories", Markup.Escape(FormatListPreview(finding.AdvisoryIds))); + table.AddRow("VEX", Markup.Escape(FormatVexMetadata(finding.Vex))); + + AnsiConsole.Write(table); + } + else + { + logger.LogInformation("Finding {Finding}", finding.FindingId); + logger.LogInformation(" Status: {Status}", finding.Status); + logger.LogInformation(" Severity: {Severity}", FormatSeverity(finding.Severity)); + logger.LogInformation(" SBOM: {Sbom}", finding.SbomId); + logger.LogInformation(" Policy version: {Version}", finding.PolicyVersion); + logger.LogInformation(" Updated (UTC): {Updated}", FormatUpdatedAt(finding.UpdatedAt)); + if (!string.IsNullOrWhiteSpace(finding.RunId)) + { + logger.LogInformation(" Run Id: {Run}", finding.RunId); + } + if (finding.AdvisoryIds.Count > 0) + { + logger.LogInformation(" Advisories: {Advisories}", string.Join(", ", finding.AdvisoryIds)); + } + if (!string.IsNullOrWhiteSpace(FormatVexMetadata(finding.Vex))) + { + logger.LogInformation(" VEX: {Vex}", FormatVexMetadata(finding.Vex)); + } + } + } + + private static void RenderPolicyFindingExplain(ILogger logger, PolicyFindingExplainResult explain) + { + if (explain.Steps.Count == 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + AnsiConsole.MarkupLine("[yellow]No explain steps were returned.[/]"); + } + else + { + logger.LogWarning("No explain steps were returned."); + } + } + else if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table() + .Border(TableBorder.Rounded) + .AddColumn("Rule") + .AddColumn("Status") + .AddColumn("Action") + .AddColumn("Score") + .AddColumn("Inputs") + .AddColumn("Evidence"); + + foreach (var step in explain.Steps) + { + table.AddRow( + Markup.Escape(step.Rule), + Markup.Escape(step.Status ?? "(n/a)"), + Markup.Escape(step.Action ?? "(n/a)"), + Markup.Escape(step.Score.HasValue ? step.Score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"), + Markup.Escape(FormatKeyValuePairs(step.Inputs)), + Markup.Escape(FormatKeyValuePairs(step.Evidence))); + } + + AnsiConsole.Write(table); + } + else + { + logger.LogInformation("{Count} explain step(s).", explain.Steps.Count); + foreach (var step in explain.Steps) + { + logger.LogInformation( + "Rule {Rule} — Status {Status}, Action {Action}, Score {Score}, Inputs {Inputs}", + step.Rule, + step.Status ?? "n/a", + step.Action ?? "n/a", + step.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", + FormatKeyValuePairs(step.Inputs)); + + if (step.Evidence is not null && step.Evidence.Count > 0) + { + logger.LogInformation(" Evidence: {Evidence}", FormatKeyValuePairs(step.Evidence)); + } + } + } + + if (explain.SealedHints.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + AnsiConsole.MarkupLine("[grey]Hints:[/]"); + foreach (var hint in explain.SealedHints) + { + AnsiConsole.MarkupLine($" • {Markup.Escape(hint.Message)}"); + } + } + else + { + foreach (var hint in explain.SealedHints) + { + logger.LogInformation("Hint: {Hint}", hint.Message); + } + } + } + } + + private static string FormatSeverity(PolicyFindingSeverity severity) + { + if (severity.Score.HasValue) + { + return FormattableString.Invariant($"{severity.Normalized} ({severity.Score.Value:0.00})"); + } + + return severity.Normalized; + } + + private static string FormatListPreview(IReadOnlyList values) + { + if (values is null || values.Count == 0) + { + return "(none)"; + } + + const int MaxItems = 3; + if (values.Count <= MaxItems) + { + return string.Join(", ", values); + } + + var preview = string.Join(", ", values.Take(MaxItems)); + return FormattableString.Invariant($"{preview} (+{values.Count - MaxItems})"); + } + + private static string FormatUpdatedAt(DateTimeOffset timestamp) + { + if (timestamp == DateTimeOffset.MinValue) + { + return "(unknown)"; + } + + return timestamp.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss'Z'", CultureInfo.InvariantCulture); + } + + private static string FormatScore(double? score) + => score.HasValue ? score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"; + + private static string FormatKeyValuePairs(IReadOnlyDictionary? values) + { + if (values is null || values.Count == 0) + { + return "(none)"; + } + + return string.Join(", ", values.Select(pair => $"{pair.Key}={pair.Value}")); + } + + private static string FormatVexMetadata(PolicyFindingVexMetadata? value) + { + if (value is null) + { + return "(none)"; + } + + var parts = new List(3); + if (!string.IsNullOrWhiteSpace(value.WinningStatementId)) + { + parts.Add($"winning={value.WinningStatementId}"); + } + + if (!string.IsNullOrWhiteSpace(value.Source)) + { + parts.Add($"source={value.Source}"); + } + + if (!string.IsNullOrWhiteSpace(value.Status)) + { + parts.Add($"status={value.Status}"); + } + + return parts.Count == 0 ? "(none)" : string.Join(", ", parts); + } + + private static void HandlePolicyFindingsFailure(PolicyApiException exception, ILogger logger, Action recordMetric) + { + var exitCode = exception.StatusCode switch + { + HttpStatusCode.Unauthorized or HttpStatusCode.Forbidden => 12, + HttpStatusCode.NotFound => 1, + _ => 1 + }; + + if (string.IsNullOrWhiteSpace(exception.ErrorCode)) + { + logger.LogError("Policy API request failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); + } + else + { + logger.LogError("Policy API request failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); + } + + recordMetric("error"); + Environment.ExitCode = exitCode; + } + + private static string FormatDelta(int? value) + => value.HasValue ? value.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"; + + private static readonly JsonSerializerOptions SimulationJsonOptions = + new(JsonSerializerDefaults.Web) { WriteIndented = true }; + + private static readonly IReadOnlyDictionary EmptyPolicyEnvironment = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)); + + private static readonly IReadOnlyList EmptyPolicySbomSet = + new ReadOnlyCollection(Array.Empty()); + + private static readonly IReadOnlyDictionary EmptyLabelSelectors = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); + + private enum PolicySimulationOutputFormat + { + Table, + Json + } + + private enum PolicyFindingsOutputFormat + { + Table, + Json + } + + + private static string FormatAdditionalValue(object? value) + { + return value switch + { + null => "null", + bool b => b ? "true" : "false", + double d => d.ToString("G17", CultureInfo.InvariantCulture), + float f => f.ToString("G9", CultureInfo.InvariantCulture), + IFormattable formattable => formattable.ToString(null, CultureInfo.InvariantCulture), + _ => value.ToString() ?? string.Empty + }; + } + + + private static IReadOnlyList NormalizeProviders(IReadOnlyList providers) + { + if (providers is null || providers.Count == 0) + { + return Array.Empty(); + } + + var list = new List(); + foreach (var provider in providers) + { + if (!string.IsNullOrWhiteSpace(provider)) + { + list.Add(provider.Trim()); + } + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static string ResolveTenant(string? tenantOption) + { + if (!string.IsNullOrWhiteSpace(tenantOption)) + { + return tenantOption.Trim(); + } + + var fromEnvironment = Environment.GetEnvironmentVariable("STELLA_TENANT"); + return string.IsNullOrWhiteSpace(fromEnvironment) ? string.Empty : fromEnvironment.Trim(); + } + + private static async Task LoadIngestInputAsync(string input, CancellationToken cancellationToken) + { + if (Uri.TryCreate(input, UriKind.Absolute, out var uri) && + (uri.Scheme.Equals(Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase) || + uri.Scheme.Equals(Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase))) + { + return await LoadIngestInputFromHttpAsync(uri, cancellationToken).ConfigureAwait(false); + } + + return await LoadIngestInputFromFileAsync(input, cancellationToken).ConfigureAwait(false); + } + + private static async Task LoadIngestInputFromHttpAsync(Uri uri, CancellationToken cancellationToken) + { + using var handler = new HttpClientHandler { AutomaticDecompression = DecompressionMethods.All }; + using var httpClient = new HttpClient(handler); + using var response = await httpClient.GetAsync(uri, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + throw new InvalidOperationException($"Failed to download document from {uri} (HTTP {(int)response.StatusCode})."); + } + + var contentType = response.Content.Headers.ContentType?.MediaType ?? "application/json"; + var contentEncoding = response.Content.Headers.ContentEncoding is { Count: > 0 } + ? string.Join(",", response.Content.Headers.ContentEncoding) + : null; + + var bytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + var normalized = NormalizeDocument(bytes, contentType, contentEncoding); + + return new IngestInputPayload( + "uri", + uri.ToString(), + normalized.Content, + normalized.ContentType, + normalized.ContentEncoding); + } + + private static async Task LoadIngestInputFromFileAsync(string path, CancellationToken cancellationToken) + { + var fullPath = Path.GetFullPath(path); + if (!File.Exists(fullPath)) + { + throw new FileNotFoundException("Input document not found.", fullPath); + } + + var bytes = await File.ReadAllBytesAsync(fullPath, cancellationToken).ConfigureAwait(false); + var normalized = NormalizeDocument(bytes, GuessContentTypeFromExtension(fullPath), null); + + return new IngestInputPayload( + "file", + Path.GetFileName(fullPath), + normalized.Content, + normalized.ContentType, + normalized.ContentEncoding); + } + + private static DocumentNormalizationResult NormalizeDocument(byte[] bytes, string? contentType, string? encodingHint) + { + if (bytes is null || bytes.Length == 0) + { + throw new InvalidOperationException("Input document is empty."); + } + + var working = bytes; + var encodings = new List(); + if (!string.IsNullOrWhiteSpace(encodingHint)) + { + encodings.Add(encodingHint); + } + + if (IsGzip(working)) + { + working = DecompressGzip(working); + encodings.Add("gzip"); + } + + var text = DecodeText(working); + var trimmed = text.TrimStart(); + + if (!string.IsNullOrWhiteSpace(trimmed) && trimmed[0] != '{' && trimmed[0] != '[') + { + if (TryDecodeBase64(text, out var decodedBytes)) + { + working = decodedBytes; + encodings.Add("base64"); + + if (IsGzip(working)) + { + working = DecompressGzip(working); + encodings.Add("gzip"); + } + + text = DecodeText(working); + } + } + + text = text.Trim(); + if (string.IsNullOrWhiteSpace(text)) + { + throw new InvalidOperationException("Input document contained no data after decoding."); + } + + var encodingLabel = encodings.Count == 0 ? null : string.Join("+", encodings); + var finalContentType = string.IsNullOrWhiteSpace(contentType) ? "application/json" : contentType; + + return new DocumentNormalizationResult(text, finalContentType, encodingLabel); + } + + private static string GuessContentTypeFromExtension(string path) + { + var extension = Path.GetExtension(path); + if (string.IsNullOrWhiteSpace(extension)) + { + return "application/json"; + } + + return extension.ToLowerInvariant() switch + { + ".json" or ".csaf" => "application/json", + ".xml" => "application/xml", + _ => "application/json" + }; + } + + private static DateTimeOffset DetermineVerificationSince(string? sinceOption) + { + if (string.IsNullOrWhiteSpace(sinceOption)) + { + return DateTimeOffset.UtcNow.AddHours(-24); + } + + var trimmed = sinceOption.Trim(); + + if (DateTimeOffset.TryParse( + trimmed, + CultureInfo.InvariantCulture, + DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, + out var parsedTimestamp)) + { + return parsedTimestamp.ToUniversalTime(); + } + + if (TryParseRelativeDuration(trimmed, out var duration)) + { + return DateTimeOffset.UtcNow.Subtract(duration); + } + + throw new InvalidOperationException("Invalid --since value. Use ISO-8601 timestamp or duration (e.g. 24h, 7d)."); + } + + private static bool TryParseRelativeDuration(string value, out TimeSpan duration) + { + duration = TimeSpan.Zero; + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var normalized = value.Trim().ToLowerInvariant(); + if (normalized.Length < 2) + { + return false; + } + + var suffix = normalized[^1]; + var magnitudeText = normalized[..^1]; + + double multiplier = suffix switch + { + 's' => 1, + 'm' => 60, + 'h' => 3600, + 'd' => 86400, + 'w' => 604800, + _ => 0 + }; + + if (multiplier == 0) + { + return false; + } + + if (!double.TryParse(magnitudeText, NumberStyles.Float, CultureInfo.InvariantCulture, out var magnitude)) + { + return false; + } + + if (double.IsNaN(magnitude) || double.IsInfinity(magnitude) || magnitude <= 0) + { + return false; + } + + var seconds = magnitude * multiplier; + if (double.IsNaN(seconds) || double.IsInfinity(seconds) || seconds <= 0) + { + return false; + } + + duration = TimeSpan.FromSeconds(seconds); + return true; + } + + private static int NormalizeLimit(int? limitOption) + { + if (!limitOption.HasValue) + { + return 20; + } + + if (limitOption.Value < 0) + { + throw new InvalidOperationException("Limit cannot be negative."); + } + + return limitOption.Value; + } + + private static IReadOnlyList ParseCommaSeparatedList(string? raw) + { + if (string.IsNullOrWhiteSpace(raw)) + { + return Array.Empty(); + } + + var tokens = raw + .Split(',', StringSplitOptions.RemoveEmptyEntries) + .Select(token => token.Trim()) + .Where(token => token.Length > 0) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + return tokens.Length == 0 ? Array.Empty() : tokens; + } + + private static string FormatWindowRange(AocVerifyWindow? window) + { + if (window is null) + { + return "(unspecified)"; + } + + var fromText = window.From?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; + var toText = window.To?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; + return $"{fromText} -> {toText}"; + } + + private static string FormatCheckedCounts(AocVerifyChecked? checkedCounts) + { + if (checkedCounts is null) + { + return "(unspecified)"; + } + + return $"advisories: {checkedCounts.Advisories.ToString("N0", CultureInfo.InvariantCulture)}, vex: {checkedCounts.Vex.ToString("N0", CultureInfo.InvariantCulture)}"; + } + + private static string DetermineVerifyStatus(AocVerifyResponse? response) + { + if (response is null) + { + return "unknown"; + } + + if (response.Truncated == true && (response.Violations is null || response.Violations.Count == 0)) + { + return "truncated"; + } + + var total = response.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; + return total > 0 ? "violations" : "ok"; + } + + private static string FormatBoolean(bool value, bool useColor) + { + var text = value ? "yes" : "no"; + if (!useColor) + { + return text; + } + + return value + ? $"[yellow]{text}[/]" + : $"[green]{text}[/]"; + } + + private static string FormatVerifyStatus(string? status, bool useColor) + { + var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); + var escaped = Markup.Escape(normalized); + if (!useColor) + { + return escaped; + } + + return normalized switch + { + "ok" => $"[green]{escaped}[/]", + "violations" => $"[red]{escaped}[/]", + "truncated" => $"[yellow]{escaped}[/]", + _ => $"[grey]{escaped}[/]" + }; + } + + private static string FormatViolationExample(AocVerifyViolationExample? example) + { + if (example is null) + { + return "(n/a)"; + } + + var parts = new List(); + if (!string.IsNullOrWhiteSpace(example.Source)) + { + parts.Add(example.Source.Trim()); + } + + if (!string.IsNullOrWhiteSpace(example.DocumentId)) + { + parts.Add(example.DocumentId.Trim()); + } + + var label = parts.Count == 0 ? "(n/a)" : string.Join(" | ", parts); + if (!string.IsNullOrWhiteSpace(example.ContentHash)) + { + label = $"{label} [{example.ContentHash.Trim()}]"; + } + + return label; + } + + private static void RenderAocVerifyTable(AocVerifyResponse response, bool useColor, int limit) + { + var summary = new Table().Border(TableBorder.Rounded); + summary.AddColumn("Field"); + summary.AddColumn("Value"); + + summary.AddRow("Tenant", Markup.Escape(string.IsNullOrWhiteSpace(response?.Tenant) ? "(unknown)" : response.Tenant!)); + summary.AddRow("Window", Markup.Escape(FormatWindowRange(response?.Window))); + summary.AddRow("Checked", Markup.Escape(FormatCheckedCounts(response?.Checked))); + + summary.AddRow("Limit", Markup.Escape(limit <= 0 ? "unbounded" : limit.ToString(CultureInfo.InvariantCulture))); + summary.AddRow("Status", FormatVerifyStatus(DetermineVerifyStatus(response), useColor)); + + if (response?.Metrics?.IngestionWriteTotal is int writes) + { + summary.AddRow("Ingestion Writes", Markup.Escape(writes.ToString("N0", CultureInfo.InvariantCulture))); + } + + if (response?.Metrics?.AocViolationTotal is int totalViolations) + { + summary.AddRow("Violations (total)", Markup.Escape(totalViolations.ToString("N0", CultureInfo.InvariantCulture))); + } + else + { + var computedViolations = response?.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; + summary.AddRow("Violations (total)", Markup.Escape(computedViolations.ToString("N0", CultureInfo.InvariantCulture))); + } + + summary.AddRow("Truncated", FormatBoolean(response?.Truncated == true, useColor)); + + AnsiConsole.Write(summary); + + if (response?.Violations is null || response.Violations.Count == 0) + { + var message = response?.Truncated == true + ? "No violations reported, but results were truncated. Increase --limit to review full output." + : "No AOC violations detected in the requested window."; + + if (useColor) + { + var color = response?.Truncated == true ? "yellow" : "green"; + AnsiConsole.MarkupLine($"[{color}]{Markup.Escape(message)}[/]"); + } + else + { + Console.WriteLine(message); + } + + return; + } + + var violationTable = new Table().Border(TableBorder.Rounded); + violationTable.AddColumn("Code"); + violationTable.AddColumn("Count"); + violationTable.AddColumn("Sample Document"); + violationTable.AddColumn("Path"); + + foreach (var violation in response.Violations) + { + var codeDisplay = FormatViolationCode(violation.Code, useColor); + var countDisplay = violation.Count.ToString("N0", CultureInfo.InvariantCulture); + var example = violation.Examples?.FirstOrDefault(); + var documentDisplay = Markup.Escape(FormatViolationExample(example)); + var pathDisplay = example is null || string.IsNullOrWhiteSpace(example.Path) + ? "(none)" + : example.Path!; + + violationTable.AddRow(codeDisplay, countDisplay, documentDisplay, Markup.Escape(pathDisplay)); + } + + AnsiConsole.Write(violationTable); +} + + private static int DetermineVerifyExitCode(AocVerifyResponse response) + { + ArgumentNullException.ThrowIfNull(response); + + if (response.Violations is not null && response.Violations.Count > 0) + { + var exitCodes = new List(); + foreach (var violation in response.Violations) + { + if (string.IsNullOrWhiteSpace(violation.Code)) + { + continue; + } + + if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) + { + exitCodes.Add(mapped); + } + } + + if (exitCodes.Count > 0) + { + return exitCodes.Min(); + } + + return response.Truncated == true ? 18 : 17; + } + + if (response.Truncated == true) + { + return 18; + } + + return 0; + } + + private static async Task WriteJsonReportAsync(T payload, string destination, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(payload); + + if (string.IsNullOrWhiteSpace(destination)) + { + throw new InvalidOperationException("Output path must be provided."); + } + + var outputPath = Path.GetFullPath(destination); + var directory = Path.GetDirectoryName(outputPath); + if (!string.IsNullOrWhiteSpace(directory)) + { + Directory.CreateDirectory(directory); + } + + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions + { + WriteIndented = true + }); + + await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false); + return outputPath; + } + + private static void RenderDryRunTable(AocIngestDryRunResponse response, bool useColor) + { + var summary = new Table().Border(TableBorder.Rounded); + summary.AddColumn("Field"); + summary.AddColumn("Value"); + + summary.AddRow("Source", Markup.Escape(response?.Source ?? "(unknown)")); + summary.AddRow("Tenant", Markup.Escape(response?.Tenant ?? "(unknown)")); + summary.AddRow("Guard Version", Markup.Escape(response?.GuardVersion ?? "(unknown)")); + summary.AddRow("Status", FormatStatusMarkup(response?.Status, useColor)); + + var violationCount = response?.Violations?.Count ?? 0; + summary.AddRow("Violations", violationCount.ToString(CultureInfo.InvariantCulture)); + + if (!string.IsNullOrWhiteSpace(response?.Document?.ContentHash)) + { + summary.AddRow("Content Hash", Markup.Escape(response.Document.ContentHash!)); + } + + if (!string.IsNullOrWhiteSpace(response?.Document?.Supersedes)) + { + summary.AddRow("Supersedes", Markup.Escape(response.Document.Supersedes!)); + } + + if (!string.IsNullOrWhiteSpace(response?.Document?.Provenance?.Signature?.Format)) + { + var signature = response.Document.Provenance.Signature; + var summaryText = signature!.Present + ? signature.Format ?? "present" + : "missing"; + summary.AddRow("Signature", Markup.Escape(summaryText)); + } + + AnsiConsole.Write(summary); + + if (violationCount == 0) + { + if (useColor) + { + AnsiConsole.MarkupLine("[green]No AOC violations detected.[/]"); + } + else + { + Console.WriteLine("No AOC violations detected."); + } + + return; + } + + var violationTable = new Table().Border(TableBorder.Rounded); + violationTable.AddColumn("Code"); + violationTable.AddColumn("Path"); + violationTable.AddColumn("Message"); + + foreach (var violation in response!.Violations!) + { + var codeDisplay = FormatViolationCode(violation.Code, useColor); + var pathDisplay = string.IsNullOrWhiteSpace(violation.Path) ? "(root)" : violation.Path!; + var messageDisplay = string.IsNullOrWhiteSpace(violation.Message) ? "(unspecified)" : violation.Message!; + violationTable.AddRow(codeDisplay, Markup.Escape(pathDisplay), Markup.Escape(messageDisplay)); + } + + AnsiConsole.Write(violationTable); + } + + private static int DetermineDryRunExitCode(AocIngestDryRunResponse response) + { + if (response?.Violations is null || response.Violations.Count == 0) + { + return 0; + } + + var exitCodes = new List(); + foreach (var violation in response.Violations) + { + if (string.IsNullOrWhiteSpace(violation.Code)) + { + continue; + } + + if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) + { + exitCodes.Add(mapped); + } + } + + if (exitCodes.Count == 0) + { + return 17; + } + + return exitCodes.Min(); + } + + private static string FormatStatusMarkup(string? status, bool useColor) + { + var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); + if (!useColor) + { + return Markup.Escape(normalized); + } + + return normalized.Equals("ok", StringComparison.OrdinalIgnoreCase) + ? $"[green]{Markup.Escape(normalized)}[/]" + : $"[red]{Markup.Escape(normalized)}[/]"; + } + + private static string FormatViolationCode(string code, bool useColor) + { + var sanitized = string.IsNullOrWhiteSpace(code) ? "(unknown)" : code.Trim(); + if (!useColor) + { + return Markup.Escape(sanitized); + } + + return $"[red]{Markup.Escape(sanitized)}[/]"; + } + + private static bool IsGzip(ReadOnlySpan data) + { + return data.Length >= 2 && data[0] == 0x1F && data[1] == 0x8B; + } + + private static byte[] DecompressGzip(byte[] payload) + { + using var input = new MemoryStream(payload); + using var gzip = new GZipStream(input, CompressionMode.Decompress); + using var output = new MemoryStream(); + gzip.CopyTo(output); + return output.ToArray(); + } + + private static string DecodeText(byte[] payload) + { + var encoding = DetectEncoding(payload); + return encoding.GetString(payload); + } + + private static Encoding DetectEncoding(ReadOnlySpan data) + { + if (data.Length >= 4) + { + if (data[0] == 0x00 && data[1] == 0x00 && data[2] == 0xFE && data[3] == 0xFF) + { + return new UTF32Encoding(bigEndian: true, byteOrderMark: true); + } + + if (data[0] == 0xFF && data[1] == 0xFE && data[2] == 0x00 && data[3] == 0x00) + { + return new UTF32Encoding(bigEndian: false, byteOrderMark: true); + } + } + + if (data.Length >= 2) + { + if (data[0] == 0xFE && data[1] == 0xFF) + { + return Encoding.BigEndianUnicode; + } + + if (data[0] == 0xFF && data[1] == 0xFE) + { + return Encoding.Unicode; + } + } + + if (data.Length >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF) + { + return Encoding.UTF8; + } + + return Encoding.UTF8; + } + public static async Task HandleKmsExportAsync( IServiceProvider services, string? rootPath, @@ -5724,102 +5830,102 @@ internal static class CommandHandlers } private static bool TryDecodeBase64(string text, out byte[] decoded) - { - decoded = Array.Empty(); - if (string.IsNullOrWhiteSpace(text)) - { - return false; - } - - var builder = new StringBuilder(text.Length); - foreach (var ch in text) - { - if (!char.IsWhiteSpace(ch)) - { - builder.Append(ch); - } - } - - var candidate = builder.ToString(); - if (candidate.Length < 8 || candidate.Length % 4 != 0) - { - return false; - } - - for (var i = 0; i < candidate.Length; i++) - { - var c = candidate[i]; - if (!(char.IsLetterOrDigit(c) || c is '+' or '/' or '=')) - { - return false; - } - } - - try - { - decoded = Convert.FromBase64String(candidate); - return true; - } - catch (FormatException) - { - return false; - } - } - - private sealed record IngestInputPayload(string Kind, string Name, string Content, string ContentType, string? ContentEncoding); - - private sealed record DocumentNormalizationResult(string Content, string ContentType, string? ContentEncoding); - - private static readonly IReadOnlyDictionary AocViolationExitCodeMap = new Dictionary(StringComparer.OrdinalIgnoreCase) - { - ["ERR_AOC_001"] = 11, - ["ERR_AOC_002"] = 12, - ["ERR_AOC_003"] = 13, - ["ERR_AOC_004"] = 14, - ["ERR_AOC_005"] = 15, - ["ERR_AOC_006"] = 16, - ["ERR_AOC_007"] = 17 - }; - - private static IDictionary RemoveNullValues(Dictionary source) - { - foreach (var key in source.Where(kvp => kvp.Value is null).Select(kvp => kvp.Key).ToList()) - { - source.Remove(key); - } - - return source; - } - - private static async Task TriggerJobAsync( - IBackendOperationsClient client, - ILogger logger, - string jobKind, - IDictionary parameters, - CancellationToken cancellationToken) - { - JobTriggerResult result = await client.TriggerJobAsync(jobKind, parameters, cancellationToken).ConfigureAwait(false); - if (result.Success) - { - if (!string.IsNullOrWhiteSpace(result.Location)) - { - logger.LogInformation("Job accepted. Track status at {Location}.", result.Location); - } - else if (result.Run is not null) - { - logger.LogInformation("Job accepted. RunId: {RunId} Status: {Status}", result.Run.RunId, result.Run.Status); - } - else - { - logger.LogInformation("Job accepted."); - } - - Environment.ExitCode = 0; - } - else - { - logger.LogError("Job '{JobKind}' failed: {Message}", jobKind, result.Message); - Environment.ExitCode = 1; - } - } -} + { + decoded = Array.Empty(); + if (string.IsNullOrWhiteSpace(text)) + { + return false; + } + + var builder = new StringBuilder(text.Length); + foreach (var ch in text) + { + if (!char.IsWhiteSpace(ch)) + { + builder.Append(ch); + } + } + + var candidate = builder.ToString(); + if (candidate.Length < 8 || candidate.Length % 4 != 0) + { + return false; + } + + for (var i = 0; i < candidate.Length; i++) + { + var c = candidate[i]; + if (!(char.IsLetterOrDigit(c) || c is '+' or '/' or '=')) + { + return false; + } + } + + try + { + decoded = Convert.FromBase64String(candidate); + return true; + } + catch (FormatException) + { + return false; + } + } + + private sealed record IngestInputPayload(string Kind, string Name, string Content, string ContentType, string? ContentEncoding); + + private sealed record DocumentNormalizationResult(string Content, string ContentType, string? ContentEncoding); + + private static readonly IReadOnlyDictionary AocViolationExitCodeMap = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["ERR_AOC_001"] = 11, + ["ERR_AOC_002"] = 12, + ["ERR_AOC_003"] = 13, + ["ERR_AOC_004"] = 14, + ["ERR_AOC_005"] = 15, + ["ERR_AOC_006"] = 16, + ["ERR_AOC_007"] = 17 + }; + + private static IDictionary RemoveNullValues(Dictionary source) + { + foreach (var key in source.Where(kvp => kvp.Value is null).Select(kvp => kvp.Key).ToList()) + { + source.Remove(key); + } + + return source; + } + + private static async Task TriggerJobAsync( + IBackendOperationsClient client, + ILogger logger, + string jobKind, + IDictionary parameters, + CancellationToken cancellationToken) + { + JobTriggerResult result = await client.TriggerJobAsync(jobKind, parameters, cancellationToken).ConfigureAwait(false); + if (result.Success) + { + if (!string.IsNullOrWhiteSpace(result.Location)) + { + logger.LogInformation("Job accepted. Track status at {Location}.", result.Location); + } + else if (result.Run is not null) + { + logger.LogInformation("Job accepted. RunId: {RunId} Status: {Status}", result.Run.RunId, result.Run.Status); + } + else + { + logger.LogInformation("Job accepted."); + } + + Environment.ExitCode = 0; + } + else + { + logger.LogError("Job '{JobKind}' failed: {Message}", jobKind, result.Message); + Environment.ExitCode = 1; + } + } +} diff --git a/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs b/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs index 6d3a9337..4f5b1024 100644 --- a/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs +++ b/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs @@ -1,2486 +1,2519 @@ -using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Linq; -using System.Net.Http.Headers; -using System.Net.Http.Json; -using System.Globalization; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.Client; -using StellaOps.Cli.Configuration; -using StellaOps.Cli.Services.Models; -using StellaOps.Cli.Services.Models.Transport; - -namespace StellaOps.Cli.Services; - -internal sealed class BackendOperationsClient : IBackendOperationsClient -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); - private static readonly TimeSpan TokenRefreshSkew = TimeSpan.FromSeconds(30); - private static readonly IReadOnlyDictionary EmptyMetadata = - new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); - - private const string OperatorReasonParameterName = "operator_reason"; - private const string OperatorTicketParameterName = "operator_ticket"; - - private readonly HttpClient _httpClient; - private readonly StellaOpsCliOptions _options; - private readonly ILogger _logger; - private readonly IStellaOpsTokenClient? _tokenClient; - private readonly object _tokenSync = new(); - private string? _cachedAccessToken; - private DateTimeOffset _cachedAccessTokenExpiresAt = DateTimeOffset.MinValue; - - public BackendOperationsClient(HttpClient httpClient, StellaOpsCliOptions options, ILogger logger, IStellaOpsTokenClient? tokenClient = null) - { - _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _tokenClient = tokenClient; - - if (!string.IsNullOrWhiteSpace(_options.BackendUrl) && httpClient.BaseAddress is null) - { - if (Uri.TryCreate(_options.BackendUrl, UriKind.Absolute, out var baseUri)) - { - httpClient.BaseAddress = baseUri; - } - } - } - - public async Task DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - channel = string.IsNullOrWhiteSpace(channel) ? "stable" : channel.Trim(); - outputPath = ResolveArtifactPath(outputPath, channel); - Directory.CreateDirectory(Path.GetDirectoryName(outputPath)!); - - if (!overwrite && File.Exists(outputPath)) - { - var existing = new FileInfo(outputPath); - _logger.LogInformation("Scanner artifact already cached at {Path} ({Size} bytes).", outputPath, existing.Length); - return new ScannerArtifactResult(outputPath, existing.Length, true); - } - - var attempt = 0; - var maxAttempts = Math.Max(1, _options.ScannerDownloadAttempts); - - while (true) - { - attempt++; - try - { - using var request = CreateRequest(HttpMethod.Get, $"api/scanner/artifacts/{channel}"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - return await ProcessScannerResponseAsync(response, outputPath, channel, verbose, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (attempt < maxAttempts) - { - var backoffSeconds = Math.Pow(2, attempt); - _logger.LogWarning(ex, "Scanner download attempt {Attempt}/{MaxAttempts} failed. Retrying in {Delay:F0}s...", attempt, maxAttempts, backoffSeconds); - await Task.Delay(TimeSpan.FromSeconds(backoffSeconds), cancellationToken).ConfigureAwait(false); - } - } - } - - private async Task ProcessScannerResponseAsync(HttpResponseMessage response, string outputPath, string channel, bool verbose, CancellationToken cancellationToken) - { - var tempFile = outputPath + ".tmp"; - await using (var payloadStream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false)) - await using (var fileStream = File.Create(tempFile)) - { - await payloadStream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); - } - - var expectedDigest = ExtractHeaderValue(response.Headers, "X-StellaOps-Digest"); - var signatureHeader = ExtractHeaderValue(response.Headers, "X-StellaOps-Signature"); - - var digestHex = await ValidateDigestAsync(tempFile, expectedDigest, cancellationToken).ConfigureAwait(false); - await ValidateSignatureAsync(signatureHeader, digestHex, verbose, cancellationToken).ConfigureAwait(false); - - if (verbose) - { - var signatureNote = string.IsNullOrWhiteSpace(signatureHeader) ? "no signature" : "signature validated"; - _logger.LogDebug("Scanner digest sha256:{Digest} ({SignatureNote}).", digestHex, signatureNote); - } - - if (File.Exists(outputPath)) - { - File.Delete(outputPath); - } - - File.Move(tempFile, outputPath); - - PersistMetadata(outputPath, channel, digestHex, signatureHeader, response); - - var downloaded = new FileInfo(outputPath); - _logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", outputPath, downloaded.Length); - - return new ScannerArtifactResult(outputPath, downloaded.Length, false); - } - - public async Task UploadScanResultsAsync(string filePath, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (!File.Exists(filePath)) - { - throw new FileNotFoundException("Scan result file not found.", filePath); - } - - var maxAttempts = Math.Max(1, _options.ScanUploadAttempts); - var attempt = 0; - - while (true) - { - attempt++; - try - { - using var content = new MultipartFormDataContent(); - await using var fileStream = File.OpenRead(filePath); - var streamContent = new StreamContent(fileStream); - streamContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); - content.Add(streamContent, "file", Path.GetFileName(filePath)); - - using var request = CreateRequest(HttpMethod.Post, "api/scanner/results"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - request.Content = content; - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (response.IsSuccessStatusCode) - { - _logger.LogInformation("Scan results uploaded from {Path}.", filePath); - return; - } - - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - if (attempt >= maxAttempts) - { - throw new InvalidOperationException(failure); - } - - var delay = GetRetryDelay(response, attempt); - _logger.LogWarning( - "Scan upload attempt {Attempt}/{MaxAttempts} failed ({Reason}). Retrying in {Delay:F1}s...", - attempt, - maxAttempts, - failure, - delay.TotalSeconds); - await Task.Delay(delay, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (attempt < maxAttempts) - { - var delay = TimeSpan.FromSeconds(Math.Pow(2, attempt)); - _logger.LogWarning( - ex, - "Scan upload attempt {Attempt}/{MaxAttempts} threw an exception. Retrying in {Delay:F1}s...", - attempt, - maxAttempts, - delay.TotalSeconds); - await Task.Delay(delay, cancellationToken).ConfigureAwait(false); - } - } - } - - public async Task TriggerJobAsync(string jobKind, IDictionary parameters, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(jobKind)) - { - throw new ArgumentException("Job kind must be provided.", nameof(jobKind)); - } - - var requestBody = new JobTriggerRequest - { - Trigger = "cli", - Parameters = parameters is null ? new Dictionary(StringComparer.Ordinal) : new Dictionary(parameters, StringComparer.Ordinal) - }; - - var request = CreateRequest(HttpMethod.Post, $"jobs/{jobKind}"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - request.Content = JsonContent.Create(requestBody, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (response.StatusCode == HttpStatusCode.Accepted) - { - JobRunResponse? run = null; - if (response.Content.Headers.ContentLength is > 0) - { - try - { - run = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - _logger.LogWarning(ex, "Failed to deserialize job run response for job kind {Kind}.", jobKind); - } - } - - var location = response.Headers.Location?.ToString(); - return new JobTriggerResult(true, "Accepted", location, run); - } - - var failureMessage = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - return new JobTriggerResult(false, failureMessage, null, null); - } - - public async Task ExecuteExcititorOperationAsync(string route, HttpMethod method, object? payload, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(route)) - { - throw new ArgumentException("Route must be provided.", nameof(route)); - } - - var relative = route.TrimStart('/'); - using var request = CreateRequest(method, $"excititor/{relative}"); - - if (payload is not null && method != HttpMethod.Get && method != HttpMethod.Delete) - { - request.Content = JsonContent.Create(payload, options: SerializerOptions); - } - - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - - if (response.IsSuccessStatusCode) - { - var (message, payloadElement) = await ExtractExcititorResponseAsync(response, cancellationToken).ConfigureAwait(false); - var location = response.Headers.Location?.ToString(); - return new ExcititorOperationResult(true, message, location, payloadElement); - } - - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - return new ExcititorOperationResult(false, failure, null, null); - } - - public async Task DownloadExcititorExportAsync(string exportId, string destinationPath, string? expectedDigestAlgorithm, string? expectedDigest, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(exportId)) - { - throw new ArgumentException("Export id must be provided.", nameof(exportId)); - } - - if (string.IsNullOrWhiteSpace(destinationPath)) - { - throw new ArgumentException("Destination path must be provided.", nameof(destinationPath)); - } - - var fullPath = Path.GetFullPath(destinationPath); - var directory = Path.GetDirectoryName(fullPath); - if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) - { - Directory.CreateDirectory(directory); - } - - var normalizedAlgorithm = string.IsNullOrWhiteSpace(expectedDigestAlgorithm) - ? null - : expectedDigestAlgorithm.Trim(); - var normalizedDigest = NormalizeExpectedDigest(expectedDigest); - - if (File.Exists(fullPath) - && string.Equals(normalizedAlgorithm, "sha256", StringComparison.OrdinalIgnoreCase) - && !string.IsNullOrWhiteSpace(normalizedDigest)) - { - var existingDigest = await ComputeSha256Async(fullPath, cancellationToken).ConfigureAwait(false); - if (string.Equals(existingDigest, normalizedDigest, StringComparison.OrdinalIgnoreCase)) - { - var info = new FileInfo(fullPath); - _logger.LogDebug("Export {ExportId} already present at {Path}; digest matches.", exportId, fullPath); - return new ExcititorExportDownloadResult(fullPath, info.Length, true); - } - } - - var encodedId = Uri.EscapeDataString(exportId); - using var request = CreateRequest(HttpMethod.Get, $"excititor/export/{encodedId}/download"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - - var tempPath = fullPath + ".tmp"; - if (File.Exists(tempPath)) - { - File.Delete(tempPath); - } - - using (var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false)) - { - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - await using (var fileStream = File.Create(tempPath)) - { - await stream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); - } - } - - if (!string.IsNullOrWhiteSpace(normalizedAlgorithm) && !string.IsNullOrWhiteSpace(normalizedDigest)) - { - if (string.Equals(normalizedAlgorithm, "sha256", StringComparison.OrdinalIgnoreCase)) - { - var computed = await ComputeSha256Async(tempPath, cancellationToken).ConfigureAwait(false); - if (!string.Equals(computed, normalizedDigest, StringComparison.OrdinalIgnoreCase)) - { - File.Delete(tempPath); - throw new InvalidOperationException($"Export digest mismatch. Expected sha256:{normalizedDigest}, computed sha256:{computed}."); - } - } - else - { - _logger.LogWarning("Export digest verification skipped. Unsupported algorithm {Algorithm}.", normalizedAlgorithm); - } - } - - if (File.Exists(fullPath)) - { - File.Delete(fullPath); - } - - File.Move(tempPath, fullPath); - - var downloaded = new FileInfo(fullPath); - return new ExcititorExportDownloadResult(fullPath, downloaded.Length, false); - } - - public async Task EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (request is null) - { - throw new ArgumentNullException(nameof(request)); - } - - var images = NormalizeImages(request.Images); - if (images.Count == 0) - { - throw new ArgumentException("At least one image digest must be provided.", nameof(request)); - } - - var payload = new RuntimePolicyEvaluationRequestDocument - { - Namespace = string.IsNullOrWhiteSpace(request.Namespace) ? null : request.Namespace.Trim(), - Images = images - }; - - if (request.Labels.Count > 0) - { - payload.Labels = new Dictionary(StringComparer.Ordinal); - foreach (var label in request.Labels) - { - if (!string.IsNullOrWhiteSpace(label.Key)) - { - payload.Labels[label.Key] = label.Value ?? string.Empty; - } - } - } - - using var message = CreateRequest(HttpMethod.Post, "api/scanner/policy/runtime"); - await AuthorizeRequestAsync(message, cancellationToken).ConfigureAwait(false); - message.Content = JsonContent.Create(payload, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - RuntimePolicyEvaluationResponseDocument? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = response.Content is null ? string.Empty : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse runtime policy response. {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (document is null) - { - throw new InvalidOperationException("Runtime policy response was empty."); - } - - var decisions = new Dictionary(StringComparer.Ordinal); - if (document.Results is not null) - { - foreach (var kvp in document.Results) - { - var image = kvp.Key; - var decision = kvp.Value; - if (string.IsNullOrWhiteSpace(image) || decision is null) - { - continue; - } - - var verdict = string.IsNullOrWhiteSpace(decision.PolicyVerdict) - ? "unknown" - : decision.PolicyVerdict!.Trim(); - - var reasons = ExtractReasons(decision.Reasons); - var metadata = ExtractExtensionMetadata(decision.ExtensionData); - - var hasSbom = decision.HasSbomReferrers ?? decision.HasSbomLegacy; - - RuntimePolicyRekorReference? rekor = null; - if (decision.Rekor is not null && - (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid) || - !string.IsNullOrWhiteSpace(decision.Rekor.Url) || - decision.Rekor.Verified.HasValue)) - { - rekor = new RuntimePolicyRekorReference( - NormalizeOptionalString(decision.Rekor.Uuid), - NormalizeOptionalString(decision.Rekor.Url), - decision.Rekor.Verified); - } - - decisions[image] = new RuntimePolicyImageDecision( - verdict, - decision.Signed, - hasSbom, - reasons, - rekor, - metadata); - } - } - - var decisionsView = new ReadOnlyDictionary(decisions); - - return new RuntimePolicyEvaluationResult( - document.TtlSeconds ?? 0, - document.ExpiresAtUtc?.ToUniversalTime(), - string.IsNullOrWhiteSpace(document.PolicyRevision) ? null : document.PolicyRevision, - decisionsView); - } - - public async Task ActivatePolicyRevisionAsync(string policyId, int version, PolicyActivationRequest request, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (version <= 0) - { - throw new ArgumentOutOfRangeException(nameof(version), "Version must be greater than zero."); - } - - if (request is null) - { - throw new ArgumentNullException(nameof(request)); - } - - var requestDocument = new PolicyActivationRequestDocument - { - Comment = NormalizeOptionalString(request.Comment), - RunNow = request.RunNow ? true : null, - ScheduledAt = request.ScheduledAt, - Priority = NormalizeOptionalString(request.Priority), - Rollback = request.Rollback ? true : null, - IncidentId = NormalizeOptionalString(request.IncidentId) - }; - - var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); - using var httpRequest = CreateRequest(HttpMethod.Post, $"api/policy/policies/{encodedPolicyId}/versions/{version}:activate"); - await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); - httpRequest.Content = JsonContent.Create(requestDocument, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - var errorCode = ExtractProblemErrorCode(problem); - throw new PolicyApiException(message, response.StatusCode, errorCode); - } - - PolicyActivationResponseDocument? responseDocument; - try - { - responseDocument = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = response.Content is null ? string.Empty : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse policy activation response: {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (responseDocument is null) - { - throw new InvalidOperationException("Policy activation response was empty."); - } - - if (string.IsNullOrWhiteSpace(responseDocument.Status)) - { - throw new InvalidOperationException("Policy activation response missing status."); - } - - if (responseDocument.Revision is null) - { - throw new InvalidOperationException("Policy activation response missing revision."); - } - - return MapPolicyActivation(responseDocument); - } - - public async Task SimulatePolicyAsync(string policyId, PolicySimulationInput input, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (input is null) - { - throw new ArgumentNullException(nameof(input)); - } - - var requestDocument = new PolicySimulationRequestDocument - { - BaseVersion = input.BaseVersion, - CandidateVersion = input.CandidateVersion, - Explain = input.Explain ? true : null - }; - - if (input.SbomSet.Count > 0) - { - requestDocument.SbomSet = input.SbomSet; - } - - if (input.Environment.Count > 0) - { - var environment = new Dictionary(StringComparer.Ordinal); - foreach (var pair in input.Environment) - { - if (string.IsNullOrWhiteSpace(pair.Key)) - { - continue; - } - - environment[pair.Key] = SerializeEnvironmentValue(pair.Value); - } - - if (environment.Count > 0) - { - requestDocument.Env = environment; - } - } - - var encodedPolicyId = Uri.EscapeDataString(policyId); - using var request = CreateRequest(HttpMethod.Post, $"api/policy/policies/{encodedPolicyId}/simulate"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - request.Content = JsonContent.Create(requestDocument, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - var errorCode = ExtractProblemErrorCode(problem); - throw new PolicyApiException(message, response.StatusCode, errorCode); - } - - if (response.Content is null || response.Content.Headers.ContentLength is 0) - { - throw new InvalidOperationException("Policy simulation response was empty."); - } - - PolicySimulationResponseDocument? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse policy simulation response: {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (document is null) - { - throw new InvalidOperationException("Policy simulation response was empty."); - } - - if (document.Diff is null) - { - throw new InvalidOperationException("Policy simulation response missing diff summary."); - } - - return MapPolicySimulation(document); - } - - public async Task GetPolicyFindingsAsync(PolicyFindingsQuery query, CancellationToken cancellationToken) - { - if (query is null) - { - throw new ArgumentNullException(nameof(query)); - } - - EnsureBackendConfigured(); - - var policyId = query.PolicyId; - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(query)); - } - - var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); - var relative = $"api/policy/findings/{encodedPolicyId}{BuildPolicyFindingsQueryString(query)}"; - - using var request = CreateRequest(HttpMethod.Get, relative); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - var errorCode = ExtractProblemErrorCode(problem); - throw new PolicyApiException(message, response.StatusCode, errorCode); - } - - PolicyFindingsResponseDocument? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse policy findings response: {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (document is null) - { - throw new InvalidOperationException("Policy findings response was empty."); - } - - return MapPolicyFindings(document); - } - - public async Task GetPolicyFindingAsync(string policyId, string findingId, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); - } - - var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); - var encodedFindingId = Uri.EscapeDataString(findingId.Trim()); - using var request = CreateRequest(HttpMethod.Get, $"api/policy/findings/{encodedPolicyId}/{encodedFindingId}"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - var errorCode = ExtractProblemErrorCode(problem); - throw new PolicyApiException(message, response.StatusCode, errorCode); - } - - PolicyFindingDocumentDocument? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse policy finding response: {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (document is null) - { - throw new InvalidOperationException("Policy finding response was empty."); - } - - return MapPolicyFinding(document); - } - - public async Task GetPolicyFindingExplainAsync(string policyId, string findingId, string? mode, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); - } - - var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); - var encodedFindingId = Uri.EscapeDataString(findingId.Trim()); - var query = string.IsNullOrWhiteSpace(mode) ? string.Empty : $"?mode={Uri.EscapeDataString(mode.Trim())}"; - - using var request = CreateRequest(HttpMethod.Get, $"api/policy/findings/{encodedPolicyId}/{encodedFindingId}/explain{query}"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - var errorCode = ExtractProblemErrorCode(problem); - throw new PolicyApiException(message, response.StatusCode, errorCode); - } - - PolicyFindingExplainResponseDocument? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse policy finding explain response: {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (document is null) - { - throw new InvalidOperationException("Policy finding explain response was empty."); - } - - return MapPolicyFindingExplain(document); - } - - public async Task> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - var query = includeDisabled ? "?includeDisabled=true" : string.Empty; - using var request = CreateRequest(HttpMethod.Get, $"excititor/providers{query}"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - if (response.Content is null || response.Content.Headers.ContentLength is 0) - { - return Array.Empty(); - } - - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - if (stream is null || stream.Length == 0) - { - return Array.Empty(); - } - - using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); - var root = document.RootElement; - if (root.ValueKind == JsonValueKind.Object && root.TryGetProperty("providers", out var providersProperty)) - { - root = providersProperty; - } - - if (root.ValueKind != JsonValueKind.Array) - { - return Array.Empty(); - } - - var list = new List(); - foreach (var item in root.EnumerateArray()) - { - var id = GetStringProperty(item, "id") ?? string.Empty; - if (string.IsNullOrWhiteSpace(id)) - { - continue; - } - - var kind = GetStringProperty(item, "kind") ?? "unknown"; - var displayName = GetStringProperty(item, "displayName") ?? id; - var trustTier = GetStringProperty(item, "trustTier") ?? string.Empty; - var enabled = GetBooleanProperty(item, "enabled", defaultValue: true); - var lastIngested = GetDateTimeOffsetProperty(item, "lastIngestedAt"); - - list.Add(new ExcititorProviderSummary(id, kind, displayName, trustTier, enabled, lastIngested)); - } - - return list; - } - - public async Task DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - var rootDirectory = ResolveOfflineDirectory(destinationDirectory); - Directory.CreateDirectory(rootDirectory); - - var descriptor = await FetchOfflineKitDescriptorAsync(bundleId, cancellationToken).ConfigureAwait(false); - - var bundlePath = Path.Combine(rootDirectory, descriptor.BundleName); - var metadataPath = bundlePath + ".metadata.json"; - var manifestPath = Path.Combine(rootDirectory, descriptor.ManifestName); - var bundleSignaturePath = descriptor.BundleSignatureName is not null ? Path.Combine(rootDirectory, descriptor.BundleSignatureName) : null; - var manifestSignaturePath = descriptor.ManifestSignatureName is not null ? Path.Combine(rootDirectory, descriptor.ManifestSignatureName) : null; - - var fromCache = false; - if (!overwrite && File.Exists(bundlePath)) - { - var digest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false); - if (string.Equals(digest, descriptor.BundleSha256, StringComparison.OrdinalIgnoreCase)) - { - fromCache = true; - } - else if (resume) - { - var partial = bundlePath + ".partial"; - File.Move(bundlePath, partial, overwrite: true); - } - else - { - File.Delete(bundlePath); - } - } - - if (!fromCache) - { - await DownloadFileWithResumeAsync(descriptor.BundleDownloadUri, bundlePath, descriptor.BundleSha256, descriptor.BundleSize, resume, cancellationToken).ConfigureAwait(false); - } - - await DownloadFileWithResumeAsync(descriptor.ManifestDownloadUri, manifestPath, descriptor.ManifestSha256, descriptor.ManifestSize ?? 0, resume: false, cancellationToken).ConfigureAwait(false); - - if (descriptor.BundleSignatureDownloadUri is not null && bundleSignaturePath is not null) - { - await DownloadAuxiliaryFileAsync(descriptor.BundleSignatureDownloadUri, bundleSignaturePath, cancellationToken).ConfigureAwait(false); - } - - if (descriptor.ManifestSignatureDownloadUri is not null && manifestSignaturePath is not null) - { - await DownloadAuxiliaryFileAsync(descriptor.ManifestSignatureDownloadUri, manifestSignaturePath, cancellationToken).ConfigureAwait(false); - } - - await WriteOfflineKitMetadataAsync(metadataPath, descriptor, bundlePath, manifestPath, bundleSignaturePath, manifestSignaturePath, cancellationToken).ConfigureAwait(false); - - return new OfflineKitDownloadResult( - descriptor, - bundlePath, - manifestPath, - bundleSignaturePath, - manifestSignaturePath, - metadataPath, - fromCache); - } - - public async Task ImportOfflineKitAsync(OfflineKitImportRequest request, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (request is null) - { - throw new ArgumentNullException(nameof(request)); - } - - var bundlePath = Path.GetFullPath(request.BundlePath); - if (!File.Exists(bundlePath)) - { - throw new FileNotFoundException("Offline kit bundle not found.", bundlePath); - } - - string? manifestPath = null; - if (!string.IsNullOrWhiteSpace(request.ManifestPath)) - { - manifestPath = Path.GetFullPath(request.ManifestPath); - if (!File.Exists(manifestPath)) - { - throw new FileNotFoundException("Offline kit manifest not found.", manifestPath); - } - } - - string? bundleSignaturePath = null; - if (!string.IsNullOrWhiteSpace(request.BundleSignaturePath)) - { - bundleSignaturePath = Path.GetFullPath(request.BundleSignaturePath); - if (!File.Exists(bundleSignaturePath)) - { - throw new FileNotFoundException("Offline kit bundle signature not found.", bundleSignaturePath); - } - } - - string? manifestSignaturePath = null; - if (!string.IsNullOrWhiteSpace(request.ManifestSignaturePath)) - { - manifestSignaturePath = Path.GetFullPath(request.ManifestSignaturePath); - if (!File.Exists(manifestSignaturePath)) - { - throw new FileNotFoundException("Offline kit manifest signature not found.", manifestSignaturePath); - } - } - - var bundleSize = request.BundleSize ?? new FileInfo(bundlePath).Length; - var bundleSha = string.IsNullOrWhiteSpace(request.BundleSha256) - ? await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false) - : NormalizeSha(request.BundleSha256) ?? throw new InvalidOperationException("Bundle digest must not be empty."); - - string? manifestSha = null; - long? manifestSize = null; - if (manifestPath is not null) - { - manifestSize = request.ManifestSize ?? new FileInfo(manifestPath).Length; - manifestSha = string.IsNullOrWhiteSpace(request.ManifestSha256) - ? await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false) - : NormalizeSha(request.ManifestSha256); - } - - var metadata = new OfflineKitImportMetadataPayload - { - BundleId = request.BundleId, - BundleSha256 = bundleSha, - BundleSize = bundleSize, - CapturedAt = request.CapturedAt, - Channel = request.Channel, - Kind = request.Kind, - IsDelta = request.IsDelta, - BaseBundleId = request.BaseBundleId, - ManifestSha256 = manifestSha, - ManifestSize = manifestSize - }; - - using var message = CreateRequest(HttpMethod.Post, "api/offline-kit/import"); - await AuthorizeRequestAsync(message, cancellationToken).ConfigureAwait(false); - - using var content = new MultipartFormDataContent(); - - var metadataOptions = new JsonSerializerOptions(SerializerOptions) - { - WriteIndented = false - }; - var metadataJson = JsonSerializer.Serialize(metadata, metadataOptions); - var metadataContent = new StringContent(metadataJson, Encoding.UTF8, "application/json"); - content.Add(metadataContent, "metadata"); - - var bundleStream = File.OpenRead(bundlePath); - var bundleContent = new StreamContent(bundleStream); - bundleContent.Headers.ContentType = new MediaTypeHeaderValue("application/gzip"); - content.Add(bundleContent, "bundle", Path.GetFileName(bundlePath)); - - if (manifestPath is not null) - { - var manifestStream = File.OpenRead(manifestPath); - var manifestContent = new StreamContent(manifestStream); - manifestContent.Headers.ContentType = new MediaTypeHeaderValue("application/json"); - content.Add(manifestContent, "manifest", Path.GetFileName(manifestPath)); - } - - if (bundleSignaturePath is not null) - { - var signatureStream = File.OpenRead(bundleSignaturePath); - var signatureContent = new StreamContent(signatureStream); - signatureContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); - content.Add(signatureContent, "bundleSignature", Path.GetFileName(bundleSignaturePath)); - } - - if (manifestSignaturePath is not null) - { - var manifestSignatureStream = File.OpenRead(manifestSignaturePath); - var manifestSignatureContent = new StreamContent(manifestSignatureStream); - manifestSignatureContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); - content.Add(manifestSignatureContent, "manifestSignature", Path.GetFileName(manifestSignaturePath)); - } - - message.Content = content; - - using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - OfflineKitImportResponseTransport? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse offline kit import response. {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - var submittedAt = document?.SubmittedAt ?? DateTimeOffset.UtcNow; - - return new OfflineKitImportResult( - document?.ImportId, - document?.Status, - submittedAt, - document?.Message); - } - - public async Task GetOfflineKitStatusAsync(CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - using var request = CreateRequest(HttpMethod.Get, "api/offline-kit/status"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - if (response.Content is null || response.Content.Headers.ContentLength is 0) - { - return new OfflineKitStatus(null, null, null, false, null, null, null, null, null, Array.Empty()); - } - - OfflineKitStatusTransport? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse offline kit status response. {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - var current = document?.Current; - var components = MapOfflineComponents(document?.Components); - - if (current is null) - { - return new OfflineKitStatus(null, null, null, false, null, null, null, null, null, components); - } - - return new OfflineKitStatus( - NormalizeOptionalString(current.BundleId), - NormalizeOptionalString(current.Channel), - NormalizeOptionalString(current.Kind), - current.IsDelta ?? false, - NormalizeOptionalString(current.BaseBundleId), - current.CapturedAt?.ToUniversalTime(), - current.ImportedAt?.ToUniversalTime(), - NormalizeSha(current.BundleSha256), - current.BundleSize, - components); - } - - public async Task ExecuteAocIngestDryRunAsync(AocIngestDryRunRequest requestBody, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - ArgumentNullException.ThrowIfNull(requestBody); - - using var request = CreateRequest(HttpMethod.Post, "api/aoc/ingest/dry-run"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - request.Content = JsonContent.Create(requestBody, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - try - { - var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - return result ?? new AocIngestDryRunResponse(); - } - catch (JsonException ex) - { - var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse ingest dry-run response. {ex.Message}", ex) - { - Data = { ["payload"] = payload } - }; - } - } - - public async Task ExecuteAocVerifyAsync(AocVerifyRequest requestBody, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - ArgumentNullException.ThrowIfNull(requestBody); - - using var request = CreateRequest(HttpMethod.Post, "api/aoc/verify"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - request.Content = JsonContent.Create(requestBody, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - try - { - var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - return result ?? new AocVerifyResponse(); - } - catch (JsonException ex) - { - var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse AOC verification response. {ex.Message}", ex) - { - Data = { ["payload"] = payload } - }; - } - } - - private string ResolveOfflineDirectory(string destinationDirectory) - { - if (!string.IsNullOrWhiteSpace(destinationDirectory)) - { - return Path.GetFullPath(destinationDirectory); - } - - var configured = _options.Offline?.KitsDirectory; - if (!string.IsNullOrWhiteSpace(configured)) - { - return Path.GetFullPath(configured); - } - - return Path.GetFullPath(Path.Combine(Environment.CurrentDirectory, "offline-kits")); - } - - private async Task FetchOfflineKitDescriptorAsync(string? bundleId, CancellationToken cancellationToken) - { - var route = string.IsNullOrWhiteSpace(bundleId) - ? "api/offline-kit/bundles/latest" - : $"api/offline-kit/bundles/{Uri.EscapeDataString(bundleId)}"; - - using var request = CreateRequest(HttpMethod.Get, route); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - OfflineKitBundleDescriptorTransport? payload; - try - { - payload = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse offline kit metadata. {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (payload is null) - { - throw new InvalidOperationException("Offline kit metadata response was empty."); - } - - return MapOfflineKitDescriptor(payload); - } - - private OfflineKitBundleDescriptor MapOfflineKitDescriptor(OfflineKitBundleDescriptorTransport transport) - { - if (transport is null) - { - throw new ArgumentNullException(nameof(transport)); - } - - var bundleName = string.IsNullOrWhiteSpace(transport.BundleName) - ? throw new InvalidOperationException("Offline kit metadata missing bundleName.") - : transport.BundleName!.Trim(); - - var bundleId = string.IsNullOrWhiteSpace(transport.BundleId) ? bundleName : transport.BundleId!.Trim(); - var bundleSha = NormalizeSha(transport.BundleSha256) ?? throw new InvalidOperationException("Offline kit metadata missing bundleSha256."); - - var bundleSize = transport.BundleSize; - if (bundleSize <= 0) - { - throw new InvalidOperationException("Offline kit metadata missing bundle size."); - } - - var manifestName = string.IsNullOrWhiteSpace(transport.ManifestName) ? "offline-manifest.json" : transport.ManifestName!.Trim(); - var manifestSha = NormalizeSha(transport.ManifestSha256) ?? throw new InvalidOperationException("Offline kit metadata missing manifestSha256."); - var capturedAt = transport.CapturedAt?.ToUniversalTime() ?? DateTimeOffset.UtcNow; - - var bundleDownloadUri = ResolveDownloadUri(transport.BundleUrl, transport.BundlePath, bundleName); - var manifestDownloadUri = ResolveDownloadUri(transport.ManifestUrl, transport.ManifestPath, manifestName); - var bundleSignatureUri = ResolveOptionalDownloadUri(transport.BundleSignatureUrl, transport.BundleSignaturePath, transport.BundleSignatureName); - var manifestSignatureUri = ResolveOptionalDownloadUri(transport.ManifestSignatureUrl, transport.ManifestSignaturePath, transport.ManifestSignatureName); - var bundleSignatureName = ResolveArtifactName(transport.BundleSignatureName, bundleSignatureUri); - var manifestSignatureName = ResolveArtifactName(transport.ManifestSignatureName, manifestSignatureUri); - - return new OfflineKitBundleDescriptor( - bundleId, - bundleName, - bundleSha, - bundleSize, - bundleDownloadUri, - manifestName, - manifestSha, - manifestDownloadUri, - capturedAt, - NormalizeOptionalString(transport.Channel), - NormalizeOptionalString(transport.Kind), - transport.IsDelta ?? false, - NormalizeOptionalString(transport.BaseBundleId), - bundleSignatureName, - bundleSignatureUri, - manifestSignatureName, - manifestSignatureUri, - transport.ManifestSize); - } - - private static string? ResolveArtifactName(string? explicitName, Uri? uri) - { - if (!string.IsNullOrWhiteSpace(explicitName)) - { - return explicitName.Trim(); - } - - if (uri is not null) - { - var name = Path.GetFileName(uri.LocalPath); - return string.IsNullOrWhiteSpace(name) ? null : name; - } - - return null; - } - - private Uri ResolveDownloadUri(string? absoluteOrRelativeUrl, string? relativePath, string fallbackFileName) - { - if (!string.IsNullOrWhiteSpace(absoluteOrRelativeUrl)) - { - var candidate = new Uri(absoluteOrRelativeUrl, UriKind.RelativeOrAbsolute); - if (candidate.IsAbsoluteUri) - { - return candidate; - } - - if (_httpClient.BaseAddress is not null) - { - return new Uri(_httpClient.BaseAddress, candidate); - } - - return BuildUriFromRelative(candidate.ToString()); - } - - if (!string.IsNullOrWhiteSpace(relativePath)) - { - return BuildUriFromRelative(relativePath); - } - - if (!string.IsNullOrWhiteSpace(fallbackFileName)) - { - return BuildUriFromRelative(fallbackFileName); - } - - throw new InvalidOperationException("Offline kit metadata did not include a download URL."); - } - - private Uri BuildUriFromRelative(string relative) - { - var normalized = relative.TrimStart('/'); - if (!string.IsNullOrWhiteSpace(_options.Offline?.MirrorUrl) && - Uri.TryCreate(_options.Offline.MirrorUrl, UriKind.Absolute, out var mirrorBase)) - { - if (!mirrorBase.AbsoluteUri.EndsWith("/")) - { - mirrorBase = new Uri(mirrorBase.AbsoluteUri + "/"); - } - - return new Uri(mirrorBase, normalized); - } - - if (_httpClient.BaseAddress is not null) - { - return new Uri(_httpClient.BaseAddress, normalized); - } - - throw new InvalidOperationException($"Cannot resolve offline kit URI for '{relative}' because no mirror or backend base address is configured."); - } - - private Uri? ResolveOptionalDownloadUri(string? absoluteOrRelativeUrl, string? relativePath, string? fallbackName) - { - var hasData = !string.IsNullOrWhiteSpace(absoluteOrRelativeUrl) || - !string.IsNullOrWhiteSpace(relativePath) || - !string.IsNullOrWhiteSpace(fallbackName); - - if (!hasData) - { - return null; - } - - try - { - return ResolveDownloadUri(absoluteOrRelativeUrl, relativePath, fallbackName ?? string.Empty); - } - catch - { - return null; - } - } - - private async Task DownloadFileWithResumeAsync(Uri downloadUri, string targetPath, string expectedSha256, long expectedSize, bool resume, CancellationToken cancellationToken) - { - var directory = Path.GetDirectoryName(targetPath); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var partialPath = resume ? targetPath + ".partial" : targetPath + ".tmp"; - - if (!resume && File.Exists(targetPath)) - { - File.Delete(targetPath); - } - - if (resume && File.Exists(targetPath)) - { - File.Move(targetPath, partialPath, overwrite: true); - } - - long existingLength = 0; - if (resume && File.Exists(partialPath)) - { - existingLength = new FileInfo(partialPath).Length; - if (expectedSize > 0 && existingLength >= expectedSize) - { - existingLength = expectedSize; - } - } - - while (true) - { - using var request = new HttpRequestMessage(HttpMethod.Get, downloadUri); - if (resume && existingLength > 0 && expectedSize > 0 && existingLength < expectedSize) - { - request.Headers.Range = new RangeHeaderValue(existingLength, null); - } - - using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); - - if (resume && existingLength > 0 && expectedSize > 0 && existingLength < expectedSize && response.StatusCode == HttpStatusCode.OK) - { - existingLength = 0; - if (File.Exists(partialPath)) - { - File.Delete(partialPath); - } - - continue; - } - - if (!response.IsSuccessStatusCode && - !(resume && existingLength > 0 && response.StatusCode == HttpStatusCode.PartialContent)) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - var destination = resume ? partialPath : targetPath; - var mode = resume && existingLength > 0 ? FileMode.Append : FileMode.Create; - - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - await using (var file = new FileStream(destination, mode, FileAccess.Write, FileShare.None, 81920, useAsync: true)) - { - await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false); - } - - break; - } - - if (resume && File.Exists(partialPath)) - { - File.Move(partialPath, targetPath, overwrite: true); - } - - var digest = await ComputeSha256Async(targetPath, cancellationToken).ConfigureAwait(false); - if (!string.Equals(digest, expectedSha256, StringComparison.OrdinalIgnoreCase)) - { - File.Delete(targetPath); - throw new InvalidOperationException($"Digest mismatch for {Path.GetFileName(targetPath)}. Expected {expectedSha256} but computed {digest}."); - } - - if (expectedSize > 0) - { - var actualSize = new FileInfo(targetPath).Length; - if (actualSize != expectedSize) - { - File.Delete(targetPath); - throw new InvalidOperationException($"Size mismatch for {Path.GetFileName(targetPath)}. Expected {expectedSize:N0} bytes but downloaded {actualSize:N0} bytes."); - } - } - } - - private async Task DownloadAuxiliaryFileAsync(Uri downloadUri, string targetPath, CancellationToken cancellationToken) - { - var directory = Path.GetDirectoryName(targetPath); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - using var request = new HttpRequestMessage(HttpMethod.Get, downloadUri); - using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - await using var file = new FileStream(targetPath, FileMode.Create, FileAccess.Write, FileShare.None, 81920, useAsync: true); - await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false); - } - - private static async Task WriteOfflineKitMetadataAsync( - string metadataPath, - OfflineKitBundleDescriptor descriptor, - string bundlePath, - string manifestPath, - string? bundleSignaturePath, - string? manifestSignaturePath, - CancellationToken cancellationToken) - { - var document = new OfflineKitMetadataDocument - { - BundleId = descriptor.BundleId, - BundleName = descriptor.BundleName, - BundleSha256 = descriptor.BundleSha256, - BundleSize = descriptor.BundleSize, - BundlePath = Path.GetFullPath(bundlePath), - CapturedAt = descriptor.CapturedAt, - DownloadedAt = DateTimeOffset.UtcNow, - Channel = descriptor.Channel, - Kind = descriptor.Kind, - IsDelta = descriptor.IsDelta, - BaseBundleId = descriptor.BaseBundleId, - ManifestName = descriptor.ManifestName, - ManifestSha256 = descriptor.ManifestSha256, - ManifestSize = descriptor.ManifestSize, - ManifestPath = Path.GetFullPath(manifestPath), - BundleSignaturePath = bundleSignaturePath is null ? null : Path.GetFullPath(bundleSignaturePath), - ManifestSignaturePath = manifestSignaturePath is null ? null : Path.GetFullPath(manifestSignaturePath) - }; - - var options = new JsonSerializerOptions(SerializerOptions) - { - WriteIndented = true - }; - - var payload = JsonSerializer.Serialize(document, options); - await File.WriteAllTextAsync(metadataPath, payload, cancellationToken).ConfigureAwait(false); - } - - private static IReadOnlyList MapOfflineComponents(List? transports) - { - if (transports is null || transports.Count == 0) - { - return Array.Empty(); - } - - var list = new List(); - foreach (var transport in transports) - { - if (transport is null || string.IsNullOrWhiteSpace(transport.Name)) - { - continue; - } - - list.Add(new OfflineKitComponentStatus( - transport.Name.Trim(), - NormalizeOptionalString(transport.Version), - NormalizeSha(transport.Digest), - transport.CapturedAt?.ToUniversalTime(), - transport.SizeBytes)); - } - - return list.Count == 0 ? Array.Empty() : list; - } - - private static string? NormalizeSha(string? digest) - { - if (string.IsNullOrWhiteSpace(digest)) - { - return null; - } - - var value = digest.Trim(); - if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) - { - value = value.Substring("sha256:".Length); - } - - return value.ToLowerInvariant(); - } - - private sealed class OfflineKitImportMetadataPayload - { - public string? BundleId { get; set; } - - public string BundleSha256 { get; set; } = string.Empty; - - public long BundleSize { get; set; } - - public DateTimeOffset? CapturedAt { get; set; } - - public string? Channel { get; set; } - - public string? Kind { get; set; } - - public bool? IsDelta { get; set; } - - public string? BaseBundleId { get; set; } - - public string? ManifestSha256 { get; set; } - - public long? ManifestSize { get; set; } - } - - private static List NormalizeImages(IReadOnlyList images) - { - var normalized = new List(); - if (images is null) - { - return normalized; - } - - var seen = new HashSet(StringComparer.Ordinal); - foreach (var entry in images) - { - if (string.IsNullOrWhiteSpace(entry)) - { - continue; - } - - var trimmed = entry.Trim(); - if (seen.Add(trimmed)) - { - normalized.Add(trimmed); - } - } - - return normalized; - } - - private static IReadOnlyList ExtractReasons(List? reasons) - { - if (reasons is null || reasons.Count == 0) - { - return Array.Empty(); - } - - var list = new List(); - foreach (var reason in reasons) - { - if (!string.IsNullOrWhiteSpace(reason)) - { - list.Add(reason.Trim()); - } - } - - return list.Count == 0 ? Array.Empty() : list; - } - - private static IReadOnlyDictionary ExtractExtensionMetadata(Dictionary? extensionData) - { - if (extensionData is null || extensionData.Count == 0) - { - return EmptyMetadata; - } - - var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase); - foreach (var kvp in extensionData) - { - var value = ConvertJsonElementToObject(kvp.Value); - if (value is not null) - { - metadata[kvp.Key] = value; - } - } - - if (metadata.Count == 0) - { - return EmptyMetadata; - } - - return new ReadOnlyDictionary(metadata); - } - - private static object? ConvertJsonElementToObject(JsonElement element) - { - return element.ValueKind switch - { - JsonValueKind.String => element.GetString(), - JsonValueKind.True => true, - JsonValueKind.False => false, - JsonValueKind.Number when element.TryGetInt64(out var integer) => integer, - JsonValueKind.Number when element.TryGetDouble(out var @double) => @double, - JsonValueKind.Null or JsonValueKind.Undefined => null, - _ => element.GetRawText() - }; - } - - private static string? NormalizeOptionalString(string? value) - { - return string.IsNullOrWhiteSpace(value) ? null : value.Trim(); - } - - private HttpRequestMessage CreateRequest(HttpMethod method, string relativeUri) - { - if (!Uri.TryCreate(relativeUri, UriKind.RelativeOrAbsolute, out var requestUri)) - { - throw new InvalidOperationException($"Invalid request URI '{relativeUri}'."); - } - - if (requestUri.IsAbsoluteUri) - { - // Nothing to normalize. - } - else - { - requestUri = new Uri(relativeUri.TrimStart('/'), UriKind.Relative); - } - - return new HttpRequestMessage(method, requestUri); - } - - private async Task AuthorizeRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - var token = await ResolveAccessTokenAsync(cancellationToken).ConfigureAwait(false); - if (!string.IsNullOrWhiteSpace(token)) - { - request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token); - } - } - - private IReadOnlyDictionary? ResolveOperatorMetadataIfNeeded(string? scope) - { - if (string.IsNullOrWhiteSpace(scope) || !scope.Contains("orch:operate", StringComparison.OrdinalIgnoreCase)) - { - return null; - } - - var reason = _options.Authority.OperatorReason?.Trim(); - var ticket = _options.Authority.OperatorTicket?.Trim(); - - if (string.IsNullOrWhiteSpace(reason) || string.IsNullOrWhiteSpace(ticket)) - { - throw new InvalidOperationException("Authority.OperatorReason and Authority.OperatorTicket must be configured when requesting orch:operate tokens. Set STELLAOPS_ORCH_REASON and STELLAOPS_ORCH_TICKET or the corresponding configuration values."); - } - - return new Dictionary(StringComparer.Ordinal) - { - [OperatorReasonParameterName] = reason, - [OperatorTicketParameterName] = ticket - }; - } - - private async Task ResolveAccessTokenAsync(CancellationToken cancellationToken) - { - if (!string.IsNullOrWhiteSpace(_options.ApiKey)) - { - return _options.ApiKey; - } - - if (_tokenClient is null || string.IsNullOrWhiteSpace(_options.Authority.Url)) - { - return null; - } - - var now = DateTimeOffset.UtcNow; - - lock (_tokenSync) - { - if (!string.IsNullOrEmpty(_cachedAccessToken) && now < _cachedAccessTokenExpiresAt - TokenRefreshSkew) - { - return _cachedAccessToken; - } - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(_options); - var cachedEntry = await _tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (cachedEntry is not null && now < cachedEntry.ExpiresAtUtc - TokenRefreshSkew) - { - lock (_tokenSync) - { - _cachedAccessToken = cachedEntry.AccessToken; - _cachedAccessTokenExpiresAt = cachedEntry.ExpiresAtUtc; - return _cachedAccessToken; - } - } - - var scope = AuthorityTokenUtilities.ResolveScope(_options); - var operatorMetadata = ResolveOperatorMetadataIfNeeded(scope); - - StellaOpsTokenResult token; - if (!string.IsNullOrWhiteSpace(_options.Authority.Username)) - { - if (string.IsNullOrWhiteSpace(_options.Authority.Password)) - { - throw new InvalidOperationException("Authority password must be configured when username is provided."); - } - - token = await _tokenClient.RequestPasswordTokenAsync( - _options.Authority.Username, - _options.Authority.Password!, - scope, - null, - cancellationToken).ConfigureAwait(false); - } - else - { - token = await _tokenClient.RequestClientCredentialsTokenAsync(scope, operatorMetadata, cancellationToken).ConfigureAwait(false); - } - - await _tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); - - lock (_tokenSync) - { - _cachedAccessToken = token.AccessToken; - _cachedAccessTokenExpiresAt = token.ExpiresAtUtc; - return _cachedAccessToken; - } - } - - private async Task<(string Message, JsonElement? Payload)> ExtractExcititorResponseAsync(HttpResponseMessage response, CancellationToken cancellationToken) - { - if (response.Content is null || response.Content.Headers.ContentLength is 0) - { - return ($"HTTP {(int)response.StatusCode}", null); - } - - try - { - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - if (stream is null || stream.Length == 0) - { - return ($"HTTP {(int)response.StatusCode}", null); - } - - using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); - var root = document.RootElement.Clone(); - string? message = null; - if (root.ValueKind == JsonValueKind.Object) - { - message = GetStringProperty(root, "message") ?? GetStringProperty(root, "status"); - } - - if (string.IsNullOrWhiteSpace(message)) - { - message = root.ValueKind == JsonValueKind.Object || root.ValueKind == JsonValueKind.Array - ? root.ToString() - : root.GetRawText(); - } - - return (message ?? $"HTTP {(int)response.StatusCode}", root); - } - catch (JsonException) - { - var text = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - return (string.IsNullOrWhiteSpace(text) ? $"HTTP {(int)response.StatusCode}" : text.Trim(), null); - } - } - - private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement property) - { - if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty(propertyName, out property)) - { - return true; - } - - if (element.ValueKind == JsonValueKind.Object) - { - foreach (var candidate in element.EnumerateObject()) - { - if (string.Equals(candidate.Name, propertyName, StringComparison.OrdinalIgnoreCase)) - { - property = candidate.Value; - return true; - } - } - } - - property = default; - return false; - } - - private static string? GetStringProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - if (property.ValueKind == JsonValueKind.String) - { - return property.GetString(); - } - } - - return null; - } - - private static bool GetBooleanProperty(JsonElement element, string propertyName, bool defaultValue) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - return property.ValueKind switch - { - JsonValueKind.True => true, - JsonValueKind.False => false, - JsonValueKind.String when bool.TryParse(property.GetString(), out var parsed) => parsed, - _ => defaultValue - }; - } - - return defaultValue; - } - - private static DateTimeOffset? GetDateTimeOffsetProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property) && property.ValueKind == JsonValueKind.String) - { - if (DateTimeOffset.TryParse(property.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed)) - { - return parsed.ToUniversalTime(); - } - } - - return null; - } - - private static JsonElement SerializeEnvironmentValue(object? value) - { - if (value is JsonElement element) - { - return element; - } - - return JsonSerializer.SerializeToElement(value, SerializerOptions); - } - - private static string? ExtractProblemErrorCode(ProblemDocument? problem) - { - if (problem?.Extensions is null || problem.Extensions.Count == 0) - { - return null; - } - - if (problem.Extensions.TryGetValue("code", out var value)) - { - switch (value) - { - case string code when !string.IsNullOrWhiteSpace(code): - return code; - case JsonElement element when element.ValueKind == JsonValueKind.String: - var text = element.GetString(); - return string.IsNullOrWhiteSpace(text) ? null : text; - } - } - - return null; - } - - private static string BuildPolicyFindingsQueryString(PolicyFindingsQuery query) - { - var parameters = new List(); - - if (query.SbomIds is not null) - { - foreach (var sbom in query.SbomIds) - { - if (!string.IsNullOrWhiteSpace(sbom)) - { - parameters.Add($"sbomId={Uri.EscapeDataString(sbom)}"); - } - } - } - - if (query.Statuses is not null && query.Statuses.Count > 0) - { - var joined = string.Join(",", query.Statuses.Where(s => !string.IsNullOrWhiteSpace(s))); - if (!string.IsNullOrWhiteSpace(joined)) - { - parameters.Add($"status={Uri.EscapeDataString(joined)}"); - } - } - - if (query.Severities is not null && query.Severities.Count > 0) - { - var joined = string.Join(",", query.Severities.Where(s => !string.IsNullOrWhiteSpace(s))); - if (!string.IsNullOrWhiteSpace(joined)) - { - parameters.Add($"severity={Uri.EscapeDataString(joined)}"); - } - } - - if (!string.IsNullOrWhiteSpace(query.Cursor)) - { - parameters.Add($"cursor={Uri.EscapeDataString(query.Cursor)}"); - } - - if (query.Page.HasValue) - { - parameters.Add($"page={query.Page.Value}"); - } - - if (query.PageSize.HasValue) - { - parameters.Add($"pageSize={query.PageSize.Value}"); - } - - if (query.Since.HasValue) - { - var value = query.Since.Value.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture); - parameters.Add($"since={Uri.EscapeDataString(value)}"); - } - - if (parameters.Count == 0) - { - return string.Empty; - } - - return "?" + string.Join("&", parameters); - } - - private static PolicyFindingsPage MapPolicyFindings(PolicyFindingsResponseDocument document) - { - var items = document.Items is null - ? new List(capacity: 0) - : document.Items - .Where(item => item is not null) - .Select(item => MapPolicyFinding(item!)) - .ToList(); - - var nextCursor = string.IsNullOrWhiteSpace(document.NextCursor) ? null : document.NextCursor; - var view = new ReadOnlyCollection(items); - return new PolicyFindingsPage(view, nextCursor, document.TotalCount); - } - - private static PolicyFindingDocument MapPolicyFinding(PolicyFindingDocumentDocument document) - { - var findingId = document.FindingId; - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new InvalidOperationException("Policy finding response missing findingId."); - } - - var status = string.IsNullOrWhiteSpace(document.Status) ? "unknown" : document.Status!; - var severityNormalized = document.Severity?.Normalized; - if (string.IsNullOrWhiteSpace(severityNormalized)) - { - severityNormalized = "unknown"; - } - - var severity = new PolicyFindingSeverity(severityNormalized!, document.Severity?.Score); - - var sbomId = string.IsNullOrWhiteSpace(document.SbomId) ? "(unknown)" : document.SbomId!; - - IReadOnlyList advisoryIds; - if (document.AdvisoryIds is null || document.AdvisoryIds.Count == 0) - { - advisoryIds = Array.Empty(); - } - else - { - advisoryIds = document.AdvisoryIds - .Where(id => !string.IsNullOrWhiteSpace(id)) - .ToArray(); - } - - PolicyFindingVexMetadata? vex = null; - if (document.Vex is not null) - { - if (!string.IsNullOrWhiteSpace(document.Vex.WinningStatementId) - || !string.IsNullOrWhiteSpace(document.Vex.Source) - || !string.IsNullOrWhiteSpace(document.Vex.Status)) - { - vex = new PolicyFindingVexMetadata( - string.IsNullOrWhiteSpace(document.Vex.WinningStatementId) ? null : document.Vex.WinningStatementId, - string.IsNullOrWhiteSpace(document.Vex.Source) ? null : document.Vex.Source, - string.IsNullOrWhiteSpace(document.Vex.Status) ? null : document.Vex.Status); - } - } - - var updatedAt = document.UpdatedAt ?? DateTimeOffset.MinValue; - - return new PolicyFindingDocument( - findingId, - status, - severity, - sbomId, - advisoryIds, - vex, - document.PolicyVersion ?? 0, - updatedAt, - string.IsNullOrWhiteSpace(document.RunId) ? null : document.RunId); - } - - private static PolicyFindingExplainResult MapPolicyFindingExplain(PolicyFindingExplainResponseDocument document) - { - var findingId = document.FindingId; - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new InvalidOperationException("Policy finding explain response missing findingId."); - } - - var steps = document.Steps is null - ? new List(capacity: 0) - : document.Steps - .Where(step => step is not null) - .Select(step => MapPolicyFindingExplainStep(step!)) - .ToList(); - - var hints = document.SealedHints is null - ? new List(capacity: 0) - : document.SealedHints - .Where(hint => hint is not null && !string.IsNullOrWhiteSpace(hint!.Message)) - .Select(hint => new PolicyFindingExplainHint(hint!.Message!.Trim())) - .ToList(); - - return new PolicyFindingExplainResult( - findingId, - document.PolicyVersion ?? 0, - new ReadOnlyCollection(steps), - new ReadOnlyCollection(hints)); - } - - private static PolicyFindingExplainStep MapPolicyFindingExplainStep(PolicyFindingExplainStepDocument document) - { - var rule = string.IsNullOrWhiteSpace(document.Rule) ? "(unknown)" : document.Rule!; - var status = string.IsNullOrWhiteSpace(document.Status) ? null : document.Status; - var action = string.IsNullOrWhiteSpace(document.Action) ? null : document.Action; - - IReadOnlyDictionary inputs = document.Inputs is null - ? new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)) - : new ReadOnlyDictionary(document.Inputs - .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key)) - .ToDictionary( - kvp => kvp.Key, - kvp => ConvertJsonElementToString(kvp.Value), - StringComparer.Ordinal)); - - IReadOnlyDictionary? evidence = null; - if (document.Evidence is not null && document.Evidence.Count > 0) - { - var evidenceDict = document.Evidence - .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key)) - .ToDictionary( - kvp => kvp.Key, - kvp => ConvertJsonElementToString(kvp.Value), - StringComparer.Ordinal); - - evidence = new ReadOnlyDictionary(evidenceDict); - } - - return new PolicyFindingExplainStep( - rule, - status, - action, - document.Score, - inputs, - evidence); - } - - private static string ConvertJsonElementToString(JsonElement element) - { - return element.ValueKind switch - { - JsonValueKind.String => element.GetString() ?? string.Empty, - JsonValueKind.Number => element.TryGetInt64(out var longValue) - ? longValue.ToString(CultureInfo.InvariantCulture) - : element.GetDouble().ToString(CultureInfo.InvariantCulture), - JsonValueKind.True => "true", - JsonValueKind.False => "false", - JsonValueKind.Null => "null", - JsonValueKind.Array => string.Join(", ", element.EnumerateArray().Select(ConvertJsonElementToString)), - JsonValueKind.Object => element.GetRawText(), - _ => element.GetRawText() - }; - } - - private static PolicyActivationResult MapPolicyActivation(PolicyActivationResponseDocument document) - { - if (document.Revision is null) - { - throw new InvalidOperationException("Policy activation response missing revision data."); - } - - var revisionDocument = document.Revision; - if (string.IsNullOrWhiteSpace(revisionDocument.PackId)) - { - throw new InvalidOperationException("Policy activation revision missing policy identifier."); - } - - if (!revisionDocument.Version.HasValue) - { - throw new InvalidOperationException("Policy activation revision missing version number."); - } - - var approvals = new List(); - if (revisionDocument.Approvals is not null) - { - foreach (var approval in revisionDocument.Approvals) - { - if (approval is null || string.IsNullOrWhiteSpace(approval.ActorId) || !approval.ApprovedAt.HasValue) - { - continue; - } - - approvals.Add(new PolicyActivationApproval( - approval.ActorId, - approval.ApprovedAt.Value.ToUniversalTime(), - NormalizeOptionalString(approval.Comment))); - } - } - - var revision = new PolicyActivationRevision( - revisionDocument.PackId, - revisionDocument.Version.Value, - NormalizeOptionalString(revisionDocument.Status) ?? "unknown", - revisionDocument.RequiresTwoPersonApproval ?? false, - revisionDocument.CreatedAt?.ToUniversalTime() ?? DateTimeOffset.MinValue, - revisionDocument.ActivatedAt?.ToUniversalTime(), - new ReadOnlyCollection(approvals)); - - return new PolicyActivationResult( - NormalizeOptionalString(document.Status) ?? "unknown", - revision); - } - - private static PolicySimulationResult MapPolicySimulation(PolicySimulationResponseDocument document) - { - var diffDocument = document.Diff ?? throw new InvalidOperationException("Policy simulation response missing diff summary."); - - var severity = diffDocument.BySeverity is null - ? new Dictionary(0, StringComparer.Ordinal) - : diffDocument.BySeverity - .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key) && kvp.Value is not null) - .ToDictionary( - kvp => kvp.Key, - kvp => new PolicySimulationSeverityDelta(kvp.Value!.Up, kvp.Value.Down), - StringComparer.Ordinal); - - var severityView = new ReadOnlyDictionary(severity); - - var ruleHits = diffDocument.RuleHits is null - ? new List() - : diffDocument.RuleHits - .Where(hit => hit is not null) - .Select(hit => new PolicySimulationRuleDelta( - hit!.RuleId ?? string.Empty, - hit.RuleName ?? string.Empty, - hit.Up, - hit.Down)) - .ToList(); - - var ruleHitsView = ruleHits.AsReadOnly(); - - var diff = new PolicySimulationDiff( - string.IsNullOrWhiteSpace(diffDocument.SchemaVersion) ? null : diffDocument.SchemaVersion, - diffDocument.Added ?? 0, - diffDocument.Removed ?? 0, - diffDocument.Unchanged ?? 0, - severityView, - ruleHitsView); - - return new PolicySimulationResult( - diff, - string.IsNullOrWhiteSpace(document.ExplainUri) ? null : document.ExplainUri); - } - - private void EnsureBackendConfigured() - { - if (_httpClient.BaseAddress is null) - { - throw new InvalidOperationException("Backend URL is not configured. Provide STELLAOPS_BACKEND_URL or configure appsettings."); - } - } - - private string ResolveArtifactPath(string outputPath, string channel) - { - if (!string.IsNullOrWhiteSpace(outputPath)) - { - return Path.GetFullPath(outputPath); - } - - var directory = string.IsNullOrWhiteSpace(_options.ScannerCacheDirectory) - ? Directory.GetCurrentDirectory() - : Path.GetFullPath(_options.ScannerCacheDirectory); - - Directory.CreateDirectory(directory); - var fileName = $"stellaops-scanner-{channel}.tar.gz"; - return Path.Combine(directory, fileName); - } - - private async Task CreateFailureMessageAsync(HttpResponseMessage response, CancellationToken cancellationToken) - { - var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - return message; - } - - private async Task<(string Message, ProblemDocument? Problem)> CreateFailureDetailsAsync(HttpResponseMessage response, CancellationToken cancellationToken) - { - var statusCode = (int)response.StatusCode; - var builder = new StringBuilder(); - builder.Append("Backend request failed with status "); - builder.Append(statusCode); - builder.Append(' '); - builder.Append(response.ReasonPhrase ?? "Unknown"); - - ProblemDocument? problem = null; - - if (response.Content is not null && response.Content.Headers.ContentLength is > 0) - { - string? raw = null; - try - { - raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - if (!string.IsNullOrWhiteSpace(raw)) - { - problem = JsonSerializer.Deserialize(raw, SerializerOptions); - } - } - catch (JsonException) - { - problem = null; - } - - if (problem is not null) - { - if (!string.IsNullOrWhiteSpace(problem.Title)) - { - builder.AppendLine().Append(problem.Title); - } - - if (!string.IsNullOrWhiteSpace(problem.Detail)) - { - builder.AppendLine().Append(problem.Detail); - } - } - else if (!string.IsNullOrWhiteSpace(raw)) - { - builder.AppendLine().Append(raw); - } - } - - return (builder.ToString(), problem); - } - - private static string? ExtractHeaderValue(HttpResponseHeaders headers, string name) - { - if (headers.TryGetValues(name, out var values)) - { - return values.FirstOrDefault(); - } - - return null; - } - - private static string? NormalizeExpectedDigest(string? digest) - { - if (string.IsNullOrWhiteSpace(digest)) - { - return null; - } - - var trimmed = digest.Trim(); - return trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) - ? trimmed[7..] - : trimmed; - } - - private async Task ValidateDigestAsync(string filePath, string? expectedDigest, CancellationToken cancellationToken) - { - string digestHex; - await using (var stream = File.OpenRead(filePath)) - { - var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); - digestHex = Convert.ToHexString(hash).ToLowerInvariant(); - } - - if (!string.IsNullOrWhiteSpace(expectedDigest)) - { - var normalized = NormalizeDigest(expectedDigest); - if (!normalized.Equals(digestHex, StringComparison.OrdinalIgnoreCase)) - { - File.Delete(filePath); - throw new InvalidOperationException($"Scanner digest mismatch. Expected sha256:{normalized}, calculated sha256:{digestHex}."); - } - } - else - { - _logger.LogWarning("Scanner download missing X-StellaOps-Digest header; relying on computed digest only."); - } - - return digestHex; - } - - private static string NormalizeDigest(string digest) - { - if (digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) - { - return digest[7..]; - } - - return digest; - } - - private static async Task ComputeSha256Async(string filePath, CancellationToken cancellationToken) - { - await using var stream = File.OpenRead(filePath); - var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - private async Task ValidateSignatureAsync(string? signatureHeader, string digestHex, bool verbose, CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(_options.ScannerSignaturePublicKeyPath)) - { - if (!string.IsNullOrWhiteSpace(signatureHeader)) - { - _logger.LogDebug("Signature header present but no public key configured; skipping validation."); - } - return; - } - - if (string.IsNullOrWhiteSpace(signatureHeader)) - { - throw new InvalidOperationException("Scanner signature missing while a public key is configured."); - } - - var publicKeyPath = Path.GetFullPath(_options.ScannerSignaturePublicKeyPath); - if (!File.Exists(publicKeyPath)) - { - throw new FileNotFoundException("Scanner signature public key not found.", publicKeyPath); - } - - var signatureBytes = Convert.FromBase64String(signatureHeader); - var digestBytes = Convert.FromHexString(digestHex); - - var pem = await File.ReadAllTextAsync(publicKeyPath, cancellationToken).ConfigureAwait(false); - using var rsa = RSA.Create(); - rsa.ImportFromPem(pem); - - var valid = rsa.VerifyHash(digestBytes, signatureBytes, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - if (!valid) - { - throw new InvalidOperationException("Scanner signature validation failed."); - } - - if (verbose) - { - _logger.LogDebug("Scanner signature validated using key {KeyPath}.", publicKeyPath); - } - } - - private void PersistMetadata(string outputPath, string channel, string digestHex, string? signatureHeader, HttpResponseMessage response) - { - var metadata = new - { - channel, - digest = $"sha256:{digestHex}", - signature = signatureHeader, - downloadedAt = DateTimeOffset.UtcNow, - source = response.RequestMessage?.RequestUri?.ToString(), - sizeBytes = new FileInfo(outputPath).Length, - headers = new - { - etag = response.Headers.ETag?.Tag, - lastModified = response.Content.Headers.LastModified, - contentType = response.Content.Headers.ContentType?.ToString() - } - }; - - var metadataPath = outputPath + ".metadata.json"; - var json = JsonSerializer.Serialize(metadata, new JsonSerializerOptions - { - WriteIndented = true - }); - - File.WriteAllText(metadataPath, json); - } - - private static TimeSpan GetRetryDelay(HttpResponseMessage response, int attempt) - { - if (response.Headers.TryGetValues("Retry-After", out var retryValues)) - { - var value = retryValues.FirstOrDefault(); - if (!string.IsNullOrWhiteSpace(value)) - { - if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var seconds) && seconds >= 0) - { - return TimeSpan.FromSeconds(Math.Min(seconds, 300)); - } - - if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var when)) - { - var delta = when - DateTimeOffset.UtcNow; - if (delta > TimeSpan.Zero) - { - return delta < TimeSpan.FromMinutes(5) ? delta : TimeSpan.FromMinutes(5); - } - } - } - } - - var fallbackSeconds = Math.Min(60, Math.Pow(2, attempt)); - return TimeSpan.FromSeconds(fallbackSeconds); - } -} +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Linq; +using System.Net.Http.Headers; +using System.Net.Http.Json; +using System.Globalization; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.Client; +using StellaOps.Cli.Configuration; +using StellaOps.Cli.Services.Models; +using StellaOps.Cli.Services.Models.Transport; + +namespace StellaOps.Cli.Services; + +internal sealed class BackendOperationsClient : IBackendOperationsClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + private static readonly TimeSpan TokenRefreshSkew = TimeSpan.FromSeconds(30); + private static readonly IReadOnlyDictionary EmptyMetadata = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); + + private const string OperatorReasonParameterName = "operator_reason"; + private const string OperatorTicketParameterName = "operator_ticket"; + + private readonly HttpClient _httpClient; + private readonly StellaOpsCliOptions _options; + private readonly ILogger _logger; + private readonly IStellaOpsTokenClient? _tokenClient; + private readonly object _tokenSync = new(); + private string? _cachedAccessToken; + private DateTimeOffset _cachedAccessTokenExpiresAt = DateTimeOffset.MinValue; + + public BackendOperationsClient(HttpClient httpClient, StellaOpsCliOptions options, ILogger logger, IStellaOpsTokenClient? tokenClient = null) + { + _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _tokenClient = tokenClient; + + if (!string.IsNullOrWhiteSpace(_options.BackendUrl) && httpClient.BaseAddress is null) + { + if (Uri.TryCreate(_options.BackendUrl, UriKind.Absolute, out var baseUri)) + { + httpClient.BaseAddress = baseUri; + } + } + } + + public async Task DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + channel = string.IsNullOrWhiteSpace(channel) ? "stable" : channel.Trim(); + outputPath = ResolveArtifactPath(outputPath, channel); + Directory.CreateDirectory(Path.GetDirectoryName(outputPath)!); + + if (!overwrite && File.Exists(outputPath)) + { + var existing = new FileInfo(outputPath); + _logger.LogInformation("Scanner artifact already cached at {Path} ({Size} bytes).", outputPath, existing.Length); + return new ScannerArtifactResult(outputPath, existing.Length, true); + } + + var attempt = 0; + var maxAttempts = Math.Max(1, _options.ScannerDownloadAttempts); + + while (true) + { + attempt++; + try + { + using var request = CreateRequest(HttpMethod.Get, $"api/scanner/artifacts/{channel}"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + return await ProcessScannerResponseAsync(response, outputPath, channel, verbose, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (attempt < maxAttempts) + { + var backoffSeconds = Math.Pow(2, attempt); + _logger.LogWarning(ex, "Scanner download attempt {Attempt}/{MaxAttempts} failed. Retrying in {Delay:F0}s...", attempt, maxAttempts, backoffSeconds); + await Task.Delay(TimeSpan.FromSeconds(backoffSeconds), cancellationToken).ConfigureAwait(false); + } + } + } + + private async Task ProcessScannerResponseAsync(HttpResponseMessage response, string outputPath, string channel, bool verbose, CancellationToken cancellationToken) + { + var tempFile = outputPath + ".tmp"; + await using (var payloadStream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false)) + await using (var fileStream = File.Create(tempFile)) + { + await payloadStream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); + } + + var expectedDigest = ExtractHeaderValue(response.Headers, "X-StellaOps-Digest"); + var signatureHeader = ExtractHeaderValue(response.Headers, "X-StellaOps-Signature"); + + var digestHex = await ValidateDigestAsync(tempFile, expectedDigest, cancellationToken).ConfigureAwait(false); + await ValidateSignatureAsync(signatureHeader, digestHex, verbose, cancellationToken).ConfigureAwait(false); + + if (verbose) + { + var signatureNote = string.IsNullOrWhiteSpace(signatureHeader) ? "no signature" : "signature validated"; + _logger.LogDebug("Scanner digest sha256:{Digest} ({SignatureNote}).", digestHex, signatureNote); + } + + if (File.Exists(outputPath)) + { + File.Delete(outputPath); + } + + File.Move(tempFile, outputPath); + + PersistMetadata(outputPath, channel, digestHex, signatureHeader, response); + + var downloaded = new FileInfo(outputPath); + _logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", outputPath, downloaded.Length); + + return new ScannerArtifactResult(outputPath, downloaded.Length, false); + } + + public async Task UploadScanResultsAsync(string filePath, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (!File.Exists(filePath)) + { + throw new FileNotFoundException("Scan result file not found.", filePath); + } + + var maxAttempts = Math.Max(1, _options.ScanUploadAttempts); + var attempt = 0; + + while (true) + { + attempt++; + try + { + using var content = new MultipartFormDataContent(); + await using var fileStream = File.OpenRead(filePath); + var streamContent = new StreamContent(fileStream); + streamContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); + content.Add(streamContent, "file", Path.GetFileName(filePath)); + + using var request = CreateRequest(HttpMethod.Post, "api/scanner/results"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + request.Content = content; + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (response.IsSuccessStatusCode) + { + _logger.LogInformation("Scan results uploaded from {Path}.", filePath); + return; + } + + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + if (attempt >= maxAttempts) + { + throw new InvalidOperationException(failure); + } + + var delay = GetRetryDelay(response, attempt); + _logger.LogWarning( + "Scan upload attempt {Attempt}/{MaxAttempts} failed ({Reason}). Retrying in {Delay:F1}s...", + attempt, + maxAttempts, + failure, + delay.TotalSeconds); + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (attempt < maxAttempts) + { + var delay = TimeSpan.FromSeconds(Math.Pow(2, attempt)); + _logger.LogWarning( + ex, + "Scan upload attempt {Attempt}/{MaxAttempts} threw an exception. Retrying in {Delay:F1}s...", + attempt, + maxAttempts, + delay.TotalSeconds); + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + } + } + } + + public async Task TriggerJobAsync(string jobKind, IDictionary parameters, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(jobKind)) + { + throw new ArgumentException("Job kind must be provided.", nameof(jobKind)); + } + + var requestBody = new JobTriggerRequest + { + Trigger = "cli", + Parameters = parameters is null ? new Dictionary(StringComparer.Ordinal) : new Dictionary(parameters, StringComparer.Ordinal) + }; + + var request = CreateRequest(HttpMethod.Post, $"jobs/{jobKind}"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + request.Content = JsonContent.Create(requestBody, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (response.StatusCode == HttpStatusCode.Accepted) + { + JobRunResponse? run = null; + if (response.Content.Headers.ContentLength is > 0) + { + try + { + run = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + _logger.LogWarning(ex, "Failed to deserialize job run response for job kind {Kind}.", jobKind); + } + } + + var location = response.Headers.Location?.ToString(); + return new JobTriggerResult(true, "Accepted", location, run); + } + + var failureMessage = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + return new JobTriggerResult(false, failureMessage, null, null); + } + + public async Task ExecuteExcititorOperationAsync(string route, HttpMethod method, object? payload, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(route)) + { + throw new ArgumentException("Route must be provided.", nameof(route)); + } + + var relative = route.TrimStart('/'); + using var request = CreateRequest(method, $"excititor/{relative}"); + + if (payload is not null && method != HttpMethod.Get && method != HttpMethod.Delete) + { + request.Content = JsonContent.Create(payload, options: SerializerOptions); + } + + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (response.IsSuccessStatusCode) + { + var (message, payloadElement) = await ExtractExcititorResponseAsync(response, cancellationToken).ConfigureAwait(false); + var location = response.Headers.Location?.ToString(); + return new ExcititorOperationResult(true, message, location, payloadElement); + } + + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + return new ExcititorOperationResult(false, failure, null, null); + } + + public async Task DownloadExcititorExportAsync(string exportId, string destinationPath, string? expectedDigestAlgorithm, string? expectedDigest, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(exportId)) + { + throw new ArgumentException("Export id must be provided.", nameof(exportId)); + } + + if (string.IsNullOrWhiteSpace(destinationPath)) + { + throw new ArgumentException("Destination path must be provided.", nameof(destinationPath)); + } + + var fullPath = Path.GetFullPath(destinationPath); + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) + { + Directory.CreateDirectory(directory); + } + + var normalizedAlgorithm = string.IsNullOrWhiteSpace(expectedDigestAlgorithm) + ? null + : expectedDigestAlgorithm.Trim(); + var normalizedDigest = NormalizeExpectedDigest(expectedDigest); + + if (File.Exists(fullPath) + && string.Equals(normalizedAlgorithm, "sha256", StringComparison.OrdinalIgnoreCase) + && !string.IsNullOrWhiteSpace(normalizedDigest)) + { + var existingDigest = await ComputeSha256Async(fullPath, cancellationToken).ConfigureAwait(false); + if (string.Equals(existingDigest, normalizedDigest, StringComparison.OrdinalIgnoreCase)) + { + var info = new FileInfo(fullPath); + _logger.LogDebug("Export {ExportId} already present at {Path}; digest matches.", exportId, fullPath); + return new ExcititorExportDownloadResult(fullPath, info.Length, true); + } + } + + var encodedId = Uri.EscapeDataString(exportId); + using var request = CreateRequest(HttpMethod.Get, $"excititor/export/{encodedId}/download"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + var tempPath = fullPath + ".tmp"; + if (File.Exists(tempPath)) + { + File.Delete(tempPath); + } + + using (var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false)) + { + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + await using (var fileStream = File.Create(tempPath)) + { + await stream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); + } + } + + if (!string.IsNullOrWhiteSpace(normalizedAlgorithm) && !string.IsNullOrWhiteSpace(normalizedDigest)) + { + if (string.Equals(normalizedAlgorithm, "sha256", StringComparison.OrdinalIgnoreCase)) + { + var computed = await ComputeSha256Async(tempPath, cancellationToken).ConfigureAwait(false); + if (!string.Equals(computed, normalizedDigest, StringComparison.OrdinalIgnoreCase)) + { + File.Delete(tempPath); + throw new InvalidOperationException($"Export digest mismatch. Expected sha256:{normalizedDigest}, computed sha256:{computed}."); + } + } + else + { + _logger.LogWarning("Export digest verification skipped. Unsupported algorithm {Algorithm}.", normalizedAlgorithm); + } + } + + if (File.Exists(fullPath)) + { + File.Delete(fullPath); + } + + File.Move(tempPath, fullPath); + + var downloaded = new FileInfo(fullPath); + return new ExcititorExportDownloadResult(fullPath, downloaded.Length, false); + } + + public async Task EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + var images = NormalizeImages(request.Images); + if (images.Count == 0) + { + throw new ArgumentException("At least one image digest must be provided.", nameof(request)); + } + + var payload = new RuntimePolicyEvaluationRequestDocument + { + Namespace = string.IsNullOrWhiteSpace(request.Namespace) ? null : request.Namespace.Trim(), + Images = images + }; + + if (request.Labels.Count > 0) + { + payload.Labels = new Dictionary(StringComparer.Ordinal); + foreach (var label in request.Labels) + { + if (!string.IsNullOrWhiteSpace(label.Key)) + { + payload.Labels[label.Key] = label.Value ?? string.Empty; + } + } + } + + using var message = CreateRequest(HttpMethod.Post, "api/scanner/policy/runtime"); + await AuthorizeRequestAsync(message, cancellationToken).ConfigureAwait(false); + message.Content = JsonContent.Create(payload, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + RuntimePolicyEvaluationResponseDocument? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = response.Content is null ? string.Empty : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse runtime policy response. {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (document is null) + { + throw new InvalidOperationException("Runtime policy response was empty."); + } + + var decisions = new Dictionary(StringComparer.Ordinal); + if (document.Results is not null) + { + foreach (var kvp in document.Results) + { + var image = kvp.Key; + var decision = kvp.Value; + if (string.IsNullOrWhiteSpace(image) || decision is null) + { + continue; + } + + var verdict = string.IsNullOrWhiteSpace(decision.PolicyVerdict) + ? "unknown" + : decision.PolicyVerdict!.Trim(); + + var reasons = ExtractReasons(decision.Reasons); + var metadata = ExtractExtensionMetadata(decision.ExtensionData); + + var hasSbom = decision.HasSbomReferrers ?? decision.HasSbomLegacy; + + RuntimePolicyRekorReference? rekor = null; + if (decision.Rekor is not null && + (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid) || + !string.IsNullOrWhiteSpace(decision.Rekor.Url) || + decision.Rekor.Verified.HasValue)) + { + rekor = new RuntimePolicyRekorReference( + NormalizeOptionalString(decision.Rekor.Uuid), + NormalizeOptionalString(decision.Rekor.Url), + decision.Rekor.Verified); + } + + decisions[image] = new RuntimePolicyImageDecision( + verdict, + decision.Signed, + hasSbom, + reasons, + rekor, + metadata); + } + } + + var decisionsView = new ReadOnlyDictionary(decisions); + + return new RuntimePolicyEvaluationResult( + document.TtlSeconds ?? 0, + document.ExpiresAtUtc?.ToUniversalTime(), + string.IsNullOrWhiteSpace(document.PolicyRevision) ? null : document.PolicyRevision, + decisionsView); + } + + public async Task ActivatePolicyRevisionAsync(string policyId, int version, PolicyActivationRequest request, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (version <= 0) + { + throw new ArgumentOutOfRangeException(nameof(version), "Version must be greater than zero."); + } + + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + var requestDocument = new PolicyActivationRequestDocument + { + Comment = NormalizeOptionalString(request.Comment), + RunNow = request.RunNow ? true : null, + ScheduledAt = request.ScheduledAt, + Priority = NormalizeOptionalString(request.Priority), + Rollback = request.Rollback ? true : null, + IncidentId = NormalizeOptionalString(request.IncidentId) + }; + + var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); + using var httpRequest = CreateRequest(HttpMethod.Post, $"api/policy/policies/{encodedPolicyId}/versions/{version}:activate"); + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + httpRequest.Content = JsonContent.Create(requestDocument, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + var errorCode = ExtractProblemErrorCode(problem); + throw new PolicyApiException(message, response.StatusCode, errorCode); + } + + PolicyActivationResponseDocument? responseDocument; + try + { + responseDocument = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = response.Content is null ? string.Empty : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse policy activation response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (responseDocument is null) + { + throw new InvalidOperationException("Policy activation response was empty."); + } + + if (string.IsNullOrWhiteSpace(responseDocument.Status)) + { + throw new InvalidOperationException("Policy activation response missing status."); + } + + if (responseDocument.Revision is null) + { + throw new InvalidOperationException("Policy activation response missing revision."); + } + + return MapPolicyActivation(responseDocument); + } + + public async Task SimulatePolicyAsync(string policyId, PolicySimulationInput input, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (input is null) + { + throw new ArgumentNullException(nameof(input)); + } + + var requestDocument = new PolicySimulationRequestDocument + { + BaseVersion = input.BaseVersion, + CandidateVersion = input.CandidateVersion, + Explain = input.Explain ? true : null + }; + + if (input.SbomSet.Count > 0) + { + requestDocument.SbomSet = input.SbomSet; + } + + if (input.Environment.Count > 0) + { + var environment = new Dictionary(StringComparer.Ordinal); + foreach (var pair in input.Environment) + { + if (string.IsNullOrWhiteSpace(pair.Key)) + { + continue; + } + + environment[pair.Key] = SerializeEnvironmentValue(pair.Value); + } + + if (environment.Count > 0) + { + requestDocument.Env = environment; + } + } + + var encodedPolicyId = Uri.EscapeDataString(policyId); + using var request = CreateRequest(HttpMethod.Post, $"api/policy/policies/{encodedPolicyId}/simulate"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + request.Content = JsonContent.Create(requestDocument, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + var errorCode = ExtractProblemErrorCode(problem); + throw new PolicyApiException(message, response.StatusCode, errorCode); + } + + if (response.Content is null || response.Content.Headers.ContentLength is 0) + { + throw new InvalidOperationException("Policy simulation response was empty."); + } + + PolicySimulationResponseDocument? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse policy simulation response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (document is null) + { + throw new InvalidOperationException("Policy simulation response was empty."); + } + + if (document.Diff is null) + { + throw new InvalidOperationException("Policy simulation response missing diff summary."); + } + + return MapPolicySimulation(document); + } + + public async Task GetPolicyFindingsAsync(PolicyFindingsQuery query, CancellationToken cancellationToken) + { + if (query is null) + { + throw new ArgumentNullException(nameof(query)); + } + + EnsureBackendConfigured(); + + var policyId = query.PolicyId; + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(query)); + } + + var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); + var relative = $"api/policy/findings/{encodedPolicyId}{BuildPolicyFindingsQueryString(query)}"; + + using var request = CreateRequest(HttpMethod.Get, relative); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + var errorCode = ExtractProblemErrorCode(problem); + throw new PolicyApiException(message, response.StatusCode, errorCode); + } + + PolicyFindingsResponseDocument? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse policy findings response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (document is null) + { + throw new InvalidOperationException("Policy findings response was empty."); + } + + return MapPolicyFindings(document); + } + + public async Task GetPolicyFindingAsync(string policyId, string findingId, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); + } + + var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); + var encodedFindingId = Uri.EscapeDataString(findingId.Trim()); + using var request = CreateRequest(HttpMethod.Get, $"api/policy/findings/{encodedPolicyId}/{encodedFindingId}"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + var errorCode = ExtractProblemErrorCode(problem); + throw new PolicyApiException(message, response.StatusCode, errorCode); + } + + PolicyFindingDocumentDocument? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse policy finding response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (document is null) + { + throw new InvalidOperationException("Policy finding response was empty."); + } + + return MapPolicyFinding(document); + } + + public async Task GetPolicyFindingExplainAsync(string policyId, string findingId, string? mode, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); + } + + var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); + var encodedFindingId = Uri.EscapeDataString(findingId.Trim()); + var query = string.IsNullOrWhiteSpace(mode) ? string.Empty : $"?mode={Uri.EscapeDataString(mode.Trim())}"; + + using var request = CreateRequest(HttpMethod.Get, $"api/policy/findings/{encodedPolicyId}/{encodedFindingId}/explain{query}"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + var errorCode = ExtractProblemErrorCode(problem); + throw new PolicyApiException(message, response.StatusCode, errorCode); + } + + PolicyFindingExplainResponseDocument? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse policy finding explain response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (document is null) + { + throw new InvalidOperationException("Policy finding explain response was empty."); + } + + return MapPolicyFindingExplain(document); + } + + public async Task GetEntryTraceAsync(string scanId, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(scanId)) + { + throw new ArgumentException("Scan identifier is required.", nameof(scanId)); + } + + using var request = CreateRequest(HttpMethod.Get, $"api/scans/{scanId}/entrytrace"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (response.StatusCode == HttpStatusCode.NotFound) + { + return null; + } + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + if (result is null) + { + throw new InvalidOperationException("EntryTrace response payload was empty."); + } + + return result; + } + + public async Task> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + var query = includeDisabled ? "?includeDisabled=true" : string.Empty; + using var request = CreateRequest(HttpMethod.Get, $"excititor/providers{query}"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + if (response.Content is null || response.Content.Headers.ContentLength is 0) + { + return Array.Empty(); + } + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + if (stream is null || stream.Length == 0) + { + return Array.Empty(); + } + + using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + var root = document.RootElement; + if (root.ValueKind == JsonValueKind.Object && root.TryGetProperty("providers", out var providersProperty)) + { + root = providersProperty; + } + + if (root.ValueKind != JsonValueKind.Array) + { + return Array.Empty(); + } + + var list = new List(); + foreach (var item in root.EnumerateArray()) + { + var id = GetStringProperty(item, "id") ?? string.Empty; + if (string.IsNullOrWhiteSpace(id)) + { + continue; + } + + var kind = GetStringProperty(item, "kind") ?? "unknown"; + var displayName = GetStringProperty(item, "displayName") ?? id; + var trustTier = GetStringProperty(item, "trustTier") ?? string.Empty; + var enabled = GetBooleanProperty(item, "enabled", defaultValue: true); + var lastIngested = GetDateTimeOffsetProperty(item, "lastIngestedAt"); + + list.Add(new ExcititorProviderSummary(id, kind, displayName, trustTier, enabled, lastIngested)); + } + + return list; + } + + public async Task DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + var rootDirectory = ResolveOfflineDirectory(destinationDirectory); + Directory.CreateDirectory(rootDirectory); + + var descriptor = await FetchOfflineKitDescriptorAsync(bundleId, cancellationToken).ConfigureAwait(false); + + var bundlePath = Path.Combine(rootDirectory, descriptor.BundleName); + var metadataPath = bundlePath + ".metadata.json"; + var manifestPath = Path.Combine(rootDirectory, descriptor.ManifestName); + var bundleSignaturePath = descriptor.BundleSignatureName is not null ? Path.Combine(rootDirectory, descriptor.BundleSignatureName) : null; + var manifestSignaturePath = descriptor.ManifestSignatureName is not null ? Path.Combine(rootDirectory, descriptor.ManifestSignatureName) : null; + + var fromCache = false; + if (!overwrite && File.Exists(bundlePath)) + { + var digest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false); + if (string.Equals(digest, descriptor.BundleSha256, StringComparison.OrdinalIgnoreCase)) + { + fromCache = true; + } + else if (resume) + { + var partial = bundlePath + ".partial"; + File.Move(bundlePath, partial, overwrite: true); + } + else + { + File.Delete(bundlePath); + } + } + + if (!fromCache) + { + await DownloadFileWithResumeAsync(descriptor.BundleDownloadUri, bundlePath, descriptor.BundleSha256, descriptor.BundleSize, resume, cancellationToken).ConfigureAwait(false); + } + + await DownloadFileWithResumeAsync(descriptor.ManifestDownloadUri, manifestPath, descriptor.ManifestSha256, descriptor.ManifestSize ?? 0, resume: false, cancellationToken).ConfigureAwait(false); + + if (descriptor.BundleSignatureDownloadUri is not null && bundleSignaturePath is not null) + { + await DownloadAuxiliaryFileAsync(descriptor.BundleSignatureDownloadUri, bundleSignaturePath, cancellationToken).ConfigureAwait(false); + } + + if (descriptor.ManifestSignatureDownloadUri is not null && manifestSignaturePath is not null) + { + await DownloadAuxiliaryFileAsync(descriptor.ManifestSignatureDownloadUri, manifestSignaturePath, cancellationToken).ConfigureAwait(false); + } + + await WriteOfflineKitMetadataAsync(metadataPath, descriptor, bundlePath, manifestPath, bundleSignaturePath, manifestSignaturePath, cancellationToken).ConfigureAwait(false); + + return new OfflineKitDownloadResult( + descriptor, + bundlePath, + manifestPath, + bundleSignaturePath, + manifestSignaturePath, + metadataPath, + fromCache); + } + + public async Task ImportOfflineKitAsync(OfflineKitImportRequest request, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + var bundlePath = Path.GetFullPath(request.BundlePath); + if (!File.Exists(bundlePath)) + { + throw new FileNotFoundException("Offline kit bundle not found.", bundlePath); + } + + string? manifestPath = null; + if (!string.IsNullOrWhiteSpace(request.ManifestPath)) + { + manifestPath = Path.GetFullPath(request.ManifestPath); + if (!File.Exists(manifestPath)) + { + throw new FileNotFoundException("Offline kit manifest not found.", manifestPath); + } + } + + string? bundleSignaturePath = null; + if (!string.IsNullOrWhiteSpace(request.BundleSignaturePath)) + { + bundleSignaturePath = Path.GetFullPath(request.BundleSignaturePath); + if (!File.Exists(bundleSignaturePath)) + { + throw new FileNotFoundException("Offline kit bundle signature not found.", bundleSignaturePath); + } + } + + string? manifestSignaturePath = null; + if (!string.IsNullOrWhiteSpace(request.ManifestSignaturePath)) + { + manifestSignaturePath = Path.GetFullPath(request.ManifestSignaturePath); + if (!File.Exists(manifestSignaturePath)) + { + throw new FileNotFoundException("Offline kit manifest signature not found.", manifestSignaturePath); + } + } + + var bundleSize = request.BundleSize ?? new FileInfo(bundlePath).Length; + var bundleSha = string.IsNullOrWhiteSpace(request.BundleSha256) + ? await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false) + : NormalizeSha(request.BundleSha256) ?? throw new InvalidOperationException("Bundle digest must not be empty."); + + string? manifestSha = null; + long? manifestSize = null; + if (manifestPath is not null) + { + manifestSize = request.ManifestSize ?? new FileInfo(manifestPath).Length; + manifestSha = string.IsNullOrWhiteSpace(request.ManifestSha256) + ? await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false) + : NormalizeSha(request.ManifestSha256); + } + + var metadata = new OfflineKitImportMetadataPayload + { + BundleId = request.BundleId, + BundleSha256 = bundleSha, + BundleSize = bundleSize, + CapturedAt = request.CapturedAt, + Channel = request.Channel, + Kind = request.Kind, + IsDelta = request.IsDelta, + BaseBundleId = request.BaseBundleId, + ManifestSha256 = manifestSha, + ManifestSize = manifestSize + }; + + using var message = CreateRequest(HttpMethod.Post, "api/offline-kit/import"); + await AuthorizeRequestAsync(message, cancellationToken).ConfigureAwait(false); + + using var content = new MultipartFormDataContent(); + + var metadataOptions = new JsonSerializerOptions(SerializerOptions) + { + WriteIndented = false + }; + var metadataJson = JsonSerializer.Serialize(metadata, metadataOptions); + var metadataContent = new StringContent(metadataJson, Encoding.UTF8, "application/json"); + content.Add(metadataContent, "metadata"); + + var bundleStream = File.OpenRead(bundlePath); + var bundleContent = new StreamContent(bundleStream); + bundleContent.Headers.ContentType = new MediaTypeHeaderValue("application/gzip"); + content.Add(bundleContent, "bundle", Path.GetFileName(bundlePath)); + + if (manifestPath is not null) + { + var manifestStream = File.OpenRead(manifestPath); + var manifestContent = new StreamContent(manifestStream); + manifestContent.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + content.Add(manifestContent, "manifest", Path.GetFileName(manifestPath)); + } + + if (bundleSignaturePath is not null) + { + var signatureStream = File.OpenRead(bundleSignaturePath); + var signatureContent = new StreamContent(signatureStream); + signatureContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); + content.Add(signatureContent, "bundleSignature", Path.GetFileName(bundleSignaturePath)); + } + + if (manifestSignaturePath is not null) + { + var manifestSignatureStream = File.OpenRead(manifestSignaturePath); + var manifestSignatureContent = new StreamContent(manifestSignatureStream); + manifestSignatureContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); + content.Add(manifestSignatureContent, "manifestSignature", Path.GetFileName(manifestSignaturePath)); + } + + message.Content = content; + + using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + OfflineKitImportResponseTransport? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse offline kit import response. {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + var submittedAt = document?.SubmittedAt ?? DateTimeOffset.UtcNow; + + return new OfflineKitImportResult( + document?.ImportId, + document?.Status, + submittedAt, + document?.Message); + } + + public async Task GetOfflineKitStatusAsync(CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + using var request = CreateRequest(HttpMethod.Get, "api/offline-kit/status"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + if (response.Content is null || response.Content.Headers.ContentLength is 0) + { + return new OfflineKitStatus(null, null, null, false, null, null, null, null, null, Array.Empty()); + } + + OfflineKitStatusTransport? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse offline kit status response. {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + var current = document?.Current; + var components = MapOfflineComponents(document?.Components); + + if (current is null) + { + return new OfflineKitStatus(null, null, null, false, null, null, null, null, null, components); + } + + return new OfflineKitStatus( + NormalizeOptionalString(current.BundleId), + NormalizeOptionalString(current.Channel), + NormalizeOptionalString(current.Kind), + current.IsDelta ?? false, + NormalizeOptionalString(current.BaseBundleId), + current.CapturedAt?.ToUniversalTime(), + current.ImportedAt?.ToUniversalTime(), + NormalizeSha(current.BundleSha256), + current.BundleSize, + components); + } + + public async Task ExecuteAocIngestDryRunAsync(AocIngestDryRunRequest requestBody, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + ArgumentNullException.ThrowIfNull(requestBody); + + using var request = CreateRequest(HttpMethod.Post, "api/aoc/ingest/dry-run"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + request.Content = JsonContent.Create(requestBody, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + try + { + var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + return result ?? new AocIngestDryRunResponse(); + } + catch (JsonException ex) + { + var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse ingest dry-run response. {ex.Message}", ex) + { + Data = { ["payload"] = payload } + }; + } + } + + public async Task ExecuteAocVerifyAsync(AocVerifyRequest requestBody, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + ArgumentNullException.ThrowIfNull(requestBody); + + using var request = CreateRequest(HttpMethod.Post, "api/aoc/verify"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + request.Content = JsonContent.Create(requestBody, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + try + { + var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + return result ?? new AocVerifyResponse(); + } + catch (JsonException ex) + { + var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse AOC verification response. {ex.Message}", ex) + { + Data = { ["payload"] = payload } + }; + } + } + + private string ResolveOfflineDirectory(string destinationDirectory) + { + if (!string.IsNullOrWhiteSpace(destinationDirectory)) + { + return Path.GetFullPath(destinationDirectory); + } + + var configured = _options.Offline?.KitsDirectory; + if (!string.IsNullOrWhiteSpace(configured)) + { + return Path.GetFullPath(configured); + } + + return Path.GetFullPath(Path.Combine(Environment.CurrentDirectory, "offline-kits")); + } + + private async Task FetchOfflineKitDescriptorAsync(string? bundleId, CancellationToken cancellationToken) + { + var route = string.IsNullOrWhiteSpace(bundleId) + ? "api/offline-kit/bundles/latest" + : $"api/offline-kit/bundles/{Uri.EscapeDataString(bundleId)}"; + + using var request = CreateRequest(HttpMethod.Get, route); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + OfflineKitBundleDescriptorTransport? payload; + try + { + payload = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse offline kit metadata. {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (payload is null) + { + throw new InvalidOperationException("Offline kit metadata response was empty."); + } + + return MapOfflineKitDescriptor(payload); + } + + private OfflineKitBundleDescriptor MapOfflineKitDescriptor(OfflineKitBundleDescriptorTransport transport) + { + if (transport is null) + { + throw new ArgumentNullException(nameof(transport)); + } + + var bundleName = string.IsNullOrWhiteSpace(transport.BundleName) + ? throw new InvalidOperationException("Offline kit metadata missing bundleName.") + : transport.BundleName!.Trim(); + + var bundleId = string.IsNullOrWhiteSpace(transport.BundleId) ? bundleName : transport.BundleId!.Trim(); + var bundleSha = NormalizeSha(transport.BundleSha256) ?? throw new InvalidOperationException("Offline kit metadata missing bundleSha256."); + + var bundleSize = transport.BundleSize; + if (bundleSize <= 0) + { + throw new InvalidOperationException("Offline kit metadata missing bundle size."); + } + + var manifestName = string.IsNullOrWhiteSpace(transport.ManifestName) ? "offline-manifest.json" : transport.ManifestName!.Trim(); + var manifestSha = NormalizeSha(transport.ManifestSha256) ?? throw new InvalidOperationException("Offline kit metadata missing manifestSha256."); + var capturedAt = transport.CapturedAt?.ToUniversalTime() ?? DateTimeOffset.UtcNow; + + var bundleDownloadUri = ResolveDownloadUri(transport.BundleUrl, transport.BundlePath, bundleName); + var manifestDownloadUri = ResolveDownloadUri(transport.ManifestUrl, transport.ManifestPath, manifestName); + var bundleSignatureUri = ResolveOptionalDownloadUri(transport.BundleSignatureUrl, transport.BundleSignaturePath, transport.BundleSignatureName); + var manifestSignatureUri = ResolveOptionalDownloadUri(transport.ManifestSignatureUrl, transport.ManifestSignaturePath, transport.ManifestSignatureName); + var bundleSignatureName = ResolveArtifactName(transport.BundleSignatureName, bundleSignatureUri); + var manifestSignatureName = ResolveArtifactName(transport.ManifestSignatureName, manifestSignatureUri); + + return new OfflineKitBundleDescriptor( + bundleId, + bundleName, + bundleSha, + bundleSize, + bundleDownloadUri, + manifestName, + manifestSha, + manifestDownloadUri, + capturedAt, + NormalizeOptionalString(transport.Channel), + NormalizeOptionalString(transport.Kind), + transport.IsDelta ?? false, + NormalizeOptionalString(transport.BaseBundleId), + bundleSignatureName, + bundleSignatureUri, + manifestSignatureName, + manifestSignatureUri, + transport.ManifestSize); + } + + private static string? ResolveArtifactName(string? explicitName, Uri? uri) + { + if (!string.IsNullOrWhiteSpace(explicitName)) + { + return explicitName.Trim(); + } + + if (uri is not null) + { + var name = Path.GetFileName(uri.LocalPath); + return string.IsNullOrWhiteSpace(name) ? null : name; + } + + return null; + } + + private Uri ResolveDownloadUri(string? absoluteOrRelativeUrl, string? relativePath, string fallbackFileName) + { + if (!string.IsNullOrWhiteSpace(absoluteOrRelativeUrl)) + { + var candidate = new Uri(absoluteOrRelativeUrl, UriKind.RelativeOrAbsolute); + if (candidate.IsAbsoluteUri) + { + return candidate; + } + + if (_httpClient.BaseAddress is not null) + { + return new Uri(_httpClient.BaseAddress, candidate); + } + + return BuildUriFromRelative(candidate.ToString()); + } + + if (!string.IsNullOrWhiteSpace(relativePath)) + { + return BuildUriFromRelative(relativePath); + } + + if (!string.IsNullOrWhiteSpace(fallbackFileName)) + { + return BuildUriFromRelative(fallbackFileName); + } + + throw new InvalidOperationException("Offline kit metadata did not include a download URL."); + } + + private Uri BuildUriFromRelative(string relative) + { + var normalized = relative.TrimStart('/'); + if (!string.IsNullOrWhiteSpace(_options.Offline?.MirrorUrl) && + Uri.TryCreate(_options.Offline.MirrorUrl, UriKind.Absolute, out var mirrorBase)) + { + if (!mirrorBase.AbsoluteUri.EndsWith("/")) + { + mirrorBase = new Uri(mirrorBase.AbsoluteUri + "/"); + } + + return new Uri(mirrorBase, normalized); + } + + if (_httpClient.BaseAddress is not null) + { + return new Uri(_httpClient.BaseAddress, normalized); + } + + throw new InvalidOperationException($"Cannot resolve offline kit URI for '{relative}' because no mirror or backend base address is configured."); + } + + private Uri? ResolveOptionalDownloadUri(string? absoluteOrRelativeUrl, string? relativePath, string? fallbackName) + { + var hasData = !string.IsNullOrWhiteSpace(absoluteOrRelativeUrl) || + !string.IsNullOrWhiteSpace(relativePath) || + !string.IsNullOrWhiteSpace(fallbackName); + + if (!hasData) + { + return null; + } + + try + { + return ResolveDownloadUri(absoluteOrRelativeUrl, relativePath, fallbackName ?? string.Empty); + } + catch + { + return null; + } + } + + private async Task DownloadFileWithResumeAsync(Uri downloadUri, string targetPath, string expectedSha256, long expectedSize, bool resume, CancellationToken cancellationToken) + { + var directory = Path.GetDirectoryName(targetPath); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var partialPath = resume ? targetPath + ".partial" : targetPath + ".tmp"; + + if (!resume && File.Exists(targetPath)) + { + File.Delete(targetPath); + } + + if (resume && File.Exists(targetPath)) + { + File.Move(targetPath, partialPath, overwrite: true); + } + + long existingLength = 0; + if (resume && File.Exists(partialPath)) + { + existingLength = new FileInfo(partialPath).Length; + if (expectedSize > 0 && existingLength >= expectedSize) + { + existingLength = expectedSize; + } + } + + while (true) + { + using var request = new HttpRequestMessage(HttpMethod.Get, downloadUri); + if (resume && existingLength > 0 && expectedSize > 0 && existingLength < expectedSize) + { + request.Headers.Range = new RangeHeaderValue(existingLength, null); + } + + using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + + if (resume && existingLength > 0 && expectedSize > 0 && existingLength < expectedSize && response.StatusCode == HttpStatusCode.OK) + { + existingLength = 0; + if (File.Exists(partialPath)) + { + File.Delete(partialPath); + } + + continue; + } + + if (!response.IsSuccessStatusCode && + !(resume && existingLength > 0 && response.StatusCode == HttpStatusCode.PartialContent)) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + var destination = resume ? partialPath : targetPath; + var mode = resume && existingLength > 0 ? FileMode.Append : FileMode.Create; + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + await using (var file = new FileStream(destination, mode, FileAccess.Write, FileShare.None, 81920, useAsync: true)) + { + await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false); + } + + break; + } + + if (resume && File.Exists(partialPath)) + { + File.Move(partialPath, targetPath, overwrite: true); + } + + var digest = await ComputeSha256Async(targetPath, cancellationToken).ConfigureAwait(false); + if (!string.Equals(digest, expectedSha256, StringComparison.OrdinalIgnoreCase)) + { + File.Delete(targetPath); + throw new InvalidOperationException($"Digest mismatch for {Path.GetFileName(targetPath)}. Expected {expectedSha256} but computed {digest}."); + } + + if (expectedSize > 0) + { + var actualSize = new FileInfo(targetPath).Length; + if (actualSize != expectedSize) + { + File.Delete(targetPath); + throw new InvalidOperationException($"Size mismatch for {Path.GetFileName(targetPath)}. Expected {expectedSize:N0} bytes but downloaded {actualSize:N0} bytes."); + } + } + } + + private async Task DownloadAuxiliaryFileAsync(Uri downloadUri, string targetPath, CancellationToken cancellationToken) + { + var directory = Path.GetDirectoryName(targetPath); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + using var request = new HttpRequestMessage(HttpMethod.Get, downloadUri); + using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + await using var file = new FileStream(targetPath, FileMode.Create, FileAccess.Write, FileShare.None, 81920, useAsync: true); + await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false); + } + + private static async Task WriteOfflineKitMetadataAsync( + string metadataPath, + OfflineKitBundleDescriptor descriptor, + string bundlePath, + string manifestPath, + string? bundleSignaturePath, + string? manifestSignaturePath, + CancellationToken cancellationToken) + { + var document = new OfflineKitMetadataDocument + { + BundleId = descriptor.BundleId, + BundleName = descriptor.BundleName, + BundleSha256 = descriptor.BundleSha256, + BundleSize = descriptor.BundleSize, + BundlePath = Path.GetFullPath(bundlePath), + CapturedAt = descriptor.CapturedAt, + DownloadedAt = DateTimeOffset.UtcNow, + Channel = descriptor.Channel, + Kind = descriptor.Kind, + IsDelta = descriptor.IsDelta, + BaseBundleId = descriptor.BaseBundleId, + ManifestName = descriptor.ManifestName, + ManifestSha256 = descriptor.ManifestSha256, + ManifestSize = descriptor.ManifestSize, + ManifestPath = Path.GetFullPath(manifestPath), + BundleSignaturePath = bundleSignaturePath is null ? null : Path.GetFullPath(bundleSignaturePath), + ManifestSignaturePath = manifestSignaturePath is null ? null : Path.GetFullPath(manifestSignaturePath) + }; + + var options = new JsonSerializerOptions(SerializerOptions) + { + WriteIndented = true + }; + + var payload = JsonSerializer.Serialize(document, options); + await File.WriteAllTextAsync(metadataPath, payload, cancellationToken).ConfigureAwait(false); + } + + private static IReadOnlyList MapOfflineComponents(List? transports) + { + if (transports is null || transports.Count == 0) + { + return Array.Empty(); + } + + var list = new List(); + foreach (var transport in transports) + { + if (transport is null || string.IsNullOrWhiteSpace(transport.Name)) + { + continue; + } + + list.Add(new OfflineKitComponentStatus( + transport.Name.Trim(), + NormalizeOptionalString(transport.Version), + NormalizeSha(transport.Digest), + transport.CapturedAt?.ToUniversalTime(), + transport.SizeBytes)); + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static string? NormalizeSha(string? digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return null; + } + + var value = digest.Trim(); + if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + value = value.Substring("sha256:".Length); + } + + return value.ToLowerInvariant(); + } + + private sealed class OfflineKitImportMetadataPayload + { + public string? BundleId { get; set; } + + public string BundleSha256 { get; set; } = string.Empty; + + public long BundleSize { get; set; } + + public DateTimeOffset? CapturedAt { get; set; } + + public string? Channel { get; set; } + + public string? Kind { get; set; } + + public bool? IsDelta { get; set; } + + public string? BaseBundleId { get; set; } + + public string? ManifestSha256 { get; set; } + + public long? ManifestSize { get; set; } + } + + private static List NormalizeImages(IReadOnlyList images) + { + var normalized = new List(); + if (images is null) + { + return normalized; + } + + var seen = new HashSet(StringComparer.Ordinal); + foreach (var entry in images) + { + if (string.IsNullOrWhiteSpace(entry)) + { + continue; + } + + var trimmed = entry.Trim(); + if (seen.Add(trimmed)) + { + normalized.Add(trimmed); + } + } + + return normalized; + } + + private static IReadOnlyList ExtractReasons(List? reasons) + { + if (reasons is null || reasons.Count == 0) + { + return Array.Empty(); + } + + var list = new List(); + foreach (var reason in reasons) + { + if (!string.IsNullOrWhiteSpace(reason)) + { + list.Add(reason.Trim()); + } + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static IReadOnlyDictionary ExtractExtensionMetadata(Dictionary? extensionData) + { + if (extensionData is null || extensionData.Count == 0) + { + return EmptyMetadata; + } + + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var kvp in extensionData) + { + var value = ConvertJsonElementToObject(kvp.Value); + if (value is not null) + { + metadata[kvp.Key] = value; + } + } + + if (metadata.Count == 0) + { + return EmptyMetadata; + } + + return new ReadOnlyDictionary(metadata); + } + + private static object? ConvertJsonElementToObject(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString(), + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.Number when element.TryGetInt64(out var integer) => integer, + JsonValueKind.Number when element.TryGetDouble(out var @double) => @double, + JsonValueKind.Null or JsonValueKind.Undefined => null, + _ => element.GetRawText() + }; + } + + private static string? NormalizeOptionalString(string? value) + { + return string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + } + + private HttpRequestMessage CreateRequest(HttpMethod method, string relativeUri) + { + if (!Uri.TryCreate(relativeUri, UriKind.RelativeOrAbsolute, out var requestUri)) + { + throw new InvalidOperationException($"Invalid request URI '{relativeUri}'."); + } + + if (requestUri.IsAbsoluteUri) + { + // Nothing to normalize. + } + else + { + requestUri = new Uri(relativeUri.TrimStart('/'), UriKind.Relative); + } + + return new HttpRequestMessage(method, requestUri); + } + + private async Task AuthorizeRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + var token = await ResolveAccessTokenAsync(cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(token)) + { + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token); + } + } + + private IReadOnlyDictionary? ResolveOperatorMetadataIfNeeded(string? scope) + { + if (string.IsNullOrWhiteSpace(scope) || !scope.Contains("orch:operate", StringComparison.OrdinalIgnoreCase)) + { + return null; + } + + var reason = _options.Authority.OperatorReason?.Trim(); + var ticket = _options.Authority.OperatorTicket?.Trim(); + + if (string.IsNullOrWhiteSpace(reason) || string.IsNullOrWhiteSpace(ticket)) + { + throw new InvalidOperationException("Authority.OperatorReason and Authority.OperatorTicket must be configured when requesting orch:operate tokens. Set STELLAOPS_ORCH_REASON and STELLAOPS_ORCH_TICKET or the corresponding configuration values."); + } + + return new Dictionary(StringComparer.Ordinal) + { + [OperatorReasonParameterName] = reason, + [OperatorTicketParameterName] = ticket + }; + } + + private async Task ResolveAccessTokenAsync(CancellationToken cancellationToken) + { + if (!string.IsNullOrWhiteSpace(_options.ApiKey)) + { + return _options.ApiKey; + } + + if (_tokenClient is null || string.IsNullOrWhiteSpace(_options.Authority.Url)) + { + return null; + } + + var now = DateTimeOffset.UtcNow; + + lock (_tokenSync) + { + if (!string.IsNullOrEmpty(_cachedAccessToken) && now < _cachedAccessTokenExpiresAt - TokenRefreshSkew) + { + return _cachedAccessToken; + } + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(_options); + var cachedEntry = await _tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (cachedEntry is not null && now < cachedEntry.ExpiresAtUtc - TokenRefreshSkew) + { + lock (_tokenSync) + { + _cachedAccessToken = cachedEntry.AccessToken; + _cachedAccessTokenExpiresAt = cachedEntry.ExpiresAtUtc; + return _cachedAccessToken; + } + } + + var scope = AuthorityTokenUtilities.ResolveScope(_options); + var operatorMetadata = ResolveOperatorMetadataIfNeeded(scope); + + StellaOpsTokenResult token; + if (!string.IsNullOrWhiteSpace(_options.Authority.Username)) + { + if (string.IsNullOrWhiteSpace(_options.Authority.Password)) + { + throw new InvalidOperationException("Authority password must be configured when username is provided."); + } + + token = await _tokenClient.RequestPasswordTokenAsync( + _options.Authority.Username, + _options.Authority.Password!, + scope, + null, + cancellationToken).ConfigureAwait(false); + } + else + { + token = await _tokenClient.RequestClientCredentialsTokenAsync(scope, operatorMetadata, cancellationToken).ConfigureAwait(false); + } + + await _tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); + + lock (_tokenSync) + { + _cachedAccessToken = token.AccessToken; + _cachedAccessTokenExpiresAt = token.ExpiresAtUtc; + return _cachedAccessToken; + } + } + + private async Task<(string Message, JsonElement? Payload)> ExtractExcititorResponseAsync(HttpResponseMessage response, CancellationToken cancellationToken) + { + if (response.Content is null || response.Content.Headers.ContentLength is 0) + { + return ($"HTTP {(int)response.StatusCode}", null); + } + + try + { + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + if (stream is null || stream.Length == 0) + { + return ($"HTTP {(int)response.StatusCode}", null); + } + + using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + var root = document.RootElement.Clone(); + string? message = null; + if (root.ValueKind == JsonValueKind.Object) + { + message = GetStringProperty(root, "message") ?? GetStringProperty(root, "status"); + } + + if (string.IsNullOrWhiteSpace(message)) + { + message = root.ValueKind == JsonValueKind.Object || root.ValueKind == JsonValueKind.Array + ? root.ToString() + : root.GetRawText(); + } + + return (message ?? $"HTTP {(int)response.StatusCode}", root); + } + catch (JsonException) + { + var text = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + return (string.IsNullOrWhiteSpace(text) ? $"HTTP {(int)response.StatusCode}" : text.Trim(), null); + } + } + + private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement property) + { + if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty(propertyName, out property)) + { + return true; + } + + if (element.ValueKind == JsonValueKind.Object) + { + foreach (var candidate in element.EnumerateObject()) + { + if (string.Equals(candidate.Name, propertyName, StringComparison.OrdinalIgnoreCase)) + { + property = candidate.Value; + return true; + } + } + } + + property = default; + return false; + } + + private static string? GetStringProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + if (property.ValueKind == JsonValueKind.String) + { + return property.GetString(); + } + } + + return null; + } + + private static bool GetBooleanProperty(JsonElement element, string propertyName, bool defaultValue) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + return property.ValueKind switch + { + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.String when bool.TryParse(property.GetString(), out var parsed) => parsed, + _ => defaultValue + }; + } + + return defaultValue; + } + + private static DateTimeOffset? GetDateTimeOffsetProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property) && property.ValueKind == JsonValueKind.String) + { + if (DateTimeOffset.TryParse(property.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed)) + { + return parsed.ToUniversalTime(); + } + } + + return null; + } + + private static JsonElement SerializeEnvironmentValue(object? value) + { + if (value is JsonElement element) + { + return element; + } + + return JsonSerializer.SerializeToElement(value, SerializerOptions); + } + + private static string? ExtractProblemErrorCode(ProblemDocument? problem) + { + if (problem?.Extensions is null || problem.Extensions.Count == 0) + { + return null; + } + + if (problem.Extensions.TryGetValue("code", out var value)) + { + switch (value) + { + case string code when !string.IsNullOrWhiteSpace(code): + return code; + case JsonElement element when element.ValueKind == JsonValueKind.String: + var text = element.GetString(); + return string.IsNullOrWhiteSpace(text) ? null : text; + } + } + + return null; + } + + private static string BuildPolicyFindingsQueryString(PolicyFindingsQuery query) + { + var parameters = new List(); + + if (query.SbomIds is not null) + { + foreach (var sbom in query.SbomIds) + { + if (!string.IsNullOrWhiteSpace(sbom)) + { + parameters.Add($"sbomId={Uri.EscapeDataString(sbom)}"); + } + } + } + + if (query.Statuses is not null && query.Statuses.Count > 0) + { + var joined = string.Join(",", query.Statuses.Where(s => !string.IsNullOrWhiteSpace(s))); + if (!string.IsNullOrWhiteSpace(joined)) + { + parameters.Add($"status={Uri.EscapeDataString(joined)}"); + } + } + + if (query.Severities is not null && query.Severities.Count > 0) + { + var joined = string.Join(",", query.Severities.Where(s => !string.IsNullOrWhiteSpace(s))); + if (!string.IsNullOrWhiteSpace(joined)) + { + parameters.Add($"severity={Uri.EscapeDataString(joined)}"); + } + } + + if (!string.IsNullOrWhiteSpace(query.Cursor)) + { + parameters.Add($"cursor={Uri.EscapeDataString(query.Cursor)}"); + } + + if (query.Page.HasValue) + { + parameters.Add($"page={query.Page.Value}"); + } + + if (query.PageSize.HasValue) + { + parameters.Add($"pageSize={query.PageSize.Value}"); + } + + if (query.Since.HasValue) + { + var value = query.Since.Value.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture); + parameters.Add($"since={Uri.EscapeDataString(value)}"); + } + + if (parameters.Count == 0) + { + return string.Empty; + } + + return "?" + string.Join("&", parameters); + } + + private static PolicyFindingsPage MapPolicyFindings(PolicyFindingsResponseDocument document) + { + var items = document.Items is null + ? new List(capacity: 0) + : document.Items + .Where(item => item is not null) + .Select(item => MapPolicyFinding(item!)) + .ToList(); + + var nextCursor = string.IsNullOrWhiteSpace(document.NextCursor) ? null : document.NextCursor; + var view = new ReadOnlyCollection(items); + return new PolicyFindingsPage(view, nextCursor, document.TotalCount); + } + + private static PolicyFindingDocument MapPolicyFinding(PolicyFindingDocumentDocument document) + { + var findingId = document.FindingId; + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new InvalidOperationException("Policy finding response missing findingId."); + } + + var status = string.IsNullOrWhiteSpace(document.Status) ? "unknown" : document.Status!; + var severityNormalized = document.Severity?.Normalized; + if (string.IsNullOrWhiteSpace(severityNormalized)) + { + severityNormalized = "unknown"; + } + + var severity = new PolicyFindingSeverity(severityNormalized!, document.Severity?.Score); + + var sbomId = string.IsNullOrWhiteSpace(document.SbomId) ? "(unknown)" : document.SbomId!; + + IReadOnlyList advisoryIds; + if (document.AdvisoryIds is null || document.AdvisoryIds.Count == 0) + { + advisoryIds = Array.Empty(); + } + else + { + advisoryIds = document.AdvisoryIds + .Where(id => !string.IsNullOrWhiteSpace(id)) + .ToArray(); + } + + PolicyFindingVexMetadata? vex = null; + if (document.Vex is not null) + { + if (!string.IsNullOrWhiteSpace(document.Vex.WinningStatementId) + || !string.IsNullOrWhiteSpace(document.Vex.Source) + || !string.IsNullOrWhiteSpace(document.Vex.Status)) + { + vex = new PolicyFindingVexMetadata( + string.IsNullOrWhiteSpace(document.Vex.WinningStatementId) ? null : document.Vex.WinningStatementId, + string.IsNullOrWhiteSpace(document.Vex.Source) ? null : document.Vex.Source, + string.IsNullOrWhiteSpace(document.Vex.Status) ? null : document.Vex.Status); + } + } + + var updatedAt = document.UpdatedAt ?? DateTimeOffset.MinValue; + + return new PolicyFindingDocument( + findingId, + status, + severity, + sbomId, + advisoryIds, + vex, + document.PolicyVersion ?? 0, + updatedAt, + string.IsNullOrWhiteSpace(document.RunId) ? null : document.RunId); + } + + private static PolicyFindingExplainResult MapPolicyFindingExplain(PolicyFindingExplainResponseDocument document) + { + var findingId = document.FindingId; + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new InvalidOperationException("Policy finding explain response missing findingId."); + } + + var steps = document.Steps is null + ? new List(capacity: 0) + : document.Steps + .Where(step => step is not null) + .Select(step => MapPolicyFindingExplainStep(step!)) + .ToList(); + + var hints = document.SealedHints is null + ? new List(capacity: 0) + : document.SealedHints + .Where(hint => hint is not null && !string.IsNullOrWhiteSpace(hint!.Message)) + .Select(hint => new PolicyFindingExplainHint(hint!.Message!.Trim())) + .ToList(); + + return new PolicyFindingExplainResult( + findingId, + document.PolicyVersion ?? 0, + new ReadOnlyCollection(steps), + new ReadOnlyCollection(hints)); + } + + private static PolicyFindingExplainStep MapPolicyFindingExplainStep(PolicyFindingExplainStepDocument document) + { + var rule = string.IsNullOrWhiteSpace(document.Rule) ? "(unknown)" : document.Rule!; + var status = string.IsNullOrWhiteSpace(document.Status) ? null : document.Status; + var action = string.IsNullOrWhiteSpace(document.Action) ? null : document.Action; + + IReadOnlyDictionary inputs = document.Inputs is null + ? new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)) + : new ReadOnlyDictionary(document.Inputs + .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key)) + .ToDictionary( + kvp => kvp.Key, + kvp => ConvertJsonElementToString(kvp.Value), + StringComparer.Ordinal)); + + IReadOnlyDictionary? evidence = null; + if (document.Evidence is not null && document.Evidence.Count > 0) + { + var evidenceDict = document.Evidence + .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key)) + .ToDictionary( + kvp => kvp.Key, + kvp => ConvertJsonElementToString(kvp.Value), + StringComparer.Ordinal); + + evidence = new ReadOnlyDictionary(evidenceDict); + } + + return new PolicyFindingExplainStep( + rule, + status, + action, + document.Score, + inputs, + evidence); + } + + private static string ConvertJsonElementToString(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString() ?? string.Empty, + JsonValueKind.Number => element.TryGetInt64(out var longValue) + ? longValue.ToString(CultureInfo.InvariantCulture) + : element.GetDouble().ToString(CultureInfo.InvariantCulture), + JsonValueKind.True => "true", + JsonValueKind.False => "false", + JsonValueKind.Null => "null", + JsonValueKind.Array => string.Join(", ", element.EnumerateArray().Select(ConvertJsonElementToString)), + JsonValueKind.Object => element.GetRawText(), + _ => element.GetRawText() + }; + } + + private static PolicyActivationResult MapPolicyActivation(PolicyActivationResponseDocument document) + { + if (document.Revision is null) + { + throw new InvalidOperationException("Policy activation response missing revision data."); + } + + var revisionDocument = document.Revision; + if (string.IsNullOrWhiteSpace(revisionDocument.PackId)) + { + throw new InvalidOperationException("Policy activation revision missing policy identifier."); + } + + if (!revisionDocument.Version.HasValue) + { + throw new InvalidOperationException("Policy activation revision missing version number."); + } + + var approvals = new List(); + if (revisionDocument.Approvals is not null) + { + foreach (var approval in revisionDocument.Approvals) + { + if (approval is null || string.IsNullOrWhiteSpace(approval.ActorId) || !approval.ApprovedAt.HasValue) + { + continue; + } + + approvals.Add(new PolicyActivationApproval( + approval.ActorId, + approval.ApprovedAt.Value.ToUniversalTime(), + NormalizeOptionalString(approval.Comment))); + } + } + + var revision = new PolicyActivationRevision( + revisionDocument.PackId, + revisionDocument.Version.Value, + NormalizeOptionalString(revisionDocument.Status) ?? "unknown", + revisionDocument.RequiresTwoPersonApproval ?? false, + revisionDocument.CreatedAt?.ToUniversalTime() ?? DateTimeOffset.MinValue, + revisionDocument.ActivatedAt?.ToUniversalTime(), + new ReadOnlyCollection(approvals)); + + return new PolicyActivationResult( + NormalizeOptionalString(document.Status) ?? "unknown", + revision); + } + + private static PolicySimulationResult MapPolicySimulation(PolicySimulationResponseDocument document) + { + var diffDocument = document.Diff ?? throw new InvalidOperationException("Policy simulation response missing diff summary."); + + var severity = diffDocument.BySeverity is null + ? new Dictionary(0, StringComparer.Ordinal) + : diffDocument.BySeverity + .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key) && kvp.Value is not null) + .ToDictionary( + kvp => kvp.Key, + kvp => new PolicySimulationSeverityDelta(kvp.Value!.Up, kvp.Value.Down), + StringComparer.Ordinal); + + var severityView = new ReadOnlyDictionary(severity); + + var ruleHits = diffDocument.RuleHits is null + ? new List() + : diffDocument.RuleHits + .Where(hit => hit is not null) + .Select(hit => new PolicySimulationRuleDelta( + hit!.RuleId ?? string.Empty, + hit.RuleName ?? string.Empty, + hit.Up, + hit.Down)) + .ToList(); + + var ruleHitsView = ruleHits.AsReadOnly(); + + var diff = new PolicySimulationDiff( + string.IsNullOrWhiteSpace(diffDocument.SchemaVersion) ? null : diffDocument.SchemaVersion, + diffDocument.Added ?? 0, + diffDocument.Removed ?? 0, + diffDocument.Unchanged ?? 0, + severityView, + ruleHitsView); + + return new PolicySimulationResult( + diff, + string.IsNullOrWhiteSpace(document.ExplainUri) ? null : document.ExplainUri); + } + + private void EnsureBackendConfigured() + { + if (_httpClient.BaseAddress is null) + { + throw new InvalidOperationException("Backend URL is not configured. Provide STELLAOPS_BACKEND_URL or configure appsettings."); + } + } + + private string ResolveArtifactPath(string outputPath, string channel) + { + if (!string.IsNullOrWhiteSpace(outputPath)) + { + return Path.GetFullPath(outputPath); + } + + var directory = string.IsNullOrWhiteSpace(_options.ScannerCacheDirectory) + ? Directory.GetCurrentDirectory() + : Path.GetFullPath(_options.ScannerCacheDirectory); + + Directory.CreateDirectory(directory); + var fileName = $"stellaops-scanner-{channel}.tar.gz"; + return Path.Combine(directory, fileName); + } + + private async Task CreateFailureMessageAsync(HttpResponseMessage response, CancellationToken cancellationToken) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + return message; + } + + private async Task<(string Message, ProblemDocument? Problem)> CreateFailureDetailsAsync(HttpResponseMessage response, CancellationToken cancellationToken) + { + var statusCode = (int)response.StatusCode; + var builder = new StringBuilder(); + builder.Append("Backend request failed with status "); + builder.Append(statusCode); + builder.Append(' '); + builder.Append(response.ReasonPhrase ?? "Unknown"); + + ProblemDocument? problem = null; + + if (response.Content is not null && response.Content.Headers.ContentLength is > 0) + { + string? raw = null; + try + { + raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(raw)) + { + problem = JsonSerializer.Deserialize(raw, SerializerOptions); + } + } + catch (JsonException) + { + problem = null; + } + + if (problem is not null) + { + if (!string.IsNullOrWhiteSpace(problem.Title)) + { + builder.AppendLine().Append(problem.Title); + } + + if (!string.IsNullOrWhiteSpace(problem.Detail)) + { + builder.AppendLine().Append(problem.Detail); + } + } + else if (!string.IsNullOrWhiteSpace(raw)) + { + builder.AppendLine().Append(raw); + } + } + + return (builder.ToString(), problem); + } + + private static string? ExtractHeaderValue(HttpResponseHeaders headers, string name) + { + if (headers.TryGetValues(name, out var values)) + { + return values.FirstOrDefault(); + } + + return null; + } + + private static string? NormalizeExpectedDigest(string? digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return null; + } + + var trimmed = digest.Trim(); + return trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) + ? trimmed[7..] + : trimmed; + } + + private async Task ValidateDigestAsync(string filePath, string? expectedDigest, CancellationToken cancellationToken) + { + string digestHex; + await using (var stream = File.OpenRead(filePath)) + { + var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); + digestHex = Convert.ToHexString(hash).ToLowerInvariant(); + } + + if (!string.IsNullOrWhiteSpace(expectedDigest)) + { + var normalized = NormalizeDigest(expectedDigest); + if (!normalized.Equals(digestHex, StringComparison.OrdinalIgnoreCase)) + { + File.Delete(filePath); + throw new InvalidOperationException($"Scanner digest mismatch. Expected sha256:{normalized}, calculated sha256:{digestHex}."); + } + } + else + { + _logger.LogWarning("Scanner download missing X-StellaOps-Digest header; relying on computed digest only."); + } + + return digestHex; + } + + private static string NormalizeDigest(string digest) + { + if (digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + return digest[7..]; + } + + return digest; + } + + private static async Task ComputeSha256Async(string filePath, CancellationToken cancellationToken) + { + await using var stream = File.OpenRead(filePath); + var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private async Task ValidateSignatureAsync(string? signatureHeader, string digestHex, bool verbose, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(_options.ScannerSignaturePublicKeyPath)) + { + if (!string.IsNullOrWhiteSpace(signatureHeader)) + { + _logger.LogDebug("Signature header present but no public key configured; skipping validation."); + } + return; + } + + if (string.IsNullOrWhiteSpace(signatureHeader)) + { + throw new InvalidOperationException("Scanner signature missing while a public key is configured."); + } + + var publicKeyPath = Path.GetFullPath(_options.ScannerSignaturePublicKeyPath); + if (!File.Exists(publicKeyPath)) + { + throw new FileNotFoundException("Scanner signature public key not found.", publicKeyPath); + } + + var signatureBytes = Convert.FromBase64String(signatureHeader); + var digestBytes = Convert.FromHexString(digestHex); + + var pem = await File.ReadAllTextAsync(publicKeyPath, cancellationToken).ConfigureAwait(false); + using var rsa = RSA.Create(); + rsa.ImportFromPem(pem); + + var valid = rsa.VerifyHash(digestBytes, signatureBytes, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + if (!valid) + { + throw new InvalidOperationException("Scanner signature validation failed."); + } + + if (verbose) + { + _logger.LogDebug("Scanner signature validated using key {KeyPath}.", publicKeyPath); + } + } + + private void PersistMetadata(string outputPath, string channel, string digestHex, string? signatureHeader, HttpResponseMessage response) + { + var metadata = new + { + channel, + digest = $"sha256:{digestHex}", + signature = signatureHeader, + downloadedAt = DateTimeOffset.UtcNow, + source = response.RequestMessage?.RequestUri?.ToString(), + sizeBytes = new FileInfo(outputPath).Length, + headers = new + { + etag = response.Headers.ETag?.Tag, + lastModified = response.Content.Headers.LastModified, + contentType = response.Content.Headers.ContentType?.ToString() + } + }; + + var metadataPath = outputPath + ".metadata.json"; + var json = JsonSerializer.Serialize(metadata, new JsonSerializerOptions + { + WriteIndented = true + }); + + File.WriteAllText(metadataPath, json); + } + + private static TimeSpan GetRetryDelay(HttpResponseMessage response, int attempt) + { + if (response.Headers.TryGetValues("Retry-After", out var retryValues)) + { + var value = retryValues.FirstOrDefault(); + if (!string.IsNullOrWhiteSpace(value)) + { + if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var seconds) && seconds >= 0) + { + return TimeSpan.FromSeconds(Math.Min(seconds, 300)); + } + + if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var when)) + { + var delta = when - DateTimeOffset.UtcNow; + if (delta > TimeSpan.Zero) + { + return delta < TimeSpan.FromMinutes(5) ? delta : TimeSpan.FromMinutes(5); + } + } + } + } + + var fallbackSeconds = Math.Min(60, Math.Pow(2, attempt)); + return TimeSpan.FromSeconds(fallbackSeconds); + } +} diff --git a/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs b/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs index 1faa13eb..be433597 100644 --- a/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs +++ b/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs @@ -42,4 +42,6 @@ internal interface IBackendOperationsClient Task GetPolicyFindingAsync(string policyId, string findingId, CancellationToken cancellationToken); Task GetPolicyFindingExplainAsync(string policyId, string findingId, string? mode, CancellationToken cancellationToken); + + Task GetEntryTraceAsync(string scanId, CancellationToken cancellationToken); } diff --git a/src/Cli/StellaOps.Cli/Services/Models/EntryTraceResponseModel.cs b/src/Cli/StellaOps.Cli/Services/Models/EntryTraceResponseModel.cs new file mode 100644 index 00000000..05f8d7d4 --- /dev/null +++ b/src/Cli/StellaOps.Cli/Services/Models/EntryTraceResponseModel.cs @@ -0,0 +1,12 @@ +using System; +using System.Collections.Generic; +using StellaOps.Scanner.EntryTrace; + +namespace StellaOps.Cli.Services.Models; + +internal sealed record EntryTraceResponseModel( + string ScanId, + string ImageDigest, + DateTimeOffset GeneratedAt, + EntryTraceGraph Graph, + IReadOnlyList Ndjson); diff --git a/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj b/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj index 04dbd0aa..1fa99450 100644 --- a/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj +++ b/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj @@ -44,6 +44,7 @@ + diff --git a/src/Cli/StellaOps.Cli/TASKS.md b/src/Cli/StellaOps.Cli/TASKS.md index 3f9e8cd6..3588ae64 100644 --- a/src/Cli/StellaOps.Cli/TASKS.md +++ b/src/Cli/StellaOps.Cli/TASKS.md @@ -10,6 +10,7 @@ > 2025-10-27: Added JSON/table Spectre output, integration tests for exit-code handling, CLI metrics, and updated quickstart/architecture docs to cover guard workflows. > Docs note (2025-10-26): `docs/modules/cli/guides/cli-reference.md` now describes both commands, exit codes, and offline usage—sync help text once implementation lands. > 2025-10-27: CLI reference now reflects final summary fields/JSON schema, quickstart includes verification/dry-run workflows, and API reference tables list both `sources ingest --dry-run` and `aoc verify`. +> 2025-11-01: Update CLI auth defaults to request `attestor.verify` (and `attestor.read` for list/detail) after Attestor scope split; tokens without new scopes will fail verification calls. ## Policy Engine v2 diff --git a/src/Cli/__Tests/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs index debc4473..d082c3fb 100644 --- a/src/Cli/__Tests/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs +++ b/src/Cli/__Tests/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs @@ -1,6 +1,7 @@ using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Collections.ObjectModel; using System.IO; using System.Linq; using System.Net; @@ -25,6 +26,7 @@ using StellaOps.Cli.Telemetry; using StellaOps.Cli.Tests.Testing; using StellaOps.Cryptography; using StellaOps.Cryptography.Kms; +using StellaOps.Scanner.EntryTrace; using Spectre.Console; using Spectre.Console.Testing; @@ -82,11 +84,11 @@ public sealed class CommandHandlersTests } [Fact] - public async Task HandleScannerRunAsync_AutomaticallyUploadsResults() - { - using var tempDir = new TempDirectory(); - var resultsFile = Path.Combine(tempDir.Path, "results", "scan.json"); - var backend = new StubBackendClient(new JobTriggerResult(true, "Accepted", null, null)); + public async Task HandleScannerRunAsync_AutomaticallyUploadsResults() + { + using var tempDir = new TempDirectory(); + var resultsFile = Path.Combine(tempDir.Path, "results", "scan.json"); + var backend = new StubBackendClient(new JobTriggerResult(true, "Accepted", null, null)); var metadataFile = Path.Combine(tempDir.Path, "results", "scan-run.json"); var executor = new StubExecutor(new ScannerExecutionResult(0, resultsFile, metadataFile)); var options = new StellaOpsCliOptions @@ -117,13 +119,114 @@ public sealed class CommandHandlersTests finally { Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleAuthLoginAsync_UsesClientCredentialsFlow() - { - var original = Environment.ExitCode; + } + } + + [Fact] + public async Task HandleScanEntryTraceAsync_RendersPlansAndNdjson() + { + var originalExit = Environment.ExitCode; + var console = new TestConsole(); + var originalConsole = AnsiConsole.Console; + + var graph = new EntryTraceGraph( + EntryTraceOutcome.Resolved, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Create(new EntryTracePlan( + ImmutableArray.Create("/usr/bin/python", "app.py"), + ImmutableDictionary.Empty, + "/workspace", + "appuser", + "/usr/bin/python", + EntryTraceTerminalType.Managed, + "python", + 0.95, + ImmutableDictionary.Empty)), + ImmutableArray.Create(new EntryTraceTerminal( + "/usr/bin/python", + EntryTraceTerminalType.Managed, + "python", + 0.95, + ImmutableDictionary.Empty, + "appuser", + "/workspace", + ImmutableArray.Empty))); + + var backend = new StubBackendClient(new JobTriggerResult(true, "Accepted", null, null)) + { + EntryTraceResponse = new EntryTraceResponseModel( + "scan-123", + "sha256:deadbeef", + DateTimeOffset.Parse("2025-11-02T12:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal), + graph, + new[] { "{\"type\":\"terminal\"}" }) + }; + + var provider = BuildServiceProvider(backend); + AnsiConsole.Console = console; + + try + { + await CommandHandlers.HandleScanEntryTraceAsync( + provider, + "scan-123", + includeNdjson: true, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.Equal("scan-123", backend.LastEntryTraceScanId); + + var output = console.Output; + Assert.Contains("scan-123", output, StringComparison.OrdinalIgnoreCase); + Assert.Contains("NDJSON Output", output, StringComparison.OrdinalIgnoreCase); + Assert.Contains("{\"type\":\"terminal\"}", output, StringComparison.Ordinal); + Assert.Contains("/usr/bin/python", output, StringComparison.OrdinalIgnoreCase); + } + finally + { + Environment.ExitCode = originalExit; + AnsiConsole.Console = originalConsole; + } + } + + [Fact] + public async Task HandleScanEntryTraceAsync_WarnsWhenResultMissing() + { + var originalExit = Environment.ExitCode; + var console = new TestConsole(); + var originalConsole = AnsiConsole.Console; + + var backend = new StubBackendClient(new JobTriggerResult(true, "Accepted", null, null)); + var provider = BuildServiceProvider(backend); + AnsiConsole.Console = console; + + try + { + await CommandHandlers.HandleScanEntryTraceAsync( + provider, + "scan-missing", + includeNdjson: false, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(1, Environment.ExitCode); + Assert.Equal("scan-missing", backend.LastEntryTraceScanId); + Assert.Contains("No EntryTrace data", console.Output, StringComparison.OrdinalIgnoreCase); + } + finally + { + Environment.ExitCode = originalExit; + AnsiConsole.Console = originalConsole; + } + } + + [Fact] + public async Task HandleAuthLoginAsync_UsesClientCredentialsFlow() + { + var original = Environment.ExitCode; using var tempDir = new TempDirectory(); try @@ -2327,13 +2430,16 @@ public sealed class CommandHandlersTests null); public (string PolicyId, string FindingId)? LastFindingGet { get; private set; } public PolicyApiException? FindingGetException { get; set; } - public PolicyFindingExplainResult ExplainResult { get; set; } = new PolicyFindingExplainResult( - "finding-default", - 1, - new ReadOnlyCollection(Array.Empty()), - new ReadOnlyCollection(Array.Empty())); - public (string PolicyId, string FindingId, string? Mode)? LastFindingExplain { get; private set; } - public PolicyApiException? FindingExplainException { get; set; } + public PolicyFindingExplainResult ExplainResult { get; set; } = new PolicyFindingExplainResult( + "finding-default", + 1, + new ReadOnlyCollection(Array.Empty()), + new ReadOnlyCollection(Array.Empty())); + public (string PolicyId, string FindingId, string? Mode)? LastFindingExplain { get; private set; } + public PolicyApiException? FindingExplainException { get; set; } + public EntryTraceResponseModel? EntryTraceResponse { get; set; } + public Exception? EntryTraceException { get; set; } + public string? LastEntryTraceScanId { get; private set; } public Task DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken) => throw new NotImplementedException(); @@ -2445,27 +2551,37 @@ public sealed class CommandHandlersTests return Task.FromResult(FindingDocument); } - public Task GetPolicyFindingExplainAsync(string policyId, string findingId, string? mode, CancellationToken cancellationToken) - { - LastFindingExplain = (policyId, findingId, mode); - if (FindingExplainException is not null) - { - throw FindingExplainException; - } - - return Task.FromResult(ExplainResult); - } - - - public Task DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken) + public Task GetPolicyFindingExplainAsync(string policyId, string findingId, string? mode, CancellationToken cancellationToken) + { + LastFindingExplain = (policyId, findingId, mode); + if (FindingExplainException is not null) + { + throw FindingExplainException; + } + + return Task.FromResult(ExplainResult); + } + + public Task DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken) + => throw new NotSupportedException(); + + public Task ImportOfflineKitAsync(OfflineKitImportRequest request, CancellationToken cancellationToken) => throw new NotSupportedException(); - public Task ImportOfflineKitAsync(OfflineKitImportRequest request, CancellationToken cancellationToken) - => throw new NotSupportedException(); - - public Task GetOfflineKitStatusAsync(CancellationToken cancellationToken) - => throw new NotSupportedException(); - } + public Task GetOfflineKitStatusAsync(CancellationToken cancellationToken) + => throw new NotSupportedException(); + + public Task GetEntryTraceAsync(string scanId, CancellationToken cancellationToken) + { + LastEntryTraceScanId = scanId; + if (EntryTraceException is not null) + { + throw EntryTraceException; + } + + return Task.FromResult(EntryTraceResponse); + } + } private sealed class StubExecutor : IScannerExecutor { diff --git a/src/Cli/__Tests/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs index 84f78dbf..0bf9a872 100644 --- a/src/Cli/__Tests/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs +++ b/src/Cli/__Tests/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs @@ -1,6 +1,7 @@ using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Collections.ObjectModel; using System.Globalization; using System.IO; using System.Net; @@ -17,9 +18,10 @@ using StellaOps.Auth.Abstractions; using StellaOps.Auth.Client; using StellaOps.Cli.Configuration; using StellaOps.Cli.Services; -using StellaOps.Cli.Services.Models; -using StellaOps.Cli.Services.Models.Transport; -using StellaOps.Cli.Tests.Testing; +using StellaOps.Cli.Services.Models; +using StellaOps.Cli.Services.Models.Transport; +using StellaOps.Cli.Tests.Testing; +using StellaOps.Scanner.EntryTrace; using System.Linq; namespace StellaOps.Cli.Tests.Services; @@ -170,11 +172,11 @@ public sealed class BackendOperationsClientTests } [Fact] - public async Task UploadScanResultsAsync_RetriesOnRetryAfter() - { - using var temp = new TempDirectory(); - var filePath = Path.Combine(temp.Path, "scan.json"); - await File.WriteAllTextAsync(filePath, "{}"); + public async Task UploadScanResultsAsync_RetriesOnRetryAfter() + { + using var temp = new TempDirectory(); + var filePath = Path.Combine(temp.Path, "scan.json"); + await File.WriteAllTextAsync(filePath, "{}"); var attempts = 0; var handler = new StubHttpMessageHandler( @@ -250,9 +252,103 @@ public sealed class BackendOperationsClientTests var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - await Assert.ThrowsAsync(() => client.UploadScanResultsAsync(filePath, CancellationToken.None)); - Assert.Equal(2, attempts); - } + await Assert.ThrowsAsync(() => client.UploadScanResultsAsync(filePath, CancellationToken.None)); + Assert.Equal(2, attempts); + } + + [Fact] + public async Task GetEntryTraceAsync_ReturnsResponse() + { + var scanId = $"scan-{Guid.NewGuid():n}"; + var generatedAt = new DateTimeOffset(2025, 11, 1, 8, 30, 0, TimeSpan.Zero); + var plan = new EntryTracePlan( + ImmutableArray.Create("/usr/bin/app"), + ImmutableDictionary.Empty, + "/work", + "root", + "/usr/bin/app", + EntryTraceTerminalType.Native, + "go", + 80d, + ImmutableDictionary.Empty); + var terminal = new EntryTraceTerminal( + "/usr/bin/app", + EntryTraceTerminalType.Native, + "go", + 80d, + ImmutableDictionary.Empty, + "root", + "/work", + ImmutableArray.Empty); + var graph = new EntryTraceGraph( + EntryTraceOutcome.Resolved, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Create(plan), + ImmutableArray.Create(terminal)); + var responseModel = new EntryTraceResponseModel( + scanId, + "sha256:test", + generatedAt, + graph, + EntryTraceNdjsonWriter.Serialize(graph, new EntryTraceNdjsonMetadata(scanId, "sha256:test", generatedAt))); + var json = JsonSerializer.Serialize(responseModel, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + + var handler = new StubHttpMessageHandler((request, _) => + { + var message = new HttpResponseMessage(HttpStatusCode.OK) + { + RequestMessage = request, + Content = new StringContent(json, Encoding.UTF8, "application/json") + }; + return message; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://scanner.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://scanner.example" + }; + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var result = await client.GetEntryTraceAsync(scanId, CancellationToken.None); + + Assert.NotNull(result); + Assert.Equal(responseModel.ScanId, result!.ScanId); + Assert.Equal(responseModel.ImageDigest, result.ImageDigest); + Assert.Equal(responseModel.Graph.Plans.Length, result.Graph.Plans.Length); + Assert.Equal(responseModel.Ndjson.Count, result.Ndjson.Count); + } + + [Fact] + public async Task GetEntryTraceAsync_ReturnsNullWhenNotFound() + { + var handler = new StubHttpMessageHandler((request, _) => new HttpResponseMessage(HttpStatusCode.NotFound) + { + RequestMessage = request + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://scanner.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://scanner.example" + }; + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var result = await client.GetEntryTraceAsync("scan-missing", CancellationToken.None); + Assert.Null(result); + } [Fact] public async Task TriggerJobAsync_ReturnsAcceptedResult() @@ -809,13 +905,13 @@ public sealed class BackendOperationsClientTests switch (name) { case "metadata": - MetadataJson = await part.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + MetadataJson = await part.ReadAsStringAsync(cancellationToken); break; case "bundle": - BundlePayload = await part.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + BundlePayload = await part.ReadAsByteArrayAsync(cancellationToken); break; case "manifest": - ManifestPayload = await part.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + ManifestPayload = await part.ReadAsByteArrayAsync(cancellationToken); break; } } diff --git a/src/Concelier/StellaOps.Concelier.WebService/Program.cs b/src/Concelier/StellaOps.Concelier.WebService/Program.cs index 09ed085d..5374ce3e 100644 --- a/src/Concelier/StellaOps.Concelier.WebService/Program.cs +++ b/src/Concelier/StellaOps.Concelier.WebService/Program.cs @@ -101,6 +101,7 @@ builder.Services.AddSingleton(); builder.Services.AddSingleton(sp => new ServiceStatus(sp.GetRequiredService())); builder.Services.AddAocGuard(); @@ -209,6 +210,50 @@ if (resolvedAuthority.Enabled && resolvedAuthority.AllowAnonymousFallback) app.MapConcelierMirrorEndpoints(authorityConfigured, enforceAuthority); +app.MapGet("/.well-known/openapi", (OpenApiDiscoveryDocumentProvider provider, HttpContext context) => +{ + var (payload, etag) = provider.GetDocument(); + + if (context.Request.Headers.IfNoneMatch.Count > 0) + { + foreach (var candidate in context.Request.Headers.IfNoneMatch) + { + if (Matches(candidate, etag)) + { + context.Response.Headers.ETag = etag; + context.Response.Headers.CacheControl = "public, max-age=300, immutable"; + return Results.StatusCode(StatusCodes.Status304NotModified); + } + } + } + + context.Response.Headers.ETag = etag; + context.Response.Headers.CacheControl = "public, max-age=300, immutable"; + return Results.Text(payload, "application/vnd.oai.openapi+json;version=3.1"); + + static bool Matches(string? candidate, string expected) + { + if (string.IsNullOrWhiteSpace(candidate)) + { + return false; + } + + var trimmed = candidate.Trim(); + if (string.Equals(trimmed, expected, StringComparison.Ordinal)) + { + return true; + } + + if (trimmed.StartsWith("W/", StringComparison.OrdinalIgnoreCase)) + { + var weakValue = trimmed[2..].TrimStart(); + return string.Equals(weakValue, expected, StringComparison.Ordinal); + } + + return false; + } +}).WithName("GetConcelierOpenApiDocument"); + var jsonOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web); jsonOptions.Converters.Add(new JsonStringEnumConverter()); diff --git a/src/Concelier/StellaOps.Concelier.WebService/Services/OpenApiDiscoveryDocumentProvider.cs b/src/Concelier/StellaOps.Concelier.WebService/Services/OpenApiDiscoveryDocumentProvider.cs new file mode 100644 index 00000000..4412886b --- /dev/null +++ b/src/Concelier/StellaOps.Concelier.WebService/Services/OpenApiDiscoveryDocumentProvider.cs @@ -0,0 +1,383 @@ +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Globalization; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.RegularExpressions; +using System.Linq; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Routing; +using Microsoft.AspNetCore.Routing.Patterns; + +namespace StellaOps.Concelier.WebService.Services; + +internal sealed class OpenApiDiscoveryDocumentProvider +{ + private static readonly string[] MethodPreference = + [ + HttpMethods.Get, + HttpMethods.Post, + HttpMethods.Put, + HttpMethods.Patch, + HttpMethods.Delete, + HttpMethods.Options, + HttpMethods.Head, + HttpMethods.Trace + ]; + + private readonly EndpointDataSource _endpointDataSource; + private readonly object _syncRoot = new(); + + private string? _cachedDocumentJson; + private string? _cachedEtag; + + public OpenApiDiscoveryDocumentProvider(EndpointDataSource endpointDataSource) + { + _endpointDataSource = endpointDataSource; + } + + public (string Payload, string ETag) GetDocument() + { + lock (_syncRoot) + { + if (_cachedDocumentJson is { } cached && _cachedEtag is { } etag) + { + return (cached, etag); + } + + var document = BuildDocument(); + + var json = JsonSerializer.Serialize( + document, + new JsonSerializerOptions + { + PropertyNamingPolicy = null, + WriteIndented = true + }); + + var bytes = Encoding.UTF8.GetBytes(json); + var hash = Convert.ToHexString(SHA256.HashData(bytes)).ToLowerInvariant(); + var computedEtag = $"\"{hash}\""; + + _cachedDocumentJson = json; + _cachedEtag = computedEtag; + + return (json, computedEtag); + } + } + + private JsonObject BuildDocument() + { + var info = new JsonObject + { + ["title"] = "StellaOps Concelier API", + ["version"] = ResolveAssemblyVersion(), + ["description"] = "Programmatic contract for Concelier advisory ingestion, observation replay, evidence exports, and job orchestration." + }; + + var servers = new JsonArray + { + new JsonObject + { + ["url"] = "/", + ["description"] = "Relative base path (API Gateway rewrites in production)." + } + }; + + var pathGroups = CollectEndpointMetadata(); + var pathsObject = new JsonObject(); + + foreach (var (path, entries) in pathGroups) + { + var pathItem = new JsonObject(); + foreach (var entry in entries) + { + pathItem[entry.Method.ToLowerInvariant()] = BuildOperation(entry); + } + + pathsObject[path] = pathItem; + } + + return new JsonObject + { + ["openapi"] = "3.1.0", + ["info"] = info, + ["servers"] = servers, + ["paths"] = pathsObject, + ["components"] = new JsonObject() // ready for future schemas + }; + } + + private static string ResolveAssemblyVersion() + { + var assembly = typeof(OpenApiDiscoveryDocumentProvider).Assembly; + + var informationalVersion = assembly + .GetCustomAttributes(typeof(System.Reflection.AssemblyInformationalVersionAttribute), inherit: false) + .OfType() + .FirstOrDefault() + ?.InformationalVersion; + + if (!string.IsNullOrWhiteSpace(informationalVersion)) + { + return informationalVersion!; + } + + var version = assembly.GetName().Version; + return version is { } v ? $"{v.Major}.{v.Minor}.{v.Build}" : "0.0.0"; + } + + private IReadOnlyDictionary> CollectEndpointMetadata() + { + var endpointEntries = new List(); + + foreach (var endpoint in _endpointDataSource.Endpoints.OfType()) + { + var httpMetadata = endpoint.Metadata.GetMetadata(); + if (httpMetadata is null || httpMetadata.HttpMethods is null) + { + continue; + } + + var normalizedPath = NormalizeRoutePattern(endpoint.RoutePattern); + if (normalizedPath is null) + { + continue; + } + + foreach (var method in httpMetadata.HttpMethods) + { + endpointEntries.Add(new EndpointEntry(normalizedPath, method, endpoint)); + } + } + + var comparer = StringComparer.OrdinalIgnoreCase; + var grouped = endpointEntries + .OrderBy(e => e.Path, comparer) + .ThenBy(e => GetMethodOrder(e.Method)) + .GroupBy(e => e.Path, comparer) + .ToDictionary( + group => group.Key, + group => (IReadOnlyList)group.ToList(), + comparer); + + return grouped; + } + + private static int GetMethodOrder(string method) + { + var index = Array.IndexOf(MethodPreference, method); + return index >= 0 ? index : MethodPreference.Length; + } + + private static JsonObject BuildOperation(EndpointEntry entry) + { + var endpoint = entry.Endpoint; + var operationId = BuildOperationId(entry.Method, entry.Path); + var summary = NormalizeSummary(endpoint.DisplayName, entry.Method, entry.Path); + + var operation = new JsonObject + { + ["operationId"] = operationId, + ["summary"] = summary + }; + + var tags = BuildTags(entry.Path); + if (tags is { Count: > 0 }) + { + var tagArray = new JsonArray(tags.Select(tag => JsonValue.Create(tag)!).ToArray()); + operation["tags"] = tagArray; + } + + var parameters = BuildParameters(endpoint.RoutePattern); + if (parameters.Count > 0) + { + operation["parameters"] = new JsonArray(parameters.ToArray()); + } + + if (RequiresBody(entry.Method)) + { + operation["requestBody"] = new JsonObject + { + ["required"] = true, + ["content"] = new JsonObject + { + ["application/json"] = new JsonObject + { + ["schema"] = new JsonObject { ["type"] = "object" } + } + } + }; + } + + operation["responses"] = BuildResponses(entry.Method); + + return operation; + } + + private static JsonObject BuildResponses(string method) + { + var responses = new JsonObject + { + ["200"] = new JsonObject + { + ["description"] = "Request processed successfully." + } + }; + + if (string.Equals(method, HttpMethods.Post, StringComparison.OrdinalIgnoreCase)) + { + responses["202"] = new JsonObject + { + ["description"] = "Accepted for asynchronous processing." + }; + } + + responses["401"] = new JsonObject + { + ["description"] = "Authentication required." + }; + responses["403"] = new JsonObject + { + ["description"] = "Authorization failed for the requested scope." + }; + + return responses; + } + + private static bool RequiresBody(string method) => + string.Equals(method, HttpMethods.Post, StringComparison.OrdinalIgnoreCase) || + string.Equals(method, HttpMethods.Put, StringComparison.OrdinalIgnoreCase) || + string.Equals(method, HttpMethods.Patch, StringComparison.OrdinalIgnoreCase); + + private static List BuildParameters(RoutePattern pattern) + { + var results = new List(); + + foreach (var parameter in pattern.Parameters) + { + var schema = new JsonObject + { + ["type"] = "string" + }; + + if (parameter.ParameterKind == RoutePatternParameterKind.CatchAll) + { + schema["description"] = "Catch-all segment"; + } + + var parameterObject = new JsonObject + { + ["name"] = parameter.Name, + ["in"] = "path", + ["required"] = true, + ["schema"] = schema + }; + + results.Add(parameterObject); + } + + return results; + } + + private static IReadOnlyList BuildTags(string path) + { + var segments = path.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (segments.Length == 0) + { + return Array.Empty(); + } + + // Tag by top-level segment (e.g., "concelier", "advisories", "jobs"). + return new[] { CultureInfo.InvariantCulture.TextInfo.ToTitleCase(segments[0]) }; + } + + private static string NormalizeSummary(string? displayName, string method, string path) + { + if (string.IsNullOrWhiteSpace(displayName)) + { + return $"{method.ToUpperInvariant()} {path}"; + } + + var summary = displayName!; + if (summary.StartsWith("HTTP:", StringComparison.OrdinalIgnoreCase)) + { + summary = summary[5..].Trim(); + } + + return summary; + } + + private static string BuildOperationId(string method, string path) + { + var builder = new StringBuilder(); + builder.Append(method.ToLowerInvariant()); + builder.Append('_'); + + foreach (var ch in path) + { + if (char.IsLetterOrDigit(ch)) + { + builder.Append(char.ToLowerInvariant(ch)); + } + else if (ch == '{' || ch == '}' || ch == '/' || ch == '-') + { + builder.Append('_'); + } + } + + return Regex.Replace(builder.ToString(), "_{2,}", "_").TrimEnd('_'); + } + + private static string? NormalizeRoutePattern(RoutePattern pattern) + { + if (!string.IsNullOrWhiteSpace(pattern.RawText)) + { + return NormalizeRawPattern(pattern.RawText!); + } + + var segments = new List(); + foreach (var segment in pattern.PathSegments) + { + var segmentBuilder = new StringBuilder(); + foreach (var part in segment.Parts) + { + switch (part) + { + case RoutePatternLiteralPart literal: + segmentBuilder.Append(literal.Content); + break; + case RoutePatternParameterPart parameter: + segmentBuilder.Append('{'); + segmentBuilder.Append(parameter.Name); + segmentBuilder.Append('}'); + break; + } + } + + segments.Add(segmentBuilder.ToString()); + } + + var combined = "/" + string.Join('/', segments); + return NormalizeRawPattern(combined); + } + + private static string NormalizeRawPattern(string raw) + { + var normalized = raw; + if (!normalized.StartsWith('/')) + { + normalized = "/" + normalized; + } + + normalized = normalized.Replace("**", "*", StringComparison.Ordinal); + normalized = Regex.Replace(normalized, @"\{(\*?)([A-Za-z0-9_]+)(:[^}]+)?\}", "{$2}", RegexOptions.Compiled); + normalized = Regex.Replace(normalized, "/{0,}$", string.Empty, RegexOptions.Compiled); + + return string.IsNullOrWhiteSpace(normalized) ? "/" : normalized; + } + + private readonly record struct EndpointEntry(string Path, string Method, RouteEndpoint Endpoint); +} diff --git a/src/Concelier/StellaOps.Concelier.WebService/TASKS.md b/src/Concelier/StellaOps.Concelier.WebService/TASKS.md index bec66333..51f0f77c 100644 --- a/src/Concelier/StellaOps.Concelier.WebService/TASKS.md +++ b/src/Concelier/StellaOps.Concelier.WebService/TASKS.md @@ -88,7 +88,7 @@ ## SDKs & OpenAPI (Epic 17) | ID | Status | Owner(s) | Depends on | Notes | |----|--------|----------|------------|-------| -| CONCELIER-WEB-OAS-61-001 `/.well-known/openapi` | TODO | Concelier WebService Guild | OAS-61-001 | Implement discovery endpoint emitting Concelier spec with version metadata and ETag. | +| CONCELIER-WEB-OAS-61-001 `/.well-known/openapi` | DONE (2025-11-02) | Concelier WebService Guild | OAS-61-001 | Implement discovery endpoint emitting Concelier spec with version metadata and ETag. | | CONCELIER-WEB-OAS-61-002 `Error envelope migration` | TODO | Concelier WebService Guild | APIGOV-61-001 | Ensure all API responses use standardized error envelope; update controllers/tests. | | CONCELIER-WEB-OAS-62-001 `Examples expansion` | TODO | Concelier WebService Guild | CONCELIER-OAS-61-002 | Add curated examples for advisory observations/linksets/conflicts; integrate into dev portal. | | CONCELIER-WEB-OAS-63-001 `Deprecation headers` | TODO | Concelier WebService Guild, API Governance Guild | APIGOV-63-001 | Add Sunset/Deprecation headers for retiring endpoints and update documentation/notifications. | diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Program.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Program.cs index 3917ef1b..2051d51c 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Program.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Program.cs @@ -1,41 +1,46 @@ -var builder = WebApplication.CreateBuilder(args); - -// Add services to the container. -// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi -builder.Services.AddOpenApi(); - -var app = builder.Build(); - -// Configure the HTTP request pipeline. -if (app.Environment.IsDevelopment()) -{ - app.MapOpenApi(); -} - -app.UseHttpsRedirection(); - -var summaries = new[] -{ - "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" -}; - -app.MapGet("/weatherforecast", () => -{ - var forecast = Enumerable.Range(1, 5).Select(index => - new WeatherForecast - ( - DateOnly.FromDateTime(DateTime.Now.AddDays(index)), - Random.Shared.Next(-20, 55), - summaries[Random.Shared.Next(summaries.Length)] - )) - .ToArray(); - return forecast; -}) -.WithName("GetWeatherForecast"); - -app.Run(); - -record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) -{ - public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); -} +using Microsoft.AspNetCore.Authorization; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; + +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configure: options => + { + options.RequiredScopes.Clear(); + }); + +builder.Services.AddAuthorization(options => +{ + options.AddObservabilityResourcePolicies(); + options.DefaultPolicy = new AuthorizationPolicyBuilder() + .RequireAuthenticatedUser() + .AddRequirements(new StellaOpsScopeRequirement(new[] { StellaOpsScopes.EvidenceRead })) + .Build(); + options.FallbackPolicy = options.DefaultPolicy; +}); + +builder.Services.AddOpenApi(); + +var app = builder.Build(); + +if (app.Environment.IsDevelopment()) +{ + app.MapOpenApi(); +} + +app.UseHttpsRedirection(); +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapGet("/evidence/{id}", (string id) => Results.Ok(new { id, status = "available" })) + .RequireAuthorization(StellaOpsResourceServerPolicies.EvidenceRead); + +app.MapPost("/evidence", () => Results.Accepted("/evidence", new { status = "queued" })) + .RequireAuthorization(StellaOpsResourceServerPolicies.EvidenceCreate); + +app.MapPost("/evidence/{id}/hold", (string id) => Results.Accepted($"/evidence/{id}/hold", new { id, status = "on-hold" })) + .RequireAuthorization(StellaOpsResourceServerPolicies.EvidenceHold); + +app.Run(); diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj index 5f453c1c..d9eb6b45 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj @@ -1,41 +1,20 @@ - - - - - - - - - net10.0 - enable - enable - preview - true - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + net10.0 + enable + enable + preview + true + + + + + + + + + + + diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json index 4d566948..4e180bf7 100644 --- a/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json @@ -1,9 +1,20 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} +{ + Logging: { + LogLevel: { + Default: Information, + Microsoft.AspNetCore: Warning + } + }, + Authority: { + ResourceServer: { + Authority: https://authority.localtest.me, + Audiences: [ + api://evidence-locker + ], + RequiredTenants: [ + tenant-default + ] + } + }, + AllowedHosts: * +} diff --git a/src/Excititor/StellaOps.Excititor.Worker/Program.cs b/src/Excititor/StellaOps.Excititor.Worker/Program.cs index 28517696..a6f62510 100644 --- a/src/Excititor/StellaOps.Excititor.Worker/Program.cs +++ b/src/Excititor/StellaOps.Excititor.Worker/Program.cs @@ -17,6 +17,7 @@ using StellaOps.Excititor.Worker.Scheduling; using StellaOps.Excititor.Worker.Signature; using StellaOps.Excititor.Attestation.Extensions; using StellaOps.Excititor.Attestation.Verification; +using StellaOps.IssuerDirectory.Client; var builder = Host.CreateApplicationBuilder(args); var services = builder.Services; @@ -39,6 +40,15 @@ services.AddOpenVexNormalizer(); services.AddSingleton(); services.AddVexAttestation(); services.Configure(configuration.GetSection("Excititor:Attestation:Verification")); +var issuerDirectorySection = configuration.GetSection("Excititor:IssuerDirectory"); +if (issuerDirectorySection.Exists()) +{ + services.AddIssuerDirectoryClient(issuerDirectorySection); +} +else +{ + services.AddIssuerDirectoryClient(configuration); +} services.PostConfigure(options => { // Workers operate in offline-first environments; allow verification to succeed without Rekor. diff --git a/src/Excititor/StellaOps.Excititor.Worker/Signature/VerifyingVexRawDocumentSink.cs b/src/Excititor/StellaOps.Excititor.Worker/Signature/VerifyingVexRawDocumentSink.cs index a32651c7..4e984921 100644 --- a/src/Excititor/StellaOps.Excititor.Worker/Signature/VerifyingVexRawDocumentSink.cs +++ b/src/Excititor/StellaOps.Excititor.Worker/Signature/VerifyingVexRawDocumentSink.cs @@ -1,8 +1,9 @@ -using System.Collections.Immutable; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Storage.Mongo; - -namespace StellaOps.Excititor.Worker.Signature; +using System.Collections.Immutable; +using System.Globalization; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; + +namespace StellaOps.Excititor.Worker.Signature; internal sealed class VerifyingVexRawDocumentSink : IVexRawDocumentSink { @@ -59,11 +60,20 @@ internal sealed class VerifyingVexRawDocumentSink : IVexRawDocumentSink builder["vex.signature.verifiedAt"] = signature.VerifiedAt.Value.ToString("O"); } - if (!string.IsNullOrWhiteSpace(signature.TransparencyLogReference)) - { - builder["vex.signature.transparencyLogReference"] = signature.TransparencyLogReference!; - } - - return builder.ToImmutable(); - } -} + if (!string.IsNullOrWhiteSpace(signature.TransparencyLogReference)) + { + builder["vex.signature.transparencyLogReference"] = signature.TransparencyLogReference!; + } + + if (signature.Trust is not null) + { + builder["vex.signature.trust.weight"] = signature.Trust.EffectiveWeight.ToString(CultureInfo.InvariantCulture); + builder["vex.signature.trust.tenantId"] = signature.Trust.TenantId; + builder["vex.signature.trust.issuerId"] = signature.Trust.IssuerId; + builder["vex.signature.trust.tenantOverrideApplied"] = signature.Trust.TenantOverrideApplied ? "true" : "false"; + builder["vex.signature.trust.retrievedAtUtc"] = signature.Trust.RetrievedAtUtc.ToString("O"); + } + + return builder.ToImmutable(); + } +} diff --git a/src/Excititor/StellaOps.Excititor.Worker/Signature/WorkerSignatureVerifier.cs b/src/Excititor/StellaOps.Excititor.Worker/Signature/WorkerSignatureVerifier.cs index 044afcfc..93360102 100644 --- a/src/Excititor/StellaOps.Excititor.Worker/Signature/WorkerSignatureVerifier.cs +++ b/src/Excititor/StellaOps.Excititor.Worker/Signature/WorkerSignatureVerifier.cs @@ -9,12 +9,13 @@ using System.Text.Json.Serialization; using Microsoft.Extensions.Logging; using StellaOps.Aoc; using StellaOps.Excititor.Attestation.Dsse; -using StellaOps.Excititor.Attestation.Models; -using StellaOps.Excititor.Attestation.Verification; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Core.Aoc; - -namespace StellaOps.Excititor.Worker.Signature; +using StellaOps.Excititor.Attestation.Models; +using StellaOps.Excititor.Attestation.Verification; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Core.Aoc; +using StellaOps.IssuerDirectory.Client; + +namespace StellaOps.Excititor.Worker.Signature; /// /// Enforces checksum validation and records signature verification metadata. @@ -26,9 +27,10 @@ internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier "ingestion_signature_verified_total", description: "Counts signature and checksum verification results for Excititor worker ingestion."); - private readonly ILogger _logger; - private readonly IVexAttestationVerifier? _attestationVerifier; - private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + private readonly IVexAttestationVerifier? _attestationVerifier; + private readonly TimeProvider _timeProvider; + private readonly IIssuerDirectoryClient? _issuerDirectoryClient; private static readonly JsonSerializerOptions EnvelopeSerializerOptions = new() { @@ -43,15 +45,17 @@ internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }, }; - public WorkerSignatureVerifier( - ILogger logger, - IVexAttestationVerifier? attestationVerifier = null, - TimeProvider? timeProvider = null) - { - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _attestationVerifier = attestationVerifier; - _timeProvider = timeProvider ?? TimeProvider.System; - } + public WorkerSignatureVerifier( + ILogger logger, + IVexAttestationVerifier? attestationVerifier = null, + TimeProvider? timeProvider = null, + IIssuerDirectoryClient? issuerDirectoryClient = null) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _attestationVerifier = attestationVerifier; + _timeProvider = timeProvider ?? TimeProvider.System; + _issuerDirectoryClient = issuerDirectoryClient; + } public async ValueTask VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) { @@ -82,13 +86,17 @@ internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier VexSignatureMetadata? signatureMetadata = null; if (document.Format == VexDocumentFormat.OciAttestation && _attestationVerifier is not null) - { - signatureMetadata = await VerifyAttestationAsync(document, metadata, cancellationToken).ConfigureAwait(false); - } - - signatureMetadata ??= ExtractSignatureMetadata(metadata); - var resultLabel = signatureMetadata is null ? "skipped" : "ok"; - RecordVerification(document.ProviderId, metadata, resultLabel); + { + signatureMetadata = await VerifyAttestationAsync(document, metadata, cancellationToken).ConfigureAwait(false); + } + + signatureMetadata ??= ExtractSignatureMetadata(metadata); + if (signatureMetadata is not null) + { + signatureMetadata = await AttachIssuerTrustAsync(signatureMetadata, metadata, cancellationToken).ConfigureAwait(false); + } + var resultLabel = signatureMetadata is null ? "skipped" : "ok"; + RecordVerification(document.ProviderId, metadata, resultLabel); if (resultLabel == "skipped") { @@ -322,11 +330,11 @@ internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant(); } - private static VexSignatureMetadata? ExtractSignatureMetadata(ImmutableDictionary metadata) - { - if (!metadata.TryGetValue("vex.signature.type", out var type) || string.IsNullOrWhiteSpace(type)) - { - return null; + private static VexSignatureMetadata? ExtractSignatureMetadata(ImmutableDictionary metadata) + { + if (!metadata.TryGetValue("vex.signature.type", out var type) || string.IsNullOrWhiteSpace(type)) + { + return null; } metadata.TryGetValue("vex.signature.subject", out var subject); @@ -341,11 +349,11 @@ internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier verifiedAt = parsed; } - return new VexSignatureMetadata(type, subject, issuer, keyId, verifiedAt, tlog); - } - - private static void RecordVerification(string providerId, ImmutableDictionary metadata, string result) - { + return new VexSignatureMetadata(type, subject, issuer, keyId, verifiedAt, tlog); + } + + private static void RecordVerification(string providerId, ImmutableDictionary metadata, string result) + { var tags = new List>(3) { new("source", providerId), @@ -359,6 +367,143 @@ internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier tags.Add(new KeyValuePair("tenant", tenant)); - SignatureVerificationCounter.Add(1, tags.ToArray()); - } -} + SignatureVerificationCounter.Add(1, tags.ToArray()); + } + + private async ValueTask AttachIssuerTrustAsync( + VexSignatureMetadata signature, + ImmutableDictionary metadata, + CancellationToken cancellationToken) + { + if (_issuerDirectoryClient is null) + { + return signature; + } + + var tenantId = ResolveTenantId(metadata); + var issuerId = ResolveIssuerId(signature, metadata); + var keyId = signature.KeyId; + + if (string.IsNullOrWhiteSpace(tenantId) || + string.IsNullOrWhiteSpace(issuerId) || + string.IsNullOrWhiteSpace(keyId)) + { + return signature; + } + + IReadOnlyList keys; + try + { + keys = await _issuerDirectoryClient + .GetIssuerKeysAsync(tenantId, issuerId, includeGlobal: true, cancellationToken) + .ConfigureAwait(false); + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) + { + _logger.LogWarning( + ex, + "Issuer Directory key lookup failed for issuer {IssuerId} (tenant={TenantId}).", + issuerId, + tenantId); + return signature; + } + + var key = keys.FirstOrDefault(k => string.Equals(k.Id, keyId, StringComparison.OrdinalIgnoreCase)); + if (key is null) + { + _logger.LogWarning( + "Issuer Directory has no key {KeyId} for issuer {IssuerId} (tenant={TenantId}).", + keyId, + issuerId, + tenantId); + return signature; + } + + if (!string.Equals(key.Status, "Active", StringComparison.OrdinalIgnoreCase)) + { + _logger.LogWarning( + "Issuer Directory key {KeyId} for issuer {IssuerId} (tenant={TenantId}) is {Status}; skipping trust enrichment.", + keyId, + issuerId, + tenantId, + key.Status); + return signature; + } + + IssuerTrustResponseModel trustResponse; + try + { + trustResponse = await _issuerDirectoryClient + .GetIssuerTrustAsync(tenantId, issuerId, includeGlobal: true, cancellationToken) + .ConfigureAwait(false); + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) + { + _logger.LogWarning( + ex, + "Issuer Directory trust lookup failed for issuer {IssuerId} (tenant={TenantId}).", + issuerId, + tenantId); + return signature; + } + + var trust = new VexSignatureTrustMetadata( + trustResponse.EffectiveWeight, + tenantId, + issuerId, + trustResponse.TenantOverride is not null, + _timeProvider.GetUtcNow()); + + return new VexSignatureMetadata( + signature.Type, + signature.Subject, + signature.Issuer, + signature.KeyId, + signature.VerifiedAt, + signature.TransparencyLogReference, + trust); + } + + private static string? ResolveTenantId(ImmutableDictionary metadata) + { + if (metadata.TryGetValue("tenant", out var tenant) && !string.IsNullOrWhiteSpace(tenant)) + { + return tenant.Trim(); + } + + if (metadata.TryGetValue("tenantId", out var tenantId) && !string.IsNullOrWhiteSpace(tenantId)) + { + return tenantId.Trim(); + } + + return null; + } + + private static string? ResolveIssuerId(VexSignatureMetadata signature, ImmutableDictionary metadata) + { + if (!string.IsNullOrWhiteSpace(signature.Issuer)) + { + return signature.Issuer; + } + + if (metadata.TryGetValue("vex.signature.issuer", out var issuer) && !string.IsNullOrWhiteSpace(issuer)) + { + return issuer.Trim(); + } + + if (metadata.TryGetValue("verification.issuer", out var diagIssuer) && !string.IsNullOrWhiteSpace(diagIssuer)) + { + return diagIssuer.Trim(); + } + + return null; + } +} diff --git a/src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj b/src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj index ac03aea8..56200f55 100644 --- a/src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj +++ b/src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj @@ -21,5 +21,6 @@ + - \ No newline at end of file + diff --git a/src/Excititor/StellaOps.Excititor.sln b/src/Excititor/StellaOps.Excititor.sln index 0b34bc11..a00bf2e4 100644 --- a/src/Excititor/StellaOps.Excititor.sln +++ b/src/Excititor/StellaOps.Excititor.sln @@ -99,6 +99,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.WebServ EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Worker.Tests", "__Tests\StellaOps.Excititor.Worker.Tests\StellaOps.Excititor.Worker.Tests.csproj", "{3F51027B-F194-4321-AC7B-E00DA5CD47E3}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.IssuerDirectory.Client", "..\__Libraries\StellaOps.IssuerDirectory.Client\StellaOps.IssuerDirectory.Client.csproj", "{E1558326-7169-467B-BB8C-498ACA5DF579}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -661,6 +663,18 @@ Global {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Release|x64.Build.0 = Release|Any CPU {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Release|x86.ActiveCfg = Release|Any CPU {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Release|x86.Build.0 = Release|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Debug|x64.ActiveCfg = Debug|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Debug|x64.Build.0 = Debug|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Debug|x86.ActiveCfg = Debug|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Debug|x86.Build.0 = Debug|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Release|Any CPU.Build.0 = Release|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Release|x64.ActiveCfg = Release|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Release|x64.Build.0 = Release|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Release|x86.ActiveCfg = Release|Any CPU + {E1558326-7169-467B-BB8C-498ACA5DF579}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj index f13e5e9f..4742074e 100644 --- a/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj @@ -1,17 +1,18 @@ - - - net10.0 - preview - enable - enable - true - - - - - - - - - - + + + net10.0 + preview + enable + enable + true + + + + + + + + + + + diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexClaim.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexClaim.cs index 6ff42310..26baefe4 100644 --- a/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexClaim.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexClaim.cs @@ -184,43 +184,74 @@ public sealed record VexClaimDocument public VexSignatureMetadata? Signature { get; } } -public sealed record VexSignatureMetadata -{ - public VexSignatureMetadata( - string type, - string? subject = null, - string? issuer = null, - string? keyId = null, - DateTimeOffset? verifiedAt = null, - string? transparencyLogReference = null) - { - if (string.IsNullOrWhiteSpace(type)) - { - throw new ArgumentException("Signature type must be provided.", nameof(type)); - } - - Type = type.Trim(); - Subject = string.IsNullOrWhiteSpace(subject) ? null : subject.Trim(); - Issuer = string.IsNullOrWhiteSpace(issuer) ? null : issuer.Trim(); - KeyId = string.IsNullOrWhiteSpace(keyId) ? null : keyId.Trim(); - VerifiedAt = verifiedAt; - TransparencyLogReference = string.IsNullOrWhiteSpace(transparencyLogReference) - ? null - : transparencyLogReference.Trim(); - } - - public string Type { get; } - - public string? Subject { get; } - - public string? Issuer { get; } - - public string? KeyId { get; } - - public DateTimeOffset? VerifiedAt { get; } - - public string? TransparencyLogReference { get; } -} +public sealed record VexSignatureMetadata +{ + public VexSignatureMetadata( + string type, + string? subject = null, + string? issuer = null, + string? keyId = null, + DateTimeOffset? verifiedAt = null, + string? transparencyLogReference = null, + VexSignatureTrustMetadata? trust = null) + { + if (string.IsNullOrWhiteSpace(type)) + { + throw new ArgumentException("Signature type must be provided.", nameof(type)); + } + + Type = type.Trim(); + Subject = string.IsNullOrWhiteSpace(subject) ? null : subject.Trim(); + Issuer = string.IsNullOrWhiteSpace(issuer) ? null : issuer.Trim(); + KeyId = string.IsNullOrWhiteSpace(keyId) ? null : keyId.Trim(); + VerifiedAt = verifiedAt; + TransparencyLogReference = string.IsNullOrWhiteSpace(transparencyLogReference) + ? null + : transparencyLogReference.Trim(); + Trust = trust; + } + + public string Type { get; } + + public string? Subject { get; } + + public string? Issuer { get; } + + public string? KeyId { get; } + + public DateTimeOffset? VerifiedAt { get; } + + public string? TransparencyLogReference { get; } + + public VexSignatureTrustMetadata? Trust { get; } +} + +public sealed record VexSignatureTrustMetadata +{ + public VexSignatureTrustMetadata( + decimal effectiveWeight, + string tenantId, + string issuerId, + bool tenantOverrideApplied, + DateTimeOffset retrievedAtUtc) + { + EffectiveWeight = effectiveWeight; + TenantId = string.IsNullOrWhiteSpace(tenantId) ? "@unknown" : tenantId.Trim(); + IssuerId = string.IsNullOrWhiteSpace(issuerId) ? "unknown" : issuerId.Trim(); + TenantOverrideApplied = tenantOverrideApplied; + RetrievedAtUtc = retrievedAtUtc.ToUniversalTime(); + } + + public decimal EffectiveWeight { get; } + + public string TenantId { get; } + + public string IssuerId { get; } + + public bool TenantOverrideApplied { get; } + + public DateTimeOffset RetrievedAtUtc { get; } +} public sealed record VexConfidence { diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs index 4c96c2a8..84286c0e 100644 --- a/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs @@ -783,43 +783,76 @@ internal sealed class VexSignatureMetadataDocument public string? Issuer { get; set; } = null; - public string? KeyId { get; set; } - = null; - - public DateTime? VerifiedAt { get; set; } - = null; - - public string? TransparencyLogReference { get; set; } - = null; - - public static VexSignatureMetadataDocument? FromDomain(VexSignatureMetadata? signature) - => signature is null - ? null - : new VexSignatureMetadataDocument - { - Type = signature.Type, - Subject = signature.Subject, - Issuer = signature.Issuer, - KeyId = signature.KeyId, - VerifiedAt = signature.VerifiedAt?.UtcDateTime, - TransparencyLogReference = signature.TransparencyLogReference, - }; - - public VexSignatureMetadata ToDomain() - { - var verifiedAt = VerifiedAt.HasValue - ? new DateTimeOffset(DateTime.SpecifyKind(VerifiedAt.Value, DateTimeKind.Utc)) - : (DateTimeOffset?)null; - - return new VexSignatureMetadata( - Type, - Subject, - Issuer, - KeyId, - verifiedAt, - TransparencyLogReference); - } -} + public string? KeyId { get; set; } + = null; + + public DateTime? VerifiedAt { get; set; } + = null; + + public string? TransparencyLogReference { get; set; } + = null; + + public decimal? TrustWeight { get; set; } + = null; + + public string? TrustTenantId { get; set; } + = null; + + public string? TrustIssuerId { get; set; } + = null; + + public bool? TrustTenantOverrideApplied { get; set; } + = null; + + public DateTime? TrustRetrievedAtUtc { get; set; } + = null; + + public static VexSignatureMetadataDocument? FromDomain(VexSignatureMetadata? signature) + => signature is null + ? null + : new VexSignatureMetadataDocument + { + Type = signature.Type, + Subject = signature.Subject, + Issuer = signature.Issuer, + KeyId = signature.KeyId, + VerifiedAt = signature.VerifiedAt?.UtcDateTime, + TransparencyLogReference = signature.TransparencyLogReference, + TrustWeight = signature.Trust?.EffectiveWeight, + TrustTenantId = signature.Trust?.TenantId, + TrustIssuerId = signature.Trust?.IssuerId, + TrustTenantOverrideApplied = signature.Trust?.TenantOverrideApplied, + TrustRetrievedAtUtc = signature.Trust?.RetrievedAtUtc.UtcDateTime + }; + + public VexSignatureMetadata ToDomain() + { + var verifiedAt = VerifiedAt.HasValue + ? new DateTimeOffset(DateTime.SpecifyKind(VerifiedAt.Value, DateTimeKind.Utc)) + : (DateTimeOffset?)null; + + VexSignatureTrustMetadata? trust = null; + if (TrustWeight is not null && TrustRetrievedAtUtc is not null) + { + var retrievedOffset = new DateTimeOffset(DateTime.SpecifyKind(TrustRetrievedAtUtc.Value, DateTimeKind.Utc)); + trust = new VexSignatureTrustMetadata( + TrustWeight.Value, + TrustTenantId ?? "@unknown", + TrustIssuerId ?? "unknown", + TrustTenantOverrideApplied ?? false, + retrievedOffset); + } + + return new VexSignatureMetadata( + Type, + Subject, + Issuer, + KeyId, + verifiedAt, + TransparencyLogReference, + trust); + } +} [BsonIgnoreExtraElements] internal sealed class VexClaimDocumentRecord diff --git a/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerTests.cs index 4d63162d..b8909851 100644 --- a/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerTests.cs @@ -19,12 +19,13 @@ using StellaOps.Excititor.Connectors.Abstractions; using StellaOps.Excititor.Core; using StellaOps.Excititor.Core.Aoc; using StellaOps.Excititor.Storage.Mongo; -using StellaOps.Excititor.Worker.Options; -using StellaOps.Excititor.Worker.Scheduling; -using StellaOps.Excititor.Worker.Signature; -using StellaOps.Aoc; -using Xunit; -using System.Runtime.CompilerServices; +using StellaOps.Excititor.Worker.Options; +using StellaOps.Excititor.Worker.Scheduling; +using StellaOps.Excititor.Worker.Signature; +using StellaOps.Aoc; +using Xunit; +using System.Runtime.CompilerServices; +using StellaOps.IssuerDirectory.Client; namespace StellaOps.Excititor.Worker.Tests; @@ -285,11 +286,12 @@ public sealed class DefaultVexProviderRunnerTests .Add("verification.issuer", "issuer-from-verifier") .Add("verification.keyId", "key-from-verifier"); - var attestationVerifier = new StubAttestationVerifier(true, diagnostics); - var signatureVerifier = new WorkerSignatureVerifier( - NullLogger.Instance, - attestationVerifier, - time); + var attestationVerifier = new StubAttestationVerifier(true, diagnostics); + var signatureVerifier = new WorkerSignatureVerifier( + NullLogger.Instance, + attestationVerifier, + time, + TestIssuerDirectoryClient.Instance); var connector = TestConnector.WithDocuments("excititor:test", document); var stateRepository = new InMemoryStateRepository(); @@ -465,28 +467,49 @@ public sealed class DefaultVexProviderRunnerTests => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray.Empty, ImmutableDictionary.Empty)); } - private sealed class StubNormalizerRouter : IVexNormalizerRouter - { - private readonly ImmutableArray _claims; - - public StubNormalizerRouter(IEnumerable claims) - { - _claims = claims.ToImmutableArray(); - } - - public int CallCount { get; private set; } - - public ValueTask NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) - { - CallCount++; - return ValueTask.FromResult(new VexClaimBatch(document, _claims, ImmutableDictionary.Empty)); - } - } - - private sealed class NoopSignatureVerifier : IVexSignatureVerifier - { - public ValueTask VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult(null); + private sealed class StubNormalizerRouter : IVexNormalizerRouter + { + private readonly ImmutableArray _claims; + + public StubNormalizerRouter(IEnumerable claims) + { + _claims = claims.ToImmutableArray(); + } + + public int CallCount { get; private set; } + + public ValueTask NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + { + CallCount++; + return ValueTask.FromResult(new VexClaimBatch(document, _claims, ImmutableDictionary.Empty)); + } + } + + private sealed class TestIssuerDirectoryClient : IIssuerDirectoryClient + { + public static TestIssuerDirectoryClient Instance { get; } = new(); + + private static readonly IssuerTrustResponseModel DefaultTrust = new(null, null, 1m); + + public ValueTask> GetIssuerKeysAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + => ValueTask.FromResult>(Array.Empty()); + + public ValueTask GetIssuerTrustAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + => ValueTask.FromResult(DefaultTrust); + } + + private sealed class NoopSignatureVerifier : IVexSignatureVerifier + { + public ValueTask VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(null); } private sealed class InMemoryStateRepository : IVexConnectorStateRepository diff --git a/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/Signature/WorkerSignatureVerifierTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/Signature/WorkerSignatureVerifierTests.cs index 1261bcdc..41134ca7 100644 --- a/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/Signature/WorkerSignatureVerifierTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/Signature/WorkerSignatureVerifierTests.cs @@ -1,4 +1,5 @@ -using System.Collections.Immutable; +using System; +using System.Collections.Immutable; using System.Security.Cryptography; using System.Text; using System.Text.Json; @@ -6,13 +7,14 @@ using System.Text.Json.Serialization; using FluentAssertions; using Microsoft.Extensions.Logging.Abstractions; using StellaOps.Aoc; -using StellaOps.Excititor.Attestation.Dsse; -using StellaOps.Excititor.Attestation.Models; -using StellaOps.Excititor.Attestation.Verification; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Core.Aoc; -using StellaOps.Excititor.Worker.Signature; -using Xunit; +using StellaOps.Excititor.Attestation.Dsse; +using StellaOps.Excititor.Attestation.Models; +using StellaOps.Excititor.Attestation.Verification; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Core.Aoc; +using StellaOps.Excititor.Worker.Signature; +using StellaOps.IssuerDirectory.Client; +using Xunit; namespace StellaOps.Excititor.Worker.Tests.Signature; @@ -41,7 +43,9 @@ public sealed class WorkerSignatureVerifierTests content, metadata); - var verifier = new WorkerSignatureVerifier(NullLogger.Instance); + var verifier = new WorkerSignatureVerifier( + NullLogger.Instance, + issuerDirectoryClient: StubIssuerDirectoryClient.DefaultFor("tenant-a", "issuer-a", "kid")); var result = await verifier.VerifyAsync(document, CancellationToken.None); @@ -67,7 +71,9 @@ public sealed class WorkerSignatureVerifierTests content, metadata); - var verifier = new WorkerSignatureVerifier(NullLogger.Instance); + var verifier = new WorkerSignatureVerifier( + NullLogger.Instance, + issuerDirectoryClient: StubIssuerDirectoryClient.Empty()); var exception = await Assert.ThrowsAsync(() => verifier.VerifyAsync(document, CancellationToken.None).AsTask()); exception.PrimaryErrorCode.Should().Be("ERR_AOC_005"); @@ -79,8 +85,12 @@ public sealed class WorkerSignatureVerifierTests var now = DateTimeOffset.UtcNow; var (document, metadata) = CreateAttestationDocument(now, subject: "export-1", includeRekor: true); - var attestationVerifier = new StubAttestationVerifier(true); - var verifier = new WorkerSignatureVerifier(NullLogger.Instance, attestationVerifier, TimeProvider.System); + var attestationVerifier = new StubAttestationVerifier(true); + var verifier = new WorkerSignatureVerifier( + NullLogger.Instance, + attestationVerifier, + TimeProvider.System, + StubIssuerDirectoryClient.Empty()); var result = await verifier.VerifyAsync(document with { Metadata = metadata }, CancellationToken.None); @@ -96,8 +106,12 @@ public sealed class WorkerSignatureVerifierTests var now = DateTimeOffset.UtcNow; var (document, metadata) = CreateAttestationDocument(now, subject: "export-2", includeRekor: true); - var attestationVerifier = new StubAttestationVerifier(false); - var verifier = new WorkerSignatureVerifier(NullLogger.Instance, attestationVerifier, TimeProvider.System); + var attestationVerifier = new StubAttestationVerifier(false); + var verifier = new WorkerSignatureVerifier( + NullLogger.Instance, + attestationVerifier, + TimeProvider.System, + StubIssuerDirectoryClient.Empty()); await Assert.ThrowsAsync(() => verifier.VerifyAsync(document with { Metadata = metadata }, CancellationToken.None).AsTask()); attestationVerifier.Invocations.Should().Be(1); @@ -113,27 +127,64 @@ public sealed class WorkerSignatureVerifierTests .Add("verification.issuer", "issuer-from-attestation") .Add("verification.keyId", "kid-from-attestation"); - var attestationVerifier = new StubAttestationVerifier(true, diagnostics); - var verifier = new WorkerSignatureVerifier( - NullLogger.Instance, - attestationVerifier, - new FixedTimeProvider(now)); + var attestationVerifier = new StubAttestationVerifier(true, diagnostics); + var verifier = new WorkerSignatureVerifier( + NullLogger.Instance, + attestationVerifier, + new FixedTimeProvider(now), + StubIssuerDirectoryClient.DefaultFor("tenant-a", "issuer-from-attestation", "kid-from-attestation")); var result = await verifier.VerifyAsync(document, CancellationToken.None); result.Should().NotBeNull(); result!.Issuer.Should().Be("issuer-from-attestation"); result.KeyId.Should().Be("kid-from-attestation"); - result.TransparencyLogReference.Should().BeNull(); - result.VerifiedAt.Should().Be(now); - attestationVerifier.Invocations.Should().Be(1); - } - - private static string ComputeDigest(ReadOnlySpan payload) - { - Span buffer = stackalloc byte[32]; - return SHA256.TryHashData(payload, buffer, out _) - ? "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant() + result.TransparencyLogReference.Should().BeNull(); + result.VerifiedAt.Should().Be(now); + attestationVerifier.Invocations.Should().Be(1); + } + + [Fact] + public async Task VerifyAsync_AttachesIssuerTrustMetadata() + { + var now = DateTimeOffset.UtcNow; + var content = Encoding.UTF8.GetBytes("{\"id\":\"trust\"}"); + var digest = ComputeDigest(content); + var metadata = ImmutableDictionary.Empty + .Add("tenant", "tenant-a") + .Add("vex.signature.type", "cosign") + .Add("vex.signature.issuer", "issuer-a") + .Add("vex.signature.keyId", "key-1") + .Add("vex.signature.verifiedAt", now.ToString("O")); + + var document = new VexRawDocument( + "provider-a", + VexDocumentFormat.Csaf, + new Uri("https://example.org/vex-trust.json"), + now, + digest, + content, + metadata); + + var issuerClient = StubIssuerDirectoryClient.DefaultFor("tenant-a", "issuer-a", "key-1", 0.85m); + var verifier = new WorkerSignatureVerifier( + NullLogger.Instance, + issuerDirectoryClient: issuerClient); + + var result = await verifier.VerifyAsync(document, CancellationToken.None); + + result.Should().NotBeNull(); + result!.Trust.Should().NotBeNull(); + result.Trust!.EffectiveWeight.Should().Be(0.85m); + result.Trust!.TenantId.Should().Be("tenant-a"); + result.Trust!.IssuerId.Should().Be("issuer-a"); + } + + private static string ComputeDigest(ReadOnlySpan payload) + { + Span buffer = stackalloc byte[32]; + return SHA256.TryHashData(payload, buffer, out _) + ? "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant() : "sha256:" + Convert.ToHexString(SHA256.HashData(payload.ToArray())).ToLowerInvariant(); } @@ -195,12 +246,12 @@ public sealed class WorkerSignatureVerifierTests return (document, metadataBuilder.ToImmutable()); } - private sealed class StubAttestationVerifier : IVexAttestationVerifier - { - private readonly bool _isValid; - private readonly ImmutableDictionary _diagnostics; - - public StubAttestationVerifier(bool isValid, ImmutableDictionary? diagnostics = null) + private sealed class StubAttestationVerifier : IVexAttestationVerifier + { + private readonly bool _isValid; + private readonly ImmutableDictionary _diagnostics; + + public StubAttestationVerifier(bool isValid, ImmutableDictionary? diagnostics = null) { _isValid = isValid; _diagnostics = diagnostics ?? ImmutableDictionary.Empty; @@ -211,15 +262,73 @@ public sealed class WorkerSignatureVerifierTests public ValueTask VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken) { Invocations++; - return ValueTask.FromResult(new VexAttestationVerification(_isValid, _diagnostics)); - } - } - - private sealed class FixedTimeProvider : TimeProvider - { - private readonly DateTimeOffset _utcNow; - - public FixedTimeProvider(DateTimeOffset utcNow) + return ValueTask.FromResult(new VexAttestationVerification(_isValid, _diagnostics)); + } + } + + private sealed class StubIssuerDirectoryClient : IIssuerDirectoryClient + { + private readonly IReadOnlyList _keys; + private readonly IssuerTrustResponseModel _trust; + + private StubIssuerDirectoryClient( + IReadOnlyList keys, + IssuerTrustResponseModel trust) + { + _keys = keys; + _trust = trust; + } + + public static StubIssuerDirectoryClient Empty() + => new(Array.Empty(), new IssuerTrustResponseModel(null, null, 0m)); + + public static StubIssuerDirectoryClient DefaultFor( + string tenantId, + string issuerId, + string keyId, + decimal weight = 1m) + { + var key = new IssuerKeyModel( + keyId, + issuerId, + tenantId, + "Ed25519PublicKey", + "Active", + "base64", + Convert.ToBase64String(new byte[32]), + "fingerprint-" + keyId, + null, + null, + null, + null); + + var now = DateTimeOffset.UtcNow; + var overrideModel = new IssuerTrustOverrideModel(weight, "stub", now, "test", now, "test"); + return new StubIssuerDirectoryClient( + new[] { key }, + new IssuerTrustResponseModel(overrideModel, null, weight)); + } + + public ValueTask> GetIssuerKeysAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + => ValueTask.FromResult(_keys); + + public ValueTask GetIssuerTrustAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + => ValueTask.FromResult(_trust); + } + + private sealed class FixedTimeProvider : TimeProvider + { + private readonly DateTimeOffset _utcNow; + + public FixedTimeProvider(DateTimeOffset utcNow) { _utcNow = utcNow; } diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs index 3917ef1b..c19d0562 100644 --- a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs @@ -1,41 +1,46 @@ -var builder = WebApplication.CreateBuilder(args); - -// Add services to the container. -// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi -builder.Services.AddOpenApi(); - -var app = builder.Build(); - -// Configure the HTTP request pipeline. -if (app.Environment.IsDevelopment()) -{ - app.MapOpenApi(); -} - -app.UseHttpsRedirection(); - -var summaries = new[] -{ - "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" -}; - -app.MapGet("/weatherforecast", () => -{ - var forecast = Enumerable.Range(1, 5).Select(index => - new WeatherForecast - ( - DateOnly.FromDateTime(DateTime.Now.AddDays(index)), - Random.Shared.Next(-20, 55), - summaries[Random.Shared.Next(summaries.Length)] - )) - .ToArray(); - return forecast; -}) -.WithName("GetWeatherForecast"); - -app.Run(); - -record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) -{ - public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); -} +using Microsoft.AspNetCore.Authorization; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; + +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configure: options => + { + options.RequiredScopes.Clear(); + }); + +builder.Services.AddAuthorization(options => +{ + options.AddObservabilityResourcePolicies(); + options.DefaultPolicy = new AuthorizationPolicyBuilder() + .RequireAuthenticatedUser() + .AddRequirements(new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ExportViewer })) + .Build(); + options.FallbackPolicy = options.DefaultPolicy; +}); + +builder.Services.AddOpenApi(); + +var app = builder.Build(); + +if (app.Environment.IsDevelopment()) +{ + app.MapOpenApi(); +} + +app.UseHttpsRedirection(); +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapGet("/exports", () => Results.Ok(Array.Empty())) + .RequireAuthorization(StellaOpsResourceServerPolicies.ExportViewer); + +app.MapPost("/exports", () => Results.Accepted("/exports", new { status = "scheduled" })) + .RequireAuthorization(StellaOpsResourceServerPolicies.ExportOperator); + +app.MapDelete("/exports/{id}", (string id) => Results.NoContent()) + .RequireAuthorization(StellaOpsResourceServerPolicies.ExportAdmin); + +app.Run(); diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj index f13b07bf..a53d5c72 100644 --- a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj @@ -1,41 +1,20 @@ - - - - - - - - - net10.0 - enable - enable - preview - true - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + net10.0 + enable + enable + preview + true + + + + + + + + + + + diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json index 4d566948..977ee273 100644 --- a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json @@ -1,9 +1,20 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} +{ + Logging: { + LogLevel: { + Default: Information, + Microsoft.AspNetCore: Warning + } + }, + Authority: { + ResourceServer: { + Authority: https://authority.localtest.me, + Audiences: [ + api://export-center + ], + RequiredTenants: [ + tenant-default + ] + } + }, + AllowedHosts: * +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.cs new file mode 100644 index 00000000..40d97288 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerDirectoryServiceTests.cs @@ -0,0 +1,191 @@ +using System.Collections.Concurrent; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Time.Testing; +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Services; +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests.Services; + +public class IssuerDirectoryServiceTests +{ + private readonly FakeIssuerRepository _repository = new(); + private readonly FakeIssuerAuditSink _auditSink = new(); + private readonly FakeTimeProvider _timeProvider = new(DateTimeOffset.Parse("2025-11-01T12:00:00Z")); + private readonly IssuerDirectoryService _service; + + public IssuerDirectoryServiceTests() + { + _service = new IssuerDirectoryService(_repository, _auditSink, _timeProvider, NullLogger.Instance); + } + + [Fact] + public async Task CreateAsync_PersistsIssuerAndAuditEntry() + { + var issuer = await _service.CreateAsync( + tenantId: "tenant-a", + issuerId: "red-hat", + displayName: "Red Hat", + slug: "red-hat", + description: "Vendor", + contact: new IssuerContact("sec@example.com", null, new Uri("https://example.com"), null), + metadata: new IssuerMetadata("org", "publisher", new Uri("https://example.com/cve"), null, new[] { "en" }, null), + endpoints: new[] { new IssuerEndpoint("csaf", new Uri("https://example.com/csaf"), "csaf", false) }, + tags: new[] { "vendor" }, + actor: "tester", + reason: "initial", + cancellationToken: CancellationToken.None); + + var stored = await _repository.GetAsync("tenant-a", "red-hat", CancellationToken.None); + stored.Should().NotBeNull(); + stored!.DisplayName.Should().Be("Red Hat"); + stored.CreatedBy.Should().Be("tester"); + + _auditSink.Entries.Should().ContainSingle(entry => entry.Action == "created" && entry.TenantId == "tenant-a"); + issuer.CreatedAtUtc.Should().Be(_timeProvider.GetUtcNow()); + } + + [Fact] + public async Task UpdateAsync_ReplacesMetadataAndRecordsAudit() + { + await CreateSampleAsync(); + _timeProvider.Advance(TimeSpan.FromHours(1)); + + var updated = await _service.UpdateAsync( + tenantId: "tenant-a", + issuerId: "red-hat", + displayName: "Red Hat Security", + description: "Updated vendor", + contact: new IssuerContact("sec@example.com", null, new Uri("https://example.com/security"), null), + metadata: new IssuerMetadata("org", "publisher", new Uri("https://example.com/new"), null, new[] { "en", "de" }, null), + endpoints: new[] { new IssuerEndpoint("csaf", new Uri("https://example.com/csaf"), "csaf", false) }, + tags: new[] { "vendor", "trusted" }, + actor: "editor", + reason: "update", + cancellationToken: CancellationToken.None); + + updated.DisplayName.Should().Be("Red Hat Security"); + updated.Tags.Should().Contain(new[] { "vendor", "trusted" }); + updated.UpdatedBy.Should().Be("editor"); + updated.UpdatedAtUtc.Should().Be(_timeProvider.GetUtcNow()); + + _auditSink.Entries.Should().Contain(entry => entry.Action == "updated"); + } + + [Fact] + public async Task DeleteAsync_RemovesIssuerAndWritesAudit() + { + await CreateSampleAsync(); + + await _service.DeleteAsync("tenant-a", "red-hat", "deleter", "cleanup", CancellationToken.None); + + var stored = await _repository.GetAsync("tenant-a", "red-hat", CancellationToken.None); + stored.Should().BeNull(); + + _auditSink.Entries.Should().Contain(entry => entry.Action == "deleted" && entry.Actor == "deleter"); + } + + [Fact] + public async Task SeedAsync_InsertsOnlyMissingSeeds() + { + var seedRecord = IssuerRecord.Create( + id: "red-hat", + tenantId: IssuerTenants.Global, + displayName: "Red Hat", + slug: "red-hat", + description: null, + contact: new IssuerContact(null, null, null, null), + metadata: new IssuerMetadata(null, null, null, null, Array.Empty(), null), + endpoints: Array.Empty(), + tags: Array.Empty(), + timestampUtc: _timeProvider.GetUtcNow(), + actor: "seed", + isSystemSeed: true); + + await _service.SeedAsync(new[] { seedRecord }, CancellationToken.None); + _auditSink.Entries.Should().Contain(entry => entry.Action == "seeded"); + + _auditSink.Clear(); + _timeProvider.Advance(TimeSpan.FromMinutes(10)); + + await _service.SeedAsync(new[] { seedRecord }, CancellationToken.None); + _auditSink.Entries.Should().BeEmpty("existing seeds should not emit duplicate audit entries"); + } + + private async Task CreateSampleAsync() + { + await _service.CreateAsync( + tenantId: "tenant-a", + issuerId: "red-hat", + displayName: "Red Hat", + slug: "red-hat", + description: "Vendor", + contact: new IssuerContact("sec@example.com", null, new Uri("https://example.com"), null), + metadata: new IssuerMetadata("org", "publisher", new Uri("https://example.com/cve"), null, new[] { "en" }, null), + endpoints: new[] { new IssuerEndpoint("csaf", new Uri("https://example.com/csaf"), "csaf", false) }, + tags: new[] { "vendor" }, + actor: "tester", + reason: "initial", + cancellationToken: CancellationToken.None); + + _auditSink.Clear(); + } + + private sealed class FakeIssuerRepository : IIssuerRepository + { + private readonly ConcurrentDictionary<(string Tenant, string Id), IssuerRecord> _store = new(); + + public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryGetValue((tenantId, issuerId), out var record); + return Task.FromResult(record); + } + + public Task> ListAsync(string tenantId, CancellationToken cancellationToken) + { + var results = _store + .Where(pair => pair.Key.Tenant.Equals(tenantId, StringComparison.Ordinal)) + .Select(pair => pair.Value) + .ToArray(); + return Task.FromResult((IReadOnlyCollection)results); + } + + public Task> ListGlobalAsync(CancellationToken cancellationToken) + { + var results = _store + .Where(pair => pair.Key.Tenant.Equals(IssuerTenants.Global, StringComparison.Ordinal)) + .Select(pair => pair.Value) + .ToArray(); + return Task.FromResult((IReadOnlyCollection)results); + } + + public Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) + { + _store[(record.TenantId, record.Id)] = record; + return Task.CompletedTask; + } + + public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryRemove((tenantId, issuerId), out _); + return Task.CompletedTask; + } + } + + private sealed class FakeIssuerAuditSink : IIssuerAuditSink + { + private readonly ConcurrentBag _entries = new(); + + public IReadOnlyCollection Entries => _entries.ToArray(); + + public Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken) + { + _entries.Add(entry); + return Task.CompletedTask; + } + + public void Clear() => _entries.Clear(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.cs new file mode 100644 index 00000000..4fe7a195 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerKeyServiceTests.cs @@ -0,0 +1,198 @@ +using System.Collections.Concurrent; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Time.Testing; +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Services; +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests.Services; + +public class IssuerKeyServiceTests +{ + private readonly FakeIssuerRepository _issuerRepository = new(); + private readonly FakeIssuerKeyRepository _keyRepository = new(); + private readonly FakeIssuerAuditSink _auditSink = new(); + private readonly FakeTimeProvider _timeProvider = new(DateTimeOffset.Parse("2025-11-01T12:00:00Z")); + private readonly IssuerKeyService _service; + + public IssuerKeyServiceTests() + { + _service = new IssuerKeyService( + _issuerRepository, + _keyRepository, + _auditSink, + _timeProvider, + NullLogger.Instance); + + var issuer = IssuerRecord.Create( + id: "red-hat", + tenantId: "tenant-a", + displayName: "Red Hat", + slug: "red-hat", + description: null, + contact: new IssuerContact(null, null, null, null), + metadata: new IssuerMetadata(null, null, null, null, Array.Empty(), null), + endpoints: Array.Empty(), + tags: Array.Empty(), + timestampUtc: _timeProvider.GetUtcNow(), + actor: "seed", + isSystemSeed: false); + + _issuerRepository.Add(issuer); + } + + [Fact] + public async Task AddAsync_StoresKeyAndWritesAudit() + { + var material = new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[32])); + + var record = await _service.AddAsync( + tenantId: "tenant-a", + issuerId: "red-hat", + type: IssuerKeyType.Ed25519PublicKey, + material, + expiresAtUtc: null, + actor: "tester", + reason: "initial", + cancellationToken: CancellationToken.None); + + record.Status.Should().Be(IssuerKeyStatus.Active); + record.Fingerprint.Should().NotBeNullOrWhiteSpace(); + _auditSink.Entries.Should().Contain(entry => entry.Action == "key_created"); + } + + [Fact] + public async Task AddAsync_DuplicateFingerprint_Throws() + { + var material = new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[32])); + + await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, material, null, "tester", null, CancellationToken.None); + + var action = async () => await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, material, null, "tester", null, CancellationToken.None); + + await action.Should().ThrowAsync(); + } + + [Fact] + public async Task RotateAsync_RetiresOldKeyAndCreatesReplacement() + { + var originalMaterial = new IssuerKeyMaterial("base64", Convert.ToBase64String(new byte[32] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32 })); + var original = await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, originalMaterial, null, "tester", null, CancellationToken.None); + + var newMaterial = new IssuerKeyMaterial("base64", Convert.ToBase64String(Enumerable.Repeat(99, 32).ToArray())); + var replacement = await _service.RotateAsync("tenant-a", "red-hat", original.Id, IssuerKeyType.Ed25519PublicKey, newMaterial, null, "tester", "rotation", CancellationToken.None); + + replacement.ReplacesKeyId.Should().Be(original.Id); + var retired = await _keyRepository.GetAsync("tenant-a", "red-hat", original.Id, CancellationToken.None); + retired!.Status.Should().Be(IssuerKeyStatus.Retired); + } + + [Fact] + public async Task RevokeAsync_SetsStatusToRevoked() + { + var material = new IssuerKeyMaterial("base64", Convert.ToBase64String(Enumerable.Repeat(77, 32).ToArray())); + var key = await _service.AddAsync("tenant-a", "red-hat", IssuerKeyType.Ed25519PublicKey, material, null, "tester", null, CancellationToken.None); + + await _service.RevokeAsync("tenant-a", "red-hat", key.Id, "tester", "compromised", CancellationToken.None); + + var revoked = await _keyRepository.GetAsync("tenant-a", "red-hat", key.Id, CancellationToken.None); + revoked!.Status.Should().Be(IssuerKeyStatus.Revoked); + _auditSink.Entries.Should().Contain(entry => entry.Action == "key_revoked"); + } + + private sealed class FakeIssuerRepository : IIssuerRepository + { + private readonly ConcurrentDictionary<(string Tenant, string Id), IssuerRecord> _store = new(); + + public void Add(IssuerRecord record) + { + _store[(record.TenantId, record.Id)] = record; + } + + public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryGetValue((tenantId, issuerId), out var record); + return Task.FromResult(record); + } + + public Task> ListAsync(string tenantId, CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + + public Task> ListGlobalAsync(CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + + public Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) + { + _store[(record.TenantId, record.Id)] = record; + return Task.CompletedTask; + } + + public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryRemove((tenantId, issuerId), out _); + return Task.CompletedTask; + } + } + + private sealed class FakeIssuerKeyRepository : IIssuerKeyRepository + { + private readonly ConcurrentDictionary<(string Tenant, string Issuer, string KeyId), IssuerKeyRecord> _store = new(); + + public Task GetAsync(string tenantId, string issuerId, string keyId, CancellationToken cancellationToken) + { + _store.TryGetValue((tenantId, issuerId, keyId), out var value); + return Task.FromResult(value); + } + + public Task GetByFingerprintAsync(string tenantId, string issuerId, string fingerprint, CancellationToken cancellationToken) + { + var record = _store.Values.FirstOrDefault(key => key.TenantId == tenantId && key.IssuerId == issuerId && key.Fingerprint == fingerprint); + return Task.FromResult(record); + } + + public Task> ListAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + var records = _store + .Where(pair => pair.Key.Tenant == tenantId && pair.Key.Issuer == issuerId) + .Select(pair => pair.Value) + .ToArray(); + + return Task.FromResult((IReadOnlyCollection)records); + } + + public Task> ListGlobalAsync(string issuerId, CancellationToken cancellationToken) + { + var records = _store + .Where(pair => pair.Key.Tenant == IssuerTenants.Global && pair.Key.Issuer == issuerId) + .Select(pair => pair.Value) + .ToArray(); + + return Task.FromResult((IReadOnlyCollection)records); + } + + public Task UpsertAsync(IssuerKeyRecord record, CancellationToken cancellationToken) + { + _store[(record.TenantId, record.IssuerId, record.Id)] = record; + return Task.CompletedTask; + } + } + + private sealed class FakeIssuerAuditSink : IIssuerAuditSink + { + private readonly ConcurrentBag _entries = new(); + + public IReadOnlyCollection Entries => _entries.ToArray(); + + public Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken) + { + _entries.Add(entry); + return Task.CompletedTask; + } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.cs new file mode 100644 index 00000000..61fe1e94 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/Services/IssuerTrustServiceTests.cs @@ -0,0 +1,153 @@ +using System.Collections.Concurrent; +using FluentAssertions; +using Microsoft.Extensions.Time.Testing; +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Services; +using Xunit; + +namespace StellaOps.IssuerDirectory.Core.Tests.Services; + +public class IssuerTrustServiceTests +{ + private readonly FakeIssuerRepository _issuerRepository = new(); + private readonly FakeIssuerTrustRepository _trustRepository = new(); + private readonly FakeIssuerAuditSink _auditSink = new(); + private readonly FakeTimeProvider _timeProvider = new(DateTimeOffset.Parse("2025-11-01T00:00:00Z")); + private readonly IssuerTrustService _service; + + public IssuerTrustServiceTests() + { + _service = new IssuerTrustService(_issuerRepository, _trustRepository, _auditSink, _timeProvider); + + var issuer = IssuerRecord.Create( + id: "issuer-1", + tenantId: "tenant-a", + displayName: "Issuer", + slug: "issuer", + description: null, + contact: new IssuerContact(null, null, null, null), + metadata: new IssuerMetadata(null, null, null, null, Array.Empty(), null), + endpoints: Array.Empty(), + tags: Array.Empty(), + timestampUtc: _timeProvider.GetUtcNow(), + actor: "seed", + isSystemSeed: false); + + _issuerRepository.Add(issuer); + _issuerRepository.Add(issuer with { TenantId = IssuerTenants.Global, IsSystemSeed = true }); + } + + [Fact] + public async Task SetAsync_SavesOverrideWithinBounds() + { + var result = await _service.SetAsync("tenant-a", "issuer-1", 4.5m, "reason", "actor", CancellationToken.None); + + result.Weight.Should().Be(4.5m); + result.UpdatedBy.Should().Be("actor"); + + var view = await _service.GetAsync("tenant-a", "issuer-1", includeGlobal: true, CancellationToken.None); + view.EffectiveWeight.Should().Be(4.5m); + _auditSink.Entries.Should().Contain(entry => entry.Action == "trust_override_set"); + } + + [Fact] + public async Task SetAsync_InvalidWeight_Throws() + { + var action = async () => await _service.SetAsync("tenant-a", "issuer-1", 20m, null, "actor", CancellationToken.None); + await action.Should().ThrowAsync(); + } + + [Fact] + public async Task GetAsync_FallsBackToGlobal() + { + await _service.SetAsync(IssuerTenants.Global, "issuer-1", -2m, null, "seed", CancellationToken.None); + + var view = await _service.GetAsync("tenant-b", "issuer-1", includeGlobal: true, CancellationToken.None); + view.EffectiveWeight.Should().Be(-2m); + view.GlobalOverride.Should().NotBeNull(); + } + + [Fact] + public async Task DeleteAsync_RemovesOverride() + { + await _service.SetAsync("tenant-a", "issuer-1", 1m, null, "actor", CancellationToken.None); + + await _service.DeleteAsync("tenant-a", "issuer-1", "actor", "clearing", CancellationToken.None); + + var view = await _service.GetAsync("tenant-a", "issuer-1", includeGlobal: false, CancellationToken.None); + view.TenantOverride.Should().BeNull(); + _auditSink.Entries.Should().Contain(entry => entry.Action == "trust_override_deleted"); + } + + private sealed class FakeIssuerRepository : IIssuerRepository + { + private readonly ConcurrentDictionary<(string Tenant, string Id), IssuerRecord> _store = new(); + + public void Add(IssuerRecord record) => _store[(record.TenantId, record.Id)] = record; + + public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryGetValue((tenantId, issuerId), out var record); + return Task.FromResult(record); + } + + public Task> ListAsync(string tenantId, CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + + public Task> ListGlobalAsync(CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + + public Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) + { + _store[(record.TenantId, record.Id)] = record; + return Task.CompletedTask; + } + + public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryRemove((tenantId, issuerId), out _); + return Task.CompletedTask; + } + } + + private sealed class FakeIssuerTrustRepository : IIssuerTrustRepository + { + private readonly ConcurrentDictionary<(string Tenant, string Issuer), IssuerTrustOverrideRecord> _store = new(); + + public Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryGetValue((tenantId, issuerId), out var record); + return Task.FromResult(record); + } + + public Task UpsertAsync(IssuerTrustOverrideRecord record, CancellationToken cancellationToken) + { + _store[(record.TenantId, record.IssuerId)] = record; + return Task.CompletedTask; + } + + public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + _store.TryRemove((tenantId, issuerId), out _); + return Task.CompletedTask; + } + } + + private sealed class FakeIssuerAuditSink : IIssuerAuditSink + { + private readonly ConcurrentBag _entries = new(); + + public IReadOnlyCollection Entries => _entries.ToArray(); + + public Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken) + { + _entries.Add(entry); + return Task.CompletedTask; + } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/StellaOps.IssuerDirectory.Core.Tests.csproj b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/StellaOps.IssuerDirectory.Core.Tests.csproj new file mode 100644 index 00000000..5aa1dbfd --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core.Tests/StellaOps.IssuerDirectory.Core.Tests.csproj @@ -0,0 +1,16 @@ + + + net10.0 + preview + false + enable + enable + true + + + + + + + + diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerAuditSink.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerAuditSink.cs new file mode 100644 index 00000000..55beeca9 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerAuditSink.cs @@ -0,0 +1,11 @@ +namespace StellaOps.IssuerDirectory.Core.Abstractions; + +using StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Persists audit events describing issuer changes. +/// +public interface IIssuerAuditSink +{ + Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken); +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerKeyRepository.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerKeyRepository.cs new file mode 100644 index 00000000..6a5ddcc4 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerKeyRepository.cs @@ -0,0 +1,19 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Abstractions; + +/// +/// Repository abstraction for issuer key persistence. +/// +public interface IIssuerKeyRepository +{ + Task GetAsync(string tenantId, string issuerId, string keyId, CancellationToken cancellationToken); + + Task GetByFingerprintAsync(string tenantId, string issuerId, string fingerprint, CancellationToken cancellationToken); + + Task> ListAsync(string tenantId, string issuerId, CancellationToken cancellationToken); + + Task> ListGlobalAsync(string issuerId, CancellationToken cancellationToken); + + Task UpsertAsync(IssuerKeyRecord record, CancellationToken cancellationToken); +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerRepository.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerRepository.cs new file mode 100644 index 00000000..bbcd152d --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerRepository.cs @@ -0,0 +1,19 @@ +namespace StellaOps.IssuerDirectory.Core.Abstractions; + +using StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Repository abstraction for issuer directory persistence. +/// +public interface IIssuerRepository +{ + Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken); + + Task> ListAsync(string tenantId, CancellationToken cancellationToken); + + Task> ListGlobalAsync(CancellationToken cancellationToken); + + Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken); + + Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken); +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerTrustRepository.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerTrustRepository.cs new file mode 100644 index 00000000..f507789f --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Abstractions/IIssuerTrustRepository.cs @@ -0,0 +1,15 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Abstractions; + +/// +/// Repository abstraction for trust weight overrides. +/// +public interface IIssuerTrustRepository +{ + Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken); + + Task UpsertAsync(IssuerTrustOverrideRecord record, CancellationToken cancellationToken); + + Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken); +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerAuditEntry.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerAuditEntry.cs new file mode 100644 index 00000000..b166be43 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerAuditEntry.cs @@ -0,0 +1,51 @@ +namespace StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Represents an audit log describing an issuer change. +/// +public sealed class IssuerAuditEntry +{ + public IssuerAuditEntry( + string tenantId, + string issuerId, + string action, + DateTimeOffset timestampUtc, + string actor, + string? reason, + IReadOnlyDictionary? metadata) + { + TenantId = Normalize(tenantId, nameof(tenantId)); + IssuerId = Normalize(issuerId, nameof(issuerId)); + Action = Normalize(action, nameof(action)); + TimestampUtc = timestampUtc.ToUniversalTime(); + Actor = Normalize(actor, nameof(actor)); + Reason = string.IsNullOrWhiteSpace(reason) ? null : reason.Trim(); + Metadata = metadata is null + ? new Dictionary() + : new Dictionary(metadata, StringComparer.OrdinalIgnoreCase); + } + + public string TenantId { get; } + + public string IssuerId { get; } + + public string Action { get; } + + public DateTimeOffset TimestampUtc { get; } + + public string Actor { get; } + + public string? Reason { get; } + + public IReadOnlyDictionary Metadata { get; } + + private static string Normalize(string value, string argumentName) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException("Value is required.", argumentName); + } + + return value.Trim(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerContact.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerContact.cs new file mode 100644 index 00000000..1d0bdee9 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerContact.cs @@ -0,0 +1,28 @@ +namespace StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Contact information for a publisher or issuer. +/// +public sealed class IssuerContact +{ + public IssuerContact(string? email, string? phone, Uri? website, string? timezone) + { + Email = Normalize(email); + Phone = Normalize(phone); + Website = website; + Timezone = Normalize(timezone); + } + + public string? Email { get; } + + public string? Phone { get; } + + public Uri? Website { get; } + + public string? Timezone { get; } + + private static string? Normalize(string? value) + { + return string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerEndpoint.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerEndpoint.cs new file mode 100644 index 00000000..bd7323f2 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerEndpoint.cs @@ -0,0 +1,33 @@ +namespace StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Represents an endpoint that exposes attestation or CSAF data for an issuer. +/// +public sealed class IssuerEndpoint +{ + public IssuerEndpoint(string kind, Uri url, string? format, bool requiresAuthentication) + { + Kind = Normalize(kind, nameof(kind)); + Url = url ?? throw new ArgumentNullException(nameof(url)); + Format = string.IsNullOrWhiteSpace(format) ? null : format.Trim().ToLowerInvariant(); + RequiresAuthentication = requiresAuthentication; + } + + public string Kind { get; } + + public Uri Url { get; } + + public string? Format { get; } + + public bool RequiresAuthentication { get; } + + private static string Normalize(string value, string argumentName) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException("Value is required.", argumentName); + } + + return value.Trim().ToLowerInvariant(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyMaterial.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyMaterial.cs new file mode 100644 index 00000000..3df909e2 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyMaterial.cs @@ -0,0 +1,21 @@ +namespace StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Represents the encoded key material. +/// +public sealed record IssuerKeyMaterial(string Format, string Value) +{ + public string Format { get; } = Normalize(Format, nameof(Format)); + + public string Value { get; } = Normalize(Value, nameof(Value)); + + private static string Normalize(string value, string argumentName) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException("Value must be provided.", argumentName); + } + + return value.Trim(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.cs new file mode 100644 index 00000000..bbc6600f --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyRecord.cs @@ -0,0 +1,112 @@ +namespace StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Represents an issuer signing key. +/// +public sealed record IssuerKeyRecord +{ + public required string Id { get; init; } + + public required string IssuerId { get; init; } + + public required string TenantId { get; init; } + + public required IssuerKeyType Type { get; init; } + + public required IssuerKeyStatus Status { get; init; } + + public required IssuerKeyMaterial Material { get; init; } + + public required string Fingerprint { get; init; } + + public required DateTimeOffset CreatedAtUtc { get; init; } + + public required string CreatedBy { get; init; } + + public required DateTimeOffset UpdatedAtUtc { get; init; } + + public required string UpdatedBy { get; init; } + + public DateTimeOffset? ExpiresAtUtc { get; init; } + + public DateTimeOffset? RetiredAtUtc { get; init; } + + public DateTimeOffset? RevokedAtUtc { get; init; } + + public string? ReplacesKeyId { get; init; } + + public static IssuerKeyRecord Create( + string id, + string issuerId, + string tenantId, + IssuerKeyType type, + IssuerKeyMaterial material, + string fingerprint, + DateTimeOffset createdAtUtc, + string createdBy, + DateTimeOffset? expiresAtUtc, + string? replacesKeyId) + { + ArgumentException.ThrowIfNullOrWhiteSpace(id); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentNullException.ThrowIfNull(material); + ArgumentException.ThrowIfNullOrWhiteSpace(fingerprint); + ArgumentException.ThrowIfNullOrWhiteSpace(createdBy); + + return new IssuerKeyRecord + { + Id = id.Trim(), + IssuerId = issuerId.Trim(), + TenantId = tenantId.Trim(), + Type = type, + Status = IssuerKeyStatus.Active, + Material = material, + Fingerprint = fingerprint.Trim(), + CreatedAtUtc = createdAtUtc, + CreatedBy = createdBy.Trim(), + UpdatedAtUtc = createdAtUtc, + UpdatedBy = createdBy.Trim(), + ExpiresAtUtc = expiresAtUtc?.ToUniversalTime(), + RetiredAtUtc = null, + RevokedAtUtc = null, + ReplacesKeyId = string.IsNullOrWhiteSpace(replacesKeyId) ? null : replacesKeyId.Trim() + }; + } + + public IssuerKeyRecord WithStatus( + IssuerKeyStatus status, + DateTimeOffset timestampUtc, + string updatedBy) + { + ArgumentException.ThrowIfNullOrWhiteSpace(updatedBy); + + return status switch + { + IssuerKeyStatus.Active => this with + { + Status = status, + UpdatedAtUtc = timestampUtc, + UpdatedBy = updatedBy.Trim(), + RetiredAtUtc = null, + RevokedAtUtc = null + }, + IssuerKeyStatus.Retired => this with + { + Status = status, + UpdatedAtUtc = timestampUtc, + UpdatedBy = updatedBy.Trim(), + RetiredAtUtc = timestampUtc, + RevokedAtUtc = null + }, + IssuerKeyStatus.Revoked => this with + { + Status = status, + UpdatedAtUtc = timestampUtc, + UpdatedBy = updatedBy.Trim(), + RevokedAtUtc = timestampUtc + }, + _ => throw new ArgumentOutOfRangeException(nameof(status), status, "Unsupported key status.") + }; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyStatus.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyStatus.cs new file mode 100644 index 00000000..eb00d262 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyStatus.cs @@ -0,0 +1,11 @@ +namespace StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Lifecycle status for issuer keys. +/// +public enum IssuerKeyStatus +{ + Active, + Retired, + Revoked +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyType.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyType.cs new file mode 100644 index 00000000..e3d2de2f --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerKeyType.cs @@ -0,0 +1,11 @@ +namespace StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Supported issuer key kinds. +/// +public enum IssuerKeyType +{ + Ed25519PublicKey, + X509Certificate, + DssePublicKey +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerMetadata.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerMetadata.cs new file mode 100644 index 00000000..a36ef3d7 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerMetadata.cs @@ -0,0 +1,61 @@ +using System.Collections.ObjectModel; + +namespace StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Domain metadata describing issuer provenance and publication capabilities. +/// +public sealed class IssuerMetadata +{ + private readonly IReadOnlyCollection _languages; + + public IssuerMetadata( + string? cveOrgId, + string? csafPublisherId, + Uri? securityAdvisoriesUrl, + Uri? catalogUrl, + IEnumerable? supportedLanguages, + IDictionary? attributes) + { + CveOrgId = Normalize(cveOrgId); + CsafPublisherId = Normalize(csafPublisherId); + SecurityAdvisoriesUrl = securityAdvisoriesUrl; + CatalogUrl = catalogUrl; + _languages = BuildLanguages(supportedLanguages); + Attributes = attributes is null + ? new ReadOnlyDictionary(new Dictionary()) + : new ReadOnlyDictionary( + attributes.ToDictionary( + pair => pair.Key.Trim(), + pair => pair.Value.Trim())); + } + + public string? CveOrgId { get; } + + public string? CsafPublisherId { get; } + + public Uri? SecurityAdvisoriesUrl { get; } + + public Uri? CatalogUrl { get; } + + public IReadOnlyCollection SupportedLanguages => _languages; + + public IReadOnlyDictionary Attributes { get; } + + private static string? Normalize(string? value) + { + return string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + } + + private static IReadOnlyCollection BuildLanguages(IEnumerable? languages) + { + var normalized = languages? + .Select(language => language?.Trim()) + .Where(language => !string.IsNullOrWhiteSpace(language)) + .Select(language => language!.ToLowerInvariant()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray() ?? Array.Empty(); + + return new ReadOnlyCollection(normalized); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.cs new file mode 100644 index 00000000..a6b994ff --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerRecord.cs @@ -0,0 +1,160 @@ +namespace StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Represents a VEX issuer or CSAF publisher entry managed by the Issuer Directory. +/// +public sealed record IssuerRecord +{ + private static readonly StringComparer TagComparer = StringComparer.OrdinalIgnoreCase; + + public required string Id { get; init; } + + public required string TenantId { get; init; } + + public required string DisplayName { get; init; } + + public required string Slug { get; init; } + + public string? Description { get; init; } + + public required IssuerContact Contact { get; init; } + + public required IssuerMetadata Metadata { get; init; } + + public IReadOnlyCollection Endpoints { get; init; } = Array.Empty(); + + public IReadOnlyCollection Tags { get; init; } = Array.Empty(); + + public required DateTimeOffset CreatedAtUtc { get; init; } + + public required string CreatedBy { get; init; } + + public required DateTimeOffset UpdatedAtUtc { get; init; } + + public required string UpdatedBy { get; init; } + + public bool IsSystemSeed { get; init; } + + public static IssuerRecord Create( + string id, + string tenantId, + string displayName, + string slug, + string? description, + IssuerContact contact, + IssuerMetadata metadata, + IEnumerable? endpoints, + IEnumerable? tags, + DateTimeOffset timestampUtc, + string actor, + bool isSystemSeed) + { + if (string.IsNullOrWhiteSpace(id)) + { + throw new ArgumentException("Identifier is required.", nameof(id)); + } + + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant must be provided.", nameof(tenantId)); + } + + if (string.IsNullOrWhiteSpace(displayName)) + { + throw new ArgumentException("Display name is required.", nameof(displayName)); + } + + if (string.IsNullOrWhiteSpace(slug)) + { + throw new ArgumentException("Slug is required.", nameof(slug)); + } + + if (contact is null) + { + throw new ArgumentNullException(nameof(contact)); + } + + if (metadata is null) + { + throw new ArgumentNullException(nameof(metadata)); + } + + if (string.IsNullOrWhiteSpace(actor)) + { + throw new ArgumentException("Actor is required.", nameof(actor)); + } + + var normalizedTags = (tags ?? Array.Empty()) + .Where(tag => !string.IsNullOrWhiteSpace(tag)) + .Select(tag => tag.Trim().ToLowerInvariant()) + .Distinct(TagComparer) + .ToArray(); + + return new IssuerRecord + { + Id = id.Trim(), + TenantId = tenantId.Trim(), + DisplayName = displayName.Trim(), + Slug = slug.Trim().ToLowerInvariant(), + Description = string.IsNullOrWhiteSpace(description) ? null : description.Trim(), + Contact = contact, + Metadata = metadata, + Endpoints = (endpoints ?? Array.Empty()).ToArray(), + Tags = normalizedTags, + CreatedAtUtc = timestampUtc.ToUniversalTime(), + CreatedBy = actor.Trim(), + UpdatedAtUtc = timestampUtc.ToUniversalTime(), + UpdatedBy = actor.Trim(), + IsSystemSeed = isSystemSeed + }; + } + + public IssuerRecord WithUpdated( + IssuerContact contact, + IssuerMetadata metadata, + IEnumerable? endpoints, + IEnumerable? tags, + string displayName, + string? description, + DateTimeOffset updatedAtUtc, + string updatedBy) + { + if (contact is null) + { + throw new ArgumentNullException(nameof(contact)); + } + + if (metadata is null) + { + throw new ArgumentNullException(nameof(metadata)); + } + + if (string.IsNullOrWhiteSpace(displayName)) + { + throw new ArgumentException("Display name is required.", nameof(displayName)); + } + + if (string.IsNullOrWhiteSpace(updatedBy)) + { + throw new ArgumentException("Actor is required.", nameof(updatedBy)); + } + + var normalizedTags = (tags ?? Array.Empty()) + .Where(tag => !string.IsNullOrWhiteSpace(tag)) + .Select(tag => tag.Trim().ToLowerInvariant()) + .Distinct(TagComparer) + .ToArray(); + + return this with + { + DisplayName = displayName.Trim(), + Description = string.IsNullOrWhiteSpace(description) ? null : description.Trim(), + Contact = contact, + Metadata = metadata, + Endpoints = (endpoints ?? Array.Empty()).ToArray(), + Tags = normalizedTags, + UpdatedAtUtc = updatedAtUtc.ToUniversalTime(), + UpdatedBy = updatedBy.Trim() + }; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerTenants.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerTenants.cs new file mode 100644 index 00000000..f2891ca1 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerTenants.cs @@ -0,0 +1,12 @@ +namespace StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Well-known tenant identifiers for issuer directory entries. +/// +public static class IssuerTenants +{ + /// + /// Global issuer used for system-wide CSAF publishers available to all tenants. + /// + public const string Global = "@global"; +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerTrustOverrideRecord.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerTrustOverrideRecord.cs new file mode 100644 index 00000000..31bde33c --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Domain/IssuerTrustOverrideRecord.cs @@ -0,0 +1,72 @@ +namespace StellaOps.IssuerDirectory.Core.Domain; + +/// +/// Represents a tenant-specific trust weight override for an issuer. +/// +public sealed record IssuerTrustOverrideRecord +{ + public required string IssuerId { get; init; } + + public required string TenantId { get; init; } + + public required decimal Weight { get; init; } + + public string? Reason { get; init; } + + public required DateTimeOffset UpdatedAtUtc { get; init; } + + public required string UpdatedBy { get; init; } + + public required DateTimeOffset CreatedAtUtc { get; init; } + + public required string CreatedBy { get; init; } + + public static IssuerTrustOverrideRecord Create( + string issuerId, + string tenantId, + decimal weight, + string? reason, + DateTimeOffset timestampUtc, + string actor) + { + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + + ValidateWeight(weight); + + return new IssuerTrustOverrideRecord + { + IssuerId = issuerId.Trim(), + TenantId = tenantId.Trim(), + Weight = weight, + Reason = string.IsNullOrWhiteSpace(reason) ? null : reason.Trim(), + CreatedAtUtc = timestampUtc, + CreatedBy = actor.Trim(), + UpdatedAtUtc = timestampUtc, + UpdatedBy = actor.Trim() + }; + } + + public IssuerTrustOverrideRecord WithUpdated(decimal weight, string? reason, DateTimeOffset timestampUtc, string actor) + { + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + ValidateWeight(weight); + + return this with + { + Weight = weight, + Reason = string.IsNullOrWhiteSpace(reason) ? null : reason.Trim(), + UpdatedAtUtc = timestampUtc, + UpdatedBy = actor.Trim() + }; + } + + public static void ValidateWeight(decimal weight) + { + if (weight is < -10m or > 10m) + { + throw new InvalidOperationException("Trust weight must be between -10 and 10 inclusive."); + } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Observability/IssuerDirectoryMetrics.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Observability/IssuerDirectoryMetrics.cs new file mode 100644 index 00000000..7d8f8d4c --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Observability/IssuerDirectoryMetrics.cs @@ -0,0 +1,54 @@ +using System.Diagnostics.Metrics; + +namespace StellaOps.IssuerDirectory.Core.Observability; + +internal static class IssuerDirectoryMetrics +{ + private static readonly Meter Meter = new("StellaOps.IssuerDirectory", "1.0"); + + private static readonly Counter IssuerChangeCounter = Meter.CreateCounter( + "issuer_directory_changes_total", + description: "Counts issuer create/update/delete events."); + + private static readonly Counter KeyOperationCounter = Meter.CreateCounter( + "issuer_directory_key_operations_total", + description: "Counts issuer key create/rotate/revoke operations."); + + private static readonly Counter KeyValidationFailureCounter = Meter.CreateCounter( + "issuer_directory_key_validation_failures_total", + description: "Counts issuer key validation or verification failures."); + + public static void RecordIssuerChange(string tenantId, string issuerId, string action) + { + IssuerChangeCounter.Add(1, new TagList + { + { "tenant", NormalizeTag(tenantId) }, + { "issuer", NormalizeTag(issuerId) }, + { "action", action } + }); + } + + public static void RecordKeyOperation(string tenantId, string issuerId, string operation, string keyType) + { + KeyOperationCounter.Add(1, new TagList + { + { "tenant", NormalizeTag(tenantId) }, + { "issuer", NormalizeTag(issuerId) }, + { "operation", operation }, + { "key_type", keyType } + }); + } + + public static void RecordKeyValidationFailure(string tenantId, string issuerId, string reason) + { + KeyValidationFailureCounter.Add(1, new TagList + { + { "tenant", NormalizeTag(tenantId) }, + { "issuer", NormalizeTag(issuerId) }, + { "reason", reason } + }); + } + + private static string NormalizeTag(string? value) + => string.IsNullOrWhiteSpace(value) ? "unknown" : value.Trim(); +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.cs new file mode 100644 index 00000000..91ae3e4f --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerDirectoryService.cs @@ -0,0 +1,252 @@ +using Microsoft.Extensions.Logging; +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Observability; + +namespace StellaOps.IssuerDirectory.Core.Services; + +/// +/// Coordinates issuer directory operations with persistence, validation, and auditing. +/// +public sealed class IssuerDirectoryService +{ + private readonly IIssuerRepository _repository; + private readonly IIssuerAuditSink _auditSink; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public IssuerDirectoryService( + IIssuerRepository repository, + IIssuerAuditSink auditSink, + TimeProvider timeProvider, + ILogger logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task> ListAsync( + string tenantId, + bool includeGlobal, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + + var tenantIssuers = await _repository.ListAsync(tenantId, cancellationToken).ConfigureAwait(false); + if (!includeGlobal) + { + return tenantIssuers.OrderBy(record => record.Slug, StringComparer.Ordinal).ToArray(); + } + + var globalIssuers = await _repository.ListGlobalAsync(cancellationToken).ConfigureAwait(false); + return tenantIssuers.Concat(globalIssuers) + .DistinctBy(record => (record.TenantId, record.Id)) + .OrderBy(record => record.Slug, StringComparer.Ordinal) + .ToArray(); + } + + public async Task GetAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var issuer = await _repository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + if (issuer is not null || !includeGlobal) + { + return issuer; + } + + return await _repository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken).ConfigureAwait(false); + } + + public async Task CreateAsync( + string tenantId, + string issuerId, + string displayName, + string slug, + string? description, + IssuerContact contact, + IssuerMetadata metadata, + IEnumerable? endpoints, + IEnumerable? tags, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var timestamp = _timeProvider.GetUtcNow(); + var record = IssuerRecord.Create( + issuerId, + tenantId, + displayName, + slug, + description, + contact, + metadata, + endpoints, + tags, + timestamp, + actor, + isSystemSeed: false); + + await _repository.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(record, "created", actor, reason, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordIssuerChange(tenantId, issuerId, "created"); + _logger.LogInformation( + "Issuer {IssuerId} created for tenant {TenantId} by {Actor}.", + issuerId, + tenantId, + actor); + + return record; + } + + public async Task UpdateAsync( + string tenantId, + string issuerId, + string displayName, + string? description, + IssuerContact contact, + IssuerMetadata metadata, + IEnumerable? endpoints, + IEnumerable? tags, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var existing = await _repository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false) + ?? throw new InvalidOperationException($"Issuer '{issuerId}' not found for tenant '{tenantId}'."); + + var timestamp = _timeProvider.GetUtcNow(); + var updated = existing.WithUpdated( + contact, + metadata, + endpoints, + tags, + displayName, + description, + timestamp, + actor); + + await _repository.UpsertAsync(updated, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(updated, "updated", actor, reason, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordIssuerChange(tenantId, issuerId, "updated"); + _logger.LogInformation( + "Issuer {IssuerId} updated for tenant {TenantId} by {Actor}.", + issuerId, + tenantId, + actor); + + return updated; + } + + public async Task DeleteAsync( + string tenantId, + string issuerId, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + await _repository.DeleteAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + var timestamp = _timeProvider.GetUtcNow(); + var audit = new IssuerAuditEntry( + tenantId, + issuerId, + action: "deleted", + timestampUtc: timestamp, + actor: actor, + reason: reason, + metadata: null); + await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordIssuerChange(tenantId, issuerId, "deleted"); + _logger.LogInformation( + "Issuer {IssuerId} deleted for tenant {TenantId} by {Actor}.", + issuerId, + tenantId, + actor); + } + + public async Task SeedAsync(IEnumerable seeds, CancellationToken cancellationToken) + { + if (seeds is null) + { + throw new ArgumentNullException(nameof(seeds)); + } + + foreach (var seed in seeds) + { + if (!seed.IsSystemSeed) + { + continue; + } + + var existing = await _repository.GetAsync(seed.TenantId, seed.Id, cancellationToken).ConfigureAwait(false); + + if (existing is null) + { + await _repository.UpsertAsync(seed, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(seed, "seeded", seed.UpdatedBy, "CSAF bootstrap import", cancellationToken) + .ConfigureAwait(false); + } + else + { + var refreshed = existing.WithUpdated( + seed.Contact, + seed.Metadata, + seed.Endpoints, + seed.Tags, + seed.DisplayName, + seed.Description, + _timeProvider.GetUtcNow(), + seed.UpdatedBy) + with + { + IsSystemSeed = true + }; + + await _repository.UpsertAsync(refreshed, cancellationToken).ConfigureAwait(false); + } + } + } + + private async Task WriteAuditAsync( + IssuerRecord record, + string action, + string actor, + string? reason, + CancellationToken cancellationToken) + { + var audit = new IssuerAuditEntry( + record.TenantId, + record.Id, + action, + _timeProvider.GetUtcNow(), + actor, + reason, + metadata: new Dictionary + { + ["display_name"] = record.DisplayName, + ["slug"] = record.Slug, + ["is_system_seed"] = record.IsSystemSeed.ToString() + }); + + await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.cs new file mode 100644 index 00000000..a1e88a12 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerKeyService.cs @@ -0,0 +1,322 @@ +using System.Security.Cryptography; +using Microsoft.Extensions.Logging; +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Observability; +using StellaOps.IssuerDirectory.Core.Validation; + +namespace StellaOps.IssuerDirectory.Core.Services; + +/// +/// Manages issuer signing keys. +/// +public sealed class IssuerKeyService +{ + private readonly IIssuerRepository _issuerRepository; + private readonly IIssuerKeyRepository _keyRepository; + private readonly IIssuerAuditSink _auditSink; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public IssuerKeyService( + IIssuerRepository issuerRepository, + IIssuerKeyRepository keyRepository, + IIssuerAuditSink auditSink, + TimeProvider timeProvider, + ILogger logger) + { + _issuerRepository = issuerRepository ?? throw new ArgumentNullException(nameof(issuerRepository)); + _keyRepository = keyRepository ?? throw new ArgumentNullException(nameof(keyRepository)); + _auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task> ListAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var tenantKeys = await _keyRepository.ListAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + if (!includeGlobal) + { + return tenantKeys.OrderBy(key => key.CreatedAtUtc).ToArray(); + } + + var globalKeys = await _keyRepository.ListGlobalAsync(issuerId, cancellationToken).ConfigureAwait(false); + return tenantKeys.Concat(globalKeys) + .DistinctBy(key => (key.TenantId, key.Id)) + .OrderBy(key => key.CreatedAtUtc) + .ToArray(); + } + + public async Task AddAsync( + string tenantId, + string issuerId, + IssuerKeyType type, + IssuerKeyMaterial material, + DateTimeOffset? expiresAtUtc, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + + await EnsureIssuerExistsAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + + IssuerKeyValidationResult validation; + try + { + validation = IssuerKeyValidator.Validate(type, material, expiresAtUtc, _timeProvider); + } + catch (Exception ex) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, ex.GetType().Name); + _logger.LogWarning( + ex, + "Key validation failed for issuer {IssuerId} (tenant={TenantId}) during add.", + issuerId, + tenantId); + throw; + } + var fingerprint = ComputeFingerprint(validation.RawKeyBytes); + + var existing = await _keyRepository.GetByFingerprintAsync(tenantId, issuerId, fingerprint, cancellationToken) + .ConfigureAwait(false); + if (existing is not null && existing.Status == IssuerKeyStatus.Active) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "duplicate_fingerprint"); + _logger.LogWarning( + "Duplicate active key detected for issuer {IssuerId} (tenant={TenantId}).", + issuerId, + tenantId); + throw new InvalidOperationException("An identical active key already exists for this issuer."); + } + + var now = _timeProvider.GetUtcNow(); + var record = IssuerKeyRecord.Create( + Guid.NewGuid().ToString("n"), + issuerId, + tenantId, + type, + validation.Material, + fingerprint, + now, + actor, + validation.ExpiresAtUtc, + replacesKeyId: null); + + await _keyRepository.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(record, "key_created", actor, reason, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordKeyOperation(tenantId, issuerId, "created", type.ToString()); + _logger.LogInformation( + "Issuer key {KeyId} created for issuer {IssuerId} (tenant={TenantId}) by {Actor}.", + record.Id, + issuerId, + tenantId, + actor); + + return record; + } + + public async Task RotateAsync( + string tenantId, + string issuerId, + string keyId, + IssuerKeyType newType, + IssuerKeyMaterial newMaterial, + DateTimeOffset? expiresAtUtc, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + + var existing = await _keyRepository.GetAsync(tenantId, issuerId, keyId, cancellationToken).ConfigureAwait(false); + if (existing is null) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "key_not_found"); + _logger.LogWarning( + "Attempted to rotate missing key {KeyId} for issuer {IssuerId} (tenant={TenantId}).", + keyId, + issuerId, + tenantId); + throw new InvalidOperationException("Key not found for rotation."); + } + + if (existing.Status != IssuerKeyStatus.Active) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "key_not_active"); + _logger.LogWarning( + "Attempted to rotate non-active key {KeyId} (status={Status}) for issuer {IssuerId} (tenant={TenantId}).", + keyId, + existing.Status, + issuerId, + tenantId); + throw new InvalidOperationException("Only active keys can be rotated."); + } + + await EnsureIssuerExistsAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + + IssuerKeyValidationResult validation; + try + { + validation = IssuerKeyValidator.Validate(newType, newMaterial, expiresAtUtc, _timeProvider); + } + catch (Exception ex) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, ex.GetType().Name); + _logger.LogWarning( + ex, + "Key validation failed for issuer {IssuerId} (tenant={TenantId}) during rotation.", + issuerId, + tenantId); + throw; + } + var fingerprint = ComputeFingerprint(validation.RawKeyBytes); + + var duplicate = await _keyRepository.GetByFingerprintAsync(tenantId, issuerId, fingerprint, cancellationToken) + .ConfigureAwait(false); + if (duplicate is not null && duplicate.Status == IssuerKeyStatus.Active) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "duplicate_fingerprint"); + _logger.LogWarning( + "Duplicate active key detected during rotation for issuer {IssuerId} (tenant={TenantId}).", + issuerId, + tenantId); + throw new InvalidOperationException("An identical active key already exists for this issuer."); + } + + var now = _timeProvider.GetUtcNow(); + + var retired = existing.WithStatus(IssuerKeyStatus.Retired, now, actor); + await _keyRepository.UpsertAsync(retired, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(retired, "key_retired", actor, reason ?? "rotation", cancellationToken) + .ConfigureAwait(false); + + var replacement = IssuerKeyRecord.Create( + Guid.NewGuid().ToString("n"), + issuerId, + tenantId, + newType, + validation.Material, + fingerprint, + now, + actor, + validation.ExpiresAtUtc, + replacesKeyId: existing.Id); + + await _keyRepository.UpsertAsync(replacement, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(replacement, "key_rotated", actor, reason, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordKeyOperation(tenantId, issuerId, "rotated", newType.ToString()); + _logger.LogInformation( + "Issuer key {OldKeyId} rotated for issuer {IssuerId} (tenant={TenantId}) by {Actor}; new key {NewKeyId}.", + existing.Id, + issuerId, + tenantId, + actor, + replacement.Id); + + return replacement; + } + + public async Task RevokeAsync( + string tenantId, + string issuerId, + string keyId, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(keyId); + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + + var existing = await _keyRepository.GetAsync(tenantId, issuerId, keyId, cancellationToken).ConfigureAwait(false); + if (existing is null) + { + IssuerDirectoryMetrics.RecordKeyValidationFailure(tenantId, issuerId, "key_not_found"); + _logger.LogWarning( + "Attempted to revoke missing key {KeyId} for issuer {IssuerId} (tenant={TenantId}).", + keyId, + issuerId, + tenantId); + throw new InvalidOperationException("Key not found for revocation."); + } + + if (existing.Status == IssuerKeyStatus.Revoked) + { + return; + } + + var now = _timeProvider.GetUtcNow(); + var revoked = existing.WithStatus(IssuerKeyStatus.Revoked, now, actor); + + await _keyRepository.UpsertAsync(revoked, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(revoked, "key_revoked", actor, reason, cancellationToken).ConfigureAwait(false); + + IssuerDirectoryMetrics.RecordKeyOperation(tenantId, issuerId, "revoked", existing.Type.ToString()); + _logger.LogInformation( + "Issuer key {KeyId} revoked for issuer {IssuerId} (tenant={TenantId}) by {Actor}.", + keyId, + issuerId, + tenantId, + actor); + } + + private async Task EnsureIssuerExistsAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + var issuer = await _issuerRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + if (issuer is null) + { + var global = await _issuerRepository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken).ConfigureAwait(false); + if (global is null) + { + throw new InvalidOperationException("Issuer does not exist."); + } + } + } + + private async Task WriteAuditAsync( + IssuerKeyRecord record, + string action, + string actor, + string? reason, + CancellationToken cancellationToken) + { + var audit = new IssuerAuditEntry( + record.TenantId, + record.IssuerId, + action, + _timeProvider.GetUtcNow(), + actor, + reason, + new Dictionary + { + ["key_id"] = record.Id, + ["key_type"] = record.Type.ToString(), + ["fingerprint"] = record.Fingerprint, + ["status"] = record.Status.ToString() + }); + + await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); + } + + private static string ComputeFingerprint(byte[] rawKeyBytes) + { + var hash = SHA256.HashData(rawKeyBytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.cs new file mode 100644 index 00000000..188e628c --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Services/IssuerTrustService.cs @@ -0,0 +1,137 @@ +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Services; + +/// +/// Handles issuer trust weight overrides. +/// +public sealed class IssuerTrustService +{ + private readonly IIssuerRepository _issuerRepository; + private readonly IIssuerTrustRepository _trustRepository; + private readonly IIssuerAuditSink _auditSink; + private readonly TimeProvider _timeProvider; + + public IssuerTrustService( + IIssuerRepository issuerRepository, + IIssuerTrustRepository trustRepository, + IIssuerAuditSink auditSink, + TimeProvider timeProvider) + { + _issuerRepository = issuerRepository ?? throw new ArgumentNullException(nameof(issuerRepository)); + _trustRepository = trustRepository ?? throw new ArgumentNullException(nameof(trustRepository)); + _auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public async Task GetAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var tenantOverride = await _trustRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + IssuerTrustOverrideRecord? globalOverride = null; + + if (includeGlobal && !string.Equals(tenantId, IssuerTenants.Global, StringComparison.Ordinal)) + { + globalOverride = await _trustRepository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken).ConfigureAwait(false); + } + + var effectiveWeight = tenantOverride?.Weight + ?? globalOverride?.Weight + ?? 0m; + + return new IssuerTrustView(tenantOverride, globalOverride, effectiveWeight); + } + + public async Task SetAsync( + string tenantId, + string issuerId, + decimal weight, + string? reason, + string actor, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + + await EnsureIssuerExistsAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + + var existing = await _trustRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + var timestamp = _timeProvider.GetUtcNow(); + + IssuerTrustOverrideRecord record = existing is null + ? IssuerTrustOverrideRecord.Create(issuerId, tenantId, weight, reason, timestamp, actor) + : existing.WithUpdated(weight, reason, timestamp, actor); + + await _trustRepository.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(record, "trust_override_set", actor, reason, cancellationToken).ConfigureAwait(false); + + return record; + } + + public async Task DeleteAsync( + string tenantId, + string issuerId, + string actor, + string? reason, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + ArgumentException.ThrowIfNullOrWhiteSpace(actor); + + var existing = await _trustRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + if (existing is null) + { + return; + } + + await _trustRepository.DeleteAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false); + await WriteAuditAsync(existing, "trust_override_deleted", actor, reason, cancellationToken).ConfigureAwait(false); + } + + private async Task EnsureIssuerExistsAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + var issuer = await _issuerRepository.GetAsync(tenantId, issuerId, cancellationToken).ConfigureAwait(false) + ?? await _issuerRepository.GetAsync(IssuerTenants.Global, issuerId, cancellationToken).ConfigureAwait(false); + + if (issuer is null) + { + throw new InvalidOperationException("Issuer does not exist."); + } + } + + private async Task WriteAuditAsync( + IssuerTrustOverrideRecord record, + string action, + string actor, + string? reason, + CancellationToken cancellationToken) + { + var audit = new IssuerAuditEntry( + record.TenantId, + record.IssuerId, + action, + _timeProvider.GetUtcNow(), + actor, + reason, + new Dictionary + { + ["weight"] = record.Weight.ToString("0.###", System.Globalization.CultureInfo.InvariantCulture) + }); + + await _auditSink.WriteAsync(audit, cancellationToken).ConfigureAwait(false); + } +} + +public sealed record IssuerTrustView( + IssuerTrustOverrideRecord? TenantOverride, + IssuerTrustOverrideRecord? GlobalOverride, + decimal EffectiveWeight); diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/StellaOps.IssuerDirectory.Core.csproj b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/StellaOps.IssuerDirectory.Core.csproj new file mode 100644 index 00000000..ecc3af66 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/StellaOps.IssuerDirectory.Core.csproj @@ -0,0 +1,9 @@ + + + net10.0 + preview + enable + enable + true + + diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidationResult.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidationResult.cs new file mode 100644 index 00000000..138314cc --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidationResult.cs @@ -0,0 +1,25 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Validation; + +/// +/// Result of validating an issuer key request. +/// +public sealed class IssuerKeyValidationResult +{ + public IssuerKeyValidationResult( + IssuerKeyMaterial material, + byte[] rawKeyBytes, + DateTimeOffset? expiresAtUtc) + { + Material = material ?? throw new ArgumentNullException(nameof(material)); + RawKeyBytes = rawKeyBytes ?? throw new ArgumentNullException(nameof(rawKeyBytes)); + ExpiresAtUtc = expiresAtUtc?.ToUniversalTime(); + } + + public IssuerKeyMaterial Material { get; } + + public byte[] RawKeyBytes { get; } + + public DateTimeOffset? ExpiresAtUtc { get; } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.cs new file mode 100644 index 00000000..b9ae2a9c --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Core/Validation/IssuerKeyValidator.cs @@ -0,0 +1,126 @@ +using System.Security.Cryptography; +using System.Security.Cryptography.X509Certificates; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Core.Validation; + +/// +/// Performs validation and normalization of issuer key material. +/// +public static class IssuerKeyValidator +{ + public static IssuerKeyValidationResult Validate( + IssuerKeyType type, + IssuerKeyMaterial material, + DateTimeOffset? expiresAtUtc, + TimeProvider timeProvider) + { + if (material is null) + { + throw new ArgumentNullException(nameof(material)); + } + + ArgumentNullException.ThrowIfNull(timeProvider); + + var normalizedMaterial = NormalizeMaterial(material); + var rawKey = type switch + { + IssuerKeyType.Ed25519PublicKey => ValidateEd25519(normalizedMaterial), + IssuerKeyType.X509Certificate => ValidateCertificate(normalizedMaterial), + IssuerKeyType.DssePublicKey => ValidateDsseKey(normalizedMaterial), + _ => throw new ArgumentOutOfRangeException(nameof(type), type, "Unsupported issuer key type.") + }; + + if (expiresAtUtc is { } expiry) + { + var now = timeProvider.GetUtcNow(); + if (expiry.ToUniversalTime() <= now) + { + throw new InvalidOperationException("Key expiry must be in the future."); + } + } + + return new IssuerKeyValidationResult(normalizedMaterial, rawKey, expiresAtUtc); + } + + private static IssuerKeyMaterial NormalizeMaterial(IssuerKeyMaterial material) + { + return new IssuerKeyMaterial(material.Format.ToLowerInvariant(), material.Value.Trim()); + } + + private static byte[] ValidateEd25519(IssuerKeyMaterial material) + { + if (!string.Equals(material.Format, "base64", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("Ed25519 keys must use base64 format."); + } + + byte[] rawBytes; + try + { + rawBytes = Convert.FromBase64String(material.Value); + } + catch (FormatException ex) + { + throw new InvalidOperationException("Ed25519 key material must be valid base64.", ex); + } + + if (rawBytes.Length != 32) + { + throw new InvalidOperationException("Ed25519 public keys must contain 32 bytes."); + } + + return rawBytes; + } + + private static byte[] ValidateCertificate(IssuerKeyMaterial material) + { + if (!string.Equals(material.Format, "pem", StringComparison.OrdinalIgnoreCase) && + !string.Equals(material.Format, "base64", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("X.509 certificates must be provided as PEM or base64."); + } + + try + { + if (string.Equals(material.Format, "pem", StringComparison.OrdinalIgnoreCase)) + { + using var pemCertificate = X509Certificate2.CreateFromPem(material.Value); + return pemCertificate.RawData; + } + + var raw = Convert.FromBase64String(material.Value); + using var loadedCertificate = X509CertificateLoader.LoadCertificate(raw); + return loadedCertificate.RawData; + } + catch (Exception ex) when (ex is CryptographicException || ex is FormatException) + { + throw new InvalidOperationException("Certificate material is invalid or unsupported.", ex); + } + } + + private static byte[] ValidateDsseKey(IssuerKeyMaterial material) + { + if (!string.Equals(material.Format, "base64", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("DSSE keys must use base64 format."); + } + + byte[] rawBytes; + try + { + rawBytes = Convert.FromBase64String(material.Value); + } + catch (FormatException ex) + { + throw new InvalidOperationException("DSSE key material must be valid base64.", ex); + } + + if (rawBytes.Length is not (32 or 48 or 64)) + { + throw new InvalidOperationException("DSSE keys must contain 32, 48, or 64 bytes of public key material."); + } + + return rawBytes; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Audit/MongoIssuerAuditSink.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Audit/MongoIssuerAuditSink.cs new file mode 100644 index 00000000..5f3a7812 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Audit/MongoIssuerAuditSink.cs @@ -0,0 +1,35 @@ +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Infrastructure.Documents; +using StellaOps.IssuerDirectory.Infrastructure.Internal; + +namespace StellaOps.IssuerDirectory.Infrastructure.Audit; + +public sealed class MongoIssuerAuditSink : IIssuerAuditSink +{ + private readonly IssuerDirectoryMongoContext _context; + + public MongoIssuerAuditSink(IssuerDirectoryMongoContext context) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + } + + public async Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(entry); + + var document = new IssuerAuditDocument + { + Id = Guid.NewGuid().ToString("N"), + TenantId = entry.TenantId, + IssuerId = entry.IssuerId, + Action = entry.Action, + TimestampUtc = entry.TimestampUtc, + Actor = entry.Actor, + Reason = entry.Reason, + Metadata = new Dictionary(entry.Metadata, StringComparer.OrdinalIgnoreCase) + }; + + await _context.Audits.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerAuditDocument.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerAuditDocument.cs new file mode 100644 index 00000000..3f426859 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerAuditDocument.cs @@ -0,0 +1,31 @@ +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.IssuerDirectory.Infrastructure.Documents; + +[BsonIgnoreExtraElements] +public sealed class IssuerAuditDocument +{ + [BsonId] + public string Id { get; set; } = Guid.NewGuid().ToString("N"); + + [BsonElement("tenant_id")] + public string TenantId { get; set; } = string.Empty; + + [BsonElement("issuer_id")] + public string IssuerId { get; set; } = string.Empty; + + [BsonElement("action")] + public string Action { get; set; } = string.Empty; + + [BsonElement("timestamp")] + public DateTimeOffset TimestampUtc { get; set; } + + [BsonElement("actor")] + public string Actor { get; set; } = string.Empty; + + [BsonElement("reason")] + public string? Reason { get; set; } + + [BsonElement("metadata")] + public Dictionary Metadata { get; set; } = new(StringComparer.OrdinalIgnoreCase); +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerDocument.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerDocument.cs new file mode 100644 index 00000000..b27314fc --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerDocument.cs @@ -0,0 +1,103 @@ +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.IssuerDirectory.Infrastructure.Documents; + +[BsonIgnoreExtraElements] +public sealed class IssuerDocument +{ + [BsonId] + public string Id { get; set; } = string.Empty; + + [BsonElement("tenant_id")] + public string TenantId { get; set; } = string.Empty; + + [BsonElement("display_name")] + public string DisplayName { get; set; } = string.Empty; + + [BsonElement("slug")] + public string Slug { get; set; } = string.Empty; + + [BsonElement("description")] + public string? Description { get; set; } + + [BsonElement("contact")] + public IssuerContactDocument Contact { get; set; } = new(); + + [BsonElement("metadata")] + public IssuerMetadataDocument Metadata { get; set; } = new(); + + [BsonElement("endpoints")] + public List Endpoints { get; set; } = new(); + + [BsonElement("tags")] + public List Tags { get; set; } = new(); + + [BsonElement("created_at")] + public DateTimeOffset CreatedAtUtc { get; set; } + + [BsonElement("created_by")] + public string CreatedBy { get; set; } = string.Empty; + + [BsonElement("updated_at")] + public DateTimeOffset UpdatedAtUtc { get; set; } + + [BsonElement("updated_by")] + public string UpdatedBy { get; set; } = string.Empty; + + [BsonElement("is_seed")] + public bool IsSystemSeed { get; set; } +} + +[BsonIgnoreExtraElements] +public sealed class IssuerContactDocument +{ + [BsonElement("email")] + public string? Email { get; set; } + + [BsonElement("phone")] + public string? Phone { get; set; } + + [BsonElement("website")] + public string? Website { get; set; } + + [BsonElement("timezone")] + public string? Timezone { get; set; } +} + +[BsonIgnoreExtraElements] +public sealed class IssuerMetadataDocument +{ + [BsonElement("cve_org_id")] + public string? CveOrgId { get; set; } + + [BsonElement("csaf_publisher_id")] + public string? CsafPublisherId { get; set; } + + [BsonElement("security_advisories_url")] + public string? SecurityAdvisoriesUrl { get; set; } + + [BsonElement("catalog_url")] + public string? CatalogUrl { get; set; } + + [BsonElement("languages")] + public List Languages { get; set; } = new(); + + [BsonElement("attributes")] + public Dictionary Attributes { get; set; } = new(StringComparer.OrdinalIgnoreCase); +} + +[BsonIgnoreExtraElements] +public sealed class IssuerEndpointDocument +{ + [BsonElement("kind")] + public string Kind { get; set; } = string.Empty; + + [BsonElement("url")] + public string Url { get; set; } = string.Empty; + + [BsonElement("format")] + public string? Format { get; set; } + + [BsonElement("requires_auth")] + public bool RequiresAuthentication { get; set; } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerKeyDocument.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerKeyDocument.cs new file mode 100644 index 00000000..e4b73d14 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerKeyDocument.cs @@ -0,0 +1,55 @@ +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.IssuerDirectory.Infrastructure.Documents; + +[BsonIgnoreExtraElements] +public sealed class IssuerKeyDocument +{ + [BsonId] + public string Id { get; set; } = string.Empty; + + [BsonElement("issuer_id")] + public string IssuerId { get; set; } = string.Empty; + + [BsonElement("tenant_id")] + public string TenantId { get; set; } = string.Empty; + + [BsonElement("type")] + public string Type { get; set; } = string.Empty; + + [BsonElement("status")] + public string Status { get; set; } = string.Empty; + + [BsonElement("material_format")] + public string MaterialFormat { get; set; } = string.Empty; + + [BsonElement("material_value")] + public string MaterialValue { get; set; } = string.Empty; + + [BsonElement("fingerprint")] + public string Fingerprint { get; set; } = string.Empty; + + [BsonElement("created_at")] + public DateTimeOffset CreatedAtUtc { get; set; } + + [BsonElement("created_by")] + public string CreatedBy { get; set; } = string.Empty; + + [BsonElement("updated_at")] + public DateTimeOffset UpdatedAtUtc { get; set; } + + [BsonElement("updated_by")] + public string UpdatedBy { get; set; } = string.Empty; + + [BsonElement("expires_at")] + public DateTimeOffset? ExpiresAtUtc { get; set; } + + [BsonElement("retired_at")] + public DateTimeOffset? RetiredAtUtc { get; set; } + + [BsonElement("revoked_at")] + public DateTimeOffset? RevokedAtUtc { get; set; } + + [BsonElement("replaces_key_id")] + public string? ReplacesKeyId { get; set; } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerTrustDocument.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerTrustDocument.cs new file mode 100644 index 00000000..6b8f88b9 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Documents/IssuerTrustDocument.cs @@ -0,0 +1,34 @@ +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.IssuerDirectory.Infrastructure.Documents; + +[BsonIgnoreExtraElements] +public sealed class IssuerTrustDocument +{ + [BsonId] + public string Id { get; set; } = string.Empty; + + [BsonElement("issuer_id")] + public string IssuerId { get; set; } = string.Empty; + + [BsonElement("tenant_id")] + public string TenantId { get; set; } = string.Empty; + + [BsonElement("weight")] + public decimal Weight { get; set; } + + [BsonElement("reason")] + public string? Reason { get; set; } + + [BsonElement("created_at")] + public DateTimeOffset CreatedAtUtc { get; set; } + + [BsonElement("created_by")] + public string CreatedBy { get; set; } = string.Empty; + + [BsonElement("updated_at")] + public DateTimeOffset UpdatedAtUtc { get; set; } + + [BsonElement("updated_by")] + public string UpdatedBy { get; set; } = string.Empty; +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Internal/IssuerDirectoryMongoContext.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Internal/IssuerDirectoryMongoContext.cs new file mode 100644 index 00000000..6cb8133a --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Internal/IssuerDirectoryMongoContext.cs @@ -0,0 +1,103 @@ +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.IssuerDirectory.Infrastructure.Documents; +using StellaOps.IssuerDirectory.Infrastructure.Options; + +namespace StellaOps.IssuerDirectory.Infrastructure.Internal; + +/// +/// MongoDB context for Issuer Directory persistence. +/// +public sealed class IssuerDirectoryMongoContext +{ + public IssuerDirectoryMongoContext( + IOptions options, + ILogger logger) + { + ArgumentNullException.ThrowIfNull(options); + ArgumentNullException.ThrowIfNull(logger); + + var value = options.Value ?? throw new InvalidOperationException("Mongo options must be provided."); + value.Validate(); + + var mongoUrl = new MongoUrl(value.ConnectionString); + var settings = MongoClientSettings.FromUrl(mongoUrl); + if (mongoUrl.UseTls is true && settings.SslSettings is not null) + { + settings.SslSettings.CheckCertificateRevocation = true; + } + + var client = new MongoClient(settings); + var database = client.GetDatabase(value.Database); + + logger.LogDebug("IssuerDirectory Mongo connected to {Database}", value.Database); + + Issuers = database.GetCollection(value.IssuersCollection); + IssuerKeys = database.GetCollection(value.IssuerKeysCollection); + IssuerTrustOverrides = database.GetCollection(value.IssuerTrustCollection); + Audits = database.GetCollection(value.AuditCollection); + + EnsureIndexes().GetAwaiter().GetResult(); + } + + public IMongoCollection Issuers { get; } + + public IMongoCollection IssuerKeys { get; } + + public IMongoCollection IssuerTrustOverrides { get; } + + public IMongoCollection Audits { get; } + + private async Task EnsureIndexes() + { + var tenantSlugIndex = new CreateIndexModel( + Builders.IndexKeys + .Ascending(document => document.TenantId) + .Ascending(document => document.Slug), + new CreateIndexOptions + { + Name = "tenant_slug_unique", + Unique = true + }); + + await Issuers.Indexes.CreateOneAsync(tenantSlugIndex).ConfigureAwait(false); + + var keyIndex = new CreateIndexModel( + Builders.IndexKeys + .Ascending(document => document.TenantId) + .Ascending(document => document.IssuerId) + .Ascending(document => document.Id), + new CreateIndexOptions + { + Name = "issuer_keys_unique", + Unique = true + }); + + var fingerprintIndex = new CreateIndexModel( + Builders.IndexKeys + .Ascending(document => document.TenantId) + .Ascending(document => document.IssuerId) + .Ascending(document => document.Fingerprint), + new CreateIndexOptions + { + Name = "issuer_keys_fingerprint", + Unique = true + }); + + await IssuerKeys.Indexes.CreateOneAsync(keyIndex).ConfigureAwait(false); + await IssuerKeys.Indexes.CreateOneAsync(fingerprintIndex).ConfigureAwait(false); + + var trustIndex = new CreateIndexModel( + Builders.IndexKeys + .Ascending(document => document.TenantId) + .Ascending(document => document.IssuerId), + new CreateIndexOptions + { + Name = "issuer_trust_unique", + Unique = true + }); + + await IssuerTrustOverrides.Indexes.CreateOneAsync(trustIndex).ConfigureAwait(false); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Options/IssuerDirectoryMongoOptions.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Options/IssuerDirectoryMongoOptions.cs new file mode 100644 index 00000000..b3956453 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Options/IssuerDirectoryMongoOptions.cs @@ -0,0 +1,54 @@ +namespace StellaOps.IssuerDirectory.Infrastructure.Options; + +/// +/// Mongo persistence configuration for the Issuer Directory service. +/// +public sealed class IssuerDirectoryMongoOptions +{ + public const string SectionName = "IssuerDirectory:Mongo"; + + public string ConnectionString { get; set; } = "mongodb://localhost:27017"; + + public string Database { get; set; } = "issuer-directory"; + + public string IssuersCollection { get; set; } = "issuers"; + + public string IssuerKeysCollection { get; set; } = "issuer_keys"; + + public string IssuerTrustCollection { get; set; } = "issuer_trust_overrides"; + + public string AuditCollection { get; set; } = "issuer_audit"; + + public void Validate() + { + if (string.IsNullOrWhiteSpace(ConnectionString)) + { + throw new InvalidOperationException("IssuerDirectory Mongo connection string must be configured."); + } + + if (string.IsNullOrWhiteSpace(Database)) + { + throw new InvalidOperationException("IssuerDirectory Mongo database must be configured."); + } + + if (string.IsNullOrWhiteSpace(IssuersCollection)) + { + throw new InvalidOperationException("IssuerDirectory Mongo issuers collection must be configured."); + } + + if (string.IsNullOrWhiteSpace(IssuerKeysCollection)) + { + throw new InvalidOperationException("IssuerDirectory Mongo issuer keys collection must be configured."); + } + + if (string.IsNullOrWhiteSpace(IssuerTrustCollection)) + { + throw new InvalidOperationException("IssuerDirectory Mongo issuer trust collection must be configured."); + } + + if (string.IsNullOrWhiteSpace(AuditCollection)) + { + throw new InvalidOperationException("IssuerDirectory Mongo audit collection must be configured."); + } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerKeyRepository.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerKeyRepository.cs new file mode 100644 index 00000000..007c2578 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerKeyRepository.cs @@ -0,0 +1,131 @@ +using MongoDB.Driver; +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Infrastructure.Documents; +using StellaOps.IssuerDirectory.Infrastructure.Internal; + +namespace StellaOps.IssuerDirectory.Infrastructure.Repositories; + +public sealed class MongoIssuerKeyRepository : IIssuerKeyRepository +{ + private readonly IssuerDirectoryMongoContext _context; + + public MongoIssuerKeyRepository(IssuerDirectoryMongoContext context) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + } + + public async Task GetAsync(string tenantId, string issuerId, string keyId, CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(doc => doc.TenantId, tenantId), + Builders.Filter.Eq(doc => doc.IssuerId, issuerId), + Builders.Filter.Eq(doc => doc.Id, keyId)); + + var document = await _context.IssuerKeys.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : MapToDomain(document); + } + + public async Task GetByFingerprintAsync(string tenantId, string issuerId, string fingerprint, CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(doc => doc.TenantId, tenantId), + Builders.Filter.Eq(doc => doc.IssuerId, issuerId), + Builders.Filter.Eq(doc => doc.Fingerprint, fingerprint)); + + var document = await _context.IssuerKeys.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : MapToDomain(document); + } + + public async Task> ListAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(doc => doc.TenantId, tenantId), + Builders.Filter.Eq(doc => doc.IssuerId, issuerId)); + + var documents = await _context.IssuerKeys + .Find(filter) + .SortBy(document => document.CreatedAtUtc) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents.Select(MapToDomain).ToArray(); + } + + public async Task> ListGlobalAsync(string issuerId, CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(doc => doc.TenantId, IssuerTenants.Global), + Builders.Filter.Eq(doc => doc.IssuerId, issuerId)); + + var documents = await _context.IssuerKeys + .Find(filter) + .SortBy(document => document.CreatedAtUtc) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents.Select(MapToDomain).ToArray(); + } + + public async Task UpsertAsync(IssuerKeyRecord record, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(record); + + var document = MapToDocument(record); + var filter = Builders.Filter.And( + Builders.Filter.Eq(doc => doc.TenantId, record.TenantId), + Builders.Filter.Eq(doc => doc.IssuerId, record.IssuerId), + Builders.Filter.Eq(doc => doc.Id, record.Id)); + + await _context.IssuerKeys.ReplaceOneAsync( + filter, + document, + new ReplaceOptions { IsUpsert = true }, + cancellationToken).ConfigureAwait(false); + } + + private static IssuerKeyRecord MapToDomain(IssuerKeyDocument document) + { + return new IssuerKeyRecord + { + Id = document.Id, + IssuerId = document.IssuerId, + TenantId = document.TenantId, + Type = Enum.Parse(document.Type, ignoreCase: true), + Status = Enum.Parse(document.Status, ignoreCase: true), + Material = new IssuerKeyMaterial(document.MaterialFormat, document.MaterialValue), + Fingerprint = document.Fingerprint, + CreatedAtUtc = document.CreatedAtUtc, + CreatedBy = document.CreatedBy, + UpdatedAtUtc = document.UpdatedAtUtc, + UpdatedBy = document.UpdatedBy, + ExpiresAtUtc = document.ExpiresAtUtc, + RetiredAtUtc = document.RetiredAtUtc, + RevokedAtUtc = document.RevokedAtUtc, + ReplacesKeyId = document.ReplacesKeyId + }; + } + + private static IssuerKeyDocument MapToDocument(IssuerKeyRecord record) + { + return new IssuerKeyDocument + { + Id = record.Id, + IssuerId = record.IssuerId, + TenantId = record.TenantId, + Type = record.Type.ToString(), + Status = record.Status.ToString(), + MaterialFormat = record.Material.Format, + MaterialValue = record.Material.Value, + Fingerprint = record.Fingerprint, + CreatedAtUtc = record.CreatedAtUtc, + CreatedBy = record.CreatedBy, + UpdatedAtUtc = record.UpdatedAtUtc, + UpdatedBy = record.UpdatedBy, + ExpiresAtUtc = record.ExpiresAtUtc, + RetiredAtUtc = record.RetiredAtUtc, + RevokedAtUtc = record.RevokedAtUtc, + ReplacesKeyId = record.ReplacesKeyId + }; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerRepository.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerRepository.cs new file mode 100644 index 00000000..4a56d03f --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerRepository.cs @@ -0,0 +1,177 @@ +using MongoDB.Driver; +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Infrastructure.Documents; +using StellaOps.IssuerDirectory.Infrastructure.Internal; + +namespace StellaOps.IssuerDirectory.Infrastructure.Repositories; + +public sealed class MongoIssuerRepository : IIssuerRepository +{ + private readonly IssuerDirectoryMongoContext _context; + + public MongoIssuerRepository(IssuerDirectoryMongoContext context) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + } + + public async Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(doc => doc.TenantId, tenantId), + Builders.Filter.Eq(doc => doc.Id, issuerId)); + + var cursor = await _context.Issuers + .Find(filter) + .Limit(1) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + return cursor is null ? null : MapToDomain(cursor); + } + + public async Task> ListAsync(string tenantId, CancellationToken cancellationToken) + { + var filter = Builders.Filter.Eq(doc => doc.TenantId, tenantId); + var documents = await _context.Issuers.Find(filter) + .SortBy(doc => doc.Slug) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents.Select(MapToDomain).ToArray(); + } + + public async Task> ListGlobalAsync(CancellationToken cancellationToken) + { + var documents = await _context.Issuers + .Find(doc => doc.TenantId == IssuerTenants.Global) + .SortBy(doc => doc.Slug) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents.Select(MapToDomain).ToArray(); + } + + public async Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(record); + + var document = MapToDocument(record); + var filter = Builders.Filter.And( + Builders.Filter.Eq(doc => doc.TenantId, record.TenantId), + Builders.Filter.Eq(doc => doc.Id, record.Id)); + + await _context.Issuers + .ReplaceOneAsync( + filter, + document, + new ReplaceOptions { IsUpsert = true }, + cancellationToken) + .ConfigureAwait(false); + } + + public async Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(doc => doc.TenantId, tenantId), + Builders.Filter.Eq(doc => doc.Id, issuerId)); + + await _context.Issuers.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false); + } + + private static IssuerRecord MapToDomain(IssuerDocument document) + { + var contact = new IssuerContact( + document.Contact.Email, + document.Contact.Phone, + string.IsNullOrWhiteSpace(document.Contact.Website) ? null : new Uri(document.Contact.Website), + document.Contact.Timezone); + + var metadata = new IssuerMetadata( + document.Metadata.CveOrgId, + document.Metadata.CsafPublisherId, + string.IsNullOrWhiteSpace(document.Metadata.SecurityAdvisoriesUrl) + ? null + : new Uri(document.Metadata.SecurityAdvisoriesUrl), + string.IsNullOrWhiteSpace(document.Metadata.CatalogUrl) + ? null + : new Uri(document.Metadata.CatalogUrl), + document.Metadata.Languages, + document.Metadata.Attributes); + + var endpoints = document.Endpoints + .Select(endpoint => new IssuerEndpoint( + endpoint.Kind, + new Uri(endpoint.Url), + endpoint.Format, + endpoint.RequiresAuthentication)) + .ToArray(); + + return new IssuerRecord + { + Id = document.Id, + TenantId = document.TenantId, + DisplayName = document.DisplayName, + Slug = document.Slug, + Description = document.Description, + Contact = contact, + Metadata = metadata, + Endpoints = endpoints, + Tags = document.Tags, + CreatedAtUtc = document.CreatedAtUtc, + CreatedBy = document.CreatedBy, + UpdatedAtUtc = document.UpdatedAtUtc, + UpdatedBy = document.UpdatedBy, + IsSystemSeed = document.IsSystemSeed + }; + } + + private static IssuerDocument MapToDocument(IssuerRecord record) + { + var contact = new IssuerContactDocument + { + Email = record.Contact.Email, + Phone = record.Contact.Phone, + Website = record.Contact.Website?.ToString(), + Timezone = record.Contact.Timezone + }; + + var metadataDocument = new IssuerMetadataDocument + { + CveOrgId = record.Metadata.CveOrgId, + CsafPublisherId = record.Metadata.CsafPublisherId, + SecurityAdvisoriesUrl = record.Metadata.SecurityAdvisoriesUrl?.ToString(), + CatalogUrl = record.Metadata.CatalogUrl?.ToString(), + Languages = record.Metadata.SupportedLanguages.ToList(), + Attributes = new Dictionary(record.Metadata.Attributes, StringComparer.OrdinalIgnoreCase) + }; + + var endpoints = record.Endpoints + .Select(endpoint => new IssuerEndpointDocument + { + Kind = endpoint.Kind, + Url = endpoint.Url.ToString(), + Format = endpoint.Format, + RequiresAuthentication = endpoint.RequiresAuthentication + }) + .ToList(); + + return new IssuerDocument + { + Id = record.Id, + TenantId = record.TenantId, + DisplayName = record.DisplayName, + Slug = record.Slug, + Description = record.Description, + Contact = contact, + Metadata = metadataDocument, + Endpoints = endpoints, + Tags = record.Tags.ToList(), + CreatedAtUtc = record.CreatedAtUtc, + CreatedBy = record.CreatedBy, + UpdatedAtUtc = record.UpdatedAtUtc, + UpdatedBy = record.UpdatedBy, + IsSystemSeed = record.IsSystemSeed + }; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerTrustRepository.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerTrustRepository.cs new file mode 100644 index 00000000..6bcf0057 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Repositories/MongoIssuerTrustRepository.cs @@ -0,0 +1,88 @@ +using System.Globalization; +using MongoDB.Driver; +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Infrastructure.Documents; +using StellaOps.IssuerDirectory.Infrastructure.Internal; + +namespace StellaOps.IssuerDirectory.Infrastructure.Repositories; + +public sealed class MongoIssuerTrustRepository : IIssuerTrustRepository +{ + private readonly IssuerDirectoryMongoContext _context; + + public MongoIssuerTrustRepository(IssuerDirectoryMongoContext context) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + } + + public async Task GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(doc => doc.TenantId, tenantId), + Builders.Filter.Eq(doc => doc.IssuerId, issuerId)); + + var document = await _context.IssuerTrustOverrides + .Find(filter) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + return document is null ? null : MapToDomain(document); + } + + public async Task UpsertAsync(IssuerTrustOverrideRecord record, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(record); + + var document = MapToDocument(record); + var filter = Builders.Filter.And( + Builders.Filter.Eq(doc => doc.TenantId, record.TenantId), + Builders.Filter.Eq(doc => doc.IssuerId, record.IssuerId)); + + await _context.IssuerTrustOverrides.ReplaceOneAsync( + filter, + document, + new ReplaceOptions { IsUpsert = true }, + cancellationToken).ConfigureAwait(false); + } + + public async Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(doc => doc.TenantId, tenantId), + Builders.Filter.Eq(doc => doc.IssuerId, issuerId)); + + await _context.IssuerTrustOverrides.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false); + } + + private static IssuerTrustOverrideRecord MapToDomain(IssuerTrustDocument document) + { + return new IssuerTrustOverrideRecord + { + IssuerId = document.IssuerId, + TenantId = document.TenantId, + Weight = document.Weight, + Reason = document.Reason, + CreatedAtUtc = document.CreatedAtUtc, + CreatedBy = document.CreatedBy, + UpdatedAtUtc = document.UpdatedAtUtc, + UpdatedBy = document.UpdatedBy + }; + } + + private static IssuerTrustDocument MapToDocument(IssuerTrustOverrideRecord record) + { + return new IssuerTrustDocument + { + Id = string.Create(CultureInfo.InvariantCulture, $"{record.TenantId}:{record.IssuerId}"), + IssuerId = record.IssuerId, + TenantId = record.TenantId, + Weight = record.Weight, + Reason = record.Reason, + CreatedAtUtc = record.CreatedAtUtc, + CreatedBy = record.CreatedBy, + UpdatedAtUtc = record.UpdatedAtUtc, + UpdatedBy = record.UpdatedBy + }; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Seed/CsafPublisherSeedLoader.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Seed/CsafPublisherSeedLoader.cs new file mode 100644 index 00000000..da8e0adc --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/Seed/CsafPublisherSeedLoader.cs @@ -0,0 +1,146 @@ +using System.Text.Json; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.Infrastructure.Seed; + +/// +/// Loads CSAF publisher metadata into IssuerRecord instances for bootstrap seeding. +/// +public sealed class CsafPublisherSeedLoader +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip + }; + + private readonly TimeProvider _timeProvider; + + public CsafPublisherSeedLoader(TimeProvider timeProvider) + { + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public IReadOnlyCollection Load(Stream stream, string actor) + { + if (stream is null) + { + throw new ArgumentNullException(nameof(stream)); + } + + if (!stream.CanRead) + { + throw new ArgumentException("Seed stream must be readable.", nameof(stream)); + } + + if (string.IsNullOrWhiteSpace(actor)) + { + throw new ArgumentException("Seed actor is required.", nameof(actor)); + } + + var seeds = JsonSerializer.Deserialize>(stream, SerializerOptions) + ?? throw new InvalidOperationException("CSAF seed data could not be parsed."); + + var timestamp = _timeProvider.GetUtcNow(); + return seeds.Select(seed => seed.ToIssuerRecord(timestamp, actor)).ToArray(); + } + + private sealed class CsafPublisherSeed + { + public string Id { get; set; } = string.Empty; + + public string TenantId { get; set; } = IssuerTenants.Global; + + public string DisplayName { get; set; } = string.Empty; + + public string Slug { get; set; } = string.Empty; + + public string? Description { get; set; } + + public SeedContact Contact { get; set; } = new(); + + public SeedMetadata Metadata { get; set; } = new(); + + public List Endpoints { get; set; } = new(); + + public List Tags { get; set; } = new(); + + public IssuerRecord ToIssuerRecord(DateTimeOffset timestamp, string actor) + { + var contact = new IssuerContact( + Contact.Email, + Contact.Phone, + string.IsNullOrWhiteSpace(Contact.Website) ? null : new Uri(Contact.Website), + Contact.Timezone); + + var metadata = new IssuerMetadata( + Metadata.CveOrgId, + Metadata.CsafPublisherId, + string.IsNullOrWhiteSpace(Metadata.SecurityAdvisoriesUrl) + ? null + : new Uri(Metadata.SecurityAdvisoriesUrl), + string.IsNullOrWhiteSpace(Metadata.CatalogUrl) + ? null + : new Uri(Metadata.CatalogUrl), + Metadata.Languages, + Metadata.Attributes); + + var endpoints = Endpoints.Select(endpoint => new IssuerEndpoint( + endpoint.Kind, + new Uri(endpoint.Url), + endpoint.Format, + endpoint.RequiresAuth)).ToArray(); + + return IssuerRecord.Create( + string.IsNullOrWhiteSpace(Id) ? Slug : Id, + string.IsNullOrWhiteSpace(TenantId) ? IssuerTenants.Global : TenantId, + DisplayName, + string.IsNullOrWhiteSpace(Slug) ? Id : Slug, + Description, + contact, + metadata, + endpoints, + Tags, + timestamp, + actor, + isSystemSeed: true); + } + } + + private sealed class SeedContact + { + public string? Email { get; set; } + + public string? Phone { get; set; } + + public string? Website { get; set; } + + public string? Timezone { get; set; } + } + + private sealed class SeedMetadata + { + public string? CveOrgId { get; set; } + + public string? CsafPublisherId { get; set; } + + public string? SecurityAdvisoriesUrl { get; set; } + + public string? CatalogUrl { get; set; } + + public List Languages { get; set; } = new(); + + public Dictionary Attributes { get; set; } = new(StringComparer.OrdinalIgnoreCase); + } + + private sealed class SeedEndpoint + { + public string Kind { get; set; } = "csaf"; + + public string Url { get; set; } = string.Empty; + + public string? Format { get; set; } + + public bool RequiresAuth { get; set; } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/ServiceCollectionExtensions.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..3061b3e4 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/ServiceCollectionExtensions.cs @@ -0,0 +1,36 @@ +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.IssuerDirectory.Core.Abstractions; +using StellaOps.IssuerDirectory.Infrastructure.Audit; +using StellaOps.IssuerDirectory.Infrastructure.Internal; +using StellaOps.IssuerDirectory.Infrastructure.Options; +using StellaOps.IssuerDirectory.Infrastructure.Repositories; + +namespace StellaOps.IssuerDirectory.Infrastructure; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddIssuerDirectoryInfrastructure( + this IServiceCollection services, + IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddOptions() + .Bind(configuration.GetSection(IssuerDirectoryMongoOptions.SectionName)) + .Validate(options => + { + options.Validate(); + return true; + }); + + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + + return services; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/StellaOps.IssuerDirectory.Infrastructure.csproj b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/StellaOps.IssuerDirectory.Infrastructure.csproj new file mode 100644 index 00000000..eb002ef6 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Infrastructure/StellaOps.IssuerDirectory.Infrastructure.csproj @@ -0,0 +1,21 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + + + + + + diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Constants/IssuerDirectoryHeaders.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Constants/IssuerDirectoryHeaders.cs new file mode 100644 index 00000000..48357c58 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Constants/IssuerDirectoryHeaders.cs @@ -0,0 +1,6 @@ +namespace StellaOps.IssuerDirectory.WebService.Constants; + +internal static class IssuerDirectoryHeaders +{ + public const string AuditReason = "X-StellaOps-Reason"; +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerDtos.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerDtos.cs new file mode 100644 index 00000000..6fcb8895 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerDtos.cs @@ -0,0 +1,178 @@ +using System.ComponentModel.DataAnnotations; +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.WebService.Contracts; + +public sealed record IssuerResponse( + string Id, + string TenantId, + string DisplayName, + string Slug, + string? Description, + IssuerContactResponse Contact, + IssuerMetadataResponse Metadata, + IReadOnlyCollection Endpoints, + IReadOnlyCollection Tags, + DateTimeOffset CreatedAtUtc, + string CreatedBy, + DateTimeOffset UpdatedAtUtc, + string UpdatedBy, + bool IsSystemSeed) +{ + public static IssuerResponse FromDomain(IssuerRecord record) + { + return new IssuerResponse( + record.Id, + record.TenantId, + record.DisplayName, + record.Slug, + record.Description, + IssuerContactResponse.FromDomain(record.Contact), + IssuerMetadataResponse.FromDomain(record.Metadata), + record.Endpoints.Select(IssuerEndpointResponse.FromDomain).ToArray(), + record.Tags, + record.CreatedAtUtc, + record.CreatedBy, + record.UpdatedAtUtc, + record.UpdatedBy, + record.IsSystemSeed); + } +} + +public sealed record IssuerContactResponse(string? Email, string? Phone, string? Website, string? Timezone) +{ + public static IssuerContactResponse FromDomain(IssuerContact contact) + { + return new IssuerContactResponse( + contact.Email, + contact.Phone, + contact.Website?.ToString(), + contact.Timezone); + } +} + +public sealed record IssuerMetadataResponse( + string? CveOrgId, + string? CsafPublisherId, + string? SecurityAdvisoriesUrl, + string? CatalogUrl, + IReadOnlyCollection Languages, + IReadOnlyDictionary Attributes) +{ + public static IssuerMetadataResponse FromDomain(IssuerMetadata metadata) + { + return new IssuerMetadataResponse( + metadata.CveOrgId, + metadata.CsafPublisherId, + metadata.SecurityAdvisoriesUrl?.ToString(), + metadata.CatalogUrl?.ToString(), + metadata.SupportedLanguages, + metadata.Attributes); + } +} + +public sealed record IssuerEndpointResponse(string Kind, string Url, string? Format, bool RequiresAuthentication) +{ + public static IssuerEndpointResponse FromDomain(IssuerEndpoint endpoint) + { + return new IssuerEndpointResponse( + endpoint.Kind, + endpoint.Url.ToString(), + endpoint.Format, + endpoint.RequiresAuthentication); + } +} + +public sealed record IssuerUpsertRequest +{ + [Required] + public string Id { get; init; } = string.Empty; + + [Required] + public string DisplayName { get; init; } = string.Empty; + + [Required] + public string Slug { get; init; } = string.Empty; + + public string? Description { get; init; } + + public IssuerContactRequest Contact { get; init; } = new(); + + public IssuerMetadataRequest Metadata { get; init; } = new(); + + public List Endpoints { get; init; } = new(); + + public List Tags { get; init; } = new(); + + public IssuerContact ToDomainContact() + { + return new IssuerContact( + Contact.Email, + Contact.Phone, + string.IsNullOrWhiteSpace(Contact.Website) ? null : new Uri(Contact.Website), + Contact.Timezone); + } + + public IssuerMetadata ToDomainMetadata() + { + return new IssuerMetadata( + Metadata.CveOrgId, + Metadata.CsafPublisherId, + string.IsNullOrWhiteSpace(Metadata.SecurityAdvisoriesUrl) + ? null + : new Uri(Metadata.SecurityAdvisoriesUrl), + string.IsNullOrWhiteSpace(Metadata.CatalogUrl) + ? null + : new Uri(Metadata.CatalogUrl), + Metadata.Languages, + Metadata.Attributes); + } + + public IReadOnlyCollection ToDomainEndpoints() + { + return Endpoints.Select(endpoint => new IssuerEndpoint( + endpoint.Kind, + new Uri(endpoint.Url), + endpoint.Format, + endpoint.RequiresAuth)).ToArray(); + } +} + +public sealed record IssuerContactRequest +{ + public string? Email { get; init; } + + public string? Phone { get; init; } + + public string? Website { get; init; } + + public string? Timezone { get; init; } +} + +public sealed record IssuerMetadataRequest +{ + public string? CveOrgId { get; init; } + + public string? CsafPublisherId { get; init; } + + public string? SecurityAdvisoriesUrl { get; init; } + + public string? CatalogUrl { get; init; } + + public List Languages { get; init; } = new(); + + public Dictionary Attributes { get; init; } = new(StringComparer.OrdinalIgnoreCase); +} + +public sealed record IssuerEndpointRequest +{ + [Required] + public string Kind { get; init; } = "csaf"; + + [Required] + public string Url { get; init; } = string.Empty; + + public string? Format { get; init; } + + public bool RequiresAuth { get; init; } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerKeyDtos.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerKeyDtos.cs new file mode 100644 index 00000000..c5a198ea --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerKeyDtos.cs @@ -0,0 +1,63 @@ +using StellaOps.IssuerDirectory.Core.Domain; + +namespace StellaOps.IssuerDirectory.WebService.Contracts; + +public sealed record IssuerKeyResponse( + string Id, + string IssuerId, + string TenantId, + string Type, + string Status, + string MaterialFormat, + string Fingerprint, + DateTimeOffset CreatedAtUtc, + string CreatedBy, + DateTimeOffset UpdatedAtUtc, + string UpdatedBy, + DateTimeOffset? ExpiresAtUtc, + DateTimeOffset? RetiredAtUtc, + DateTimeOffset? RevokedAtUtc, + string? ReplacesKeyId) +{ + public static IssuerKeyResponse FromDomain(IssuerKeyRecord record) + { + return new IssuerKeyResponse( + record.Id, + record.IssuerId, + record.TenantId, + record.Type.ToString(), + record.Status.ToString(), + record.Material.Format, + record.Fingerprint, + record.CreatedAtUtc, + record.CreatedBy, + record.UpdatedAtUtc, + record.UpdatedBy, + record.ExpiresAtUtc, + record.RetiredAtUtc, + record.RevokedAtUtc, + record.ReplacesKeyId); + } +} + +public sealed record IssuerKeyCreateRequest +{ + public string Type { get; init; } = string.Empty; + + public string Format { get; init; } = string.Empty; + + public string Value { get; init; } = string.Empty; + + public DateTimeOffset? ExpiresAtUtc { get; init; } +} + +public sealed record IssuerKeyRotateRequest +{ + public string Type { get; init; } = string.Empty; + + public string Format { get; init; } = string.Empty; + + public string Value { get; init; } = string.Empty; + + public DateTimeOffset? ExpiresAtUtc { get; init; } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerTrustDtos.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerTrustDtos.cs new file mode 100644 index 00000000..b9684813 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Contracts/IssuerTrustDtos.cs @@ -0,0 +1,44 @@ +using StellaOps.IssuerDirectory.Core.Services; + +namespace StellaOps.IssuerDirectory.WebService.Contracts; + +public sealed record IssuerTrustResponse( + TrustOverrideSummary? TenantOverride, + TrustOverrideSummary? GlobalOverride, + decimal EffectiveWeight) +{ + public static IssuerTrustResponse FromView(IssuerTrustView view) + { + return new IssuerTrustResponse( + view.TenantOverride is null ? null : TrustOverrideSummary.FromRecord(view.TenantOverride), + view.GlobalOverride is null ? null : TrustOverrideSummary.FromRecord(view.GlobalOverride), + view.EffectiveWeight); + } +} + +public sealed record TrustOverrideSummary( + decimal Weight, + string? Reason, + DateTimeOffset UpdatedAtUtc, + string UpdatedBy, + DateTimeOffset CreatedAtUtc, + string CreatedBy) +{ + public static TrustOverrideSummary FromRecord(Core.Domain.IssuerTrustOverrideRecord record) + { + return new TrustOverrideSummary( + record.Weight, + record.Reason, + record.UpdatedAtUtc, + record.UpdatedBy, + record.CreatedAtUtc, + record.CreatedBy); + } +} + +public sealed record IssuerTrustSetRequest +{ + public decimal Weight { get; init; } + + public string? Reason { get; init; } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerEndpoints.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerEndpoints.cs new file mode 100644 index 00000000..df031c0f --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerEndpoints.cs @@ -0,0 +1,166 @@ +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Options; +using StellaOps.IssuerDirectory.Core.Services; +using StellaOps.IssuerDirectory.WebService.Constants; +using StellaOps.IssuerDirectory.WebService.Contracts; +using StellaOps.IssuerDirectory.WebService.Options; +using StellaOps.IssuerDirectory.WebService.Security; +using StellaOps.IssuerDirectory.WebService.Services; + +namespace StellaOps.IssuerDirectory.WebService.Endpoints; + +public static class IssuerEndpoints +{ + public static RouteGroupBuilder MapIssuerEndpoints(this IEndpointRouteBuilder app) + { + var group = app.MapGroup("/issuer-directory/issuers") + .WithTags("Issuer Directory"); + + group.MapGet(string.Empty, ListIssuers) + .RequireAuthorization(IssuerDirectoryPolicies.Reader) + .WithName("IssuerDirectory_ListIssuers"); + + group.MapGet("{id}", GetIssuer) + .RequireAuthorization(IssuerDirectoryPolicies.Reader) + .WithName("IssuerDirectory_GetIssuer"); + + group.MapPost(string.Empty, CreateIssuer) + .RequireAuthorization(IssuerDirectoryPolicies.Writer) + .WithName("IssuerDirectory_CreateIssuer"); + + group.MapPut("{id}", UpdateIssuer) + .RequireAuthorization(IssuerDirectoryPolicies.Writer) + .WithName("IssuerDirectory_UpdateIssuer"); + + group.MapDelete("{id}", DeleteIssuer) + .RequireAuthorization(IssuerDirectoryPolicies.Admin) + .WithName("IssuerDirectory_DeleteIssuer"); + + group.MapIssuerKeyEndpoints(); + group.MapIssuerTrustEndpoints(); + + return group; + } + + private static async Task ListIssuers( + HttpContext context, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerDirectoryService service, + [FromQuery] bool includeGlobal = true, + CancellationToken cancellationToken = default) + { + var tenantId = tenantResolver.Resolve(context); + var issuers = await service.ListAsync(tenantId, includeGlobal, cancellationToken).ConfigureAwait(false); + var response = issuers.Select(IssuerResponse.FromDomain).ToArray(); + return Results.Ok(response); + } + + private static async Task GetIssuer( + HttpContext context, + [FromRoute] string id, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerDirectoryService service, + [FromQuery] bool includeGlobal = true, + CancellationToken cancellationToken = default) + { + var tenantId = tenantResolver.Resolve(context); + var issuer = await service.GetAsync(tenantId, id, includeGlobal, cancellationToken).ConfigureAwait(false); + if (issuer is null) + { + return Results.NotFound(); + } + + return Results.Ok(IssuerResponse.FromDomain(issuer)); + } + + private static async Task CreateIssuer( + HttpContext context, + [FromBody] IssuerUpsertRequest request, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerDirectoryService service, + CancellationToken cancellationToken) + { + var tenantId = tenantResolver.Resolve(context); + var actor = ActorResolver.Resolve(context); + var reason = ResolveAuditReason(context); + + var issuer = await service.CreateAsync( + tenantId, + request.Id, + request.DisplayName, + request.Slug, + request.Description, + request.ToDomainContact(), + request.ToDomainMetadata(), + request.ToDomainEndpoints(), + request.Tags, + actor, + reason, + cancellationToken).ConfigureAwait(false); + + return Results.Created($"/issuer-directory/issuers/{issuer.Id}", IssuerResponse.FromDomain(issuer)); + } + + private static async Task UpdateIssuer( + HttpContext context, + [FromRoute] string id, + [FromBody] IssuerUpsertRequest request, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerDirectoryService service, + CancellationToken cancellationToken) + { + if (!string.Equals(id, request.Id, StringComparison.Ordinal)) + { + return Results.BadRequest(new ProblemDetails + { + Title = "Identifier mismatch", + Detail = "Route identifier does not match request body." + }); + } + + var tenantId = tenantResolver.Resolve(context); + var actor = ActorResolver.Resolve(context); + var reason = ResolveAuditReason(context); + + var issuer = await service.UpdateAsync( + tenantId, + id, + request.DisplayName, + request.Description, + request.ToDomainContact(), + request.ToDomainMetadata(), + request.ToDomainEndpoints(), + request.Tags, + actor, + reason, + cancellationToken).ConfigureAwait(false); + + return Results.Ok(IssuerResponse.FromDomain(issuer)); + } + + private static async Task DeleteIssuer( + HttpContext context, + [FromRoute] string id, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerDirectoryService service, + CancellationToken cancellationToken) + { + var tenantId = tenantResolver.Resolve(context); + var actor = ActorResolver.Resolve(context); + var reason = ResolveAuditReason(context); + + await service.DeleteAsync(tenantId, id, actor, reason, cancellationToken).ConfigureAwait(false); + return Results.NoContent(); + } + + private static string? ResolveAuditReason(HttpContext context) + { + if (context.Request.Headers.TryGetValue(IssuerDirectoryHeaders.AuditReason, out var value)) + { + var reason = value.ToString(); + return string.IsNullOrWhiteSpace(reason) ? null : reason.Trim(); + } + + return null; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerKeyEndpoints.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerKeyEndpoints.cs new file mode 100644 index 00000000..5977e46a --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerKeyEndpoints.cs @@ -0,0 +1,190 @@ +using Microsoft.AspNetCore.Mvc; +using StellaOps.IssuerDirectory.Core.Domain; +using StellaOps.IssuerDirectory.Core.Services; +using StellaOps.IssuerDirectory.WebService.Constants; +using StellaOps.IssuerDirectory.WebService.Contracts; +using StellaOps.IssuerDirectory.WebService.Security; +using StellaOps.IssuerDirectory.WebService.Services; + +namespace StellaOps.IssuerDirectory.WebService.Endpoints; + +internal static class IssuerKeyEndpoints +{ + public static void MapIssuerKeyEndpoints(this RouteGroupBuilder group) + { + var keysGroup = group.MapGroup("{issuerId}/keys"); + + keysGroup.MapGet(string.Empty, ListKeys) + .RequireAuthorization(IssuerDirectoryPolicies.Reader) + .WithName("IssuerDirectory_ListIssuerKeys"); + + keysGroup.MapPost(string.Empty, CreateKey) + .RequireAuthorization(IssuerDirectoryPolicies.Writer) + .WithName("IssuerDirectory_CreateIssuerKey"); + + keysGroup.MapPost("{keyId}/rotate", RotateKey) + .RequireAuthorization(IssuerDirectoryPolicies.Writer) + .WithName("IssuerDirectory_RotateIssuerKey"); + + keysGroup.MapDelete("{keyId}", RevokeKey) + .RequireAuthorization(IssuerDirectoryPolicies.Admin) + .WithName("IssuerDirectory_RevokeIssuerKey"); + } + + private static async Task ListKeys( + HttpContext context, + [FromRoute] string issuerId, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerKeyService keyService, + [FromQuery] bool includeGlobal, + CancellationToken cancellationToken) + { + var tenantId = tenantResolver.Resolve(context); + var keys = await keyService.ListAsync(tenantId, issuerId, includeGlobal, cancellationToken).ConfigureAwait(false); + var response = keys.Select(IssuerKeyResponse.FromDomain).ToArray(); + return Results.Ok(response); + } + + private static async Task CreateKey( + HttpContext context, + [FromRoute] string issuerId, + [FromBody] IssuerKeyCreateRequest request, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerKeyService keyService, + CancellationToken cancellationToken) + { + var tenantId = tenantResolver.Resolve(context); + var actor = ActorResolver.Resolve(context); + var reason = ResolveReason(context); + + if (!TryParseType(request.Type, out var type, out var error)) + { + return CreateBadRequest(error); + } + + var material = new IssuerKeyMaterial(request.Format, request.Value); + + try + { + var record = await keyService.AddAsync( + tenantId, + issuerId, + type, + material, + request.ExpiresAtUtc, + actor, + reason, + cancellationToken).ConfigureAwait(false); + + var response = IssuerKeyResponse.FromDomain(record); + return Results.Created($"/issuer-directory/issuers/{issuerId}/keys/{record.Id}", response); + } + catch (InvalidOperationException ex) + { + return CreateBadRequest(ex.Message); + } + } + + private static async Task RotateKey( + HttpContext context, + [FromRoute] string issuerId, + [FromRoute] string keyId, + [FromBody] IssuerKeyRotateRequest request, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerKeyService keyService, + CancellationToken cancellationToken) + { + var tenantId = tenantResolver.Resolve(context); + var actor = ActorResolver.Resolve(context); + var reason = ResolveReason(context); + + if (!TryParseType(request.Type, out var type, out var error)) + { + return CreateBadRequest(error); + } + + var material = new IssuerKeyMaterial(request.Format, request.Value); + + try + { + var record = await keyService.RotateAsync( + tenantId, + issuerId, + keyId, + type, + material, + request.ExpiresAtUtc, + actor, + reason, + cancellationToken).ConfigureAwait(false); + + return Results.Ok(IssuerKeyResponse.FromDomain(record)); + } + catch (InvalidOperationException ex) + { + return CreateBadRequest(ex.Message); + } + } + + private static async Task RevokeKey( + HttpContext context, + [FromRoute] string issuerId, + [FromRoute] string keyId, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerKeyService keyService, + CancellationToken cancellationToken) + { + var tenantId = tenantResolver.Resolve(context); + var actor = ActorResolver.Resolve(context); + var reason = ResolveReason(context); + + try + { + await keyService.RevokeAsync( + tenantId, + issuerId, + keyId, + actor, + reason, + cancellationToken).ConfigureAwait(false); + + return Results.NoContent(); + } + catch (InvalidOperationException ex) + { + return CreateBadRequest(ex.Message); + } + } + + private static bool TryParseType(string value, out IssuerKeyType type, out string error) + { + if (Enum.TryParse(value?.Trim(), ignoreCase: true, out type)) + { + error = string.Empty; + return true; + } + + error = "Unsupported key type. Valid values: Ed25519PublicKey, X509Certificate, DssePublicKey."; + return false; + } + + private static string? ResolveReason(HttpContext context) + { + if (context.Request.Headers.TryGetValue(IssuerDirectoryHeaders.AuditReason, out var value)) + { + var reason = value.ToString(); + return string.IsNullOrWhiteSpace(reason) ? null : reason.Trim(); + } + + return null; + } + + private static IResult CreateBadRequest(string message) + { + return Results.BadRequest(new ProblemDetails + { + Title = "Invalid request", + Detail = message + }); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerTrustEndpoints.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerTrustEndpoints.cs new file mode 100644 index 00000000..b99f94fd --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Endpoints/IssuerTrustEndpoints.cs @@ -0,0 +1,110 @@ +using Microsoft.AspNetCore.Mvc; +using StellaOps.IssuerDirectory.Core.Services; +using StellaOps.IssuerDirectory.WebService.Constants; +using StellaOps.IssuerDirectory.WebService.Contracts; +using StellaOps.IssuerDirectory.WebService.Security; +using StellaOps.IssuerDirectory.WebService.Services; + +namespace StellaOps.IssuerDirectory.WebService.Endpoints; + +internal static class IssuerTrustEndpoints +{ + public static void MapIssuerTrustEndpoints(this RouteGroupBuilder group) + { + var trustGroup = group.MapGroup("{issuerId}/trust"); + + trustGroup.MapGet(string.Empty, GetTrust) + .RequireAuthorization(IssuerDirectoryPolicies.Reader) + .WithName("IssuerDirectory_GetTrust"); + + trustGroup.MapPut(string.Empty, SetTrust) + .RequireAuthorization(IssuerDirectoryPolicies.Writer) + .WithName("IssuerDirectory_SetTrust"); + + trustGroup.MapDelete(string.Empty, DeleteTrust) + .RequireAuthorization(IssuerDirectoryPolicies.Admin) + .WithName("IssuerDirectory_DeleteTrust"); + } + + private static async Task GetTrust( + HttpContext context, + [FromRoute] string issuerId, + [FromQuery] bool includeGlobal, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerTrustService trustService, + CancellationToken cancellationToken) + { + var tenantId = tenantResolver.Resolve(context); + var view = await trustService.GetAsync(tenantId, issuerId, includeGlobal, cancellationToken).ConfigureAwait(false); + return Results.Ok(IssuerTrustResponse.FromView(view)); + } + + private static async Task SetTrust( + HttpContext context, + [FromRoute] string issuerId, + [FromBody] IssuerTrustSetRequest request, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerTrustService trustService, + CancellationToken cancellationToken) + { + var tenantId = tenantResolver.Resolve(context); + var actor = ActorResolver.Resolve(context); + var reason = ResolveReason(context, request.Reason); + + try + { + var record = await trustService.SetAsync( + tenantId, + issuerId, + request.Weight, + reason, + actor, + cancellationToken).ConfigureAwait(false); + + var view = await trustService.GetAsync(tenantId, issuerId, includeGlobal: true, cancellationToken).ConfigureAwait(false); + return Results.Ok(IssuerTrustResponse.FromView(view)); + } + catch (InvalidOperationException ex) + { + return BadRequest(ex.Message); + } + } + + private static async Task DeleteTrust( + HttpContext context, + [FromRoute] string issuerId, + [FromServices] TenantResolver tenantResolver, + [FromServices] IssuerTrustService trustService, + CancellationToken cancellationToken) + { + var tenantId = tenantResolver.Resolve(context); + var actor = ActorResolver.Resolve(context); + var reason = ResolveReason(context, null); + + await trustService.DeleteAsync(tenantId, issuerId, actor, reason, cancellationToken).ConfigureAwait(false); + return Results.NoContent(); + } + + private static IResult BadRequest(string message) + { + return Results.BadRequest(new ProblemDetails + { + Title = "Invalid request", + Detail = message + }); + } + + private static string? ResolveReason(HttpContext context, string? bodyReason) + { + if (context.Request.Headers.TryGetValue(IssuerDirectoryHeaders.AuditReason, out var value)) + { + var headerReason = value.ToString(); + if (!string.IsNullOrWhiteSpace(headerReason)) + { + return headerReason.Trim(); + } + } + + return string.IsNullOrWhiteSpace(bodyReason) ? null : bodyReason.Trim(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Options/IssuerDirectoryWebServiceOptions.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Options/IssuerDirectoryWebServiceOptions.cs new file mode 100644 index 00000000..e9842b40 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Options/IssuerDirectoryWebServiceOptions.cs @@ -0,0 +1,77 @@ +using StellaOps.Auth.Abstractions; + +namespace StellaOps.IssuerDirectory.WebService.Options; + +public sealed class IssuerDirectoryWebServiceOptions +{ + public const string SectionName = "IssuerDirectory"; + + public TelemetryOptions Telemetry { get; set; } = new(); + + public AuthorityOptions Authority { get; set; } = new(); + + public string TenantHeader { get; set; } = "X-StellaOps-Tenant"; + + public bool SeedCsafPublishers { get; set; } = true; + + public string CsafSeedPath { get; set; } = "csaf-publishers.json"; + + public void Validate() + { + if (string.IsNullOrWhiteSpace(TenantHeader)) + { + throw new InvalidOperationException("Tenant header must be configured."); + } + + Authority.Validate(); + } + + public sealed class TelemetryOptions + { + public string MinimumLogLevel { get; set; } = "Information"; + } + + public sealed class AuthorityOptions + { + public bool Enabled { get; set; } = true; + + public string Issuer { get; set; } = string.Empty; + + public IList Audiences { get; set; } = new List(); + + public bool RequireHttpsMetadata { get; set; } = true; + + public IList Scopes { get; set; } = new List + { + StellaOpsScopes.IssuerDirectoryRead, + StellaOpsScopes.IssuerDirectoryWrite, + StellaOpsScopes.IssuerDirectoryAdmin + }; + + public string ReadScope { get; set; } = StellaOpsScopes.IssuerDirectoryRead; + + public string WriteScope { get; set; } = StellaOpsScopes.IssuerDirectoryWrite; + + public string AdminScope { get; set; } = StellaOpsScopes.IssuerDirectoryAdmin; + + public IList ClientScopes { get; set; } = new List(); + + public void Validate() + { + if (!Enabled) + { + return; + } + + if (string.IsNullOrWhiteSpace(Issuer)) + { + throw new InvalidOperationException("IssuerDirectory authority issuer is required when enabled."); + } + + if (Audiences.Count == 0) + { + throw new InvalidOperationException("IssuerDirectory authority audiences must be configured."); + } + } + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Program.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Program.cs new file mode 100644 index 00000000..b659e41b --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Program.cs @@ -0,0 +1,213 @@ +using System.Security.Claims; +using System.Text.Encodings.Web; +using Microsoft.AspNetCore.Authentication; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Mvc.Infrastructure; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using OpenTelemetry.Metrics; +using OpenTelemetry.Trace; +using Serilog; +using Serilog.Events; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Configuration; +using StellaOps.IssuerDirectory.Core.Services; +using StellaOps.IssuerDirectory.Infrastructure; +using StellaOps.IssuerDirectory.Infrastructure.Seed; +using StellaOps.IssuerDirectory.WebService.Endpoints; +using StellaOps.IssuerDirectory.WebService.Options; +using StellaOps.IssuerDirectory.WebService.Security; +using StellaOps.IssuerDirectory.WebService.Services; + +const string ConfigurationPrefix = "ISSUERDIRECTORY_"; + +var builder = WebApplication.CreateBuilder(args); + +builder.Configuration.AddStellaOpsDefaults(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = ConfigurationPrefix; + options.BindingSection = IssuerDirectoryWebServiceOptions.SectionName; +}); + +var bootstrapOptions = builder.Configuration.BindOptions( + IssuerDirectoryWebServiceOptions.SectionName, + static (options, _) => options.Validate()); + +builder.Host.UseSerilog((context, services, configuration) => +{ + var minLevel = MapLogLevel(bootstrapOptions.Telemetry.MinimumLogLevel); + configuration + .MinimumLevel.Is(minLevel) + .MinimumLevel.Override("Microsoft.AspNetCore", LogEventLevel.Warning) + .Enrich.FromLogContext() + .WriteTo.Console(); +}); + +builder.Services.AddOptions() + .Bind(builder.Configuration.GetSection(IssuerDirectoryWebServiceOptions.SectionName)) + .Validate(options => + { + options.Validate(); + return true; + }) + .ValidateOnStart(); + +builder.Services.AddSingleton(TimeProvider.System); +builder.Services.AddProblemDetails(); +builder.Services.AddEndpointsApiExplorer(); +builder.Services.AddIssuerDirectoryInfrastructure(builder.Configuration); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +ConfigureAuthentication(builder, bootstrapOptions); + +builder.Services.AddAuthorization(auth => +{ + if (bootstrapOptions.Authority.Enabled) + { + auth.AddPolicy(IssuerDirectoryPolicies.Reader, policy => policy.RequireScope(bootstrapOptions.Authority.ReadScope)); + auth.AddPolicy(IssuerDirectoryPolicies.Writer, policy => policy.RequireScope(bootstrapOptions.Authority.WriteScope)); + auth.AddPolicy(IssuerDirectoryPolicies.Admin, policy => policy.RequireScope(bootstrapOptions.Authority.AdminScope)); + } + else + { + auth.AddPolicy(IssuerDirectoryPolicies.Reader, policy => policy.RequireAssertion(static _ => true)); + auth.AddPolicy(IssuerDirectoryPolicies.Writer, policy => policy.RequireAssertion(static _ => true)); + auth.AddPolicy(IssuerDirectoryPolicies.Admin, policy => policy.RequireAssertion(static _ => true)); + } +}); + +builder.Services.AddHealthChecks().AddCheck("self", () => Microsoft.Extensions.Diagnostics.HealthChecks.HealthCheckResult.Healthy()); + +builder.Services.AddOpenTelemetry() + .WithMetrics(metrics => metrics + .AddMeter("StellaOps.IssuerDirectory") + .AddAspNetCoreInstrumentation() + .AddRuntimeInstrumentation()) + .WithTracing(tracing => tracing.AddAspNetCoreInstrumentation().AddHttpClientInstrumentation()); + +var app = builder.Build(); + +app.UseSerilogRequestLogging(); +app.UseAuthentication(); +app.UseAuthorization(); + +var issuerGroup = app.MapIssuerEndpoints(); + +var seedingTask = SeedPublishersAsync(app.Services, app.Environment); +await seedingTask.ConfigureAwait(false); + +app.Run(); + +static LogEventLevel MapLogLevel(string? value) +{ + return Enum.TryParse(value, ignoreCase: true, out var level) + ? level + : LogEventLevel.Information; +} + +static void ConfigureAuthentication( + WebApplicationBuilder builder, + IssuerDirectoryWebServiceOptions options) +{ + if (options.Authority.Enabled) + { + builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configurationSection: null, + configure: resourceOptions => + { + resourceOptions.Authority = options.Authority.Issuer; + resourceOptions.RequireHttpsMetadata = options.Authority.RequireHttpsMetadata; + foreach (var audience in options.Authority.Audiences) + { + resourceOptions.Audiences.Add(audience); + } + }); + } + else + { + builder.Services.AddAuthentication(AllowAnonymousAuthenticationHandler.SchemeName) + .AddScheme( + AllowAnonymousAuthenticationHandler.SchemeName, + static _ => { }); + } +} + +static async Task SeedPublishersAsync(IServiceProvider services, IWebHostEnvironment environment) +{ + using var scope = services.CreateScope(); + var options = scope.ServiceProvider.GetRequiredService>().Value; + if (!options.SeedCsafPublishers) + { + return; + } + + var loader = scope.ServiceProvider.GetRequiredService(); + var service = scope.ServiceProvider.GetRequiredService(); + + var path = options.CsafSeedPath; + if (!Path.IsPathRooted(path)) + { + path = Path.Combine(environment.ContentRootPath, path); + } + + if (!File.Exists(path)) + { + Log.Warning("CSAF seed file {SeedPath} not found; skipping issuer bootstrap.", path); + return; + } + + await using var stream = File.OpenRead(path); + var records = loader.Load(stream, actor: "issuer-directory-seed"); + await service.SeedAsync(records, CancellationToken.None).ConfigureAwait(false); +} + +internal static class AuthorizationPolicyBuilderExtensions +{ + public static AuthorizationPolicyBuilder RequireScope(this AuthorizationPolicyBuilder policy, string scope) + { + return policy.RequireAuthenticatedUser() + .RequireAssertion(context => context.User.HasScope(scope)); + } +} + +internal static class ClaimsPrincipalExtensions +{ + public static bool HasScope(this ClaimsPrincipal principal, string scope) + { + return principal.FindAll("scope").Concat(principal.FindAll("scp")) + .SelectMany(value => value.Value.Split(' ', StringSplitOptions.TrimEntries | StringSplitOptions.RemoveEmptyEntries)) + .Any(value => string.Equals(value, scope, StringComparison.OrdinalIgnoreCase)); + } +} + +internal sealed class AllowAnonymousAuthenticationHandler : AuthenticationHandler +{ + public const string SchemeName = "AllowAnonymous"; + +#pragma warning disable CS0618 + public AllowAnonymousAuthenticationHandler( + IOptionsMonitor options, + ILoggerFactory logger, + UrlEncoder encoder, + ISystemClock clock) + : base(options, logger, encoder, clock) + { + } +#pragma warning restore CS0618 + + protected override Task HandleAuthenticateAsync() + { + var identity = new ClaimsIdentity(SchemeName); + var principal = new ClaimsPrincipal(identity); + var ticket = new AuthenticationTicket(principal, Scheme.Name); + return Task.FromResult(AuthenticateResult.Success(ticket)); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Security/IssuerDirectoryPolicies.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Security/IssuerDirectoryPolicies.cs new file mode 100644 index 00000000..ea3cae73 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Security/IssuerDirectoryPolicies.cs @@ -0,0 +1,8 @@ +namespace StellaOps.IssuerDirectory.WebService.Security; + +public static class IssuerDirectoryPolicies +{ + public const string Reader = "IssuerDirectory.Reader"; + public const string Writer = "IssuerDirectory.Writer"; + public const string Admin = "IssuerDirectory.Admin"; +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/ActorResolver.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/ActorResolver.cs new file mode 100644 index 00000000..58d0beba --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/ActorResolver.cs @@ -0,0 +1,17 @@ +namespace StellaOps.IssuerDirectory.WebService.Services; + +internal static class ActorResolver +{ + public static string Resolve(HttpContext context) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + return context.User?.FindFirst("sub")?.Value + ?? context.User?.Identity?.Name + ?? context.User?.FindFirst("client_id")?.Value + ?? "anonymous"; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/ScopeAuthorization.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/ScopeAuthorization.cs new file mode 100644 index 00000000..fa38f249 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/ScopeAuthorization.cs @@ -0,0 +1,53 @@ +using System.Security.Claims; + +namespace StellaOps.IssuerDirectory.WebService.Services; + +internal static class ScopeAuthorization +{ + private static readonly StringComparer Comparer = StringComparer.OrdinalIgnoreCase; + + public static IResult? RequireScope(HttpContext context, string scope) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + if (string.IsNullOrWhiteSpace(scope)) + { + throw new ArgumentException("Scope must be provided.", nameof(scope)); + } + + var user = context.User; + if (user?.Identity?.IsAuthenticated is not true) + { + return Results.Unauthorized(); + } + + if (!HasScope(user, scope)) + { + return Results.Forbid(); + } + + return null; + } + + private static bool HasScope(ClaimsPrincipal principal, string scope) + { + foreach (var claim in principal.FindAll("scope").Concat(principal.FindAll("scp"))) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var scopes = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (scopes.Any(value => Comparer.Equals(value, scope))) + { + return true; + } + } + + return false; + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/TenantResolver.cs b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/TenantResolver.cs new file mode 100644 index 00000000..56a30564 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/Services/TenantResolver.cs @@ -0,0 +1,37 @@ +using Microsoft.Extensions.Options; +using StellaOps.IssuerDirectory.WebService.Options; + +namespace StellaOps.IssuerDirectory.WebService.Services; + +internal sealed class TenantResolver +{ + private readonly IssuerDirectoryWebServiceOptions _options; + + public TenantResolver(IOptions options) + { + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + } + + public string Resolve(HttpContext context) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + if (!context.Request.Headers.TryGetValue(_options.TenantHeader, out var values)) + { + throw new InvalidOperationException( + $"Tenant header '{_options.TenantHeader}' is required for Issuer Directory operations."); + } + + var tenantId = values.ToString(); + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new InvalidOperationException( + $"Tenant header '{_options.TenantHeader}' must contain a value."); + } + + return tenantId.Trim(); + } +} diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/StellaOps.IssuerDirectory.WebService.csproj b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/StellaOps.IssuerDirectory.WebService.csproj new file mode 100644 index 00000000..a7eb1d19 --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.WebService/StellaOps.IssuerDirectory.WebService.csproj @@ -0,0 +1,30 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + + + + + + + + + + + + PreserveNewest + + + diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.sln b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.sln new file mode 100644 index 00000000..d4708baf --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.sln @@ -0,0 +1,43 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.IssuerDirectory.Core", "StellaOps.IssuerDirectory.Core\StellaOps.IssuerDirectory.Core.csproj", "{298FE21A-B406-486C-972C-E8CE6FE81D38}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.IssuerDirectory.Infrastructure", "StellaOps.IssuerDirectory.Infrastructure\StellaOps.IssuerDirectory.Infrastructure.csproj", "{0F76EF16-3194-4127-BC50-15F01E48F2B7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.IssuerDirectory.WebService", "StellaOps.IssuerDirectory.WebService\StellaOps.IssuerDirectory.WebService.csproj", "{8ECE3570-9BA0-470B-A8E3-C244F6AAEF92}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.IssuerDirectory.Core.Tests", "StellaOps.IssuerDirectory.Core.Tests\StellaOps.IssuerDirectory.Core.Tests.csproj", "{22842BC6-D909-4919-8FB1-B2C3ED7E4DDE}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {298FE21A-B406-486C-972C-E8CE6FE81D38}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {298FE21A-B406-486C-972C-E8CE6FE81D38}.Debug|Any CPU.Build.0 = Debug|Any CPU + {298FE21A-B406-486C-972C-E8CE6FE81D38}.Release|Any CPU.ActiveCfg = Release|Any CPU + {298FE21A-B406-486C-972C-E8CE6FE81D38}.Release|Any CPU.Build.0 = Release|Any CPU + {0F76EF16-3194-4127-BC50-15F01E48F2B7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0F76EF16-3194-4127-BC50-15F01E48F2B7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0F76EF16-3194-4127-BC50-15F01E48F2B7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0F76EF16-3194-4127-BC50-15F01E48F2B7}.Release|Any CPU.Build.0 = Release|Any CPU + {8ECE3570-9BA0-470B-A8E3-C244F6AAEF92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8ECE3570-9BA0-470B-A8E3-C244F6AAEF92}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8ECE3570-9BA0-470B-A8E3-C244F6AAEF92}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8ECE3570-9BA0-470B-A8E3-C244F6AAEF92}.Release|Any CPU.Build.0 = Release|Any CPU + {22842BC6-D909-4919-8FB1-B2C3ED7E4DDE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {22842BC6-D909-4919-8FB1-B2C3ED7E4DDE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {22842BC6-D909-4919-8FB1-B2C3ED7E4DDE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {22842BC6-D909-4919-8FB1-B2C3ED7E4DDE}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {291CD30E-130B-4349-AD46-80801170D9F5} + EndGlobalSection +EndGlobal diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md b/src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md index e59035cd..f82ad2e4 100644 --- a/src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md @@ -1,9 +1,11 @@ # Issuer Directory Task Board — Epic 7 | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| ISSUER-30-001 | TODO | Issuer Directory Guild | AUTH-VULN-29-001 | Implement issuer CRUD API with RBAC, audit logging, and tenant scoping; seed CSAF publisher metadata. | APIs deployed; audit logs capture actor/reason; seed data imported; tests cover RBAC. | -| ISSUER-30-002 | TODO | Issuer Directory Guild, Security Guild | ISSUER-30-001 | Implement key management endpoints (add/rotate/revoke keys), enforce expiry, validate formats (Ed25519, X.509, DSSE). | Keys stored securely; expiry enforced; validation tests cover key types; docs updated. | -| ISSUER-30-003 | TODO | Issuer Directory Guild, Policy Guild | ISSUER-30-001 | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. | Trust overrides persisted; policy integration confirmed; tests cover overrides. | -| ISSUER-30-004 | TODO | Issuer Directory Guild, VEX Lens Guild | ISSUER-30-001..003 | Integrate with VEX Lens and Excitator signature verification (client SDK, caching, retries). | Lens/Excitator resolve issuer metadata via SDK; integration tests cover network failures. | -| ISSUER-30-005 | TODO | Issuer Directory Guild, Observability Guild | ISSUER-30-001..004 | Instrument metrics/logs (issuer changes, key rotation, verification failures) and dashboards/alerts. | Telemetry live; alerts configured; docs updated. | -| ISSUER-30-006 | TODO | Issuer Directory Guild, DevOps Guild | ISSUER-30-001..005 | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. | Deployment docs merged; smoke deploy validated; backup tested; offline kit updated. | +| ISSUER-30-001 | DONE (2025-11-01) | Issuer Directory Guild | AUTH-VULN-29-001 | Implement issuer CRUD API with RBAC, audit logging, and tenant scoping; seed CSAF publisher metadata. | APIs deployed; audit logs capture actor/reason; seed data imported; tests cover RBAC. | +| ISSUER-30-002 | DONE (2025-11-01) | Issuer Directory Guild, Security Guild | ISSUER-30-001 | Implement key management endpoints (add/rotate/revoke keys), enforce expiry, validate formats (Ed25519, X.509, DSSE). | Keys stored securely; expiry enforced; validation tests cover key types; docs updated. | +| ISSUER-30-003 | DOING | Issuer Directory Guild, Policy Guild | ISSUER-30-001 | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. | Trust overrides persisted; policy integration confirmed; tests cover overrides. | +| ISSUER-30-004 | DONE (2025-11-01) | Issuer Directory Guild, VEX Lens Guild | ISSUER-30-001..003 | Integrate with VEX Lens and Excitator signature verification (client SDK, caching, retries). | Lens/Excitator resolve issuer metadata via SDK; integration tests cover network failures. | +| ISSUER-30-005 | DONE (2025-11-01) | Issuer Directory Guild, Observability Guild | ISSUER-30-001..004 | Instrument metrics/logs (issuer changes, key rotation, verification failures) and dashboards/alerts. | Telemetry live; alerts configured; docs updated. | +| ISSUER-30-006 | TODO | Issuer Directory Guild, DevOps Guild | ISSUER-30-001..005 | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. | Deployment docs merged; smoke deploy validated; backup tested; offline kit updated. | + +> 2025-11-01: Excititor worker now queries Issuer Directory via during attestation verification, caching active key metadata and trust weights for tenant/global scopes. diff --git a/src/IssuerDirectory/StellaOps.IssuerDirectory/data/csaf-publishers.json b/src/IssuerDirectory/StellaOps.IssuerDirectory/data/csaf-publishers.json new file mode 100644 index 00000000..999cb68c --- /dev/null +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/data/csaf-publishers.json @@ -0,0 +1,94 @@ +[ + { + "id": "red-hat", + "tenantId": "@global", + "displayName": "Red Hat Product Security", + "slug": "red-hat", + "description": "Official CSAF publisher for Red Hat advisories.", + "contact": { + "email": "secalert@redhat.com", + "website": "https://access.redhat.com/security/team/contact/" + }, + "metadata": { + "cveOrgId": "redhat", + "csafPublisherId": "cpe:/o:redhat", + "securityAdvisoriesUrl": "https://access.redhat.com/security/cve/", + "catalogUrl": "https://access.redhat.com/security/data/csaf/v2/", + "languages": [ "en" ], + "attributes": { + "distribution": "online", + "license": "public" + } + }, + "endpoints": [ + { + "kind": "csaf", + "url": "https://access.redhat.com/security/data/csaf/v2/advisories", + "format": "csaf", + "requiresAuth": false + } + ], + "tags": [ "vendor", "linux", "csaf" ] + }, + { + "id": "microsoft", + "tenantId": "@global", + "displayName": "Microsoft Security", + "slug": "microsoft", + "description": "Microsoft vulnerability disclosure and CSAF catalog.", + "contact": { + "website": "https://www.microsoft.com/security/blog" + }, + "metadata": { + "cveOrgId": "microsoft", + "csafPublisherId": "cpe:/o:microsoft", + "securityAdvisoriesUrl": "https://msrc.microsoft.com/update-guide", + "catalogUrl": "https://www.microsoft.com/en-us/msrc/csa", + "languages": [ "en" ], + "attributes": { + "distribution": "online", + "license": "public" + } + }, + "endpoints": [ + { + "kind": "csaf", + "url": "https://www.microsoft.com/en-us/msrc/csa", + "format": "csaf", + "requiresAuth": false + } + ], + "tags": [ "vendor", "windows", "csaf" ] + }, + { + "id": "cisco", + "tenantId": "@global", + "displayName": "Cisco PSIRT", + "slug": "cisco", + "description": "Cisco Product Security Incident Response Team advisories.", + "contact": { + "email": "psirt@cisco.com", + "website": "https://sec.cloudapps.cisco.com/security/center/publicationListing.x" + }, + "metadata": { + "cveOrgId": "cisco", + "csafPublisherId": "cpe:/o:cisco", + "securityAdvisoriesUrl": "https://sec.cloudapps.cisco.com/security/center/publicationListing.x", + "catalogUrl": "https://sec.cloudapps.cisco.com/security/vulnerabilitycenter/csaf", + "languages": [ "en" ], + "attributes": { + "distribution": "online", + "license": "public" + } + }, + "endpoints": [ + { + "kind": "csaf", + "url": "https://sec.cloudapps.cisco.com/security/vulnerabilitycenter/csaf", + "format": "csaf", + "requiresAuth": false + } + ], + "tags": [ "vendor", "networking", "csaf" ] + } +] diff --git a/src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs b/src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs index b6abc08c..fa2868fb 100644 --- a/src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs +++ b/src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs @@ -55,11 +55,13 @@ public sealed class NotifyWebServiceOptions public int TokenClockSkewSeconds { get; set; } = 60; - public IList Audiences { get; set; } = new List { "notify" }; - - public string ReadScope { get; set; } = "notify.read"; - - public string AdminScope { get; set; } = "notify.admin"; + public IList Audiences { get; set; } = new List { "notify" }; + + public string ViewerScope { get; set; } = "notify.viewer"; + + public string OperatorScope { get; set; } = "notify.operator"; + + public string AdminScope { get; set; } = "notify.admin"; /// /// Optional development signing key for symmetric JWT validation when Authority is disabled. diff --git a/src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs b/src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs index ec87c3df..6602c8c7 100644 --- a/src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs +++ b/src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs @@ -60,9 +60,11 @@ internal static class NotifyWebServiceOptionsValidator throw new InvalidOperationException("notify:authority:audiences must include at least one value."); } - if (string.IsNullOrWhiteSpace(authority.AdminScope) || string.IsNullOrWhiteSpace(authority.ReadScope)) - { - throw new InvalidOperationException("notify:authority admin and read scopes must be configured."); + if (string.IsNullOrWhiteSpace(authority.AdminScope) + || string.IsNullOrWhiteSpace(authority.OperatorScope) + || string.IsNullOrWhiteSpace(authority.ViewerScope)) + { + throw new InvalidOperationException("notify:authority admin, operator, and viewer scopes must be configured."); } } else diff --git a/src/Notify/StellaOps.Notify.WebService/Program.cs b/src/Notify/StellaOps.Notify.WebService/Program.cs index 685c3d26..20942823 100644 --- a/src/Notify/StellaOps.Notify.WebService/Program.cs +++ b/src/Notify/StellaOps.Notify.WebService/Program.cs @@ -1,491 +1,537 @@ -using System; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Security.Claims; -using System.Text; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Threading; -using System.Threading.RateLimiting; -using Microsoft.AspNetCore.Authentication.JwtBearer; -using Microsoft.AspNetCore.Authorization; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.AspNetCore.RateLimiting; -using Microsoft.Extensions.Options; -using Microsoft.IdentityModel.Tokens; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Primitives; -using Serilog; -using Serilog.Events; -using StellaOps.Auth.ServerIntegration; -using StellaOps.Configuration; -using StellaOps.Notify.Models; -using StellaOps.Notify.Storage.Mongo; -using StellaOps.Notify.Storage.Mongo.Documents; -using StellaOps.Notify.Storage.Mongo.Repositories; -using StellaOps.Notify.WebService.Diagnostics; -using StellaOps.Notify.WebService.Extensions; -using StellaOps.Notify.WebService.Hosting; -using StellaOps.Notify.WebService.Options; -using StellaOps.Notify.WebService.Plugins; -using StellaOps.Notify.WebService.Security; -using StellaOps.Notify.WebService.Services; -using StellaOps.Notify.WebService.Internal; -using StellaOps.Notify.WebService.Storage.InMemory; -using StellaOps.Plugin.DependencyInjection; -using MongoDB.Bson; -using StellaOps.Notify.WebService.Contracts; - -var builder = WebApplication.CreateBuilder(args); - -builder.Configuration.AddStellaOpsDefaults(options => -{ - options.BasePath = builder.Environment.ContentRootPath; - options.EnvironmentPrefix = "NOTIFY_"; - options.ConfigureBuilder = configurationBuilder => - { - configurationBuilder.AddNotifyYaml(Path.Combine(builder.Environment.ContentRootPath, "../etc/notify.yaml")); - }; -}); - -var contentRootPath = builder.Environment.ContentRootPath; - -var bootstrapOptions = builder.Configuration.BindOptions( - NotifyWebServiceOptions.SectionName, - (opts, _) => - { - NotifyWebServiceOptionsPostConfigure.Apply(opts, contentRootPath); - NotifyWebServiceOptionsValidator.Validate(opts); - }); - -builder.Services.AddOptions() - .Bind(builder.Configuration.GetSection(NotifyWebServiceOptions.SectionName)) - .PostConfigure(options => - { - NotifyWebServiceOptionsPostConfigure.Apply(options, contentRootPath); - NotifyWebServiceOptionsValidator.Validate(options); - }) - .ValidateOnStart(); - -builder.Host.UseSerilog((context, services, loggerConfiguration) => -{ - var minimumLevel = MapLogLevel(bootstrapOptions.Telemetry.MinimumLogLevel); - - loggerConfiguration - .MinimumLevel.Is(minimumLevel) - .MinimumLevel.Override("Microsoft.AspNetCore", LogEventLevel.Warning) - .Enrich.FromLogContext() - .WriteTo.Console(); -}); - -builder.Services.AddSingleton(TimeProvider.System); -builder.Services.AddSingleton(); -builder.Services.AddSingleton(); - -if (string.Equals(bootstrapOptions.Storage.Driver, "mongo", StringComparison.OrdinalIgnoreCase)) -{ - builder.Services.AddNotifyMongoStorage(builder.Configuration.GetSection("notify:storage")); -} -else -{ - builder.Services.AddInMemoryNotifyStorage(); -} - -var pluginHostOptions = NotifyPluginHostFactory.Build(bootstrapOptions, contentRootPath); -builder.Services.AddSingleton(pluginHostOptions); -builder.Services.RegisterPluginRoutines(builder.Configuration, pluginHostOptions); +using System; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Security.Claims; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading; +using System.Threading.RateLimiting; +using Microsoft.AspNetCore.Authentication; +using Microsoft.AspNetCore.Authentication.JwtBearer; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.RateLimiting; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Tokens; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Primitives; +using Serilog; +using Serilog.Events; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Configuration; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo; +using StellaOps.Notify.Storage.Mongo.Documents; +using StellaOps.Notify.Storage.Mongo.Repositories; +using StellaOps.Notify.WebService.Diagnostics; +using StellaOps.Notify.WebService.Extensions; +using StellaOps.Notify.WebService.Hosting; +using StellaOps.Notify.WebService.Options; +using StellaOps.Notify.WebService.Plugins; +using StellaOps.Notify.WebService.Security; +using StellaOps.Notify.WebService.Services; +using StellaOps.Notify.WebService.Internal; +using StellaOps.Notify.WebService.Storage.InMemory; +using StellaOps.Plugin.DependencyInjection; +using MongoDB.Bson; +using StellaOps.Notify.WebService.Contracts; + +var builder = WebApplication.CreateBuilder(args); + +builder.Configuration.AddStellaOpsDefaults(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = "NOTIFY_"; + options.ConfigureBuilder = configurationBuilder => + { + configurationBuilder.AddNotifyYaml(Path.Combine(builder.Environment.ContentRootPath, "../etc/notify.yaml")); + }; +}); + +var contentRootPath = builder.Environment.ContentRootPath; + +var bootstrapOptions = builder.Configuration.BindOptions( + NotifyWebServiceOptions.SectionName, + (opts, _) => + { + NotifyWebServiceOptionsPostConfigure.Apply(opts, contentRootPath); + NotifyWebServiceOptionsValidator.Validate(opts); + }); + +builder.Services.AddOptions() + .Bind(builder.Configuration.GetSection(NotifyWebServiceOptions.SectionName)) + .PostConfigure(options => + { + NotifyWebServiceOptionsPostConfigure.Apply(options, contentRootPath); + NotifyWebServiceOptionsValidator.Validate(options); + }) + .ValidateOnStart(); + +builder.Host.UseSerilog((context, services, loggerConfiguration) => +{ + var minimumLevel = MapLogLevel(bootstrapOptions.Telemetry.MinimumLogLevel); + + loggerConfiguration + .MinimumLevel.Is(minimumLevel) + .MinimumLevel.Override("Microsoft.AspNetCore", LogEventLevel.Warning) + .Enrich.FromLogContext() + .WriteTo.Console(); +}); + +builder.Services.AddSingleton(TimeProvider.System); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +if (string.Equals(bootstrapOptions.Storage.Driver, "mongo", StringComparison.OrdinalIgnoreCase)) +{ + builder.Services.AddNotifyMongoStorage(builder.Configuration.GetSection("notify:storage")); +} +else +{ + builder.Services.AddInMemoryNotifyStorage(); +} + +var pluginHostOptions = NotifyPluginHostFactory.Build(bootstrapOptions, contentRootPath); +builder.Services.AddSingleton(pluginHostOptions); +builder.Services.RegisterPluginRoutines(builder.Configuration, pluginHostOptions); builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); - -ConfigureAuthentication(builder, bootstrapOptions); -ConfigureRateLimiting(builder, bootstrapOptions); - -builder.Services.AddEndpointsApiExplorer(); - -var app = builder.Build(); - -var readyStatus = app.Services.GetRequiredService(); - -var resolvedOptions = app.Services.GetRequiredService>().Value; -await InitialiseAsync(app.Services, readyStatus, app.Logger, resolvedOptions); - -ConfigureRequestPipeline(app, bootstrapOptions); -ConfigureEndpoints(app); - -await app.RunAsync(); - -static void ConfigureAuthentication(WebApplicationBuilder builder, NotifyWebServiceOptions options) -{ - if (options.Authority.Enabled) - { - builder.Services.AddStellaOpsResourceServerAuthentication( - builder.Configuration, - configurationSection: null, - configure: resourceOptions => - { - resourceOptions.Authority = options.Authority.Issuer; - resourceOptions.RequireHttpsMetadata = options.Authority.RequireHttpsMetadata; - resourceOptions.MetadataAddress = options.Authority.MetadataAddress; - resourceOptions.BackchannelTimeout = TimeSpan.FromSeconds(options.Authority.BackchannelTimeoutSeconds); - resourceOptions.TokenClockSkew = TimeSpan.FromSeconds(options.Authority.TokenClockSkewSeconds); - - resourceOptions.Audiences.Clear(); - foreach (var audience in options.Authority.Audiences) - { - resourceOptions.Audiences.Add(audience); - } - }); - - builder.Services.AddAuthorization(auth => - { - auth.AddStellaOpsScopePolicy(NotifyPolicies.Read, options.Authority.ReadScope); - auth.AddStellaOpsScopePolicy(NotifyPolicies.Admin, options.Authority.AdminScope); - }); - } - else - { - builder.Services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme) - .AddJwtBearer(jwt => - { - jwt.RequireHttpsMetadata = false; - jwt.TokenValidationParameters = new TokenValidationParameters - { - ValidateIssuer = true, - ValidIssuer = options.Authority.Issuer, - ValidateAudience = options.Authority.Audiences.Count > 0, - ValidAudiences = options.Authority.Audiences, - ValidateIssuerSigningKey = true, - IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(options.Authority.DevelopmentSigningKey!)), - ValidateLifetime = true, - ClockSkew = TimeSpan.FromSeconds(options.Authority.TokenClockSkewSeconds), - NameClaimType = ClaimTypes.Name - }; - }); - - builder.Services.AddAuthorization(auth => - { - auth.AddPolicy( - NotifyPolicies.Read, - policy => policy - .RequireAuthenticatedUser() - .RequireAssertion(ctx => - HasScope(ctx.User, options.Authority.ReadScope) || - HasScope(ctx.User, options.Authority.AdminScope))); - - auth.AddPolicy( - NotifyPolicies.Admin, - policy => policy - .RequireAuthenticatedUser() - .RequireAssertion(ctx => HasScope(ctx.User, options.Authority.AdminScope))); - }); - } -} - -static void ConfigureRateLimiting(WebApplicationBuilder builder, NotifyWebServiceOptions options) -{ - ArgumentNullException.ThrowIfNull(options); - var tenantHeader = options.Api.TenantHeader; - var limits = options.Api.RateLimits; - - builder.Services.AddRateLimiter(rateLimiterOptions => - { - rateLimiterOptions.RejectionStatusCode = StatusCodes.Status429TooManyRequests; - rateLimiterOptions.OnRejected = static (context, _) => - { - context.HttpContext.Response.Headers.TryAdd("Retry-After", "1"); - return ValueTask.CompletedTask; - }; - - ConfigurePolicy(rateLimiterOptions, NotifyRateLimitPolicies.DeliveryHistory, limits.DeliveryHistory, tenantHeader, "deliveries"); - ConfigurePolicy(rateLimiterOptions, NotifyRateLimitPolicies.TestSend, limits.TestSend, tenantHeader, "channel-test"); - }); - - static void ConfigurePolicy( - RateLimiterOptions rateLimiterOptions, - string policyName, - NotifyWebServiceOptions.RateLimitPolicyOptions policy, - string tenantHeader, - string prefix) - { - rateLimiterOptions.AddPolicy(policyName, httpContext => - { - if (policy is null || !policy.Enabled) - { - return RateLimitPartition.GetNoLimiter("notify-disabled"); - } - - var identity = ResolveIdentity(httpContext, tenantHeader, prefix); - - return RateLimitPartition.GetTokenBucketLimiter(identity, _ => new TokenBucketRateLimiterOptions - { - TokenLimit = policy.TokenLimit, - TokensPerPeriod = policy.TokensPerPeriod, - ReplenishmentPeriod = TimeSpan.FromSeconds(policy.ReplenishmentPeriodSeconds), - QueueLimit = policy.QueueLimit, - QueueProcessingOrder = QueueProcessingOrder.OldestFirst, - AutoReplenishment = true - }); - }); - } - - static string ResolveIdentity(HttpContext httpContext, string tenantHeader, string prefix) - { - var tenant = httpContext.Request.Headers.TryGetValue(tenantHeader, out var header) && !StringValues.IsNullOrEmpty(header) - ? header.ToString().Trim() - : "anonymous"; - - var subject = httpContext.User.FindFirst("sub")?.Value - ?? httpContext.User.Identity?.Name - ?? httpContext.Connection.RemoteIpAddress?.ToString() - ?? "anonymous"; - - return string.Concat(prefix, ':', tenant, ':', subject); - } -} - -static async Task InitialiseAsync(IServiceProvider services, ServiceStatus status, Microsoft.Extensions.Logging.ILogger logger, NotifyWebServiceOptions options) -{ - var stopwatch = Stopwatch.StartNew(); - - try - { - await using var scope = services.CreateAsyncScope(); - if (string.Equals(options.Storage.Driver, "mongo", StringComparison.OrdinalIgnoreCase)) - { - await RunMongoMigrationsAsync(scope.ServiceProvider); - } - - var registry = scope.ServiceProvider.GetRequiredService(); - var count = await registry.WarmupAsync(); - - stopwatch.Stop(); - status.RecordReadyCheck(success: true, stopwatch.Elapsed); - logger.LogInformation("Notify WebService initialised in {ElapsedMs} ms; loaded {PluginCount} plug-in(s).", stopwatch.Elapsed.TotalMilliseconds, count); - } - catch (Exception ex) - { - stopwatch.Stop(); - status.RecordReadyCheck(success: false, stopwatch.Elapsed, ex.Message); - logger.LogError(ex, "Failed to initialise Notify WebService."); - throw; - } -} - -static async Task RunMongoMigrationsAsync(IServiceProvider services) -{ - var initializerType = Type.GetType("StellaOps.Notify.Storage.Mongo.Internal.NotifyMongoInitializer, StellaOps.Notify.Storage.Mongo"); - if (initializerType is null) - { - return; - } - - var initializer = services.GetService(initializerType); - if (initializer is null) - { - return; - } - - var method = initializerType.GetMethod("EnsureIndexesAsync", new[] { typeof(CancellationToken) }); - if (method is null) - { - return; - } - - if (method.Invoke(initializer, new object[] { CancellationToken.None }) is Task task) - { - await task.ConfigureAwait(false); - } -} - -static void ConfigureRequestPipeline(WebApplication app, NotifyWebServiceOptions options) -{ - if (options.Telemetry.EnableRequestLogging) - { - app.UseSerilogRequestLogging(c => - { - c.IncludeQueryInRequestPath = true; - c.GetLevel = (_, _, exception) => exception is null ? LogEventLevel.Information : LogEventLevel.Error; - }); - } - - app.UseAuthentication(); - app.UseRateLimiter(); - app.UseAuthorization(); -} - -static void ConfigureEndpoints(WebApplication app) -{ - app.MapGet("/healthz", () => Results.Ok(new { status = "ok" })); - - app.MapGet("/readyz", (ServiceStatus status) => - { - var snapshot = status.CreateSnapshot(); - if (snapshot.Ready.IsReady) - { - return Results.Ok(new - { - status = "ready", - checkedAt = snapshot.Ready.CheckedAt, - latencyMs = snapshot.Ready.Latency?.TotalMilliseconds, - snapshot.StartedAt - }); - } - - return JsonResponse( - new - { - status = "unready", - snapshot.Ready.Error, - checkedAt = snapshot.Ready.CheckedAt, - latencyMs = snapshot.Ready.Latency?.TotalMilliseconds - }, - StatusCodes.Status503ServiceUnavailable); - }); - - var options = app.Services.GetRequiredService>().Value; - var tenantHeader = options.Api.TenantHeader; - var apiBasePath = options.Api.BasePath.TrimEnd('/'); - var apiGroup = app.MapGroup(options.Api.BasePath); - var internalGroup = app.MapGroup(options.Api.InternalBasePath); - - internalGroup.MapPost("/rules/normalize", (JsonNode? body, NotifySchemaMigrationService service) => Normalize(body, service.UpgradeRule)) - .WithName("notify.rules.normalize") - .Produces(StatusCodes.Status200OK) - .Produces(StatusCodes.Status400BadRequest); - - internalGroup.MapPost("/channels/normalize", (JsonNode? body, NotifySchemaMigrationService service) => Normalize(body, service.UpgradeChannel)) - .WithName("notify.channels.normalize"); - - internalGroup.MapPost("/templates/normalize", (JsonNode? body, NotifySchemaMigrationService service) => Normalize(body, service.UpgradeTemplate)) - .WithName("notify.templates.normalize"); - - apiGroup.MapGet("/rules", async ([FromServices] INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - var rules = await repository.ListAsync(tenant, cancellationToken); - return JsonResponse(rules); - }) - .RequireAuthorization(NotifyPolicies.Read); - - apiGroup.MapGet("/rules/{ruleId}", async (string ruleId, [FromServices] INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - var rule = await repository.GetAsync(tenant, ruleId, cancellationToken); - return rule is null ? Results.NotFound() : JsonResponse(rule); - }) - .RequireAuthorization(NotifyPolicies.Read); - - apiGroup.MapPost("/rules", async (JsonNode? body, NotifySchemaMigrationService service, INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - if (body is null) - { - return Results.BadRequest(new { error = "Request body is required." }); - } - - var rule = service.UpgradeRule(body); - if (!string.Equals(rule.TenantId, tenant, StringComparison.Ordinal)) - { - return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); - } - - await repository.UpsertAsync(rule, cancellationToken); - - return CreatedJson(BuildResourceLocation(apiBasePath, "rules", rule.RuleId), rule); - }) - .RequireAuthorization(NotifyPolicies.Admin); - - apiGroup.MapDelete("/rules/{ruleId}", async (string ruleId, INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - await repository.DeleteAsync(tenant, ruleId, cancellationToken); - return Results.NoContent(); - }) - .RequireAuthorization(NotifyPolicies.Admin); - - apiGroup.MapGet("/channels", async (INotifyChannelRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - var channels = await repository.ListAsync(tenant, cancellationToken); - return JsonResponse(channels); - }) - .RequireAuthorization(NotifyPolicies.Read); - - apiGroup.MapPost("/channels", async (JsonNode? body, NotifySchemaMigrationService service, INotifyChannelRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - if (body is null) - { - return Results.BadRequest(new { error = "Request body is required." }); - } - - var channel = service.UpgradeChannel(body); - if (!string.Equals(channel.TenantId, tenant, StringComparison.Ordinal)) - { - return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); - } - - await repository.UpsertAsync(channel, cancellationToken); - return CreatedJson(BuildResourceLocation(apiBasePath, "channels", channel.ChannelId), channel); -}) -.RequireAuthorization(NotifyPolicies.Admin); - + +ConfigureAuthentication(builder, bootstrapOptions); +ConfigureRateLimiting(builder, bootstrapOptions); + +builder.Services.AddEndpointsApiExplorer(); + +var app = builder.Build(); + +var readyStatus = app.Services.GetRequiredService(); + +var resolvedOptions = app.Services.GetRequiredService>().Value; +await InitialiseAsync(app.Services, readyStatus, app.Logger, resolvedOptions); + +ConfigureRequestPipeline(app, bootstrapOptions); +ConfigureEndpoints(app); + +await app.RunAsync(); + +static void ConfigureAuthentication(WebApplicationBuilder builder, NotifyWebServiceOptions options) +{ + if (options.Authority.Enabled) + { + builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configurationSection: null, + configure: resourceOptions => + { + resourceOptions.Authority = options.Authority.Issuer; + resourceOptions.RequireHttpsMetadata = options.Authority.RequireHttpsMetadata; + resourceOptions.MetadataAddress = options.Authority.MetadataAddress; + resourceOptions.BackchannelTimeout = TimeSpan.FromSeconds(options.Authority.BackchannelTimeoutSeconds); + resourceOptions.TokenClockSkew = TimeSpan.FromSeconds(options.Authority.TokenClockSkewSeconds); + + resourceOptions.Audiences.Clear(); + foreach (var audience in options.Authority.Audiences) + { + resourceOptions.Audiences.Add(audience); + } + }); + + builder.Services.AddAuthorization(auth => + { + auth.AddStellaOpsScopePolicy(NotifyPolicies.Viewer, options.Authority.ViewerScope); + auth.AddPolicy( + NotifyPolicies.Operator, + policy => policy + .RequireAuthenticatedUser() + .RequireAssertion(ctx => + HasScope(ctx.User, options.Authority.OperatorScope) || + HasScope(ctx.User, options.Authority.AdminScope))); + auth.AddStellaOpsScopePolicy(NotifyPolicies.Admin, options.Authority.AdminScope); + }); + } + else + { + if (options.Authority.AllowAnonymousFallback) + { + builder.Services.AddAuthentication(authOptions => + { + authOptions.DefaultAuthenticateScheme = AllowAllAuthenticationHandler.SchemeName; + authOptions.DefaultChallengeScheme = AllowAllAuthenticationHandler.SchemeName; + }).AddScheme( + AllowAllAuthenticationHandler.SchemeName, + static _ => { }); + + builder.Services.AddAuthorization(auth => + { + auth.AddPolicy( + NotifyPolicies.Viewer, + policy => policy.RequireAssertion(_ => true)); + + auth.AddPolicy( + NotifyPolicies.Operator, + policy => policy.RequireAssertion(_ => true)); + + auth.AddPolicy( + NotifyPolicies.Admin, + policy => policy.RequireAssertion(_ => true)); + }); + } + else + { + builder.Services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme) + .AddJwtBearer(jwt => + { + jwt.RequireHttpsMetadata = false; + jwt.IncludeErrorDetails = true; + jwt.TokenValidationParameters = new TokenValidationParameters + { + ValidateIssuer = true, + ValidIssuer = options.Authority.Issuer, + ValidateAudience = options.Authority.Audiences.Count > 0, + ValidAudiences = options.Authority.Audiences, + ValidateIssuerSigningKey = true, + IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(options.Authority.DevelopmentSigningKey!)), + ValidateLifetime = true, + ClockSkew = TimeSpan.FromSeconds(options.Authority.TokenClockSkewSeconds), + NameClaimType = ClaimTypes.Name + }; + }); + + builder.Services.AddAuthorization(auth => + { + auth.AddPolicy( + NotifyPolicies.Viewer, + policy => policy + .RequireAuthenticatedUser() + .RequireAssertion(ctx => + HasScope(ctx.User, options.Authority.ViewerScope) || + HasScope(ctx.User, options.Authority.OperatorScope) || + HasScope(ctx.User, options.Authority.AdminScope))); + + auth.AddPolicy( + NotifyPolicies.Operator, + policy => policy + .RequireAuthenticatedUser() + .RequireAssertion(ctx => + HasScope(ctx.User, options.Authority.OperatorScope) || + HasScope(ctx.User, options.Authority.AdminScope))); + + auth.AddPolicy( + NotifyPolicies.Admin, + policy => policy + .RequireAuthenticatedUser() + .RequireAssertion(ctx => HasScope(ctx.User, options.Authority.AdminScope))); + }); + } + } +} + +static void ConfigureRateLimiting(WebApplicationBuilder builder, NotifyWebServiceOptions options) +{ + ArgumentNullException.ThrowIfNull(options); + var tenantHeader = options.Api.TenantHeader; + var limits = options.Api.RateLimits; + + builder.Services.AddRateLimiter(rateLimiterOptions => + { + rateLimiterOptions.RejectionStatusCode = StatusCodes.Status429TooManyRequests; + rateLimiterOptions.OnRejected = static (context, _) => + { + context.HttpContext.Response.Headers.TryAdd("Retry-After", "1"); + return ValueTask.CompletedTask; + }; + + ConfigurePolicy(rateLimiterOptions, NotifyRateLimitPolicies.DeliveryHistory, limits.DeliveryHistory, tenantHeader, "deliveries"); + ConfigurePolicy(rateLimiterOptions, NotifyRateLimitPolicies.TestSend, limits.TestSend, tenantHeader, "channel-test"); + }); + + static void ConfigurePolicy( + RateLimiterOptions rateLimiterOptions, + string policyName, + NotifyWebServiceOptions.RateLimitPolicyOptions policy, + string tenantHeader, + string prefix) + { + rateLimiterOptions.AddPolicy(policyName, httpContext => + { + if (policy is null || !policy.Enabled) + { + return RateLimitPartition.GetNoLimiter("notify-disabled"); + } + + var identity = ResolveIdentity(httpContext, tenantHeader, prefix); + + return RateLimitPartition.GetTokenBucketLimiter(identity, _ => new TokenBucketRateLimiterOptions + { + TokenLimit = policy.TokenLimit, + TokensPerPeriod = policy.TokensPerPeriod, + ReplenishmentPeriod = TimeSpan.FromSeconds(policy.ReplenishmentPeriodSeconds), + QueueLimit = policy.QueueLimit, + QueueProcessingOrder = QueueProcessingOrder.OldestFirst, + AutoReplenishment = true + }); + }); + } + + static string ResolveIdentity(HttpContext httpContext, string tenantHeader, string prefix) + { + var tenant = httpContext.Request.Headers.TryGetValue(tenantHeader, out var header) && !StringValues.IsNullOrEmpty(header) + ? header.ToString().Trim() + : "anonymous"; + + var subject = httpContext.User.FindFirst("sub")?.Value + ?? httpContext.User.Identity?.Name + ?? httpContext.Connection.RemoteIpAddress?.ToString() + ?? "anonymous"; + + return string.Concat(prefix, ':', tenant, ':', subject); + } +} + +static async Task InitialiseAsync(IServiceProvider services, ServiceStatus status, Microsoft.Extensions.Logging.ILogger logger, NotifyWebServiceOptions options) +{ + var stopwatch = Stopwatch.StartNew(); + + try + { + await using var scope = services.CreateAsyncScope(); + if (string.Equals(options.Storage.Driver, "mongo", StringComparison.OrdinalIgnoreCase)) + { + await RunMongoMigrationsAsync(scope.ServiceProvider); + } + + var registry = scope.ServiceProvider.GetRequiredService(); + var count = await registry.WarmupAsync(); + + stopwatch.Stop(); + status.RecordReadyCheck(success: true, stopwatch.Elapsed); + logger.LogInformation("Notify WebService initialised in {ElapsedMs} ms; loaded {PluginCount} plug-in(s).", stopwatch.Elapsed.TotalMilliseconds, count); + } + catch (Exception ex) + { + stopwatch.Stop(); + status.RecordReadyCheck(success: false, stopwatch.Elapsed, ex.Message); + logger.LogError(ex, "Failed to initialise Notify WebService."); + throw; + } +} + +static async Task RunMongoMigrationsAsync(IServiceProvider services) +{ + var initializerType = Type.GetType("StellaOps.Notify.Storage.Mongo.Internal.NotifyMongoInitializer, StellaOps.Notify.Storage.Mongo"); + if (initializerType is null) + { + return; + } + + var initializer = services.GetService(initializerType); + if (initializer is null) + { + return; + } + + var method = initializerType.GetMethod("EnsureIndexesAsync", new[] { typeof(CancellationToken) }); + if (method is null) + { + return; + } + + if (method.Invoke(initializer, new object[] { CancellationToken.None }) is Task task) + { + await task.ConfigureAwait(false); + } +} + +static void ConfigureRequestPipeline(WebApplication app, NotifyWebServiceOptions options) +{ + if (options.Telemetry.EnableRequestLogging) + { + app.UseSerilogRequestLogging(c => + { + c.IncludeQueryInRequestPath = true; + c.GetLevel = (_, _, exception) => exception is null ? LogEventLevel.Information : LogEventLevel.Error; + }); + } + + app.UseAuthentication(); + app.UseRateLimiter(); + app.UseAuthorization(); +} + +static void ConfigureEndpoints(WebApplication app) +{ + app.MapGet("/healthz", () => Results.Ok(new { status = "ok" })); + + app.MapGet("/readyz", (ServiceStatus status) => + { + var snapshot = status.CreateSnapshot(); + if (snapshot.Ready.IsReady) + { + return Results.Ok(new + { + status = "ready", + checkedAt = snapshot.Ready.CheckedAt, + latencyMs = snapshot.Ready.Latency?.TotalMilliseconds, + snapshot.StartedAt + }); + } + + return JsonResponse( + new + { + status = "unready", + snapshot.Ready.Error, + checkedAt = snapshot.Ready.CheckedAt, + latencyMs = snapshot.Ready.Latency?.TotalMilliseconds + }, + StatusCodes.Status503ServiceUnavailable); + }); + + var options = app.Services.GetRequiredService>().Value; + var tenantHeader = options.Api.TenantHeader; + var apiBasePath = options.Api.BasePath.TrimEnd('/'); + var apiGroup = app.MapGroup(options.Api.BasePath); + var internalGroup = app.MapGroup(options.Api.InternalBasePath); + + internalGroup.MapPost("/rules/normalize", (JsonNode? body, NotifySchemaMigrationService service) => Normalize(body, service.UpgradeRule)) + .WithName("notify.rules.normalize") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status400BadRequest); + + internalGroup.MapPost("/channels/normalize", (JsonNode? body, NotifySchemaMigrationService service) => Normalize(body, service.UpgradeChannel)) + .WithName("notify.channels.normalize"); + + internalGroup.MapPost("/templates/normalize", (JsonNode? body, NotifySchemaMigrationService service) => Normalize(body, service.UpgradeTemplate)) + .WithName("notify.templates.normalize"); + + apiGroup.MapGet("/rules", async ([FromServices] INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var rules = await repository.ListAsync(tenant, cancellationToken); + return JsonResponse(rules); + }) + .RequireAuthorization(NotifyPolicies.Viewer); + + apiGroup.MapGet("/rules/{ruleId}", async (string ruleId, [FromServices] INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var rule = await repository.GetAsync(tenant, ruleId, cancellationToken); + return rule is null ? Results.NotFound() : JsonResponse(rule); + }) + .RequireAuthorization(NotifyPolicies.Viewer); + + apiGroup.MapPost("/rules", async (JsonNode? body, NotifySchemaMigrationService service, INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + var rule = service.UpgradeRule(body); + if (!string.Equals(rule.TenantId, tenant, StringComparison.Ordinal)) + { + return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); + } + + await repository.UpsertAsync(rule, cancellationToken); + + return CreatedJson(BuildResourceLocation(apiBasePath, "rules", rule.RuleId), rule); + }) + .RequireAuthorization(NotifyPolicies.Operator); + + apiGroup.MapDelete("/rules/{ruleId}", async (string ruleId, INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + await repository.DeleteAsync(tenant, ruleId, cancellationToken); + return Results.NoContent(); + }) + .RequireAuthorization(NotifyPolicies.Operator); + + apiGroup.MapGet("/channels", async (INotifyChannelRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var channels = await repository.ListAsync(tenant, cancellationToken); + return JsonResponse(channels); + }) + .RequireAuthorization(NotifyPolicies.Viewer); + + apiGroup.MapPost("/channels", async (JsonNode? body, NotifySchemaMigrationService service, INotifyChannelRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + var channel = service.UpgradeChannel(body); + if (!string.Equals(channel.TenantId, tenant, StringComparison.Ordinal)) + { + return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); + } + + await repository.UpsertAsync(channel, cancellationToken); + return CreatedJson(BuildResourceLocation(apiBasePath, "channels", channel.ChannelId), channel); +}) +.RequireAuthorization(NotifyPolicies.Operator); + apiGroup.MapPost("/channels/{channelId}/test", async (string channelId, [FromBody] ChannelTestSendRequest? request, INotifyChannelRepository repository, INotifyChannelTestService testService, HttpContext context, CancellationToken cancellationToken) => { if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) { return error!; - } - - if (request is null) - { - return Results.BadRequest(new { error = "Request body is required." }); - } - - var channel = await repository.GetAsync(tenant, channelId, cancellationToken); - if (channel is null) - { - return Results.NotFound(); - } - - try - { - var response = await testService.SendAsync(tenant, channel, request, context.TraceIdentifier, cancellationToken).ConfigureAwait(false); - return JsonResponse(response, StatusCodes.Status202Accepted); - } - catch (ChannelTestSendValidationException ex) - { - return Results.BadRequest(new { error = ex.Message }); - } + } + + if (request is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + var channel = await repository.GetAsync(tenant, channelId, cancellationToken); + if (channel is null) + { + return Results.NotFound(); + } + + try + { + var response = await testService.SendAsync(tenant, channel, request, context.TraceIdentifier, cancellationToken).ConfigureAwait(false); + return JsonResponse(response, StatusCodes.Status202Accepted); + } + catch (ChannelTestSendValidationException ex) + { + return Results.BadRequest(new { error = ex.Message }); + } }) - .RequireAuthorization(NotifyPolicies.Admin) + .RequireAuthorization(NotifyPolicies.Operator) .RequireRateLimiting(NotifyRateLimitPolicies.TestSend); apiGroup.MapGet("/channels/{channelId}/health", async (string channelId, INotifyChannelRepository repository, INotifyChannelHealthService healthService, HttpContext context, CancellationToken cancellationToken) => @@ -504,347 +550,362 @@ static void ConfigureEndpoints(WebApplication app) var response = await healthService.CheckAsync(tenant, channel, context.TraceIdentifier, cancellationToken).ConfigureAwait(false); return JsonResponse(response); }) - .RequireAuthorization(NotifyPolicies.Read); + .RequireAuthorization(NotifyPolicies.Viewer); - apiGroup.MapDelete("/channels/{channelId}", async (string channelId, INotifyChannelRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - await repository.DeleteAsync(tenant, channelId, cancellationToken); - return Results.NoContent(); - }) - .RequireAuthorization(NotifyPolicies.Admin); - - apiGroup.MapGet("/templates", async (INotifyTemplateRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - var templates = await repository.ListAsync(tenant, cancellationToken); - return JsonResponse(templates); - }) - .RequireAuthorization(NotifyPolicies.Read); - - apiGroup.MapPost("/templates", async (JsonNode? body, NotifySchemaMigrationService service, INotifyTemplateRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - if (body is null) - { - return Results.BadRequest(new { error = "Request body is required." }); - } - - var template = service.UpgradeTemplate(body); - if (!string.Equals(template.TenantId, tenant, StringComparison.Ordinal)) - { - return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); - } - - await repository.UpsertAsync(template, cancellationToken); - return CreatedJson(BuildResourceLocation(apiBasePath, "templates", template.TemplateId), template); - }) - .RequireAuthorization(NotifyPolicies.Admin); - - apiGroup.MapDelete("/templates/{templateId}", async (string templateId, INotifyTemplateRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - await repository.DeleteAsync(tenant, templateId, cancellationToken); - return Results.NoContent(); - }) - .RequireAuthorization(NotifyPolicies.Admin); - - apiGroup.MapPost("/deliveries", async (JsonNode? body, INotifyDeliveryRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - if (body is null) - { - return Results.BadRequest(new { error = "Request body is required." }); - } - - var delivery = NotifyCanonicalJsonSerializer.Deserialize(body.ToJsonString()); - if (!string.Equals(delivery.TenantId, tenant, StringComparison.Ordinal)) - { - return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); - } - - await repository.UpdateAsync(delivery, cancellationToken); - return CreatedJson(BuildResourceLocation(apiBasePath, "deliveries", delivery.DeliveryId), delivery); - }) - .RequireAuthorization(NotifyPolicies.Admin); - - apiGroup.MapGet("/deliveries", async ([FromServices] INotifyDeliveryRepository repository, HttpContext context, [FromQuery] DateTimeOffset? since, [FromQuery] string? status, [FromQuery] int? limit, [FromQuery] string? continuationToken, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - var effectiveLimit = NormalizeLimit(limit); - var result = await repository.QueryAsync(tenant, since, status, effectiveLimit, continuationToken, cancellationToken).ConfigureAwait(false); - var payload = new - { - items = result.Items, - continuationToken = result.ContinuationToken, - count = result.Items.Count - }; - - return JsonResponse(payload); - }) - .RequireAuthorization(NotifyPolicies.Read) - .RequireRateLimiting(NotifyRateLimitPolicies.DeliveryHistory); - - apiGroup.MapGet("/deliveries/{deliveryId}", async (string deliveryId, INotifyDeliveryRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - var delivery = await repository.GetAsync(tenant, deliveryId, cancellationToken); - return delivery is null ? Results.NotFound() : JsonResponse(delivery); - }) - .RequireAuthorization(NotifyPolicies.Read) - .RequireRateLimiting(NotifyRateLimitPolicies.DeliveryHistory); - - apiGroup.MapPost("/digests", async ([FromBody] NotifyDigestDocument payload, INotifyDigestRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - if (!string.Equals(payload.TenantId, tenant, StringComparison.Ordinal)) - { - return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); - } - - await repository.UpsertAsync(payload, cancellationToken); - return Results.Ok(); - }) - .RequireAuthorization(NotifyPolicies.Admin); - - apiGroup.MapGet("/digests/{actionKey}", async (string actionKey, INotifyDigestRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - var digest = await repository.GetAsync(tenant, actionKey, cancellationToken); - return digest is null ? Results.NotFound() : JsonResponse(digest); - }) - .RequireAuthorization(NotifyPolicies.Read); - - apiGroup.MapDelete("/digests/{actionKey}", async (string actionKey, INotifyDigestRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - await repository.RemoveAsync(tenant, actionKey, cancellationToken); - return Results.NoContent(); - }) - .RequireAuthorization(NotifyPolicies.Admin); - - apiGroup.MapPost("/locks/acquire", async ([FromBody] AcquireLockRequest request, INotifyLockRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - var acquired = await repository.TryAcquireAsync(tenant, request.Resource, request.Owner, TimeSpan.FromSeconds(request.TtlSeconds), cancellationToken); - return JsonResponse(new { acquired }); - }) - .RequireAuthorization(NotifyPolicies.Admin); - - apiGroup.MapPost("/locks/release", async ([FromBody] ReleaseLockRequest request, INotifyLockRepository repository, HttpContext context, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - await repository.ReleaseAsync(tenant, request.Resource, request.Owner, cancellationToken); - return Results.NoContent(); - }) - .RequireAuthorization(NotifyPolicies.Admin); - - apiGroup.MapPost("/audit", async ([FromBody] JsonNode? body, INotifyAuditRepository repository, HttpContext context, ClaimsPrincipal user, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - if (body is null) - { - return Results.BadRequest(new { error = "Request body is required." }); - } - - var action = body["action"]?.GetValue(); - if (string.IsNullOrWhiteSpace(action)) - { - return Results.BadRequest(new { error = "Action is required." }); - } - - var entry = new NotifyAuditEntryDocument - { - Id = ObjectId.GenerateNewId(), - TenantId = tenant, - Action = action, - Actor = user.Identity?.Name ?? "unknown", - EntityId = body["entityId"]?.GetValue() ?? string.Empty, - EntityType = body["entityType"]?.GetValue() ?? string.Empty, - Timestamp = DateTimeOffset.UtcNow, - Payload = body["payload"] is JsonObject payloadObj - ? BsonDocument.Parse(payloadObj.ToJsonString()) - : new BsonDocument() - }; - - await repository.AppendAsync(entry, cancellationToken); - return CreatedJson(BuildResourceLocation(apiBasePath, "audit", entry.Id.ToString()), new { entry.Id }); - }) - .RequireAuthorization(NotifyPolicies.Admin); - - apiGroup.MapGet("/audit", async (INotifyAuditRepository repository, HttpContext context, [FromQuery] DateTimeOffset? since, [FromQuery] int? limit, CancellationToken cancellationToken) => - { - if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) - { - return error!; - } - - var entries = await repository.QueryAsync(tenant, since, limit, cancellationToken); - var response = entries.Select(e => new - { - e.Id, - e.TenantId, - e.Actor, - e.Action, - e.EntityId, - e.EntityType, - e.Timestamp, - Payload = JsonNode.Parse(e.Payload.ToJson()) - }); - - return JsonResponse(response); - }) - .RequireAuthorization(NotifyPolicies.Read); -} - -static int NormalizeLimit(int? value) -{ - if (value is null || value <= 0) - { - return 50; - } - - return Math.Min(value.Value, 200); -} - -static bool TryResolveTenant(HttpContext context, string tenantHeader, out string tenant, out IResult? error) -{ - if (!context.Request.Headers.TryGetValue(tenantHeader, out var header) || string.IsNullOrWhiteSpace(header)) - { - tenant = string.Empty; - error = Results.BadRequest(new { error = $"{tenantHeader} header is required." }); - return false; - } - - tenant = header.ToString().Trim(); - error = null; - return true; -} - -static string BuildResourceLocation(string basePath, params string[] segments) -{ - if (segments.Length == 0) - { - return basePath; - } - - var builder = new StringBuilder(basePath); - foreach (var segment in segments) - { - builder.Append('/'); - builder.Append(Uri.EscapeDataString(segment)); - } - - return builder.ToString(); -} - -static IResult JsonResponse(T value, int statusCode = StatusCodes.Status200OK, string? location = null) -{ - var payload = JsonSerializer.Serialize(value, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - return new JsonHttpResult(payload, statusCode, location); -} - -static IResult CreatedJson(string location, T value) - => JsonResponse(value, StatusCodes.Status201Created, location); - -static IResult Normalize(JsonNode? body, Func upgrade) -{ - if (body is null) - { - return Results.BadRequest(new { error = "Request body is required." }); - } - - try - { - var model = upgrade(body); - var json = NotifyCanonicalJsonSerializer.Serialize(model); - return Results.Content(json, "application/json"); - } - catch (Exception ex) - { - return Results.BadRequest(new { error = ex.Message }); - } -} - -static bool HasScope(ClaimsPrincipal principal, string scope) -{ - if (principal is null || string.IsNullOrWhiteSpace(scope)) - { - return false; - } - - foreach (var claim in principal.FindAll("scope")) - { - if (string.Equals(claim.Value, scope, StringComparison.OrdinalIgnoreCase)) - { - return true; - } - } - - return false; -} - -static LogEventLevel MapLogLevel(string configuredLevel) -{ - return configuredLevel?.ToLowerInvariant() switch - { - "verbose" => LogEventLevel.Verbose, - "debug" => LogEventLevel.Debug, - "warning" => LogEventLevel.Warning, - "error" => LogEventLevel.Error, - "fatal" => LogEventLevel.Fatal, - _ => LogEventLevel.Information - }; -} + apiGroup.MapDelete("/channels/{channelId}", async (string channelId, INotifyChannelRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + await repository.DeleteAsync(tenant, channelId, cancellationToken); + return Results.NoContent(); + }) + .RequireAuthorization(NotifyPolicies.Operator); + + apiGroup.MapGet("/templates", async (INotifyTemplateRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var templates = await repository.ListAsync(tenant, cancellationToken); + return JsonResponse(templates); + }) + .RequireAuthorization(NotifyPolicies.Viewer); + + apiGroup.MapPost("/templates", async (JsonNode? body, NotifySchemaMigrationService service, INotifyTemplateRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + var template = service.UpgradeTemplate(body); + if (!string.Equals(template.TenantId, tenant, StringComparison.Ordinal)) + { + return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); + } + + await repository.UpsertAsync(template, cancellationToken); + return CreatedJson(BuildResourceLocation(apiBasePath, "templates", template.TemplateId), template); + }) + .RequireAuthorization(NotifyPolicies.Operator); + + apiGroup.MapDelete("/templates/{templateId}", async (string templateId, INotifyTemplateRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + await repository.DeleteAsync(tenant, templateId, cancellationToken); + return Results.NoContent(); + }) + .RequireAuthorization(NotifyPolicies.Operator); + + apiGroup.MapPost("/deliveries", async (JsonNode? body, INotifyDeliveryRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + var delivery = NotifyCanonicalJsonSerializer.Deserialize(body.ToJsonString()); + if (!string.Equals(delivery.TenantId, tenant, StringComparison.Ordinal)) + { + return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); + } + + await repository.UpdateAsync(delivery, cancellationToken); + return CreatedJson(BuildResourceLocation(apiBasePath, "deliveries", delivery.DeliveryId), delivery); + }) + .RequireAuthorization(NotifyPolicies.Operator); + + apiGroup.MapGet("/deliveries", async ([FromServices] INotifyDeliveryRepository repository, HttpContext context, [FromQuery] DateTimeOffset? since, [FromQuery] string? status, [FromQuery] int? limit, [FromQuery] string? continuationToken, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var effectiveLimit = NormalizeLimit(limit); + var result = await repository.QueryAsync(tenant, since, status, effectiveLimit, continuationToken, cancellationToken).ConfigureAwait(false); + var payload = new + { + items = result.Items, + continuationToken = result.ContinuationToken, + count = result.Items.Count + }; + + return JsonResponse(payload); + }) + .RequireAuthorization(NotifyPolicies.Viewer) + .RequireRateLimiting(NotifyRateLimitPolicies.DeliveryHistory); + + apiGroup.MapGet("/deliveries/{deliveryId}", async (string deliveryId, INotifyDeliveryRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var delivery = await repository.GetAsync(tenant, deliveryId, cancellationToken); + return delivery is null ? Results.NotFound() : JsonResponse(delivery); + }) + .RequireAuthorization(NotifyPolicies.Viewer) + .RequireRateLimiting(NotifyRateLimitPolicies.DeliveryHistory); + + apiGroup.MapPost("/digests", async ([FromBody] NotifyDigestDocument payload, INotifyDigestRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (!string.Equals(payload.TenantId, tenant, StringComparison.Ordinal)) + { + return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); + } + + await repository.UpsertAsync(payload, cancellationToken); + return Results.Ok(); + }) + .RequireAuthorization(NotifyPolicies.Operator); + + apiGroup.MapGet("/digests/{actionKey}", async (string actionKey, INotifyDigestRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var digest = await repository.GetAsync(tenant, actionKey, cancellationToken); + return digest is null ? Results.NotFound() : JsonResponse(digest); + }) + .RequireAuthorization(NotifyPolicies.Viewer); + + apiGroup.MapDelete("/digests/{actionKey}", async (string actionKey, INotifyDigestRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + await repository.RemoveAsync(tenant, actionKey, cancellationToken); + return Results.NoContent(); + }) + .RequireAuthorization(NotifyPolicies.Operator); + + apiGroup.MapPost("/locks/acquire", async ([FromBody] AcquireLockRequest request, INotifyLockRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var acquired = await repository.TryAcquireAsync(tenant, request.Resource, request.Owner, TimeSpan.FromSeconds(request.TtlSeconds), cancellationToken); + return JsonResponse(new { acquired }); + }) + .RequireAuthorization(NotifyPolicies.Operator); + + apiGroup.MapPost("/locks/release", async ([FromBody] ReleaseLockRequest request, INotifyLockRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + await repository.ReleaseAsync(tenant, request.Resource, request.Owner, cancellationToken); + return Results.NoContent(); + }) + .RequireAuthorization(NotifyPolicies.Operator); + + apiGroup.MapPost("/audit", async ([FromBody] JsonNode? body, INotifyAuditRepository repository, HttpContext context, ClaimsPrincipal user, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + var action = body["action"]?.GetValue(); + if (string.IsNullOrWhiteSpace(action)) + { + return Results.BadRequest(new { error = "Action is required." }); + } + + var entry = new NotifyAuditEntryDocument + { + Id = ObjectId.GenerateNewId(), + TenantId = tenant, + Action = action, + Actor = user.Identity?.Name ?? "unknown", + EntityId = body["entityId"]?.GetValue() ?? string.Empty, + EntityType = body["entityType"]?.GetValue() ?? string.Empty, + Timestamp = DateTimeOffset.UtcNow, + Payload = body["payload"] is JsonObject payloadObj + ? BsonDocument.Parse(payloadObj.ToJsonString()) + : new BsonDocument() + }; + + await repository.AppendAsync(entry, cancellationToken); + return CreatedJson(BuildResourceLocation(apiBasePath, "audit", entry.Id.ToString()), new { entry.Id }); + }) + .RequireAuthorization(NotifyPolicies.Operator); + + apiGroup.MapGet("/audit", async (INotifyAuditRepository repository, HttpContext context, [FromQuery] DateTimeOffset? since, [FromQuery] int? limit, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var entries = await repository.QueryAsync(tenant, since, limit, cancellationToken); + var response = entries.Select(e => new + { + e.Id, + e.TenantId, + e.Actor, + e.Action, + e.EntityId, + e.EntityType, + e.Timestamp, + Payload = JsonNode.Parse(e.Payload.ToJson()) + }); + + return JsonResponse(response); + }) + .RequireAuthorization(NotifyPolicies.Viewer); +} + +static int NormalizeLimit(int? value) +{ + if (value is null || value <= 0) + { + return 50; + } + + return Math.Min(value.Value, 200); +} + +static bool TryResolveTenant(HttpContext context, string tenantHeader, out string tenant, out IResult? error) +{ + if (!context.Request.Headers.TryGetValue(tenantHeader, out var header) || string.IsNullOrWhiteSpace(header)) + { + tenant = string.Empty; + error = Results.BadRequest(new { error = $"{tenantHeader} header is required." }); + return false; + } + + tenant = header.ToString().Trim(); + error = null; + return true; +} + +static string BuildResourceLocation(string basePath, params string[] segments) +{ + if (segments.Length == 0) + { + return basePath; + } + + var builder = new StringBuilder(basePath); + foreach (var segment in segments) + { + builder.Append('/'); + builder.Append(Uri.EscapeDataString(segment)); + } + + return builder.ToString(); +} + +static IResult JsonResponse(T value, int statusCode = StatusCodes.Status200OK, string? location = null) +{ + var payload = JsonSerializer.Serialize(value, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + return new JsonHttpResult(payload, statusCode, location); +} + +static IResult CreatedJson(string location, T value) + => JsonResponse(value, StatusCodes.Status201Created, location); + +static IResult Normalize(JsonNode? body, Func upgrade) +{ + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + try + { + var model = upgrade(body); + var json = NotifyCanonicalJsonSerializer.Serialize(model); + return Results.Content(json, "application/json"); + } + catch (Exception ex) + { + return Results.BadRequest(new { error = ex.Message }); + } +} + +static bool HasScope(ClaimsPrincipal principal, string scope) +{ + if (principal is null || string.IsNullOrWhiteSpace(scope)) + { + return false; + } + + foreach (var claim in principal.Claims) + { + if (!string.Equals(claim.Type, "scope", StringComparison.OrdinalIgnoreCase) + && !string.Equals(claim.Type, "http://schemas.microsoft.com/identity/claims/scope", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var values = claim.Value.Split(new[] { ' ', '\t', '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); + foreach (var value in values) + { + if (string.Equals(value, scope, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + } + + return false; +} + +static LogEventLevel MapLogLevel(string configuredLevel) +{ + return configuredLevel?.ToLowerInvariant() switch + { + "verbose" => LogEventLevel.Verbose, + "debug" => LogEventLevel.Debug, + "warning" => LogEventLevel.Warning, + "error" => LogEventLevel.Error, + "fatal" => LogEventLevel.Fatal, + _ => LogEventLevel.Information + }; +} diff --git a/src/Notify/StellaOps.Notify.WebService/Security/AllowAllAuthenticationHandler.cs b/src/Notify/StellaOps.Notify.WebService/Security/AllowAllAuthenticationHandler.cs new file mode 100644 index 00000000..dfed1895 --- /dev/null +++ b/src/Notify/StellaOps.Notify.WebService/Security/AllowAllAuthenticationHandler.cs @@ -0,0 +1,31 @@ +using System; +using System.Security.Claims; +using System.Text.Encodings.Web; +using Microsoft.AspNetCore.Authentication; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace StellaOps.Notify.WebService.Security; + +internal sealed class AllowAllAuthenticationHandler : AuthenticationHandler +{ + public const string SchemeName = "Notify.AllowAll"; + +#pragma warning disable CS0618 + public AllowAllAuthenticationHandler( + IOptionsMonitor options, + ILoggerFactory logger, + UrlEncoder encoder, + ISystemClock clock) + : base(options, logger, encoder, clock) + { + } +#pragma warning restore CS0618 + + protected override Task HandleAuthenticateAsync() + { + var principal = new ClaimsPrincipal(new ClaimsIdentity()); + var ticket = new AuthenticationTicket(principal, Scheme.Name); + return Task.FromResult(AuthenticateResult.Success(ticket)); + } +} diff --git a/src/Notify/StellaOps.Notify.WebService/Security/NotifyPolicies.cs b/src/Notify/StellaOps.Notify.WebService/Security/NotifyPolicies.cs index b6803793..854322b9 100644 --- a/src/Notify/StellaOps.Notify.WebService/Security/NotifyPolicies.cs +++ b/src/Notify/StellaOps.Notify.WebService/Security/NotifyPolicies.cs @@ -1,7 +1,8 @@ namespace StellaOps.Notify.WebService.Security; -internal static class NotifyPolicies -{ - public const string Read = "notify.read"; - public const string Admin = "notify.admin"; -} +internal static class NotifyPolicies +{ + public const string Viewer = "notify.viewer"; + public const string Operator = "notify.operator"; + public const string Admin = "notify.admin"; +} diff --git a/src/Notify/__Tests/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs b/src/Notify/__Tests/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs index 84b47072..6a70fdb0 100644 --- a/src/Notify/__Tests/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs +++ b/src/Notify/__Tests/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs @@ -1,417 +1,449 @@ -using System.Collections.Generic; -using System.IdentityModel.Tokens.Jwt; -using System.IO; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json.Nodes; -using Microsoft.AspNetCore.Mvc.Testing; -using Microsoft.IdentityModel.Tokens; -using Microsoft.Extensions.DependencyInjection; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.WebService.Tests; - -public sealed class CrudEndpointsTests : IClassFixture>, IAsyncLifetime -{ - private const string SigningKey = "super-secret-test-key-1234567890"; - private const string Issuer = "test-issuer"; - private const string Audience = "notify"; - - private readonly WebApplicationFactory _factory; - private readonly string _adminToken; - private readonly string _readToken; - - public CrudEndpointsTests(WebApplicationFactory factory) - { - _factory = factory.WithWebHostBuilder(builder => - { - builder.UseSetting("notify:storage:driver", "memory"); - builder.UseSetting("notify:authority:enabled", "false"); - builder.UseSetting("notify:authority:developmentSigningKey", SigningKey); - builder.UseSetting("notify:authority:issuer", Issuer); - builder.UseSetting("notify:authority:audiences:0", Audience); - builder.UseSetting("notify:authority:adminScope", "notify.admin"); - builder.UseSetting("notify:authority:readScope", "notify.read"); - builder.UseSetting("notify:telemetry:enableRequestLogging", "false"); - builder.UseSetting("notify:api:rateLimits:testSend:tokenLimit", "10"); - builder.UseSetting("notify:api:rateLimits:testSend:tokensPerPeriod", "10"); - builder.UseSetting("notify:api:rateLimits:testSend:queueLimit", "5"); - builder.UseSetting("notify:api:rateLimits:deliveryHistory:tokenLimit", "30"); - builder.UseSetting("notify:api:rateLimits:deliveryHistory:tokensPerPeriod", "30"); - builder.UseSetting("notify:api:rateLimits:deliveryHistory:queueLimit", "10"); - }); - - _adminToken = CreateToken("notify.admin"); - _readToken = CreateToken("notify.read"); - } - - public Task InitializeAsync() => Task.CompletedTask; - - public Task DisposeAsync() => Task.CompletedTask; - - [Fact] - public async Task RuleCrudLifecycle() - { - var client = _factory.CreateClient(); - var payload = LoadSample("notify-rule@1.sample.json"); - payload["ruleId"] = "rule-web"; - payload["tenantId"] = "tenant-web"; - payload["actions"]!.AsArray()[0]! ["actionId"] = "action-web"; - - await PostAsync(client, "/api/v1/notify/rules", payload); - - var list = await GetJsonArrayAsync(client, "/api/v1/notify/rules", useAdminToken: false); - Assert.Equal("rule-web", list?[0]? ["ruleId"]?.GetValue()); - - var single = await GetJsonObjectAsync(client, "/api/v1/notify/rules/rule-web", useAdminToken: false); - Assert.Equal("tenant-web", single? ["tenantId"]?.GetValue()); - - await DeleteAsync(client, "/api/v1/notify/rules/rule-web"); - var afterDelete = await SendAsync(client, HttpMethod.Get, "/api/v1/notify/rules/rule-web", useAdminToken: false); - Assert.Equal(HttpStatusCode.NotFound, afterDelete.StatusCode); - } - - [Fact] - public async Task ChannelTemplateDeliveryAndAuditFlows() - { - var client = _factory.CreateClient(); - - var channelPayload = LoadSample("notify-channel@1.sample.json"); - channelPayload["channelId"] = "channel-web"; - channelPayload["tenantId"] = "tenant-web"; - await PostAsync(client, "/api/v1/notify/channels", channelPayload); - - var templatePayload = LoadSample("notify-template@1.sample.json"); - templatePayload["templateId"] = "template-web"; - templatePayload["tenantId"] = "tenant-web"; - await PostAsync(client, "/api/v1/notify/templates", templatePayload); - - var delivery = NotifyDelivery.Create( - deliveryId: "delivery-web", - tenantId: "tenant-web", - ruleId: "rule-web", - actionId: "channel-web", - eventId: Guid.NewGuid(), - kind: NotifyEventKinds.ScannerReportReady, - status: NotifyDeliveryStatus.Sent, - createdAt: DateTimeOffset.UtcNow, - sentAt: DateTimeOffset.UtcNow); - - var deliveryNode = JsonNode.Parse(NotifyCanonicalJsonSerializer.Serialize(delivery))!; - await PostAsync(client, "/api/v1/notify/deliveries", deliveryNode); - - var deliveriesEnvelope = await GetJsonObjectAsync(client, "/api/v1/notify/deliveries?limit=10", useAdminToken: false); - Assert.NotNull(deliveriesEnvelope); - Assert.Equal(1, deliveriesEnvelope? ["count"]?.GetValue()); - Assert.Null(deliveriesEnvelope? ["continuationToken"]?.GetValue()); - var deliveries = deliveriesEnvelope? ["items"] as JsonArray; - Assert.NotNull(deliveries); - Assert.NotEmpty(deliveries!.OfType()); - - var digestNode = new JsonObject - { - ["tenantId"] = "tenant-web", - ["actionKey"] = "channel-web", - ["window"] = "hourly", - ["openedAt"] = DateTimeOffset.UtcNow.ToString("O"), - ["status"] = "open", - ["items"] = new JsonArray() - }; - await PostAsync(client, "/api/v1/notify/digests", digestNode); - - var digest = await GetJsonObjectAsync(client, "/api/v1/notify/digests/channel-web", useAdminToken: false); - Assert.Equal("channel-web", digest? ["actionKey"]?.GetValue()); - - var auditPayload = JsonNode.Parse(""" - { - "action": "create-rule", - "entityType": "rule", - "entityId": "rule-web", - "payload": {"ruleId": "rule-web"} - } - """)!; - await PostAsync(client, "/api/v1/notify/audit", auditPayload); - - var audits = await GetJsonArrayAsync(client, "/api/v1/notify/audit", useAdminToken: false); - Assert.NotNull(audits); - Assert.Contains(audits!.OfType(), entry => entry?["action"]?.GetValue() == "create-rule"); - - await DeleteAsync(client, "/api/v1/notify/digests/channel-web"); - var digestAfterDelete = await SendAsync(client, HttpMethod.Get, "/api/v1/notify/digests/channel-web", useAdminToken: false); - Assert.Equal(HttpStatusCode.NotFound, digestAfterDelete.StatusCode); - } - - [Fact] - public async Task LockEndpointsAllowAcquireAndRelease() - { - var client = _factory.CreateClient(); - var acquirePayload = JsonNode.Parse(""" - { - "resource": "workers", - "owner": "worker-1", - "ttlSeconds": 30 - } - """)!; - - var acquireResponse = await PostAsync(client, "/api/v1/notify/locks/acquire", acquirePayload); - var acquireContent = JsonNode.Parse(await acquireResponse.Content.ReadAsStringAsync()); - Assert.True(acquireContent? ["acquired"]?.GetValue()); - - await PostAsync(client, "/api/v1/notify/locks/release", JsonNode.Parse(""" - { - "resource": "workers", - "owner": "worker-1" - } - """)!); - - var secondAcquire = await PostAsync(client, "/api/v1/notify/locks/acquire", acquirePayload); - var secondContent = JsonNode.Parse(await secondAcquire.Content.ReadAsStringAsync()); - Assert.True(secondContent? ["acquired"]?.GetValue()); - } - - [Fact] - public async Task ChannelTestSendReturnsPreview() - { - var client = _factory.CreateClient(); - - var channelPayload = LoadSample("notify-channel@1.sample.json"); - channelPayload["channelId"] = "channel-test"; - channelPayload["tenantId"] = "tenant-web"; - channelPayload["config"]! ["target"] = "#ops-alerts"; - await PostAsync(client, "/api/v1/notify/channels", channelPayload); - - var payload = JsonNode.Parse(""" - { - "target": "#ops-alerts", - "title": "Smoke test", - "body": "Sample body" - } - """)!; - - var response = await PostAsync(client, "/api/v1/notify/channels/channel-test/test", payload); - Assert.Equal(HttpStatusCode.Accepted, response.StatusCode); - - var json = JsonNode.Parse(await response.Content.ReadAsStringAsync())!.AsObject(); - Assert.Equal("tenant-web", json["tenantId"]?.GetValue()); - Assert.Equal("channel-test", json["channelId"]?.GetValue()); - Assert.NotNull(json["queuedAt"]); - Assert.NotNull(json["traceId"]); - - var preview = json["preview"]?.AsObject(); - Assert.NotNull(preview); - Assert.Equal("#ops-alerts", preview? ["target"]?.GetValue()); - Assert.Equal("Smoke test", preview? ["title"]?.GetValue()); - Assert.Equal("Sample body", preview? ["body"]?.GetValue()); - - var expectedHash = Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes("Sample body"))).ToLowerInvariant(); - Assert.Equal(expectedHash, preview? ["bodyHash"]?.GetValue()); - - var metadata = json["metadata"] as JsonObject; - Assert.NotNull(metadata); - Assert.Equal("#ops-alerts", metadata?["target"]?.GetValue()); - Assert.Equal("slack", metadata?["channelType"]?.GetValue()); - Assert.Equal("fallback", metadata?["previewProvider"]?.GetValue()); - Assert.Equal(json["traceId"]?.GetValue(), metadata?["traceId"]?.GetValue()); - } - - [Fact] - public async Task ChannelTestSendHonoursRateLimit() - { - using var limitedFactory = _factory.WithWebHostBuilder(builder => - { - builder.UseSetting("notify:api:rateLimits:testSend:tokenLimit", "1"); - builder.UseSetting("notify:api:rateLimits:testSend:tokensPerPeriod", "1"); - builder.UseSetting("notify:api:rateLimits:testSend:queueLimit", "0"); - }); - - var client = limitedFactory.CreateClient(); - - var channelPayload = LoadSample("notify-channel@1.sample.json"); - channelPayload["channelId"] = "channel-rate-limit"; - channelPayload["tenantId"] = "tenant-web"; - channelPayload["config"]! ["target"] = "#ops-alerts"; - await PostAsync(client, "/api/v1/notify/channels", channelPayload); - - var payload = JsonNode.Parse(""" - { - "body": "First" - } - """)!; - - var first = await PostAsync(client, "/api/v1/notify/channels/channel-rate-limit/test", payload); - Assert.Equal(HttpStatusCode.Accepted, first.StatusCode); - - var secondRequest = new HttpRequestMessage(HttpMethod.Post, "/api/v1/notify/channels/channel-rate-limit/test") - { - Content = new StringContent(payload.ToJsonString(), Encoding.UTF8, "application/json") - }; - - var second = await SendAsync(client, secondRequest); - Assert.Equal(HttpStatusCode.TooManyRequests, second.StatusCode); - Assert.NotNull(second.Headers.RetryAfter); - } - - [Fact] - public async Task ChannelTestSendUsesRegisteredProvider() - { - var providerName = typeof(FakeSlackTestProvider).FullName!; - - using var providerFactory = _factory.WithWebHostBuilder(builder => - { - builder.ConfigureServices(services => - { - services.AddSingleton(); - }); - }); - - var client = providerFactory.CreateClient(); - - var channelPayload = LoadSample("notify-channel@1.sample.json"); - channelPayload["channelId"] = "channel-provider"; - channelPayload["tenantId"] = "tenant-web"; - channelPayload["config"]! ["target"] = "#ops-alerts"; - await PostAsync(client, "/api/v1/notify/channels", channelPayload); - - var payload = JsonNode.Parse(""" - { - "target": "#ops-alerts", - "title": "Provider Title", - "summary": "Provider Summary" - } - """)!; - - var response = await PostAsync(client, "/api/v1/notify/channels/channel-provider/test", payload); - Assert.Equal(HttpStatusCode.Accepted, response.StatusCode); - - var json = JsonNode.Parse(await response.Content.ReadAsStringAsync())!.AsObject(); - var preview = json["preview"]?.AsObject(); - Assert.NotNull(preview); - Assert.Equal("#ops-alerts", preview?["target"]?.GetValue()); - Assert.Equal("Provider Title", preview?["title"]?.GetValue()); - Assert.Equal("{\"provider\":\"fake\"}", preview?["body"]?.GetValue()); - - var metadata = json["metadata"]?.AsObject(); - Assert.NotNull(metadata); - Assert.Equal(providerName, metadata?["previewProvider"]?.GetValue()); - Assert.Equal("fake-provider", metadata?["provider.name"]?.GetValue()); - } - - private sealed class FakeSlackTestProvider : INotifyChannelTestProvider - { - public NotifyChannelType ChannelType => NotifyChannelType.Slack; - - public Task BuildPreviewAsync(ChannelTestPreviewContext context, CancellationToken cancellationToken) - { - cancellationToken.ThrowIfCancellationRequested(); - - var body = "{\"provider\":\"fake\"}"; - var preview = NotifyDeliveryRendered.Create( - NotifyChannelType.Slack, - NotifyDeliveryFormat.Slack, - context.Target, - context.Request.Title ?? "Provider Title", - body, - context.Request.Summary ?? "Provider Summary", - context.Request.TextBody, - context.Request.Locale, - ChannelTestPreviewUtilities.ComputeBodyHash(body), - context.Request.Attachments); - - var metadata = new Dictionary(StringComparer.Ordinal) - { - ["provider.name"] = "fake-provider" - }; - - return Task.FromResult(new ChannelTestPreviewResult(preview, metadata)); - } - } - - private static JsonNode LoadSample(string fileName) - { - var path = Path.Combine(AppContext.BaseDirectory, fileName); - if (!File.Exists(path)) - { - throw new FileNotFoundException($"Unable to load sample '{fileName}'.", path); - } - - return JsonNode.Parse(File.ReadAllText(path)) ?? throw new InvalidOperationException("Sample JSON null."); - } - - private async Task GetJsonArrayAsync(HttpClient client, string path, bool useAdminToken) - { - var response = await SendAsync(client, HttpMethod.Get, path, useAdminToken); - response.EnsureSuccessStatusCode(); - var content = await response.Content.ReadAsStringAsync(); - return JsonNode.Parse(content) as JsonArray; - } - - private async Task GetJsonObjectAsync(HttpClient client, string path, bool useAdminToken) - { - var response = await SendAsync(client, HttpMethod.Get, path, useAdminToken); - response.EnsureSuccessStatusCode(); - var content = await response.Content.ReadAsStringAsync(); - return JsonNode.Parse(content) as JsonObject; - } - - private async Task PostAsync(HttpClient client, string path, JsonNode payload, bool useAdminToken = true) - { - var request = new HttpRequestMessage(HttpMethod.Post, path) - { - Content = new StringContent(payload.ToJsonString(), Encoding.UTF8, "application/json") - }; - - var response = await SendAsync(client, request, useAdminToken); - if (!response.IsSuccessStatusCode) - { - var body = await response.Content.ReadAsStringAsync(); - throw new InvalidOperationException($"Request to {path} failed with {(int)response.StatusCode} {response.StatusCode}: {body}"); - } - - return response; - } - - private Task PostAsync(HttpClient client, string path, JsonNode payload) - => PostAsync(client, path, payload, useAdminToken: true); - - private async Task DeleteAsync(HttpClient client, string path) - { - var response = await SendAsync(client, HttpMethod.Delete, path); - response.EnsureSuccessStatusCode(); - } - - private Task SendAsync(HttpClient client, HttpMethod method, string path, bool useAdminToken = true) - => SendAsync(client, new HttpRequestMessage(method, path), useAdminToken); - - private Task SendAsync(HttpClient client, HttpRequestMessage request, bool useAdminToken = true) - { - request.Headers.Add("X-StellaOps-Tenant", "tenant-web"); - request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", useAdminToken ? _adminToken : _readToken); - return client.SendAsync(request); - } - - private static string CreateToken(string scope) - { - var handler = new JwtSecurityTokenHandler(); - var key = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(SigningKey)); - var descriptor = new SecurityTokenDescriptor - { - Issuer = Issuer, - Audience = Audience, - Expires = DateTime.UtcNow.AddMinutes(10), - SigningCredentials = new SigningCredentials(key, SecurityAlgorithms.HmacSha256), - Subject = new System.Security.Claims.ClaimsIdentity(new[] - { - new System.Security.Claims.Claim("scope", scope), - new System.Security.Claims.Claim(System.Security.Claims.ClaimTypes.Name, "integration-test") - }) - }; - - var token = handler.CreateToken(descriptor); - return handler.WriteToken(token); - } -} +using System.Collections.Generic; +using System.IdentityModel.Tokens.Jwt; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json.Nodes; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.IdentityModel.Tokens; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.WebService.Tests; + +public sealed class CrudEndpointsTests : IClassFixture>, IAsyncLifetime +{ + private const string SigningKey = "super-secret-test-key-1234567890"; + private const string Issuer = "test-issuer"; + private const string Audience = "notify"; + + private readonly WebApplicationFactory _factory; + private readonly string _operatorToken; + private readonly string _viewerToken; + + public CrudEndpointsTests(WebApplicationFactory factory) + { + _factory = factory.WithWebHostBuilder(builder => + { + builder.UseSetting("notify:storage:driver", "memory"); + builder.UseSetting("notify:authority:enabled", "false"); + builder.UseSetting("notify:authority:developmentSigningKey", SigningKey); + builder.UseSetting("notify:authority:issuer", Issuer); + builder.UseSetting("notify:authority:audiences:0", Audience); + builder.UseSetting("notify:authority:allowAnonymousFallback", "true"); + builder.UseSetting("notify:authority:adminScope", "notify.admin"); + builder.UseSetting("notify:authority:operatorScope", "notify.operator"); + builder.UseSetting("notify:authority:viewerScope", "notify.viewer"); + builder.UseSetting("notify:telemetry:enableRequestLogging", "false"); + builder.UseSetting("notify:api:rateLimits:testSend:tokenLimit", "10"); + builder.UseSetting("notify:api:rateLimits:testSend:tokensPerPeriod", "10"); + builder.UseSetting("notify:api:rateLimits:testSend:queueLimit", "5"); + builder.UseSetting("notify:api:rateLimits:deliveryHistory:tokenLimit", "30"); + builder.UseSetting("notify:api:rateLimits:deliveryHistory:tokensPerPeriod", "30"); + builder.UseSetting("notify:api:rateLimits:deliveryHistory:queueLimit", "10"); + }); + + _operatorToken = CreateToken("notify.viewer", "notify.operator", "notify.admin"); + _viewerToken = CreateToken("notify.viewer"); + + ValidateToken(_operatorToken); + ValidateToken(_viewerToken); + } + + private static void ValidateToken(string token) + { + var handler = new JwtSecurityTokenHandler(); + var parameters = new TokenValidationParameters + { + ValidateIssuer = true, + ValidIssuer = Issuer, + ValidateAudience = true, + ValidAudience = Audience, + ValidateIssuerSigningKey = true, + IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(SigningKey)), + ValidateLifetime = true, + ClockSkew = TimeSpan.FromSeconds(30), + NameClaimType = System.Security.Claims.ClaimTypes.Name + }; + + handler.ValidateToken(token, parameters, out _); + } + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() => Task.CompletedTask; + + [Fact] + public async Task RuleCrudLifecycle() + { + var client = _factory.CreateClient(); + var payload = LoadSample("notify-rule@1.sample.json"); + payload["ruleId"] = "rule-web"; + payload["tenantId"] = "tenant-web"; + payload["actions"]!.AsArray()[0]! ["actionId"] = "action-web"; + + await PostAsync(client, "/api/v1/notify/rules", payload); + + var list = await GetJsonArrayAsync(client, "/api/v1/notify/rules", useOperatorToken: false); + Assert.Equal("rule-web", list?[0]? ["ruleId"]?.GetValue()); + + var single = await GetJsonObjectAsync(client, "/api/v1/notify/rules/rule-web", useOperatorToken: false); + Assert.Equal("tenant-web", single? ["tenantId"]?.GetValue()); + + await DeleteAsync(client, "/api/v1/notify/rules/rule-web"); + var afterDelete = await SendAsync(client, HttpMethod.Get, "/api/v1/notify/rules/rule-web", useOperatorToken: false); + Assert.Equal(HttpStatusCode.NotFound, afterDelete.StatusCode); + } + + [Fact] + public async Task ChannelTemplateDeliveryAndAuditFlows() + { + var client = _factory.CreateClient(); + + var channelPayload = LoadSample("notify-channel@1.sample.json"); + channelPayload["channelId"] = "channel-web"; + channelPayload["tenantId"] = "tenant-web"; + await PostAsync(client, "/api/v1/notify/channels", channelPayload); + + var templatePayload = LoadSample("notify-template@1.sample.json"); + templatePayload["templateId"] = "template-web"; + templatePayload["tenantId"] = "tenant-web"; + await PostAsync(client, "/api/v1/notify/templates", templatePayload); + + var delivery = NotifyDelivery.Create( + deliveryId: "delivery-web", + tenantId: "tenant-web", + ruleId: "rule-web", + actionId: "channel-web", + eventId: Guid.NewGuid(), + kind: NotifyEventKinds.ScannerReportReady, + status: NotifyDeliveryStatus.Sent, + createdAt: DateTimeOffset.UtcNow, + sentAt: DateTimeOffset.UtcNow); + + var deliveryNode = JsonNode.Parse(NotifyCanonicalJsonSerializer.Serialize(delivery))!; + await PostAsync(client, "/api/v1/notify/deliveries", deliveryNode); + + var deliveriesEnvelope = await GetJsonObjectAsync(client, "/api/v1/notify/deliveries?limit=10", useOperatorToken: false); + Assert.NotNull(deliveriesEnvelope); + Assert.Equal(1, deliveriesEnvelope? ["count"]?.GetValue()); + Assert.Null(deliveriesEnvelope? ["continuationToken"]?.GetValue()); + var deliveries = deliveriesEnvelope? ["items"] as JsonArray; + Assert.NotNull(deliveries); + Assert.NotEmpty(deliveries!.OfType()); + + var digestNode = new JsonObject + { + ["tenantId"] = "tenant-web", + ["actionKey"] = "channel-web", + ["window"] = "hourly", + ["openedAt"] = DateTimeOffset.UtcNow.ToString("O"), + ["status"] = "open", + ["items"] = new JsonArray() + }; + await PostAsync(client, "/api/v1/notify/digests", digestNode); + + var digest = await GetJsonObjectAsync(client, "/api/v1/notify/digests/channel-web", useOperatorToken: false); + Assert.Equal("channel-web", digest? ["actionKey"]?.GetValue()); + + var auditPayload = JsonNode.Parse(""" + { + "action": "create-rule", + "entityType": "rule", + "entityId": "rule-web", + "payload": {"ruleId": "rule-web"} + } + """)!; + await PostAsync(client, "/api/v1/notify/audit", auditPayload); + + var audits = await GetJsonArrayAsync(client, "/api/v1/notify/audit", useOperatorToken: false); + Assert.NotNull(audits); + Assert.Contains(audits!.OfType(), entry => entry?["action"]?.GetValue() == "create-rule"); + + await DeleteAsync(client, "/api/v1/notify/digests/channel-web"); + var digestAfterDelete = await SendAsync(client, HttpMethod.Get, "/api/v1/notify/digests/channel-web", useOperatorToken: false); + Assert.Equal(HttpStatusCode.NotFound, digestAfterDelete.StatusCode); + } + + [Fact] + public async Task LockEndpointsAllowAcquireAndRelease() + { + var client = _factory.CreateClient(); + var acquirePayload = JsonNode.Parse(""" + { + "resource": "workers", + "owner": "worker-1", + "ttlSeconds": 30 + } + """)!; + + var acquireResponse = await PostAsync(client, "/api/v1/notify/locks/acquire", acquirePayload); + var acquireContent = JsonNode.Parse(await acquireResponse.Content.ReadAsStringAsync()); + Assert.True(acquireContent? ["acquired"]?.GetValue()); + + await PostAsync(client, "/api/v1/notify/locks/release", JsonNode.Parse(""" + { + "resource": "workers", + "owner": "worker-1" + } + """)!); + + var secondAcquire = await PostAsync(client, "/api/v1/notify/locks/acquire", acquirePayload); + var secondContent = JsonNode.Parse(await secondAcquire.Content.ReadAsStringAsync()); + Assert.True(secondContent? ["acquired"]?.GetValue()); + } + + [Fact] + public async Task ChannelTestSendReturnsPreview() + { + var client = _factory.CreateClient(); + + var channelPayload = LoadSample("notify-channel@1.sample.json"); + channelPayload["channelId"] = "channel-test"; + channelPayload["tenantId"] = "tenant-web"; + channelPayload["config"]! ["target"] = "#ops-alerts"; + await PostAsync(client, "/api/v1/notify/channels", channelPayload); + + var payload = JsonNode.Parse(""" + { + "target": "#ops-alerts", + "title": "Smoke test", + "body": "Sample body" + } + """)!; + + var response = await PostAsync(client, "/api/v1/notify/channels/channel-test/test", payload); + Assert.Equal(HttpStatusCode.Accepted, response.StatusCode); + + var json = JsonNode.Parse(await response.Content.ReadAsStringAsync())!.AsObject(); + Assert.Equal("tenant-web", json["tenantId"]?.GetValue()); + Assert.Equal("channel-test", json["channelId"]?.GetValue()); + Assert.NotNull(json["queuedAt"]); + Assert.NotNull(json["traceId"]); + + var preview = json["preview"]?.AsObject(); + Assert.NotNull(preview); + Assert.Equal("#ops-alerts", preview? ["target"]?.GetValue()); + Assert.Equal("Smoke test", preview? ["title"]?.GetValue()); + Assert.Equal("Sample body", preview? ["body"]?.GetValue()); + + var expectedHash = Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes("Sample body"))).ToLowerInvariant(); + Assert.Equal(expectedHash, preview? ["bodyHash"]?.GetValue()); + + var metadata = json["metadata"] as JsonObject; + Assert.NotNull(metadata); + Assert.Equal("#ops-alerts", metadata?["target"]?.GetValue()); + Assert.Equal("slack", metadata?["channelType"]?.GetValue()); + Assert.Equal("fallback", metadata?["previewProvider"]?.GetValue()); + Assert.Equal(json["traceId"]?.GetValue(), metadata?["traceId"]?.GetValue()); + } + + [Fact] + public async Task ChannelTestSendHonoursRateLimit() + { + using var limitedFactory = _factory.WithWebHostBuilder(builder => + { + builder.UseSetting("notify:api:rateLimits:testSend:tokenLimit", "1"); + builder.UseSetting("notify:api:rateLimits:testSend:tokensPerPeriod", "1"); + builder.UseSetting("notify:api:rateLimits:testSend:queueLimit", "0"); + }); + + var client = limitedFactory.CreateClient(); + + var channelPayload = LoadSample("notify-channel@1.sample.json"); + channelPayload["channelId"] = "channel-rate-limit"; + channelPayload["tenantId"] = "tenant-web"; + channelPayload["config"]! ["target"] = "#ops-alerts"; + await PostAsync(client, "/api/v1/notify/channels", channelPayload); + + var payload = JsonNode.Parse(""" + { + "body": "First" + } + """)!; + + var first = await PostAsync(client, "/api/v1/notify/channels/channel-rate-limit/test", payload); + Assert.Equal(HttpStatusCode.Accepted, first.StatusCode); + + var secondRequest = new HttpRequestMessage(HttpMethod.Post, "/api/v1/notify/channels/channel-rate-limit/test") + { + Content = new StringContent(payload.ToJsonString(), Encoding.UTF8, "application/json") + }; + + var second = await SendAsync(client, secondRequest); + Assert.Equal(HttpStatusCode.TooManyRequests, second.StatusCode); + Assert.NotNull(second.Headers.RetryAfter); + } + + [Fact] + public async Task ChannelTestSendUsesRegisteredProvider() + { + var providerName = typeof(FakeSlackTestProvider).FullName!; + + using var providerFactory = _factory.WithWebHostBuilder(builder => + { + builder.ConfigureServices(services => + { + services.AddSingleton(); + }); + }); + + var client = providerFactory.CreateClient(); + + var channelPayload = LoadSample("notify-channel@1.sample.json"); + channelPayload["channelId"] = "channel-provider"; + channelPayload["tenantId"] = "tenant-web"; + channelPayload["config"]! ["target"] = "#ops-alerts"; + await PostAsync(client, "/api/v1/notify/channels", channelPayload); + + var payload = JsonNode.Parse(""" + { + "target": "#ops-alerts", + "title": "Provider Title", + "summary": "Provider Summary" + } + """)!; + + var response = await PostAsync(client, "/api/v1/notify/channels/channel-provider/test", payload); + Assert.Equal(HttpStatusCode.Accepted, response.StatusCode); + + var json = JsonNode.Parse(await response.Content.ReadAsStringAsync())!.AsObject(); + var preview = json["preview"]?.AsObject(); + Assert.NotNull(preview); + Assert.Equal("#ops-alerts", preview?["target"]?.GetValue()); + Assert.Equal("Provider Title", preview?["title"]?.GetValue()); + Assert.Equal("{\"provider\":\"fake\"}", preview?["body"]?.GetValue()); + + var metadata = json["metadata"]?.AsObject(); + Assert.NotNull(metadata); + Assert.Equal(providerName, metadata?["previewProvider"]?.GetValue()); + Assert.Equal("fake-provider", metadata?["provider.name"]?.GetValue()); + } + + private sealed class FakeSlackTestProvider : INotifyChannelTestProvider + { + public NotifyChannelType ChannelType => NotifyChannelType.Slack; + + public Task BuildPreviewAsync(ChannelTestPreviewContext context, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + var body = "{\"provider\":\"fake\"}"; + var preview = NotifyDeliveryRendered.Create( + NotifyChannelType.Slack, + NotifyDeliveryFormat.Slack, + context.Target, + context.Request.Title ?? "Provider Title", + body, + context.Request.Summary ?? "Provider Summary", + context.Request.TextBody, + context.Request.Locale, + ChannelTestPreviewUtilities.ComputeBodyHash(body), + context.Request.Attachments); + + var metadata = new Dictionary(StringComparer.Ordinal) + { + ["provider.name"] = "fake-provider" + }; + + return Task.FromResult(new ChannelTestPreviewResult(preview, metadata)); + } + } + + private static JsonNode LoadSample(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, fileName); + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Unable to load sample '{fileName}'.", path); + } + + return JsonNode.Parse(File.ReadAllText(path)) ?? throw new InvalidOperationException("Sample JSON null."); + } + + private async Task GetJsonArrayAsync(HttpClient client, string path, bool useOperatorToken) + { + var response = await SendAsync(client, HttpMethod.Get, path, useOperatorToken); + response.EnsureSuccessStatusCode(); + var content = await response.Content.ReadAsStringAsync(); + return JsonNode.Parse(content) as JsonArray; + } + + private async Task GetJsonObjectAsync(HttpClient client, string path, bool useOperatorToken) + { + var response = await SendAsync(client, HttpMethod.Get, path, useOperatorToken); + response.EnsureSuccessStatusCode(); + var content = await response.Content.ReadAsStringAsync(); + return JsonNode.Parse(content) as JsonObject; + } + + private async Task PostAsync(HttpClient client, string path, JsonNode payload, bool useOperatorToken = true) + { + var request = new HttpRequestMessage(HttpMethod.Post, path) + { + Content = new StringContent(payload.ToJsonString(), Encoding.UTF8, "application/json") + }; + + var response = await SendAsync(client, request, useOperatorToken); + if (!response.IsSuccessStatusCode) + { + var body = await response.Content.ReadAsStringAsync(); + var authHeader = response.Headers.WwwAuthenticate.ToString(); + throw new InvalidOperationException($"Request to {path} failed with {(int)response.StatusCode} {response.StatusCode}: {body} (WWW-Authenticate: {authHeader})"); + } + + return response; + } + + private Task PostAsync(HttpClient client, string path, JsonNode payload) + => PostAsync(client, path, payload, useOperatorToken: true); + + private async Task DeleteAsync(HttpClient client, string path) + { + var response = await SendAsync(client, HttpMethod.Delete, path); + response.EnsureSuccessStatusCode(); + } + + private Task SendAsync(HttpClient client, HttpMethod method, string path, bool useOperatorToken = true) + => SendAsync(client, new HttpRequestMessage(method, path), useOperatorToken); + + private Task SendAsync(HttpClient client, HttpRequestMessage request, bool useOperatorToken = true) + { + request.Headers.Add("X-StellaOps-Tenant", "tenant-web"); + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", useOperatorToken ? _operatorToken : _viewerToken); + return client.SendAsync(request); + } + + private static string CreateToken(params string[] scopes) + { + var handler = new JwtSecurityTokenHandler(); + var key = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(SigningKey)); + var claims = new List + { + new System.Security.Claims.Claim(System.Security.Claims.ClaimTypes.Name, "integration-test") + }; + + foreach (var scope in scopes) + { + claims.Add(new System.Security.Claims.Claim("scope", scope)); + claims.Add(new System.Security.Claims.Claim("http://schemas.microsoft.com/identity/claims/scope", scope)); + } + + var descriptor = new SecurityTokenDescriptor + { + Issuer = Issuer, + Audience = Audience, + Expires = DateTime.UtcNow.AddMinutes(10), + SigningCredentials = new SigningCredentials(key, SecurityAlgorithms.HmacSha256), + Subject = new System.Security.Claims.ClaimsIdentity(claims) + }; + + var token = handler.CreateToken(descriptor); + return handler.WriteToken(token); + } +} diff --git a/src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md b/src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md index 30f035a6..c7f3a132 100644 --- a/src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md +++ b/src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md @@ -3,7 +3,7 @@ ## Sprint 41 – Foundations | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| PACKS-REG-41-001 | TODO | Packs Registry Guild | AUTH-PACKS-41-001 | Implement registry service, migrations for `packs_index`, `parity_matrix`, provenance docs; support pack upload/list/get, signature verification, RBAC enforcement, and provenance manifest storage. | Service builds/tests; signature verification works; RBAC validated; provenance stored; docs cross-linked. | +| PACKS-REG-41-001 | DOING (2025-11-01) | Packs Registry Guild | AUTH-PACKS-41-001 | Implement registry service, migrations for `packs_index`, `parity_matrix`, provenance docs; support pack upload/list/get, signature verification, RBAC enforcement, and provenance manifest storage. | Service builds/tests; signature verification works; RBAC validated; provenance stored; docs cross-linked. | ## Sprint 42 – Lifecycle & Governance | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | diff --git a/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/GatewayActivationTests.cs b/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/GatewayActivationTests.cs index 4c2df9f7..6041654e 100644 --- a/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/GatewayActivationTests.cs +++ b/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/GatewayActivationTests.cs @@ -435,7 +435,7 @@ public sealed class GatewayActivationTests app.Use(async (context, innerNext) => { context.Connection.RemoteIpAddress ??= IPAddress.Loopback; - await innerNext().ConfigureAwait(false); + await innerNext(); }); next(app); diff --git a/src/Scanner/StellaOps.Scanner.WebService/Contracts/EntryTraceResponse.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/EntryTraceResponse.cs new file mode 100644 index 00000000..48ca2ede --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Contracts/EntryTraceResponse.cs @@ -0,0 +1,10 @@ +using StellaOps.Scanner.EntryTrace; + +namespace StellaOps.Scanner.WebService.Contracts; + +public sealed record EntryTraceResponse( + string ScanId, + string ImageDigest, + DateTimeOffset GeneratedAt, + EntryTraceGraph Graph, + IReadOnlyList Ndjson); diff --git a/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs index 06e5f416..5ee7356e 100644 --- a/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs @@ -1,309 +1,356 @@ -using System.Collections.Generic; -using System.IO.Pipelines; -using System.Runtime.CompilerServices; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading.Tasks; -using System.Text; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Routing; -using StellaOps.Scanner.WebService.Constants; -using StellaOps.Scanner.WebService.Contracts; -using StellaOps.Scanner.WebService.Domain; -using StellaOps.Scanner.WebService.Infrastructure; -using StellaOps.Scanner.WebService.Security; -using StellaOps.Scanner.WebService.Services; - -namespace StellaOps.Scanner.WebService.Endpoints; - -internal static class ScanEndpoints -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - Converters = { new JsonStringEnumConverter() } - }; - - public static void MapScanEndpoints(this RouteGroupBuilder apiGroup, string scansSegment) - { - ArgumentNullException.ThrowIfNull(apiGroup); - - var scans = apiGroup.MapGroup(NormalizeSegment(scansSegment)); - - scans.MapPost("/", HandleSubmitAsync) - .WithName("scanner.scans.submit") - .Produces(StatusCodes.Status202Accepted) - .Produces(StatusCodes.Status400BadRequest) - .Produces(StatusCodes.Status409Conflict) - .RequireAuthorization(ScannerPolicies.ScansEnqueue); - - scans.MapGet("/{scanId}", HandleStatusAsync) - .WithName("scanner.scans.status") - .Produces(StatusCodes.Status200OK) - .Produces(StatusCodes.Status404NotFound) - .RequireAuthorization(ScannerPolicies.ScansRead); - - scans.MapGet("/{scanId}/events", HandleProgressStreamAsync) - .WithName("scanner.scans.events") - .Produces(StatusCodes.Status200OK) - .Produces(StatusCodes.Status404NotFound) - .RequireAuthorization(ScannerPolicies.ScansRead); - } - - private static async Task HandleSubmitAsync( - ScanSubmitRequest request, - IScanCoordinator coordinator, - LinkGenerator links, - HttpContext context, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentNullException.ThrowIfNull(coordinator); - ArgumentNullException.ThrowIfNull(links); - - if (request.Image is null) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid scan submission", - StatusCodes.Status400BadRequest, - detail: "Request image descriptor is required."); - } - - var reference = request.Image.Reference; - var digest = request.Image.Digest; - if (string.IsNullOrWhiteSpace(reference) && string.IsNullOrWhiteSpace(digest)) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid scan submission", - StatusCodes.Status400BadRequest, - detail: "Either image.reference or image.digest must be provided."); - } - - if (!string.IsNullOrWhiteSpace(digest) && !digest.Contains(':', StringComparison.Ordinal)) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid scan submission", - StatusCodes.Status400BadRequest, - detail: "Image digest must include algorithm prefix (e.g. sha256:...)."); - } - - var target = new ScanTarget(reference, digest).Normalize(); - var metadata = NormalizeMetadata(request.Metadata); - var submission = new ScanSubmission( - Target: target, - Force: request.Force, - ClientRequestId: request.ClientRequestId?.Trim(), - Metadata: metadata); - - ScanSubmissionResult result; - try - { - result = await coordinator.SubmitAsync(submission, context.RequestAborted).ConfigureAwait(false); - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - throw; - } - - var statusText = result.Snapshot.Status.ToString(); - var location = links.GetPathByName( - httpContext: context, - endpointName: "scanner.scans.status", - values: new { scanId = result.Snapshot.ScanId.Value }); - - if (!string.IsNullOrWhiteSpace(location)) - { - context.Response.Headers.Location = location; - } - - var response = new ScanSubmitResponse( - ScanId: result.Snapshot.ScanId.Value, - Status: statusText, - Location: location, - Created: result.Created); - - return Json(response, StatusCodes.Status202Accepted); - } - - private static async Task HandleStatusAsync( - string scanId, - IScanCoordinator coordinator, - HttpContext context, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(coordinator); - - if (!ScanId.TryParse(scanId, out var parsed)) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid scan identifier", - StatusCodes.Status400BadRequest, - detail: "Scan identifier is required."); - } - - var snapshot = await coordinator.GetAsync(parsed, context.RequestAborted).ConfigureAwait(false); - if (snapshot is null) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.NotFound, - "Scan not found", - StatusCodes.Status404NotFound, - detail: "Requested scan could not be located."); - } - - var response = new ScanStatusResponse( - ScanId: snapshot.ScanId.Value, - Status: snapshot.Status.ToString(), - Image: new ScanStatusTarget(snapshot.Target.Reference, snapshot.Target.Digest), - CreatedAt: snapshot.CreatedAt, - UpdatedAt: snapshot.UpdatedAt, - FailureReason: snapshot.FailureReason); - - return Json(response, StatusCodes.Status200OK); - } - - private static async Task HandleProgressStreamAsync( - string scanId, - string? format, - IScanProgressReader progressReader, - HttpContext context, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(progressReader); - - if (!ScanId.TryParse(scanId, out var parsed)) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid scan identifier", - StatusCodes.Status400BadRequest, - detail: "Scan identifier is required."); - } - - if (!progressReader.Exists(parsed)) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.NotFound, - "Scan not found", - StatusCodes.Status404NotFound, - detail: "Requested scan could not be located."); - } - - var streamFormat = string.Equals(format, "jsonl", StringComparison.OrdinalIgnoreCase) - ? "jsonl" - : "sse"; - - context.Response.StatusCode = StatusCodes.Status200OK; - context.Response.Headers.CacheControl = "no-store"; - context.Response.Headers["X-Accel-Buffering"] = "no"; - context.Response.Headers["Connection"] = "keep-alive"; - - if (streamFormat == "jsonl") - { - context.Response.ContentType = "application/x-ndjson"; - } - else - { - context.Response.ContentType = "text/event-stream"; - } - - await foreach (var progressEvent in progressReader.SubscribeAsync(parsed, context.RequestAborted).WithCancellation(context.RequestAborted)) - { - var payload = new - { - scanId = progressEvent.ScanId.Value, - sequence = progressEvent.Sequence, - state = progressEvent.State, - message = progressEvent.Message, - timestamp = progressEvent.Timestamp, - correlationId = progressEvent.CorrelationId, - data = progressEvent.Data - }; - - if (streamFormat == "jsonl") - { - await WriteJsonLineAsync(context.Response.BodyWriter, payload, cancellationToken).ConfigureAwait(false); - } - else - { - await WriteSseAsync(context.Response.BodyWriter, payload, progressEvent, cancellationToken).ConfigureAwait(false); - } - - await context.Response.BodyWriter.FlushAsync(cancellationToken).ConfigureAwait(false); - } - - return Results.Empty; - } - - private static IReadOnlyDictionary NormalizeMetadata(IDictionary metadata) - { - if (metadata is null || metadata.Count == 0) - { - return new Dictionary(); - } - - var normalized = new Dictionary(StringComparer.OrdinalIgnoreCase); - foreach (var pair in metadata) - { - if (string.IsNullOrWhiteSpace(pair.Key)) - { - continue; - } - - var key = pair.Key.Trim(); - var value = pair.Value?.Trim() ?? string.Empty; - normalized[key] = value; - } - - return normalized; - } - - private static async Task WriteJsonLineAsync(PipeWriter writer, object payload, CancellationToken cancellationToken) - { - var json = JsonSerializer.Serialize(payload, SerializerOptions); - var jsonBytes = Encoding.UTF8.GetBytes(json); - await writer.WriteAsync(jsonBytes, cancellationToken).ConfigureAwait(false); - await writer.WriteAsync(new[] { (byte)'\n' }, cancellationToken).ConfigureAwait(false); - } - - private static async Task WriteSseAsync(PipeWriter writer, object payload, ScanProgressEvent progressEvent, CancellationToken cancellationToken) - { - var json = JsonSerializer.Serialize(payload, SerializerOptions); - var eventName = progressEvent.State.ToLowerInvariant(); - var builder = new StringBuilder(); - builder.Append("id: ").Append(progressEvent.Sequence).Append('\n'); - builder.Append("event: ").Append(eventName).Append('\n'); - builder.Append("data: ").Append(json).Append('\n'); - builder.Append('\n'); - - var bytes = Encoding.UTF8.GetBytes(builder.ToString()); - await writer.WriteAsync(bytes, cancellationToken).ConfigureAwait(false); - } - - private static IResult Json(T value, int statusCode) - { - var payload = JsonSerializer.Serialize(value, SerializerOptions); - return Results.Content(payload, "application/json", System.Text.Encoding.UTF8, statusCode); - } - - private static string NormalizeSegment(string segment) - { - if (string.IsNullOrWhiteSpace(segment)) - { - return "/scans"; - } - - var trimmed = segment.Trim('/'); - return "/" + trimmed; - } -} +using System.Collections.Generic; +using System.IO.Pipelines; +using System.Runtime.CompilerServices; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading.Tasks; +using System.Text; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Routing; +using StellaOps.Scanner.WebService.Constants; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Domain; +using StellaOps.Scanner.WebService.Infrastructure; +using StellaOps.Scanner.WebService.Security; +using StellaOps.Scanner.WebService.Services; +using StellaOps.Scanner.EntryTrace; + +namespace StellaOps.Scanner.WebService.Endpoints; + +internal static class ScanEndpoints +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + Converters = { new JsonStringEnumConverter() } + }; + + public static void MapScanEndpoints(this RouteGroupBuilder apiGroup, string scansSegment) + { + ArgumentNullException.ThrowIfNull(apiGroup); + + var scans = apiGroup.MapGroup(NormalizeSegment(scansSegment)); + + scans.MapPost("/", HandleSubmitAsync) + .WithName("scanner.scans.submit") + .Produces(StatusCodes.Status202Accepted) + .Produces(StatusCodes.Status400BadRequest) + .Produces(StatusCodes.Status409Conflict) + .RequireAuthorization(ScannerPolicies.ScansEnqueue); + + scans.MapGet("/{scanId}", HandleStatusAsync) + .WithName("scanner.scans.status") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(ScannerPolicies.ScansRead); + + scans.MapGet("/{scanId}/events", HandleProgressStreamAsync) + .WithName("scanner.scans.events") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(ScannerPolicies.ScansRead); + + scans.MapGet("/{scanId}/entrytrace", HandleEntryTraceAsync) + .WithName("scanner.scans.entrytrace") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(ScannerPolicies.ScansRead); + } + + private static async Task HandleSubmitAsync( + ScanSubmitRequest request, + IScanCoordinator coordinator, + LinkGenerator links, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(coordinator); + ArgumentNullException.ThrowIfNull(links); + + if (request.Image is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan submission", + StatusCodes.Status400BadRequest, + detail: "Request image descriptor is required."); + } + + var reference = request.Image.Reference; + var digest = request.Image.Digest; + if (string.IsNullOrWhiteSpace(reference) && string.IsNullOrWhiteSpace(digest)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan submission", + StatusCodes.Status400BadRequest, + detail: "Either image.reference or image.digest must be provided."); + } + + if (!string.IsNullOrWhiteSpace(digest) && !digest.Contains(':', StringComparison.Ordinal)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan submission", + StatusCodes.Status400BadRequest, + detail: "Image digest must include algorithm prefix (e.g. sha256:...)."); + } + + var target = new ScanTarget(reference, digest).Normalize(); + var metadata = NormalizeMetadata(request.Metadata); + var submission = new ScanSubmission( + Target: target, + Force: request.Force, + ClientRequestId: request.ClientRequestId?.Trim(), + Metadata: metadata); + + ScanSubmissionResult result; + try + { + result = await coordinator.SubmitAsync(submission, context.RequestAborted).ConfigureAwait(false); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + + var statusText = result.Snapshot.Status.ToString(); + var location = links.GetPathByName( + httpContext: context, + endpointName: "scanner.scans.status", + values: new { scanId = result.Snapshot.ScanId.Value }); + + if (!string.IsNullOrWhiteSpace(location)) + { + context.Response.Headers.Location = location; + } + + var response = new ScanSubmitResponse( + ScanId: result.Snapshot.ScanId.Value, + Status: statusText, + Location: location, + Created: result.Created); + + return Json(response, StatusCodes.Status202Accepted); + } + + private static async Task HandleStatusAsync( + string scanId, + IScanCoordinator coordinator, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(coordinator); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + var snapshot = await coordinator.GetAsync(parsed, context.RequestAborted).ConfigureAwait(false); + if (snapshot is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Scan not found", + StatusCodes.Status404NotFound, + detail: "Requested scan could not be located."); + } + + var response = new ScanStatusResponse( + ScanId: snapshot.ScanId.Value, + Status: snapshot.Status.ToString(), + Image: new ScanStatusTarget(snapshot.Target.Reference, snapshot.Target.Digest), + CreatedAt: snapshot.CreatedAt, + UpdatedAt: snapshot.UpdatedAt, + FailureReason: snapshot.FailureReason); + + return Json(response, StatusCodes.Status200OK); + } + + private static async Task HandleProgressStreamAsync( + string scanId, + string? format, + IScanProgressReader progressReader, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(progressReader); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + if (!progressReader.Exists(parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Scan not found", + StatusCodes.Status404NotFound, + detail: "Requested scan could not be located."); + } + + var streamFormat = string.Equals(format, "jsonl", StringComparison.OrdinalIgnoreCase) + ? "jsonl" + : "sse"; + + context.Response.StatusCode = StatusCodes.Status200OK; + context.Response.Headers.CacheControl = "no-store"; + context.Response.Headers["X-Accel-Buffering"] = "no"; + context.Response.Headers["Connection"] = "keep-alive"; + + if (streamFormat == "jsonl") + { + context.Response.ContentType = "application/x-ndjson"; + } + else + { + context.Response.ContentType = "text/event-stream"; + } + + await foreach (var progressEvent in progressReader.SubscribeAsync(parsed, context.RequestAborted).WithCancellation(context.RequestAborted)) + { + var payload = new + { + scanId = progressEvent.ScanId.Value, + sequence = progressEvent.Sequence, + state = progressEvent.State, + message = progressEvent.Message, + timestamp = progressEvent.Timestamp, + correlationId = progressEvent.CorrelationId, + data = progressEvent.Data + }; + + if (streamFormat == "jsonl") + { + await WriteJsonLineAsync(context.Response.BodyWriter, payload, cancellationToken).ConfigureAwait(false); + } + else + { + await WriteSseAsync(context.Response.BodyWriter, payload, progressEvent, cancellationToken).ConfigureAwait(false); + } + + await context.Response.BodyWriter.FlushAsync(cancellationToken).ConfigureAwait(false); + } + + return Results.Empty; + } + + + private static async Task HandleEntryTraceAsync( + string scanId, + IEntryTraceResultStore resultStore, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(resultStore); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + var result = await resultStore.GetAsync(parsed.Value, cancellationToken).ConfigureAwait(false); + if (result is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "EntryTrace not found", + StatusCodes.Status404NotFound, + detail: "EntryTrace data is not available for the requested scan."); + } + + var response = new EntryTraceResponse( + result.ScanId, + result.ImageDigest, + result.GeneratedAtUtc, + result.Graph, + result.Ndjson); + + return Json(response, StatusCodes.Status200OK); + } + + private static IReadOnlyDictionary NormalizeMetadata(IDictionary metadata) + { + if (metadata is null || metadata.Count == 0) + { + return new Dictionary(); + } + + var normalized = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var pair in metadata) + { + if (string.IsNullOrWhiteSpace(pair.Key)) + { + continue; + } + + var key = pair.Key.Trim(); + var value = pair.Value?.Trim() ?? string.Empty; + normalized[key] = value; + } + + return normalized; + } + + private static async Task WriteJsonLineAsync(PipeWriter writer, object payload, CancellationToken cancellationToken) + { + var json = JsonSerializer.Serialize(payload, SerializerOptions); + var jsonBytes = Encoding.UTF8.GetBytes(json); + await writer.WriteAsync(jsonBytes, cancellationToken).ConfigureAwait(false); + await writer.WriteAsync(new[] { (byte)'\n' }, cancellationToken).ConfigureAwait(false); + } + + private static async Task WriteSseAsync(PipeWriter writer, object payload, ScanProgressEvent progressEvent, CancellationToken cancellationToken) + { + var json = JsonSerializer.Serialize(payload, SerializerOptions); + var eventName = progressEvent.State.ToLowerInvariant(); + var builder = new StringBuilder(); + builder.Append("id: ").Append(progressEvent.Sequence).Append('\n'); + builder.Append("event: ").Append(eventName).Append('\n'); + builder.Append("data: ").Append(json).Append('\n'); + builder.Append('\n'); + + var bytes = Encoding.UTF8.GetBytes(builder.ToString()); + await writer.WriteAsync(bytes, cancellationToken).ConfigureAwait(false); + } + + private static IResult Json(T value, int statusCode) + { + var payload = JsonSerializer.Serialize(value, SerializerOptions); + return Results.Content(payload, "application/json", System.Text.Encoding.UTF8, statusCode); + } + + private static string NormalizeSegment(string segment) + { + if (string.IsNullOrWhiteSpace(segment)) + { + return "/scans"; + } + + var trimmed = segment.Trim('/'); + return "/" + trimmed; + } +} diff --git a/src/Scanner/StellaOps.Scanner.Worker/Options/ScannerWorkerOptions.cs b/src/Scanner/StellaOps.Scanner.Worker/Options/ScannerWorkerOptions.cs index be6f6eaa..32bbe9f0 100644 --- a/src/Scanner/StellaOps.Scanner.Worker/Options/ScannerWorkerOptions.cs +++ b/src/Scanner/StellaOps.Scanner.Worker/Options/ScannerWorkerOptions.cs @@ -171,5 +171,7 @@ public sealed class ScannerWorkerOptions public string EntryTraceLayerDirectoriesMetadataKey { get; set; } = ScanMetadataKeys.LayerDirectories; public string EntryTraceLayerArchivesMetadataKey { get; set; } = ScanMetadataKeys.LayerArchives; + + public string EntryTraceProcRootMetadataKey { get; set; } = ScanMetadataKeys.RuntimeProcRoot; } } diff --git a/src/Scanner/StellaOps.Scanner.Worker/Processing/EntryTraceExecutionService.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/EntryTraceExecutionService.cs index 7e861701..d75ffb5c 100644 --- a/src/Scanner/StellaOps.Scanner.Worker/Processing/EntryTraceExecutionService.cs +++ b/src/Scanner/StellaOps.Scanner.Worker/Processing/EntryTraceExecutionService.cs @@ -1,302 +1,740 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.IO; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scanner.Core.Contracts; -using StellaOps.Scanner.EntryTrace; -using StellaOps.Scanner.Worker.Options; -using IOPath = System.IO.Path; - -namespace StellaOps.Scanner.Worker.Processing; - -public sealed class EntryTraceExecutionService : IEntryTraceExecutionService -{ - private readonly IEntryTraceAnalyzer _analyzer; - private readonly EntryTraceAnalyzerOptions _entryTraceOptions; - private readonly ScannerWorkerOptions _workerOptions; - private readonly ILogger _logger; - private readonly ILoggerFactory _loggerFactory; - - public EntryTraceExecutionService( - IEntryTraceAnalyzer analyzer, - IOptions entryTraceOptions, - IOptions workerOptions, - ILogger logger, - ILoggerFactory loggerFactory) - { - _analyzer = analyzer ?? throw new ArgumentNullException(nameof(analyzer)); - _entryTraceOptions = (entryTraceOptions ?? throw new ArgumentNullException(nameof(entryTraceOptions))).Value ?? new EntryTraceAnalyzerOptions(); - _workerOptions = (workerOptions ?? throw new ArgumentNullException(nameof(workerOptions))).Value ?? new ScannerWorkerOptions(); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _loggerFactory = loggerFactory ?? throw new ArgumentNullException(nameof(loggerFactory)); - } - - public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - - var metadata = context.Lease.Metadata ?? new Dictionary(StringComparer.Ordinal); - - var configPath = ResolvePath(metadata, _workerOptions.Analyzers.EntryTraceConfigMetadataKey, ScanMetadataKeys.ImageConfigPath); - if (configPath is null) - { - _logger.LogDebug("EntryTrace config metadata '{MetadataKey}' missing for job {JobId}; skipping entry trace.", _workerOptions.Analyzers.EntryTraceConfigMetadataKey, context.JobId); - return; - } - - if (!File.Exists(configPath)) - { - _logger.LogWarning("EntryTrace config file '{ConfigPath}' not found for job {JobId}; skipping entry trace.", configPath, context.JobId); - return; - } - - OciImageConfig config; - try - { - using var stream = File.OpenRead(configPath); - config = OciImageConfigLoader.Load(stream); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to parse OCI image config at '{ConfigPath}' for job {JobId}; entry trace skipped.", configPath, context.JobId); - return; - } - - var fileSystem = BuildFileSystem(context.JobId, metadata); - if (fileSystem is null) - { - return; - } - - var imageDigest = ResolveImageDigest(metadata, context); - var entryTraceLogger = _loggerFactory.CreateLogger(); - EntryTraceImageContext imageContext; - try - { - imageContext = EntryTraceImageContextFactory.Create( - config, - fileSystem, - _entryTraceOptions, - imageDigest, - context.ScanId, - entryTraceLogger); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to build EntryTrace context for job {JobId}; skipping entry trace.", context.JobId); - return; - } - - EntryTraceGraph graph; - try - { - graph = await _analyzer.ResolveAsync(imageContext.Entrypoint, imageContext.Context, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - _logger.LogError(ex, "EntryTrace analyzer failed for job {JobId}.", context.JobId); - return; - } - - context.Analysis.Set(ScanAnalysisKeys.EntryTraceGraph, graph); - } - - private LayeredRootFileSystem? BuildFileSystem(string jobId, IReadOnlyDictionary metadata) - { - var directoryValues = ResolveList(metadata, _workerOptions.Analyzers.EntryTraceLayerDirectoriesMetadataKey, ScanMetadataKeys.LayerDirectories); - var archiveValues = ResolveList(metadata, _workerOptions.Analyzers.EntryTraceLayerArchivesMetadataKey, ScanMetadataKeys.LayerArchives); - - var directoryLayers = new List(); - foreach (var value in directoryValues) - { - var fullPath = NormalizePath(value); - if (string.IsNullOrWhiteSpace(fullPath)) - { - continue; - } - - if (!Directory.Exists(fullPath)) - { - _logger.LogWarning("EntryTrace layer directory '{Directory}' not found for job {JobId}; skipping layer.", fullPath, jobId); - continue; - } - - directoryLayers.Add(new LayeredRootFileSystem.LayerDirectory(TryDeriveDigest(fullPath) ?? string.Empty, fullPath)); - } - - var archiveLayers = new List(); - foreach (var value in archiveValues) - { - var fullPath = NormalizePath(value); - if (string.IsNullOrWhiteSpace(fullPath)) - { - continue; - } - - if (!File.Exists(fullPath)) - { - _logger.LogWarning("EntryTrace layer archive '{Archive}' not found for job {JobId}; skipping layer.", fullPath, jobId); - continue; - } - - archiveLayers.Add(new LayeredRootFileSystem.LayerArchive(TryDeriveDigest(fullPath) ?? string.Empty, fullPath)); - } - - try - { - if (archiveLayers.Count > 0) - { - return LayeredRootFileSystem.FromArchives(archiveLayers); - } - - if (directoryLayers.Count > 0) - { - return LayeredRootFileSystem.FromDirectories(directoryLayers); - } - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to construct layered root filesystem for job {JobId}; entry trace skipped.", jobId); - return null; - } - - var rootFsPath = ResolvePath(metadata, _workerOptions.Analyzers.RootFilesystemMetadataKey, ScanMetadataKeys.RootFilesystemPath); - if (!string.IsNullOrWhiteSpace(rootFsPath) && Directory.Exists(rootFsPath)) - { - try - { - return LayeredRootFileSystem.FromDirectories(new[] - { - new LayeredRootFileSystem.LayerDirectory(TryDeriveDigest(rootFsPath) ?? string.Empty, rootFsPath) - }); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to create layered filesystem from root path '{RootPath}' for job {JobId}; entry trace skipped.", rootFsPath, jobId); - return null; - } - } - - _logger.LogDebug("No EntryTrace layers or root filesystem metadata available for job {JobId}; skipping entry trace.", jobId); - return null; - } - - private static string ResolveImageDigest(IReadOnlyDictionary metadata, ScanJobContext context) - { - if (metadata.TryGetValue("image.digest", out var digest) && !string.IsNullOrWhiteSpace(digest)) - { - return digest.Trim(); - } - - if (metadata.TryGetValue("imageDigest", out var altDigest) && !string.IsNullOrWhiteSpace(altDigest)) - { - return altDigest.Trim(); - } - - return context.Lease.Metadata.TryGetValue("scanner.image.digest", out var scopedDigest) && !string.IsNullOrWhiteSpace(scopedDigest) - ? scopedDigest.Trim() - : $"sha256:{context.JobId}"; - } - - private static IReadOnlyCollection ResolveList(IReadOnlyDictionary metadata, string key, string fallbackKey) - { - if (metadata.TryGetValue(key, out var value) && !string.IsNullOrWhiteSpace(value)) - { - return SplitList(value); - } - - if (!string.Equals(key, fallbackKey, StringComparison.Ordinal) && - metadata.TryGetValue(fallbackKey, out var fallbackValue) && - !string.IsNullOrWhiteSpace(fallbackValue)) - { - return SplitList(fallbackValue); - } - - return Array.Empty(); - } - - private static string? ResolvePath(IReadOnlyDictionary metadata, string key, string fallbackKey) - { - if (metadata.TryGetValue(key, out var value) && !string.IsNullOrWhiteSpace(value)) - { - return NormalizePath(value); - } - - if (!string.Equals(key, fallbackKey, StringComparison.Ordinal) && - metadata.TryGetValue(fallbackKey, out var fallbackValue) && - !string.IsNullOrWhiteSpace(fallbackValue)) - { - return NormalizePath(fallbackValue); - } - - return null; - } - - private static IReadOnlyCollection SplitList(string value) - { - var segments = value.Split(new[] { ';', ',', '\n', '\r', IOPath.PathSeparator }, StringSplitOptions.RemoveEmptyEntries); - return segments - .Select(segment => NormalizePath(segment)) - .Where(segment => !string.IsNullOrWhiteSpace(segment)) - .ToArray(); - } - - private static string NormalizePath(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return string.Empty; - } - - var trimmed = value.Trim().Trim('"'); - return string.IsNullOrWhiteSpace(trimmed) ? string.Empty : trimmed; - } - - private static string? TryDeriveDigest(string path) - { - if (string.IsNullOrWhiteSpace(path)) - { - return null; - } - - var candidate = path.TrimEnd(IOPath.DirectorySeparatorChar, IOPath.AltDirectorySeparatorChar); - var name = IOPath.GetFileName(candidate); - if (string.IsNullOrWhiteSpace(name)) - { - return null; - } - - var normalized = name; - if (normalized.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase)) - { - normalized = normalized[..^7]; - } - else if (normalized.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase)) - { - normalized = normalized[..^4]; - } - else if (normalized.EndsWith(".tar", StringComparison.OrdinalIgnoreCase)) - { - normalized = normalized[..^4]; - } - - if (normalized.Contains(':', StringComparison.Ordinal)) - { - return normalized; - } - - if (normalized.StartsWith("sha", StringComparison.OrdinalIgnoreCase)) - { - return normalized.Contains('-') - ? normalized.Replace('-', ':') - : $"sha256:{normalized}"; - } - - return null; - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.EntryTrace; +using StellaOps.Scanner.EntryTrace.FileSystem; +using StellaOps.Scanner.EntryTrace.Runtime; +using StellaOps.Scanner.Surface.Env; +using StellaOps.Scanner.Surface.FS; +using StellaOps.Scanner.Surface.Secrets; +using StellaOps.Scanner.Surface.Validation; +using StellaOps.Scanner.Worker.Options; +using IOPath = System.IO.Path; + +namespace StellaOps.Scanner.Worker.Processing; + +public sealed class EntryTraceExecutionService : IEntryTraceExecutionService +{ + private const string CacheNamespace = "entrytrace.graph"; + private const string CacheEnvelopeVersion = "entrytrace.v1"; + private const string ComponentName = "Scanner.Worker.EntryTrace"; + + private static readonly JsonSerializerOptions CacheSerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = false + }; + + private static readonly UTF8Encoding StrictUtf8 = new(encoderShouldEmitUTF8Identifier: false, throwOnInvalidBytes: true); + + private sealed record FileSystemHandle( + IRootFileSystem FileSystem, + string RootPath, + ImmutableArray LayerDirectories, + ImmutableArray LayerArchives); + + private readonly IEntryTraceAnalyzer _analyzer; + private readonly EntryTraceAnalyzerOptions _entryTraceOptions; + private readonly ScannerWorkerOptions _workerOptions; + private readonly ILogger _logger; + private readonly ILoggerFactory _loggerFactory; + private readonly EntryTraceRuntimeReconciler _runtimeReconciler; + private readonly IEntryTraceResultStore _resultStore; + private readonly ISurfaceValidatorRunner _validatorRunner; + private readonly ISurfaceEnvironment _surfaceEnvironment; + private readonly ISurfaceCache _surfaceCache; + private readonly ISurfaceSecretProvider _surfaceSecrets; + private readonly IServiceProvider _serviceProvider; + + public EntryTraceExecutionService( + IEntryTraceAnalyzer analyzer, + IOptions entryTraceOptions, + IOptions workerOptions, + ILogger logger, + ILoggerFactory loggerFactory, + EntryTraceRuntimeReconciler runtimeReconciler, + IEntryTraceResultStore resultStore, + ISurfaceValidatorRunner validatorRunner, + ISurfaceEnvironment surfaceEnvironment, + ISurfaceCache surfaceCache, + ISurfaceSecretProvider surfaceSecrets, + IServiceProvider serviceProvider) + { + _analyzer = analyzer ?? throw new ArgumentNullException(nameof(analyzer)); + _entryTraceOptions = (entryTraceOptions ?? throw new ArgumentNullException(nameof(entryTraceOptions))).Value ?? new EntryTraceAnalyzerOptions(); + _workerOptions = (workerOptions ?? throw new ArgumentNullException(nameof(workerOptions))).Value ?? new ScannerWorkerOptions(); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _loggerFactory = loggerFactory ?? throw new ArgumentNullException(nameof(loggerFactory)); + _runtimeReconciler = runtimeReconciler ?? throw new ArgumentNullException(nameof(runtimeReconciler)); + _resultStore = resultStore ?? throw new ArgumentNullException(nameof(resultStore)); + _validatorRunner = validatorRunner ?? throw new ArgumentNullException(nameof(validatorRunner)); + _surfaceEnvironment = surfaceEnvironment ?? throw new ArgumentNullException(nameof(surfaceEnvironment)); + _surfaceCache = surfaceCache ?? throw new ArgumentNullException(nameof(surfaceCache)); + _surfaceSecrets = surfaceSecrets ?? throw new ArgumentNullException(nameof(surfaceSecrets)); + _serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); + } + + public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + var metadata = context.Lease.Metadata ?? new Dictionary(StringComparer.Ordinal); + + var configPath = ResolvePath(metadata, _workerOptions.Analyzers.EntryTraceConfigMetadataKey, ScanMetadataKeys.ImageConfigPath); + if (configPath is null) + { + _logger.LogDebug("EntryTrace config metadata '{MetadataKey}' missing for job {JobId}; skipping entry trace.", _workerOptions.Analyzers.EntryTraceConfigMetadataKey, context.JobId); + return; + } + + if (!File.Exists(configPath)) + { + _logger.LogWarning("EntryTrace config file '{ConfigPath}' not found for job {JobId}; skipping entry trace.", configPath, context.JobId); + return; + } + + OciImageConfig config; + try + { + using var stream = File.OpenRead(configPath); + config = OciImageConfigLoader.Load(stream); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to parse OCI image config at '{ConfigPath}' for job {JobId}; entry trace skipped.", configPath, context.JobId); + return; + } + + var fileSystemHandle = BuildFileSystem(context.JobId, metadata); + if (fileSystemHandle is null) + { + return; + } + + var imageDigest = ResolveImageDigest(metadata, context); + var validationPassed = await RunSurfaceValidationAsync( + imageDigest, + configPath, + fileSystemHandle.RootPath, + fileSystemHandle.LayerDirectories, + fileSystemHandle.LayerArchives, + context.JobId, + cancellationToken).ConfigureAwait(false); + + if (!validationPassed) + { + return; + } + + var entryTraceLogger = _loggerFactory.CreateLogger(); + EntryTraceImageContext imageContext; + try + { + imageContext = EntryTraceImageContextFactory.Create( + config, + fileSystemHandle.FileSystem, + _entryTraceOptions, + imageDigest, + context.ScanId, + entryTraceLogger); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to build EntryTrace context for job {JobId}; skipping entry trace.", context.JobId); + return; + } + + imageContext = await ResolveEnvironmentSecretsAsync(imageContext, context.JobId, cancellationToken).ConfigureAwait(false); + + var optionsFingerprint = ComputeOptionsFingerprint(_entryTraceOptions); + var cacheKey = CreateCacheKey(imageDigest, imageContext, _surfaceEnvironment.Settings.Tenant, optionsFingerprint); + + EntryTraceGraph graph; + try + { + graph = await GetOrCreateGraphAsync( + cacheKey, + imageContext, + optionsFingerprint, + cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "EntryTrace cache lookup failed for job {JobId}; running analyzer without cache.", context.JobId); + graph = await _analyzer.ResolveAsync(imageContext.Entrypoint, imageContext.Context, cancellationToken).ConfigureAwait(false); + } + + var runtimeGraph = BuildRuntimeGraph(metadata, context.JobId); + graph = _runtimeReconciler.Reconcile(graph, runtimeGraph); + + var generatedAt = context.TimeProvider.GetUtcNow(); + var ndjson = EntryTraceNdjsonWriter.Serialize( + graph, + new EntryTraceNdjsonMetadata( + context.ScanId, + imageDigest, + generatedAt, + Source: "scanner.worker")); + + context.Analysis.Set(ScanAnalysisKeys.EntryTraceGraph, graph); + context.Analysis.Set(ScanAnalysisKeys.EntryTraceNdjson, ndjson); + + var result = new EntryTraceResult( + context.ScanId, + imageDigest, + generatedAt, + graph, + ndjson); + + await _resultStore.StoreAsync(result, cancellationToken).ConfigureAwait(false); + } + + private async Task GetOrCreateGraphAsync( + SurfaceCacheKey cacheKey, + EntryTraceImageContext imageContext, + string optionsFingerprint, + CancellationToken cancellationToken) + { + var cached = await _surfaceCache.TryGetAsync(cacheKey, DeserializeCacheEnvelope, cancellationToken).ConfigureAwait(false); + if (cached is { } envelope && + string.Equals(envelope.Version, CacheEnvelopeVersion, StringComparison.Ordinal) && + string.Equals(envelope.OptionsFingerprint, optionsFingerprint, StringComparison.Ordinal)) + { + _logger.LogDebug("EntryTrace cache hit for {ImageDigest} (scan {ScanId}).", imageContext.Context.ImageDigest, imageContext.Context.ScanId); + return envelope.Graph; + } + + var graph = await _analyzer.ResolveAsync(imageContext.Entrypoint, imageContext.Context, cancellationToken).ConfigureAwait(false); + var newEnvelope = new EntryTraceCacheEnvelope(CacheEnvelopeVersion, optionsFingerprint, graph); + await _surfaceCache.SetAsync(cacheKey, SerializeCacheEnvelope(newEnvelope), cancellationToken).ConfigureAwait(false); + _logger.LogDebug("EntryTrace cache stored for {ImageDigest} (scan {ScanId}).", imageContext.Context.ImageDigest, imageContext.Context.ScanId); + return graph; + } + + private async Task RunSurfaceValidationAsync( + string imageDigest, + string configPath, + string rootPath, + ImmutableArray layerDirectories, + ImmutableArray layerArchives, + string jobId, + CancellationToken cancellationToken) + { + var properties = new Dictionary(StringComparer.Ordinal) + { + ["imageDigest"] = imageDigest, + ["jobId"] = jobId, + ["configPath"] = configPath, + ["rootPath"] = rootPath + }; + + if (!layerDirectories.IsDefaultOrEmpty && layerDirectories.Length > 0) + { + properties["layerDirectories"] = string.Join(",", layerDirectories); + } + + if (!layerArchives.IsDefaultOrEmpty && layerArchives.Length > 0) + { + properties["layerArchives"] = string.Join(",", layerArchives); + } + + var validationContext = SurfaceValidationContext.Create( + _serviceProvider, + ComponentName, + _surfaceEnvironment.Settings, + properties); + + var result = await _validatorRunner.RunAllAsync(validationContext, cancellationToken).ConfigureAwait(false); + if (!result.IsSuccess) + { + var summary = string.Join( + ", ", + result.Issues.Select(issue => $"{issue.Code}:{issue.Severity}")); + + _logger.LogWarning( + "Surface validation failed for job {JobId}; skipping entry trace. Issues: {Issues}", + jobId, + summary); + + return false; + } + + return true; + } + + private async Task ResolveEnvironmentSecretsAsync( + EntryTraceImageContext imageContext, + string jobId, + CancellationToken cancellationToken) + { + var environment = imageContext.Context.Environment; + if (environment.Count == 0) + { + return imageContext; + } + + var builder = environment.ToBuilder(); + var changed = false; + foreach (var (key, rawValue) in environment) + { + if (!TryParseSecretReference(rawValue, out var secretType, out var secretName)) + { + continue; + } + + try + { + using var handle = await _surfaceSecrets.GetAsync(new SurfaceSecretRequest( + Tenant: _surfaceEnvironment.Settings.Secrets.Tenant, + Component: ComponentName, + SecretType: secretType, + Name: secretName), cancellationToken).ConfigureAwait(false); + + var secretBytes = handle.AsBytes(); + string decoded; + + if (secretBytes.IsEmpty) + { + decoded = string.Empty; + } + else + { + try + { + decoded = StrictUtf8.GetString(secretBytes.Span); + } + catch (DecoderFallbackException) + { + var buffer = secretBytes.ToArray(); + decoded = Convert.ToBase64String(buffer); + _logger.LogDebug( + "Secret reference '{Secret}' for job {JobId} could not be decoded as UTF-8; using base64 representation.", + rawValue, + jobId); + } + } + + builder[key] = decoded; + changed = true; + } + catch (SurfaceSecretNotFoundException) + { + _logger.LogWarning("Secret reference '{Secret}' not found for job {JobId}; leaving placeholder value.", rawValue, jobId); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to resolve secret reference '{Secret}' for job {JobId}; leaving placeholder value.", rawValue, jobId); + } + } + + if (!changed) + { + return imageContext; + } + + var updatedEnvironment = builder.ToImmutable(); + var updatedContext = imageContext.Context with { Environment = updatedEnvironment }; + return imageContext with { Context = updatedContext }; + } + + private static bool TryParseSecretReference(string value, out string secretType, out string? secretName) + { + secretType = string.Empty; + secretName = null; + + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + const string Prefix = "secret://"; + if (!value.StartsWith(Prefix, StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + var remainder = value.Substring(Prefix.Length); + var segments = remainder.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (segments.Length == 0) + { + return false; + } + + secretType = segments[0]; + secretName = segments.Length > 1 ? segments[1] : null; + return true; + } + + private static SurfaceCacheKey CreateCacheKey( + string imageDigest, + EntryTraceImageContext context, + string tenant, + string optionsFingerprint) + { + var builder = new StringBuilder(); + builder.Append(imageDigest); + builder.Append('|').Append(context.Context.User); + builder.Append('|').Append(context.Context.WorkingDirectory); + builder.Append('|').Append(ComputeEntrypointSignature(context.Entrypoint)); + builder.Append('|').Append(ComputeEnvironmentFingerprint(context.Context.Environment)); + builder.Append('|').Append(optionsFingerprint); + + var hash = ComputeSha256(builder.ToString()); + return new SurfaceCacheKey(CacheNamespace, tenant, hash); + } + + private static string ComputeOptionsFingerprint(EntryTraceAnalyzerOptions options) + { + var builder = new StringBuilder(); + builder.Append(options.MaxDepth); + builder.Append('|').Append(options.FollowRunParts); + builder.Append('|').Append(options.RunPartsLimit); + builder.Append('|').Append(options.DefaultPath); + return ComputeSha256(builder.ToString()); + } + + private static string ComputeEntrypointSignature(EntrypointSpecification specification) + { + var builder = new StringBuilder(); + builder.AppendJoin(',', specification.Entrypoint); + builder.Append('|'); + builder.AppendJoin(',', specification.Command); + builder.Append('|').Append(specification.EntrypointShell ?? string.Empty); + builder.Append('|').Append(specification.CommandShell ?? string.Empty); + return ComputeSha256(builder.ToString()); + } + + private static string ComputeEnvironmentFingerprint(ImmutableDictionary environment) + { + if (environment.Count == 0) + { + return "env:none"; + } + + var builder = new StringBuilder(); + foreach (var kvp in environment.OrderBy(pair => pair.Key, StringComparer.Ordinal)) + { + builder.Append(kvp.Key).Append('=').Append(kvp.Value).Append(';'); + } + + return ComputeSha256(builder.ToString()); + } + + private static string ComputeSha256(string value) + { + using var sha = SHA256.Create(); + var bytes = Encoding.UTF8.GetBytes(value); + var hash = sha.ComputeHash(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static string? ResolvePath( + IReadOnlyDictionary metadata, + string workerMetadataKey, + string legacyMetadataKey) + { + if (!string.IsNullOrWhiteSpace(workerMetadataKey) && metadata.TryGetValue(workerMetadataKey, out var workerPath) && !string.IsNullOrWhiteSpace(workerPath)) + { + return workerPath; + } + + if (metadata.TryGetValue(legacyMetadataKey, out var legacyPath) && !string.IsNullOrWhiteSpace(legacyPath)) + { + return legacyPath; + } + + return null; + } + + private string ResolveImageDigest(IReadOnlyDictionary metadata, ScanJobContext context) + { + static bool TryGetValue(IReadOnlyDictionary source, string key, out string value) + { + if (source.TryGetValue(key, out var found) && !string.IsNullOrWhiteSpace(found)) + { + value = found.Trim(); + return true; + } + + value = string.Empty; + return false; + } + + if (TryGetValue(metadata, "image.digest", out var digest)) + { + return digest; + } + + if (TryGetValue(metadata, "imageDigest", out digest)) + { + return digest; + } + + if (TryGetValue(metadata, "scanner.image.digest", out digest)) + { + return digest; + } + + _logger.LogDebug("Image digest metadata missing for job {JobId}; falling back to scan id.", context.JobId); + return context.ScanId; + } + + private ProcGraph? BuildRuntimeGraph(IReadOnlyDictionary metadata, string jobId) + { + var procRoot = ResolvePath( + metadata, + _workerOptions.Analyzers.EntryTraceProcRootMetadataKey, + ScanMetadataKeys.RuntimeProcRoot); + + if (string.IsNullOrWhiteSpace(procRoot)) + { + return null; + } + + var normalized = IOPath.GetFullPath(procRoot); + if (!Directory.Exists(normalized)) + { + _logger.LogDebug("EntryTrace runtime proc root '{ProcRoot}' not found for job {JobId}; skipping runtime reconciliation.", normalized, jobId); + return null; + } + + try + { + var provider = new ProcFileSystemSnapshot(normalized); + return ProcGraphBuilder.Build(provider); + } + catch (Exception ex) + { + _logger.LogDebug(ex, "Failed to read runtime process snapshot from '{ProcRoot}' for job {JobId}.", normalized, jobId); + return null; + } + } + + private FileSystemHandle? BuildFileSystem(string jobId, IReadOnlyDictionary metadata) + { + var rootPath = ResolvePath( + metadata, + _workerOptions.Analyzers.RootFilesystemMetadataKey, + ScanMetadataKeys.RootFilesystemPath); + + if (string.IsNullOrWhiteSpace(rootPath)) + { + _logger.LogDebug( + "Root filesystem metadata key '{MetadataKey}' missing for job {JobId}; skipping entry trace.", + _workerOptions.Analyzers.RootFilesystemMetadataKey, + jobId); + return null; + } + + string rootDirectory; + try + { + rootDirectory = IOPath.GetFullPath(rootPath); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Root filesystem path '{RootPath}' could not be normalised for job {JobId}.", rootPath, jobId); + return null; + } + + if (!Directory.Exists(rootDirectory)) + { + _logger.LogWarning("Root filesystem directory '{Root}' does not exist for job {JobId}; skipping entry trace.", rootDirectory, jobId); + return null; + } + + var layerDirectoryEntries = ParseLayerEntries(metadata, _workerOptions.Analyzers.EntryTraceLayerDirectoriesMetadataKey); + var layerArchiveEntries = ParseLayerEntries(metadata, _workerOptions.Analyzers.EntryTraceLayerArchivesMetadataKey); + + var directoryDescriptors = new List(); + var directoryPathsBuilder = ImmutableArray.CreateBuilder(); + foreach (var (digest, rawPath) in layerDirectoryEntries) + { + string fullPath; + try + { + fullPath = IOPath.GetFullPath(rawPath); + } + catch (Exception ex) + { + _logger.LogDebug(ex, "Layer directory path '{LayerPath}' invalid for job {JobId}; skipping.", rawPath, jobId); + continue; + } + + if (!Directory.Exists(fullPath)) + { + _logger.LogDebug("Layer directory '{LayerDirectory}' not found for job {JobId}; skipping.", fullPath, jobId); + continue; + } + + var descriptorDigest = string.IsNullOrWhiteSpace(digest) ? ComputeSha256(fullPath) : digest!; + directoryDescriptors.Add(new LayeredRootFileSystem.LayerDirectory(descriptorDigest, fullPath)); + directoryPathsBuilder.Add(fullPath); + } + + var archiveDescriptors = new List(); + var archivePathsBuilder = ImmutableArray.CreateBuilder(); + foreach (var (digest, rawPath) in layerArchiveEntries) + { + string fullPath; + try + { + fullPath = IOPath.GetFullPath(rawPath); + } + catch (Exception ex) + { + _logger.LogDebug(ex, "Layer archive path '{LayerPath}' invalid for job {JobId}; skipping.", rawPath, jobId); + continue; + } + + if (!File.Exists(fullPath)) + { + _logger.LogDebug("Layer archive '{LayerArchive}' not found for job {JobId}; skipping.", fullPath, jobId); + continue; + } + + var descriptorDigest = string.IsNullOrWhiteSpace(digest) ? ComputeSha256(fullPath) : digest!; + archiveDescriptors.Add(new LayeredRootFileSystem.LayerArchive(descriptorDigest, fullPath)); + archivePathsBuilder.Add(fullPath); + } + + var layerDirectoryPaths = directoryPathsBuilder.ToImmutable(); + var layerArchivePaths = archivePathsBuilder.ToImmutable(); + + if (directoryDescriptors.Count > 0) + { + try + { + var layered = LayeredRootFileSystem.FromDirectories(directoryDescriptors); + return new FileSystemHandle(layered, rootDirectory, layerDirectoryPaths, layerArchivePaths); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to construct layered filesystem from directories for job {JobId}; falling back to base directory.", jobId); + } + } + + if (archiveDescriptors.Count > 0) + { + try + { + var layered = LayeredRootFileSystem.FromArchives(archiveDescriptors); + return new FileSystemHandle(layered, rootDirectory, layerDirectoryPaths, layerArchivePaths); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to construct layered filesystem from archives for job {JobId}; falling back to base directory.", jobId); + } + } + + try + { + var directoryFs = new DirectoryRootFileSystem(rootDirectory); + return new FileSystemHandle(directoryFs, rootDirectory, layerDirectoryPaths, layerArchivePaths); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Unable to build directory filesystem for job {JobId}; skipping entry trace.", jobId); + return null; + } + } + + private static ImmutableArray<(string? Digest, string Path)> ParseLayerEntries( + IReadOnlyDictionary metadata, + string metadataKey) + { + if (string.IsNullOrWhiteSpace(metadataKey) || + !metadata.TryGetValue(metadataKey, out var rawValue) || + string.IsNullOrWhiteSpace(rawValue)) + { + return ImmutableArray<(string?, string)>.Empty; + } + + rawValue = rawValue.Trim(); + IEnumerable tokens; + + if (rawValue.StartsWith("[", StringComparison.Ordinal)) + { + try + { + var parsed = JsonSerializer.Deserialize(rawValue); + tokens = parsed ?? Array.Empty(); + } + catch + { + tokens = SplitLayerString(rawValue); + } + } + else + { + tokens = SplitLayerString(rawValue); + } + + var builder = ImmutableArray.CreateBuilder<(string?, string)>(); + foreach (var token in tokens) + { + var entry = token.Trim(); + if (entry.Length == 0) + { + continue; + } + + var separator = entry.IndexOf('='); + string? digest = null; + string pathPart = entry; + + if (separator >= 0) + { + digest = entry[..separator].Trim(); + pathPart = entry[(separator + 1)..].Trim(); + } + + if (pathPart.Length == 0) + { + continue; + } + + builder.Add((string.IsNullOrWhiteSpace(digest) ? null : digest, pathPart)); + } + + return builder.ToImmutable(); + } + + private static IEnumerable SplitLayerString(string raw) + => raw.Split(new[] { '\n', '\r', ';' }, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + + private static EntryTraceCacheEnvelope? DeserializeCacheEnvelope(ReadOnlyMemory payload) + { + if (payload.IsEmpty) + { + return null; + } + + try + { + return JsonSerializer.Deserialize(payload.Span, CacheSerializerOptions); + } + catch + { + return null; + } + } + + private static ReadOnlyMemory SerializeCacheEnvelope(EntryTraceCacheEnvelope envelope) + { + ArgumentNullException.ThrowIfNull(envelope); + return JsonSerializer.SerializeToUtf8Bytes(envelope, CacheSerializerOptions).AsMemory(); + } + +} diff --git a/src/Scanner/StellaOps.Scanner.Worker/Program.cs b/src/Scanner/StellaOps.Scanner.Worker/Program.cs index 2887f39c..d9d140c8 100644 --- a/src/Scanner/StellaOps.Scanner.Worker/Program.cs +++ b/src/Scanner/StellaOps.Scanner.Worker/Program.cs @@ -1,7 +1,7 @@ -using System.Diagnostics; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; +using System.Diagnostics; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Microsoft.Extensions.DependencyInjection.Extensions; using StellaOps.Auth.Client; @@ -10,101 +10,120 @@ using StellaOps.Scanner.Analyzers.OS.Plugin; using StellaOps.Scanner.Analyzers.Lang.Plugin; using StellaOps.Scanner.EntryTrace; using StellaOps.Scanner.Core.Security; +using StellaOps.Scanner.Surface.Env; +using StellaOps.Scanner.Surface.FS; +using StellaOps.Scanner.Surface.Secrets; +using StellaOps.Scanner.Surface.Validation; using StellaOps.Scanner.Worker.Diagnostics; using StellaOps.Scanner.Worker.Hosting; using StellaOps.Scanner.Worker.Options; using StellaOps.Scanner.Worker.Processing; - -var builder = Host.CreateApplicationBuilder(args); - -builder.Services.AddOptions() - .BindConfiguration(ScannerWorkerOptions.SectionName) - .ValidateOnStart(); - +using StellaOps.Scanner.Storage.Extensions; + +var builder = Host.CreateApplicationBuilder(args); + +builder.Services.AddOptions() + .BindConfiguration(ScannerWorkerOptions.SectionName) + .ValidateOnStart(); + builder.Services.AddSingleton, ScannerWorkerOptionsValidator>(); builder.Services.AddSingleton(TimeProvider.System); builder.Services.AddScannerCache(builder.Configuration); +builder.Services.AddSurfaceEnvironment(options => +{ + options.ComponentName = "Scanner.Worker"; +}); +builder.Services.AddSurfaceValidation(); +builder.Services.AddSurfaceFileCache(); +builder.Services.AddSurfaceSecrets(); builder.Services.AddSingleton(); -builder.Services.AddSingleton(); -builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddEntryTraceAnalyzer(); builder.Services.AddSingleton(); +var storageSection = builder.Configuration.GetSection("ScannerStorage"); +var connectionString = storageSection.GetValue("Mongo:ConnectionString"); +if (!string.IsNullOrWhiteSpace(connectionString)) +{ + builder.Services.AddScannerStorage(storageSection); +} + builder.Services.TryAddSingleton(); builder.Services.TryAddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); - -builder.Services.AddSingleton(); -builder.Services.AddHostedService(sp => sp.GetRequiredService()); - -var workerOptions = builder.Configuration.GetSection(ScannerWorkerOptions.SectionName).Get() ?? new ScannerWorkerOptions(); - -builder.Services.Configure(options => -{ - options.ShutdownTimeout = workerOptions.Shutdown.Timeout; -}); - -builder.ConfigureScannerWorkerTelemetry(workerOptions); - -if (workerOptions.Authority.Enabled) -{ - builder.Services.AddStellaOpsAuthClient(clientOptions => - { - clientOptions.Authority = workerOptions.Authority.Issuer?.Trim() ?? string.Empty; - clientOptions.ClientId = workerOptions.Authority.ClientId?.Trim() ?? string.Empty; - clientOptions.ClientSecret = workerOptions.Authority.ClientSecret; - clientOptions.EnableRetries = workerOptions.Authority.Resilience.EnableRetries ?? true; - clientOptions.HttpTimeout = TimeSpan.FromSeconds(workerOptions.Authority.BackchannelTimeoutSeconds); - - clientOptions.DefaultScopes.Clear(); - foreach (var scope in workerOptions.Authority.Scopes) - { - if (string.IsNullOrWhiteSpace(scope)) - { - continue; - } - - clientOptions.DefaultScopes.Add(scope); - } - - clientOptions.RetryDelays.Clear(); - foreach (var delay in workerOptions.Authority.Resilience.RetryDelays) - { - if (delay <= TimeSpan.Zero) - { - continue; - } - - clientOptions.RetryDelays.Add(delay); - } - - if (workerOptions.Authority.Resilience.AllowOfflineCacheFallback is bool allowOffline) - { - clientOptions.AllowOfflineCacheFallback = allowOffline; - } - - if (workerOptions.Authority.Resilience.OfflineCacheTolerance is { } tolerance && tolerance > TimeSpan.Zero) - { - clientOptions.OfflineCacheTolerance = tolerance; - } - }); -} - -builder.Logging.Configure(options => -{ - options.ActivityTrackingOptions = ActivityTrackingOptions.SpanId - | ActivityTrackingOptions.TraceId - | ActivityTrackingOptions.ParentId; -}); - -var host = builder.Build(); - -await host.RunAsync(); - -public partial class Program; + +builder.Services.AddSingleton(); +builder.Services.AddHostedService(sp => sp.GetRequiredService()); + +var workerOptions = builder.Configuration.GetSection(ScannerWorkerOptions.SectionName).Get() ?? new ScannerWorkerOptions(); + +builder.Services.Configure(options => +{ + options.ShutdownTimeout = workerOptions.Shutdown.Timeout; +}); + +builder.ConfigureScannerWorkerTelemetry(workerOptions); + +if (workerOptions.Authority.Enabled) +{ + builder.Services.AddStellaOpsAuthClient(clientOptions => + { + clientOptions.Authority = workerOptions.Authority.Issuer?.Trim() ?? string.Empty; + clientOptions.ClientId = workerOptions.Authority.ClientId?.Trim() ?? string.Empty; + clientOptions.ClientSecret = workerOptions.Authority.ClientSecret; + clientOptions.EnableRetries = workerOptions.Authority.Resilience.EnableRetries ?? true; + clientOptions.HttpTimeout = TimeSpan.FromSeconds(workerOptions.Authority.BackchannelTimeoutSeconds); + + clientOptions.DefaultScopes.Clear(); + foreach (var scope in workerOptions.Authority.Scopes) + { + if (string.IsNullOrWhiteSpace(scope)) + { + continue; + } + + clientOptions.DefaultScopes.Add(scope); + } + + clientOptions.RetryDelays.Clear(); + foreach (var delay in workerOptions.Authority.Resilience.RetryDelays) + { + if (delay <= TimeSpan.Zero) + { + continue; + } + + clientOptions.RetryDelays.Add(delay); + } + + if (workerOptions.Authority.Resilience.AllowOfflineCacheFallback is bool allowOffline) + { + clientOptions.AllowOfflineCacheFallback = allowOffline; + } + + if (workerOptions.Authority.Resilience.OfflineCacheTolerance is { } tolerance && tolerance > TimeSpan.Zero) + { + clientOptions.OfflineCacheTolerance = tolerance; + } + }); +} + +builder.Logging.Configure(options => +{ + options.ActivityTrackingOptions = ActivityTrackingOptions.SpanId + | ActivityTrackingOptions.TraceId + | ActivityTrackingOptions.ParentId; +}); + +var host = builder.Build(); + +await host.RunAsync(); + +public partial class Program; diff --git a/src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj b/src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj index ee0776d7..e7a2e565 100644 --- a/src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj +++ b/src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj @@ -21,5 +21,10 @@ + + + + + - \ No newline at end of file + diff --git a/src/Scanner/StellaOps.Scanner.sln b/src/Scanner/StellaOps.Scanner.sln index 14214e23..1538233c 100644 --- a/src/Scanner/StellaOps.Scanner.sln +++ b/src/Scanner/StellaOps.Scanner.sln @@ -73,7 +73,7 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Emit", "_ EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Queue", "__Libraries\StellaOps.Scanner.Queue\StellaOps.Scanner.Queue.csproj", "{CE58DBCD-FE30-4714-A462-758459B21185}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Sbomer.BuildXPlugin", "StellaOps.Scanner.Sbomer.BuildXPlugin\StellaOps.Scanner.Sbomer.BuildXPlugin.csproj", "{A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Sbomer.BuildXPlugin", "StellaOps.Scanner.Sbomer.BuildXPlugin\StellaOps.Scanner.Sbomer.BuildXPlugin.csproj", "{A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" EndProject @@ -109,6 +109,16 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.WebServic EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Worker.Tests", "__Tests\StellaOps.Scanner.Worker.Tests\StellaOps.Scanner.Worker.Tests.csproj", "{51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Benchmarks", "__Benchmarks", "{7FECE895-ECB6-33CE-12BE-877282A67F5D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks", "__Benchmarks\StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks\StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks.csproj", "{E76AE786-599B-434C-8E52-1B1211768386}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Surface.Validation", "__Libraries\StellaOps.Scanner.Surface.Validation\StellaOps.Scanner.Surface.Validation.csproj", "{B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Surface.FS", "__Libraries\StellaOps.Scanner.Surface.FS\StellaOps.Scanner.Surface.FS.csproj", "{B2597D13-8733-4F20-B157-B4B5D36FB59A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Surface.Env", "__Libraries\StellaOps.Scanner.Surface.Env\StellaOps.Scanner.Surface.Env.csproj", "{C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -731,6 +741,54 @@ Global {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Release|x64.Build.0 = Release|Any CPU {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Release|x86.ActiveCfg = Release|Any CPU {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Release|x86.Build.0 = Release|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Debug|x64.ActiveCfg = Debug|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Debug|x64.Build.0 = Debug|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Debug|x86.ActiveCfg = Debug|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Debug|x86.Build.0 = Debug|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Release|Any CPU.Build.0 = Release|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Release|x64.ActiveCfg = Release|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Release|x64.Build.0 = Release|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Release|x86.ActiveCfg = Release|Any CPU + {E76AE786-599B-434C-8E52-1B1211768386}.Release|x86.Build.0 = Release|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Debug|x64.ActiveCfg = Debug|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Debug|x64.Build.0 = Debug|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Debug|x86.ActiveCfg = Debug|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Debug|x86.Build.0 = Debug|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Release|Any CPU.Build.0 = Release|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Release|x64.ActiveCfg = Release|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Release|x64.Build.0 = Release|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Release|x86.ActiveCfg = Release|Any CPU + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A}.Release|x86.Build.0 = Release|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Debug|x64.ActiveCfg = Debug|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Debug|x64.Build.0 = Debug|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Debug|x86.ActiveCfg = Debug|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Debug|x86.Build.0 = Debug|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Release|Any CPU.Build.0 = Release|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Release|x64.ActiveCfg = Release|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Release|x64.Build.0 = Release|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Release|x86.ActiveCfg = Release|Any CPU + {B2597D13-8733-4F20-B157-B4B5D36FB59A}.Release|x86.Build.0 = Release|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Debug|x64.ActiveCfg = Debug|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Debug|x64.Build.0 = Debug|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Debug|x86.ActiveCfg = Debug|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Debug|x86.Build.0 = Debug|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Release|Any CPU.Build.0 = Release|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Release|x64.ActiveCfg = Release|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Release|x64.Build.0 = Release|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Release|x86.ActiveCfg = Release|Any CPU + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -771,5 +829,9 @@ Global {07D15319-95A0-4C36-B06C-A5C80E0A7752} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} {782652F5-A7C3-4070-8B42-F7DC2C17973E} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {E76AE786-599B-434C-8E52-1B1211768386} = {7FECE895-ECB6-33CE-12BE-877282A67F5D} + {B6C4BB91-BC9F-4F5F-904F-9B19C80D4E4A} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {B2597D13-8733-4F20-B157-B4B5D36FB59A} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {C2B2B38A-D67D-429E-BB2E-023E25EBD7D3} = {41F15E67-7190-CF23-3BC4-77E87134CADD} EndGlobalSection EndGlobal diff --git a/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/Program.cs b/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/Program.cs new file mode 100644 index 00000000..0f46a782 --- /dev/null +++ b/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/Program.cs @@ -0,0 +1,15 @@ +using BenchmarkDotNet.Running; + +if (args.Any(arg => string.Equals(arg, "--dump-heuristics", StringComparison.OrdinalIgnoreCase))) +{ + RustBenchmarkUtility.DumpHeuristicsFixture(); + return; +} + +if (args.Any(arg => string.Equals(arg, "--dump-fallback", StringComparison.OrdinalIgnoreCase))) +{ + RustBenchmarkUtility.DumpFallbackFixture(); + return; +} + +BenchmarkRunner.Run(); diff --git a/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustBenchmarkShared.cs b/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustBenchmarkShared.cs new file mode 100644 index 00000000..86f3144f --- /dev/null +++ b/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustBenchmarkShared.cs @@ -0,0 +1,109 @@ +using System.Text.Json; +using StellaOps.Scanner.Analyzers.Lang; + +internal static class RustBenchmarkShared +{ + public static string ResolveRepoRoot() + { + var fromEnv = Environment.GetEnvironmentVariable("STELLAOPS_REPO_ROOT"); + if (!string.IsNullOrWhiteSpace(fromEnv) && Directory.Exists(fromEnv)) + { + return Path.GetFullPath(fromEnv); + } + + var directory = Path.GetFullPath(AppContext.BaseDirectory); + while (!string.IsNullOrEmpty(directory)) + { + if (Directory.Exists(Path.Combine(directory, ".git"))) + { + return directory; + } + + var parent = Directory.GetParent(directory)?.FullName; + if (string.IsNullOrEmpty(parent) || string.Equals(parent, directory, StringComparison.Ordinal)) + { + break; + } + + directory = parent; + } + + directory = Path.GetFullPath(AppContext.BaseDirectory); + while (!string.IsNullOrEmpty(directory)) + { + if (File.Exists(Path.Combine(directory, "Directory.Build.props")) && Directory.Exists(Path.Combine(directory, "docs"))) + { + return directory; + } + + var parent = Directory.GetParent(directory)?.FullName; + if (string.IsNullOrEmpty(parent) || string.Equals(parent, directory, StringComparison.Ordinal)) + { + break; + } + + directory = parent; + } + + throw new InvalidOperationException("Unable to locate StellaOps repository root. Set STELLAOPS_REPO_ROOT."); + } + + public static string ResolveFixture(string repoRoot, params string[] segments) + { + var path = Path.Combine(new[] { repoRoot }.Concat(segments).ToArray()); + if (!Directory.Exists(path)) + { + throw new DirectoryNotFoundException($"Fixture path '{path}' not found."); + } + + return path; + } + + public static LanguageAnalyzerContext CreateContext(string rootPath, LanguageUsageHints usageHints) + => new(rootPath, TimeProvider.System, usageHints); + + public static HashSet ExtractHeuristicComponents(LanguageAnalyzerResult result) + { + var names = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var component in result.Components) + { + if (component.Metadata.TryGetValue("provenance", out var provenance) && + string.Equals(provenance, "heuristic", StringComparison.OrdinalIgnoreCase)) + { + names.Add(component.Name); + } + } + + return names; + } + + public static HashSet LoadCompetitorBaseline(string baselinePath) + { + var names = new HashSet(StringComparer.OrdinalIgnoreCase); + if (!File.Exists(baselinePath)) + { + return names; + } + + using var document = JsonDocument.Parse(File.ReadAllText(baselinePath)); + if (document.RootElement.TryGetProperty("detectedCrates", out var detectedCrates) && + detectedCrates.ValueKind == JsonValueKind.Array) + { + foreach (var entry in detectedCrates.EnumerateArray()) + { + if (entry.ValueKind == JsonValueKind.Object && + entry.TryGetProperty("name", out var nameProperty) && + nameProperty.ValueKind == JsonValueKind.String) + { + var name = nameProperty.GetString(); + if (!string.IsNullOrWhiteSpace(name)) + { + names.Add(name); + } + } + } + } + + return names; + } +} diff --git a/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustBenchmarkUtility.cs b/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustBenchmarkUtility.cs new file mode 100644 index 00000000..ee36190b --- /dev/null +++ b/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustBenchmarkUtility.cs @@ -0,0 +1,56 @@ +using StellaOps.Scanner.Analyzers.Lang; +using StellaOps.Scanner.Analyzers.Lang.Rust; + +internal static class RustBenchmarkUtility +{ + public static void DumpHeuristicsFixture() + { + var repoRoot = RustBenchmarkShared.ResolveRepoRoot(); + var fixture = RustBenchmarkShared.ResolveFixture(repoRoot, "src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Tests", "Fixtures", "lang", "rust", "heuristics"); + var usageHints = new LanguageUsageHints(new[] + { + Path.Combine(fixture, "usr/local/bin/heuristic_app") + }); + + var analyzers = new ILanguageAnalyzer[] + { + new RustLanguageAnalyzer() + }; + + var engine = new LanguageAnalyzerEngine(analyzers); + var context = RustBenchmarkShared.CreateContext(fixture, usageHints); + var result = engine.AnalyzeAsync(context, CancellationToken.None).GetAwaiter().GetResult(); + + Console.WriteLine(result.ToJson(indent: true)); + + var heuristics = RustBenchmarkShared.ExtractHeuristicComponents(result); + var competitor = RustBenchmarkShared.LoadCompetitorBaseline(Path.Combine(fixture, "competitor-baseline.json")); + if (competitor.Count > 0) + { + var improvement = (double)heuristics.Count / competitor.Count; + Console.WriteLine(); + Console.WriteLine($"Heuristic coverage: {heuristics.Count} components vs competitor {competitor.Count} ({improvement:P2})."); + } + } + + public static void DumpFallbackFixture() + { + var repoRoot = RustBenchmarkShared.ResolveRepoRoot(); + var fixture = RustBenchmarkShared.ResolveFixture(repoRoot, "src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Tests", "Fixtures", "lang", "rust", "fallback"); + var usageHints = new LanguageUsageHints(new[] + { + Path.Combine(fixture, "usr/local/bin/opaque_bin") + }); + + var analyzers = new ILanguageAnalyzer[] + { + new RustLanguageAnalyzer() + }; + + var engine = new LanguageAnalyzerEngine(analyzers); + var context = RustBenchmarkShared.CreateContext(fixture, usageHints); + var result = engine.AnalyzeAsync(context, CancellationToken.None).GetAwaiter().GetResult(); + + Console.WriteLine(result.ToJson(indent: true)); + } +} diff --git a/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustLanguageAnalyzerBenchmark.cs b/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustLanguageAnalyzerBenchmark.cs new file mode 100644 index 00000000..fc033e97 --- /dev/null +++ b/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/RustLanguageAnalyzerBenchmark.cs @@ -0,0 +1,79 @@ +using System.Linq; +using BenchmarkDotNet.Attributes; +using StellaOps.Scanner.Analyzers.Lang; +using StellaOps.Scanner.Analyzers.Lang.Rust; + +[MemoryDiagnoser] +public class RustLanguageAnalyzerBenchmark +{ + private ILanguageAnalyzer[] _analyzers = Array.Empty(); + private LanguageAnalyzerEngine _engine = default!; + private string _heuristicsFixture = string.Empty; + private string _fallbackFixture = string.Empty; + private LanguageUsageHints _heuristicUsageHints = LanguageUsageHints.Empty; + private LanguageUsageHints _fallbackUsageHints = LanguageUsageHints.Empty; + private HashSet _competitorBaseline = new(StringComparer.OrdinalIgnoreCase); + + public double HeuristicImprovement { get; private set; } + public int HeuristicComponentCount { get; private set; } + + [GlobalSetup] + public void Setup() + { + _analyzers = new ILanguageAnalyzer[] + { + new RustLanguageAnalyzer() + }; + + _engine = new LanguageAnalyzerEngine(_analyzers); + + var repoRoot = RustBenchmarkShared.ResolveRepoRoot(); + _heuristicsFixture = RustBenchmarkShared.ResolveFixture(repoRoot, "src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Tests", "Fixtures", "lang", "rust", "heuristics"); + _fallbackFixture = RustBenchmarkShared.ResolveFixture(repoRoot, "src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Tests", "Fixtures", "lang", "rust", "fallback"); + + _heuristicUsageHints = new LanguageUsageHints(new[] + { + Path.Combine(_heuristicsFixture, "usr/local/bin/heuristic_app") + }); + + _fallbackUsageHints = new LanguageUsageHints(new[] + { + Path.Combine(_fallbackFixture, "usr/local/bin/opaque_bin") + }); + + var baselinePath = Path.Combine(_heuristicsFixture, "competitor-baseline.json"); + _competitorBaseline = RustBenchmarkShared.LoadCompetitorBaseline(baselinePath); + + HeuristicImprovement = CalculateHeuristicImprovement(); + Console.WriteLine($"[Rust Analyzer] Heuristic coverage improvement vs competitor: {HeuristicImprovement:P2} ({HeuristicComponentCount} vs {_competitorBaseline.Count})."); + } + + [Benchmark(Description = "Heuristics fixture")] + public async Task AnalyzeHeuristicsFixtureAsync() + { + var context = RustBenchmarkShared.CreateContext(_heuristicsFixture, _heuristicUsageHints); + await _engine.AnalyzeAsync(context, CancellationToken.None).ConfigureAwait(false); + } + + [Benchmark(Description = "Fallback fixture")] + public async Task AnalyzeFallbackFixtureAsync() + { + var context = RustBenchmarkShared.CreateContext(_fallbackFixture, _fallbackUsageHints); + await _engine.AnalyzeAsync(context, CancellationToken.None).ConfigureAwait(false); + } + + private double CalculateHeuristicImprovement() + { + var context = RustBenchmarkShared.CreateContext(_heuristicsFixture, _heuristicUsageHints); + var result = _engine.AnalyzeAsync(context, CancellationToken.None).GetAwaiter().GetResult(); + var heuristics = RustBenchmarkShared.ExtractHeuristicComponents(result); + HeuristicComponentCount = heuristics.Count; + + if (_competitorBaseline.Count == 0) + { + return double.PositiveInfinity; + } + + return (double)HeuristicComponentCount / _competitorBaseline.Count; + } +} diff --git a/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks.csproj b/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks.csproj new file mode 100644 index 00000000..b46b1f58 --- /dev/null +++ b/src/Scanner/__Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks/StellaOps.Scanner.Analyzers.Lang.Rust.Benchmarks.csproj @@ -0,0 +1,21 @@ + + + Exe + net10.0 + preview + enable + enable + true + $(NoWarn);NU1603 + + + + + + + + + + + + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustCargoLockParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustCargoLockParser.cs index f3047663..6d3bf455 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustCargoLockParser.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustCargoLockParser.cs @@ -71,7 +71,7 @@ internal static class RustCargoLockParser { if (trimmed[0] == ']') { - builder.SetArray(currentArrayKey, arrayValues); + packageBuilder.SetArray(currentArrayKey, arrayValues); currentArrayKey = null; arrayValues.Clear(); continue; @@ -89,8 +89,8 @@ internal static class RustCargoLockParser if (trimmed[0] == '[') { // Entering a new table; finish any pending package and skip section. - FlushCurrent(builder, packages); - builder = null; + FlushCurrent(packageBuilder, resultBuilder); + packageBuilder = null; continue; } diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md index 970d7c49..b0ec9211 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md @@ -2,5 +2,5 @@ | Seq | ID | Status | Depends on | Description | Exit Criteria | |-----|----|--------|------------|-------------|---------------| -| 5 | SCANNER-ANALYZERS-LANG-10-308R | TODO | SCANNER-ANALYZERS-LANG-10-307R | Determinism fixtures + performance benchmarks; compare against competitor heuristic coverage. | Fixtures `Fixtures/lang/rust/` committed; determinism guard; benchmark shows ≥15 % better coverage vs competitor. | -| 6 | SCANNER-ANALYZERS-LANG-10-309R | TODO | SCANNER-ANALYZERS-LANG-10-308R | Package plug-in manifest + Offline Kit documentation; ensure Worker integration. | Manifest copied; Worker loads analyzer; Offline Kit doc updated. | +| 5 | SCANNER-ANALYZERS-LANG-10-308R | DONE | SCANNER-ANALYZERS-LANG-10-307R | Determinism fixtures + performance benchmarks; compare against competitor heuristic coverage. | Fixtures `Fixtures/lang/rust/` committed; determinism guard; benchmark shows ≥15 % better coverage vs competitor. | +| 6 | SCANNER-ANALYZERS-LANG-10-309R | DONE | SCANNER-ANALYZERS-LANG-10-308R | Package plug-in manifest + Offline Kit documentation; ensure Worker integration. | Manifest copied; Worker loads analyzer; Offline Kit doc updated. | diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs index 954c9aab..5784818c 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs @@ -13,4 +13,6 @@ public static class ScanAnalysisKeys public const string LanguageComponentFragments = "analysis.lang.fragments"; public const string EntryTraceGraph = "analysis.entrytrace.graph"; + + public const string EntryTraceNdjson = "analysis.entrytrace.ndjson"; } diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs index d7c0ce64..f2038ceb 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs @@ -7,4 +7,5 @@ public static class ScanMetadataKeys public const string ImageConfigPath = "scanner.image.config.path"; public const string LayerDirectories = "scanner.rootfs.layers"; public const string LayerArchives = "scanner.layer.archives"; + public const string RuntimeProcRoot = "scanner.runtime.proc_root"; } diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzer.cs index a759687a..6a470757 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzer.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzer.cs @@ -1,11 +1,14 @@ using System.Collections.Generic; using System.Collections.Immutable; -using System.IO; -using System.Linq; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scanner.EntryTrace.Diagnostics; -using StellaOps.Scanner.EntryTrace.Parsing; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Text; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.EntryTrace.Diagnostics; +using StellaOps.Scanner.EntryTrace.FileSystem; +using StellaOps.Scanner.EntryTrace.Parsing; namespace StellaOps.Scanner.EntryTrace; @@ -73,13 +76,17 @@ public sealed class EntryTraceAnalyzer : IEntryTraceAnalyzer { private readonly EntrypointSpecification _entrypoint; private readonly EntryTraceContext _context; - private readonly EntryTraceAnalyzerOptions _options; - private readonly EntryTraceMetrics _metrics; - private readonly ILogger _logger; - private readonly ImmutableArray _pathEntries; - private readonly List _nodes = new(); - private readonly List _edges = new(); - private readonly List _diagnostics = new(); + private readonly EntryTraceAnalyzerOptions _options; + private readonly EntryTraceMetrics _metrics; + private readonly ILogger _logger; + private readonly ImmutableArray _pathEntries; + private readonly ImmutableArray _candidates; + private readonly List _nodes = new(); + private readonly List _edges = new(); + private readonly List _diagnostics = new(); + private readonly List _plans = new(); + private readonly List _terminals = new(); + private readonly HashSet _terminalKeys = new(StringComparer.Ordinal); private readonly HashSet _visitedScripts = new(StringComparer.Ordinal); private readonly HashSet _visitedCommands = new(StringComparer.Ordinal); private int _nextNodeId = 1; @@ -89,15 +96,16 @@ public sealed class EntryTraceAnalyzer : IEntryTraceAnalyzer EntryTraceContext context, EntryTraceAnalyzerOptions options, EntryTraceMetrics metrics, - ILogger logger) - { - _entrypoint = entrypoint; - _context = context; - _options = options; - _metrics = metrics; - _logger = logger; - _pathEntries = DeterminePath(context); - } + ILogger logger) + { + _entrypoint = entrypoint; + _context = context; + _options = options; + _metrics = metrics; + _logger = logger; + _pathEntries = DeterminePath(context); + _candidates = context.Candidates; + } private static ImmutableArray DeterminePath(EntryTraceContext context) { @@ -114,46 +122,65 @@ public sealed class EntryTraceAnalyzer : IEntryTraceAnalyzer return ImmutableArray.Empty; } - public EntryTraceGraph BuildGraph() - { - var initialArgs = ComposeInitialCommand(_entrypoint); - if (initialArgs.Length == 0) - { - _diagnostics.Add(new EntryTraceDiagnostic( - EntryTraceDiagnosticSeverity.Error, - EntryTraceUnknownReason.CommandNotFound, - "ENTRYPOINT/CMD yielded no executable command.", - Span: null, - RelatedPath: null)); - return ToGraph(EntryTraceOutcome.Unresolved); - } + public EntryTraceGraph BuildGraph() + { + var initialArgs = ComposeInitialCommand(_entrypoint); + if (initialArgs.Length == 0) + { + if (_candidates.Length == 0) + { + _diagnostics.Add(new EntryTraceDiagnostic( + EntryTraceDiagnosticSeverity.Warning, + EntryTraceUnknownReason.CommandNotFound, + "No ENTRYPOINT/CMD declared and no fallback candidates were discovered.", + Span: null, + RelatedPath: null)); + return ToGraph(DetermineOutcome()); + } + + foreach (var candidate in _candidates) + { + _diagnostics.Add(new EntryTraceDiagnostic( + EntryTraceDiagnosticSeverity.Info, + MapCandidateReason(candidate.Source), + CreateCandidateMessage(candidate), + Span: null, + RelatedPath: candidate.Evidence?.Path)); + + ResolveCommand(candidate.Command, parent: null, originSpan: null, depth: 0, relationship: candidate.Source); + } + + return ToGraph(DetermineOutcome()); + } + + ResolveCommand(initialArgs, parent: null, originSpan: null, depth: 0, relationship: "entrypoint"); + + var outcome = DetermineOutcome(); + return ToGraph(outcome); + } + + private EntryTraceOutcome DetermineOutcome() + { + var hasErrors = _diagnostics.Any(d => d.Severity == EntryTraceDiagnosticSeverity.Error); + if (hasErrors) + { + return EntryTraceOutcome.Unresolved; + } + + var hasWarnings = _diagnostics.Any(d => d.Severity == EntryTraceDiagnosticSeverity.Warning); + return hasWarnings ? EntryTraceOutcome.PartiallyResolved : EntryTraceOutcome.Resolved; + } - ResolveCommand(initialArgs, parent: null, originSpan: null, depth: 0, relationship: "entrypoint"); - - var outcome = DetermineOutcome(); - return ToGraph(outcome); - } - - private EntryTraceOutcome DetermineOutcome() - { - if (_diagnostics.Count == 0) - { - return EntryTraceOutcome.Resolved; - } - - return _diagnostics.Any(d => d.Severity == EntryTraceDiagnosticSeverity.Error) - ? EntryTraceOutcome.Unresolved - : EntryTraceOutcome.PartiallyResolved; - } - - private EntryTraceGraph ToGraph(EntryTraceOutcome outcome) - { - return new EntryTraceGraph( - outcome, - _nodes.ToImmutableArray(), - _edges.ToImmutableArray(), - _diagnostics.ToImmutableArray()); - } + private EntryTraceGraph ToGraph(EntryTraceOutcome outcome) + { + return new EntryTraceGraph( + outcome, + _nodes.ToImmutableArray(), + _edges.ToImmutableArray(), + _diagnostics.ToImmutableArray(), + _plans.ToImmutableArray(), + _terminals.ToImmutableArray()); + } private ImmutableArray ComposeInitialCommand(EntrypointSpecification specification) { @@ -185,17 +212,17 @@ public sealed class EntryTraceAnalyzer : IEntryTraceAnalyzer return ImmutableArray.Empty; } - private void ResolveCommand( - ImmutableArray arguments, - EntryTraceNode? parent, - EntryTraceSpan? originSpan, - int depth, - string relationship) - { - if (arguments.Length == 0) - { - return; - } + private void ResolveCommand( + ImmutableArray arguments, + EntryTraceNode? parent, + EntryTraceSpan? originSpan, + int depth, + string relationship) + { + if (arguments.Length == 0) + { + return; + } if (depth >= _options.MaxDepth) { @@ -242,18 +269,19 @@ public sealed class EntryTraceAnalyzer : IEntryTraceAnalyzer return; } - if (TryFollowInterpreter(node, descriptor, arguments, depth)) - { - return; - } - - if (TryFollowShell(node, descriptor, arguments, depth)) - { - return; - } - - // Terminal executable. - } + if (TryFollowInterpreter(node, descriptor, arguments, depth)) + { + return; + } + + if (TryFollowShell(node, descriptor, arguments, depth)) + { + return; + } + + ClassifyTerminal(node, descriptor, arguments); + // Terminal executable. + } private bool TryResolveExecutable( string commandName, @@ -497,16 +525,16 @@ public sealed class EntryTraceAnalyzer : IEntryTraceAnalyzer return true; } - private bool HandleJava( - EntryTraceNode node, - ImmutableArray arguments, - RootFileDescriptor descriptor, - int depth) - { - if (arguments.Length < 2) - { - return false; - } + private bool HandleJava( + EntryTraceNode node, + ImmutableArray arguments, + RootFileDescriptor descriptor, + int depth) + { + if (arguments.Length < 2) + { + return false; + } string? jar = null; string? mainClass = null; @@ -526,40 +554,42 @@ public sealed class EntryTraceAnalyzer : IEntryTraceAnalyzer } } - if (jar is not null) - { - if (!_context.FileSystem.TryResolveExecutable(jar, Array.Empty(), out var jarDescriptor)) - { - _diagnostics.Add(new EntryTraceDiagnostic( - EntryTraceDiagnosticSeverity.Warning, - EntryTraceUnknownReason.JarNotFound, - $"Java JAR '{jar}' not found.", - Span: null, - RelatedPath: jar)); - } - else - { - var jarNode = AddNode( - EntryTraceNodeKind.Executable, - jarDescriptor.Path, - ImmutableArray.Empty, - EntryTraceInterpreterKind.Java, - new EntryTraceEvidence(jarDescriptor.Path, jarDescriptor.LayerDigest, "jar", null), - null); - _edges.Add(new EntryTraceEdge(node.Id, jarNode.Id, "executes", null)); - } - - return true; - } - - if (mainClass is not null) - { - _edges.Add(new EntryTraceEdge(node.Id, node.Id, "java-main", new Dictionary - { - ["class"] = mainClass - })); - return true; - } + if (jar is not null) + { + if (!_context.FileSystem.TryResolveExecutable(jar, _pathEntries, out var jarDescriptor) && + !_context.FileSystem.TryResolveExecutable(jar, Array.Empty(), out jarDescriptor)) + { + _diagnostics.Add(new EntryTraceDiagnostic( + EntryTraceDiagnosticSeverity.Warning, + EntryTraceUnknownReason.JarNotFound, + $"Java JAR '{jar}' not found.", + Span: null, + RelatedPath: jar)); + return true; + } + + var jarNode = AddNode( + EntryTraceNodeKind.Executable, + jarDescriptor.Path, + ImmutableArray.Empty, + EntryTraceInterpreterKind.Java, + new EntryTraceEvidence(jarDescriptor.Path, jarDescriptor.LayerDigest, "jar", null), + null); + + _edges.Add(new EntryTraceEdge(node.Id, jarNode.Id, "executes", null)); + ClassifyTerminal(jarNode, jarDescriptor, arguments); + return true; + } + + if (mainClass is not null) + { + _edges.Add(new EntryTraceEdge(node.Id, node.Id, "java-main", new Dictionary + { + ["class"] = mainClass + })); + ClassifyTerminal(node, descriptor, arguments); + return true; + } return false; } @@ -1018,26 +1048,325 @@ public sealed class EntryTraceAnalyzer : IEntryTraceAnalyzer return content.Contains("#!/bin/sh", StringComparison.Ordinal); } - private EntryTraceNode AddNode( - EntryTraceNodeKind kind, - string displayName, - ImmutableArray arguments, - EntryTraceInterpreterKind interpreterKind, - EntryTraceEvidence? evidence, - EntryTraceSpan? span) - { - var node = new EntryTraceNode( - _nextNodeId++, - kind, - displayName, - arguments, - interpreterKind, - evidence, - span); - _nodes.Add(node); - return node; - } - + private EntryTraceNode AddNode( + EntryTraceNodeKind kind, + string displayName, + ImmutableArray arguments, + EntryTraceInterpreterKind interpreterKind, + EntryTraceEvidence? evidence, + EntryTraceSpan? span, + ImmutableDictionary? metadata = null) + { + var node = new EntryTraceNode( + _nextNodeId++, + kind, + displayName, + arguments, + interpreterKind, + evidence, + span, + metadata); + _nodes.Add(node); + return node; + } + + private void ClassifyTerminal( + EntryTraceNode node, + RootFileDescriptor descriptor, + ImmutableArray arguments) + { + var signature = CreateCommandSignature(arguments, node.DisplayName); + var key = $"{descriptor.Path}|{_context.User}|{_context.WorkingDirectory}|{signature}"; + if (!_terminalKeys.Add(key)) + { + return; + } + + var evidence = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + double score = descriptor.IsExecutable ? 50d : 40d; + string? runtime = null; + var type = EntryTraceTerminalType.Unknown; + + if (!string.IsNullOrWhiteSpace(descriptor.ShebangInterpreter)) + { + var shebang = descriptor.ShebangInterpreter!; + evidence["shebang"] = shebang; + runtime = InferRuntimeFromShebang(shebang); + type = EntryTraceTerminalType.Script; + score += 15d; + } + + if (_context.FileSystem.TryReadBytes(descriptor.Path, 2_097_152, out _, out var binaryContent)) + { + var span = binaryContent.Span; + if (TryClassifyElf(span, evidence, ref runtime)) + { + type = EntryTraceTerminalType.Native; + score += 15d; + } + else if (TryClassifyPe(span, evidence, ref runtime)) + { + type = EntryTraceTerminalType.Managed; + score += 15d; + } + else if (IsZipArchive(span)) + { + runtime ??= "java"; + type = EntryTraceTerminalType.Managed; + score += 10d; + if (TryReadJarManifest(span, descriptor.Path, evidence)) + { + score += 5d; + } + } + } + + runtime ??= InferRuntimeFromCommand(node.DisplayName, arguments); + + if (runtime is "go" or "rust") + { + type = EntryTraceTerminalType.Native; + score += 10d; + } + else if (runtime is ".net" or "java" or "python" or "node" or "ruby" or "php" or "php-fpm") + { + if (type == EntryTraceTerminalType.Unknown) + { + type = EntryTraceTerminalType.Managed; + } + + score += 5d; + } + + if (runtime is "shell") + { + type = EntryTraceTerminalType.Script; + } + + if (type == EntryTraceTerminalType.Unknown) + { + type = EntryTraceTerminalType.Native; + } + + var boundedScore = Math.Min(95d, score); + var terminal = new EntryTraceTerminal( + descriptor.Path, + type, + runtime, + boundedScore, + evidence.ToImmutable(), + _context.User, + _context.WorkingDirectory, + arguments.IsDefault ? ImmutableArray.Empty : arguments); + + var plan = new EntryTracePlan( + terminal.Arguments, + _context.Environment, + _context.WorkingDirectory, + _context.User, + terminal.Path, + terminal.Type, + terminal.Runtime, + terminal.Confidence, + terminal.Evidence); + + _terminals.Add(terminal); + _plans.Add(plan); + } + + private static string CreateCommandSignature(ImmutableArray command, string displayName) + { + if (command.IsDefaultOrEmpty || command.Length == 0) + { + return displayName; + } + + return string.Join('\u001F', command); + } + + private static EntryTraceUnknownReason MapCandidateReason(string source) + => source switch + { + "history" => EntryTraceUnknownReason.InferredEntrypointFromHistory, + "service-directory" => EntryTraceUnknownReason.InferredEntrypointFromServices, + "supervisor" => EntryTraceUnknownReason.InferredEntrypointFromSupervisor, + "entrypoint-script" => EntryTraceUnknownReason.InferredEntrypointFromEntrypointScript, + _ => EntryTraceUnknownReason.CommandNotFound + }; + + private static string CreateCandidateMessage(EntryTraceCandidate candidate) + { + var primary = candidate.Command.Length > 0 ? candidate.Command[0] : candidate.Source; + return candidate.Source switch + { + "history" => "Inferred entrypoint from image history.", + "service-directory" => $"Inferred service run script '{primary}'.", + "supervisor" => candidate.Description is null + ? "Inferred supervisor command." + : $"Inferred supervisor program '{candidate.Description}'.", + "entrypoint-script" => $"Inferred entrypoint script '{primary}'.", + _ => "Inferred entrypoint candidate." + }; + } + + private static string? InferRuntimeFromShebang(string shebang) + { + var normalized = shebang.ToLowerInvariant(); + if (normalized.Contains("python")) + { + return "python"; + } + + if (normalized.Contains("node")) + { + return "node"; + } + + if (normalized.Contains("ruby")) + { + return "ruby"; + } + + if (normalized.Contains("php-fpm")) + { + return "php-fpm"; + } + + if (normalized.Contains("php")) + { + return "php"; + } + + if (normalized.Contains("sh") || normalized.Contains("bash")) + { + return "shell"; + } + + return null; + } + + private static string? InferRuntimeFromCommand(string commandName, ImmutableArray arguments) + { + var normalized = commandName.ToLowerInvariant(); + if (normalized == "java" || arguments.Any(arg => arg.Equals("-jar", StringComparison.OrdinalIgnoreCase))) + { + return "java"; + } + + if (normalized.Contains("dotnet") || normalized.EndsWith(".dll", StringComparison.OrdinalIgnoreCase)) + { + return ".net"; + } + + if (normalized.Contains("python") || normalized.EndsWith(".py", StringComparison.OrdinalIgnoreCase)) + { + return "python"; + } + + if (normalized.Contains("node") || normalized.EndsWith(".js", StringComparison.OrdinalIgnoreCase)) + { + return "node"; + } + + if (normalized.Contains("go")) + { + return "go"; + } + + return null; + } + + private bool TryClassifyElf(ReadOnlySpan span, ImmutableDictionary.Builder evidence, ref string? runtime) + { + if (span.Length < 4 || span[0] != 0x7F || span[1] != (byte)'E' || span[2] != (byte)'L' || span[3] != (byte)'F') + { + return false; + } + + evidence["binary.format"] = "ELF"; + + if (ContainsAscii(span, "Go build ID") || ContainsAscii(span, ".gopclntab")) + { + runtime = "go"; + evidence["runtime"] = "go"; + } + else if (ContainsAscii(span, "rust_eh_personality") || ContainsAscii(span, ".rustc")) + { + runtime = "rust"; + evidence["runtime"] = "rust"; + } + + return true; + } + + private static bool ContainsAscii(ReadOnlySpan span, string value) + { + var bytes = Encoding.ASCII.GetBytes(value); + return span.IndexOf(bytes) >= 0; + } + + private bool TryClassifyPe(ReadOnlySpan span, ImmutableDictionary.Builder evidence, ref string? runtime) + { + if (span.Length < 2 || span[0] != 'M' || span[1] != 'Z') + { + return false; + } + + evidence["binary.format"] = "PE"; + if (ContainsAscii(span, "BSJB") || ContainsAscii(span, "CLR")) + { + runtime = ".net"; + evidence["pe.cli"] = "true"; + } + + return true; + } + + private bool TryReadJarManifest(ReadOnlySpan span, string path, ImmutableDictionary.Builder evidence) + { + try + { + using var stream = new MemoryStream(span.ToArray(), writable: false); + using var archive = new ZipArchive(stream, ZipArchiveMode.Read, leaveOpen: false); + var manifestEntry = archive.GetEntry("META-INF/MANIFEST.MF"); + if (manifestEntry is null) + { + return false; + } + + using var reader = new StreamReader(manifestEntry.Open(), Encoding.UTF8, detectEncodingFromByteOrderMarks: true); + var content = reader.ReadToEnd(); + evidence["jar.manifest"] = "true"; + + foreach (var line in content.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries)) + { + var separator = line.IndexOf(':'); + if (separator <= 0) + { + continue; + } + + var key = line[..separator].Trim(); + var value = line[(separator + 1)..].Trim(); + + if (key.Equals("Main-Class", StringComparison.OrdinalIgnoreCase)) + { + evidence["jar.main-class"] = value; + } + } + + return true; + } + catch (InvalidDataException ex) + { + _logger.LogDebug(ex, "Failed to read jar manifest for {JarPath}.", path); + return false; + } + } + + private static bool IsZipArchive(ReadOnlySpan span) + => span.Length >= 4 && span[0] == 0x50 && span[1] == 0x4B && span[2] == 0x03 && span[3] == 0x04; + private static string CombineUnixPath(string baseDirectory, string relative) { var normalizedBase = NormalizeUnixPath(baseDirectory); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceCacheEnvelope.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceCacheEnvelope.cs new file mode 100644 index 00000000..883e1407 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceCacheEnvelope.cs @@ -0,0 +1,8 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.EntryTrace; + +public sealed record EntryTraceCacheEnvelope( + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("options")] string OptionsFingerprint, + [property: JsonPropertyName("graph")] EntryTraceGraph Graph); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceCacheSerializer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceCacheSerializer.cs new file mode 100644 index 00000000..59eaa7d4 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceCacheSerializer.cs @@ -0,0 +1,35 @@ +using System; +using System.Text.Json; + +namespace StellaOps.Scanner.EntryTrace; + +internal static class EntryTraceCacheSerializer +{ + public const string CurrentVersion = "entrytrace.v1"; + + public static byte[] Serialize(EntryTraceCacheEnvelope envelope) + { + if (envelope is null) + { + throw new ArgumentNullException(nameof(envelope)); + } + + return JsonSerializer.SerializeToUtf8Bytes(envelope); + } + + public static EntryTraceCacheEnvelope Deserialize(byte[] payload) + { + if (payload is null || payload.Length == 0) + { + throw new ArgumentException("Payload cannot be empty.", nameof(payload)); + } + + var envelope = JsonSerializer.Deserialize(payload); + if (envelope is null) + { + throw new InvalidOperationException("Failed to deserialize entry trace cache envelope."); + } + + return envelope; + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceContext.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceContext.cs index 38c8f87a..cd282217 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceContext.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceContext.cs @@ -1,7 +1,8 @@ using System.Collections.Immutable; -using Microsoft.Extensions.Logging; - -namespace StellaOps.Scanner.EntryTrace; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.EntryTrace.FileSystem; + +namespace StellaOps.Scanner.EntryTrace; /// /// Provides runtime context for entry trace analysis. @@ -14,4 +15,7 @@ public sealed record EntryTraceContext( string User, string ImageDigest, string ScanId, - ILogger? Logger); + ILogger? Logger) +{ + public ImmutableArray Candidates { get; init; } = ImmutableArray.Empty; +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceImageContextFactory.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceImageContextFactory.cs index a5054ef0..f4efc586 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceImageContextFactory.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceImageContextFactory.cs @@ -1,9 +1,14 @@ using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using Microsoft.Extensions.Logging; - -namespace StellaOps.Scanner.EntryTrace; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO; +using System.Linq; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.EntryTrace.FileSystem; +using StellaOps.Scanner.EntryTrace.Parsing; + +namespace StellaOps.Scanner.EntryTrace; /// /// Combines OCI configuration and root filesystem data into the context required by the EntryTrace analyzer. @@ -31,22 +36,447 @@ public static class EntryTraceImageContextFactory var workingDir = NormalizeWorkingDirectory(config.WorkingDirectory); var user = NormalizeUser(config.User); - var context = new EntryTraceContext( - fileSystem, - environment, - path, - workingDir, - user, - imageDigest, - scanId, - logger); - - var entrypoint = EntrypointSpecification.FromExecForm( - config.Entrypoint.IsDefaultOrEmpty ? null : config.Entrypoint, - config.Command.IsDefaultOrEmpty ? null : config.Command); - - return new EntryTraceImageContext(entrypoint, context); - } + var context = new EntryTraceContext( + fileSystem, + environment, + path, + workingDir, + user, + imageDigest, + scanId, + logger); + + var candidates = BuildFallbackCandidates(config, fileSystem, logger); + context = context with { Candidates = candidates }; + + var entrypoint = EntrypointSpecification.FromExecForm( + config.Entrypoint.IsDefaultOrEmpty ? null : config.Entrypoint, + config.Command.IsDefaultOrEmpty ? null : config.Command); + + return new EntryTraceImageContext(entrypoint, context); + } + + private static ImmutableArray BuildFallbackCandidates( + OciImageConfig config, + IRootFileSystem fileSystem, + ILogger? logger) + { + if (config.Entrypoint.Length > 0 || config.Command.Length > 0) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(); + var seen = new HashSet(StringComparer.Ordinal); + + void AddCandidate(ImmutableArray command, string source, EntryTraceEvidence? evidence, string? description) + { + if (command.IsDefaultOrEmpty || command.Length == 0) + { + return; + } + + var signature = CreateSignature(command); + if (!seen.Add(signature)) + { + return; + } + + builder.Add(new EntryTraceCandidate(command, source, evidence, description)); + } + + if (TryAddHistoryCandidate(config, AddCandidate, logger)) + { + // Preserve first viable history candidate only. + } + + CollectEntrypointScripts(fileSystem, AddCandidate); + CollectSupervisorCommands(fileSystem, AddCandidate); + CollectServiceRunScripts(fileSystem, AddCandidate); + + return builder.ToImmutable(); + } + + private static bool TryAddHistoryCandidate( + OciImageConfig config, + Action, string, EntryTraceEvidence?, string?> addCandidate, + ILogger? logger) + { + if (config.History.IsDefaultOrEmpty || config.History.Length == 0) + { + return false; + } + + for (var i = config.History.Length - 1; i >= 0; i--) + { + var entry = config.History[i]; + if (TryExtractHistoryCommand(entry?.CreatedBy, out var command)) + { + var metadata = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(entry?.CreatedBy)) + { + metadata["created_by"] = entry!.CreatedBy!; + } + + var evidence = new EntryTraceEvidence( + Path: "/image/history", + LayerDigest: null, + Source: "history", + metadata.Count > 0 ? metadata.ToImmutable() : null); + + addCandidate(command, "history", evidence, null); + return true; + } + } + + return false; + } + + private static void CollectEntrypointScripts( + IRootFileSystem fileSystem, + Action, string, EntryTraceEvidence?, string?> addCandidate) + { + const string directory = "/usr/local/bin"; + if (!fileSystem.DirectoryExists(directory)) + { + return; + } + + foreach (var entry in fileSystem.EnumerateDirectory(directory)) + { + if (entry.IsDirectory || !entry.IsExecutable) + { + continue; + } + + var name = Path.GetFileName(entry.Path); + if (!name.Contains("entrypoint", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + var evidence = new EntryTraceEvidence(entry.Path, entry.LayerDigest, "entrypoint-script", null); + addCandidate(ImmutableArray.Create(entry.Path), "entrypoint-script", evidence, null); + } + } + + private static void CollectSupervisorCommands( + IRootFileSystem fileSystem, + Action, string, EntryTraceEvidence?, string?> addCandidate) + { + const string root = "/etc/supervisor"; + if (!fileSystem.DirectoryExists(root)) + { + return; + } + + var pending = new Stack(); + pending.Push(root); + + while (pending.Count > 0) + { + var current = pending.Pop(); + foreach (var entry in fileSystem.EnumerateDirectory(current)) + { + if (entry.IsDirectory) + { + pending.Push(entry.Path); + continue; + } + + if (!entry.Path.EndsWith(".conf", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + if (!fileSystem.TryReadAllText(entry.Path, out var descriptor, out var content)) + { + continue; + } + + foreach (var (command, program) in ExtractSupervisorCommands(content)) + { + var metadataBuilder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(program)) + { + metadataBuilder["program"] = program!; + } + + var evidence = new EntryTraceEvidence( + entry.Path, + descriptor.LayerDigest, + "supervisor", + metadataBuilder.Count > 0 ? metadataBuilder.ToImmutable() : null); + + addCandidate(command, "supervisor", evidence, program); + } + } + } + } + + private static void CollectServiceRunScripts( + IRootFileSystem fileSystem, + Action, string, EntryTraceEvidence?, string?> addCandidate) + { + string[] roots = + { + "/etc/services.d", + "/etc/services", + "/service", + "/s6", + "/etc/s6", + "/etc/s6-overlay/s6-rc.d" + }; + + foreach (var root in roots) + { + if (!fileSystem.DirectoryExists(root)) + { + continue; + } + + var pending = new Stack(); + pending.Push(root); + + while (pending.Count > 0) + { + var current = pending.Pop(); + foreach (var entry in fileSystem.EnumerateDirectory(current)) + { + if (entry.IsDirectory) + { + pending.Push(entry.Path); + continue; + } + + if (!entry.IsExecutable) + { + continue; + } + + if (!entry.Path.EndsWith("/run", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + var metadata = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + var directory = Path.GetDirectoryName(entry.Path); + if (!string.IsNullOrWhiteSpace(directory)) + { + metadata["service_dir"] = directory!.Replace('\\', '/'); + } + + var evidence = new EntryTraceEvidence( + entry.Path, + entry.LayerDigest, + "service-directory", + metadata.Count > 0 ? metadata.ToImmutable() : null); + addCandidate(ImmutableArray.Create(entry.Path), "service-directory", evidence, null); + } + } + } + } + + private static IEnumerable<(ImmutableArray Command, string? Program)> ExtractSupervisorCommands(string content) + { + var results = new List<(ImmutableArray, string?)>(); + if (string.IsNullOrWhiteSpace(content)) + { + return results; + } + + string? currentProgram = null; + foreach (var rawLine in content.Split('\n')) + { + var line = rawLine.Trim(); + if (line.Length == 0 || line.StartsWith("#", StringComparison.Ordinal)) + { + continue; + } + + if (line.StartsWith("[", StringComparison.Ordinal) && line.EndsWith("]", StringComparison.Ordinal)) + { + var section = line[1..^1].Trim(); + if (section.StartsWith("program:", StringComparison.OrdinalIgnoreCase)) + { + currentProgram = section["program:".Length..].Trim(); + } + else + { + currentProgram = null; + } + + continue; + } + + if (!line.StartsWith("command", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + var equalsIndex = line.IndexOf('=', StringComparison.Ordinal); + if (equalsIndex < 0) + { + continue; + } + + var commandText = line[(equalsIndex + 1)..].Trim(); + if (commandText.Length == 0) + { + continue; + } + + if (TryTokenizeShellCommand(commandText, out var command)) + { + results.Add((command, currentProgram)); + } + } + + return results; + } + + private static bool TryExtractHistoryCommand(string? createdBy, out ImmutableArray command) + { + command = ImmutableArray.Empty; + if (string.IsNullOrWhiteSpace(createdBy)) + { + return false; + } + + var text = createdBy.Trim(); + + if (TryParseHistoryJsonCommand(text, "CMD", out command) || + TryParseHistoryJsonCommand(text, "ENTRYPOINT", out command)) + { + return true; + } + + var shIndex = text.IndexOf("/bin/sh", StringComparison.OrdinalIgnoreCase); + if (shIndex >= 0) + { + var dashC = text.IndexOf("-c", shIndex, StringComparison.OrdinalIgnoreCase); + if (dashC >= 0) + { + var after = text[(dashC + 2)..].Trim(); + if (after.StartsWith("#(nop)", StringComparison.OrdinalIgnoreCase)) + { + after = after[6..].Trim(); + } + + if (after.StartsWith("CMD", StringComparison.OrdinalIgnoreCase)) + { + after = after[3..].Trim(); + } + else if (after.StartsWith("ENTRYPOINT", StringComparison.OrdinalIgnoreCase)) + { + after = after[10..].Trim(); + } + + if (after.StartsWith("[", StringComparison.Ordinal)) + { + if (TryParseJsonArray(after, out command)) + { + return true; + } + } + else if (TryTokenizeShellCommand(after, out command)) + { + return true; + } + } + } + + return false; + } + + private static bool TryParseHistoryJsonCommand(string text, string keyword, out ImmutableArray command) + { + command = ImmutableArray.Empty; + var index = text.IndexOf(keyword, StringComparison.OrdinalIgnoreCase); + if (index < 0) + { + return false; + } + + var bracket = text.IndexOf('[', index); + if (bracket < 0) + { + return false; + } + + var end = text.IndexOf(']', bracket); + if (end < 0) + { + return false; + } + + var json = text.Substring(bracket, end - bracket + 1); + return TryParseJsonArray(json, out command); + } + + private static bool TryParseJsonArray(string json, out ImmutableArray command) + { + try + { + using var document = JsonDocument.Parse(json); + if (document.RootElement.ValueKind != JsonValueKind.Array) + { + command = ImmutableArray.Empty; + return false; + } + + var builder = ImmutableArray.CreateBuilder(); + foreach (var element in document.RootElement.EnumerateArray()) + { + if (element.ValueKind == JsonValueKind.String) + { + builder.Add(element.GetString() ?? string.Empty); + } + } + + command = builder.ToImmutable(); + return command.Length > 0; + } + catch (JsonException) + { + command = ImmutableArray.Empty; + return false; + } + } + + private static bool TryTokenizeShellCommand(string commandText, out ImmutableArray command) + { + var tokenizer = new ShellTokenizer(); + var tokens = tokenizer.Tokenize(commandText); + var builder = ImmutableArray.CreateBuilder(); + + foreach (var token in tokens) + { + switch (token.Kind) + { + case ShellTokenKind.Word: + case ShellTokenKind.SingleQuoted: + case ShellTokenKind.DoubleQuoted: + builder.Add(token.Value); + break; + case ShellTokenKind.Operator: + case ShellTokenKind.NewLine: + case ShellTokenKind.EndOfFile: + if (builder.Count > 0) + { + command = builder.ToImmutable(); + return command.Length > 0; + } + break; + } + } + + command = builder.ToImmutable(); + return command.Length > 0; + } + + private static string CreateSignature(ImmutableArray command) + => string.Join('\u001F', command); private static ImmutableDictionary BuildEnvironment(ImmutableArray raw) { diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceResult.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceResult.cs new file mode 100644 index 00000000..f90ffdb2 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceResult.cs @@ -0,0 +1,32 @@ +using System.Collections.Immutable; + +namespace StellaOps.Scanner.EntryTrace; + +public sealed record EntryTraceResult( + string ScanId, + string ImageDigest, + DateTimeOffset GeneratedAtUtc, + EntryTraceGraph Graph, + ImmutableArray Ndjson); + +public interface IEntryTraceResultStore +{ + Task StoreAsync(EntryTraceResult result, CancellationToken cancellationToken); + + Task GetAsync(string scanId, CancellationToken cancellationToken); +} + +internal sealed class NullEntryTraceResultStore : IEntryTraceResultStore +{ + public Task StoreAsync(EntryTraceResult result, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(result); + return Task.CompletedTask; + } + + public Task GetAsync(string scanId, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(scanId); + return Task.FromResult(null); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceTypes.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceTypes.cs index 89c978e5..f559c88b 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceTypes.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceTypes.cs @@ -16,19 +16,19 @@ public enum EntryTraceOutcome /// /// Logical classification for nodes in the entry trace graph. /// -public enum EntryTraceNodeKind -{ - Command, - Script, - Include, +public enum EntryTraceNodeKind +{ + Command, + Script, + Include, Interpreter, Executable, RunPartsDirectory, - RunPartsScript -} - -/// -/// Interpreter categories supported by the analyzer. + RunPartsScript +} + +/// +/// Interpreter categories supported by the analyzer. /// public enum EntryTraceInterpreterKind { @@ -55,18 +55,45 @@ public enum EntryTraceUnknownReason { CommandNotFound, MissingFile, - DynamicEnvironmentReference, - UnsupportedSyntax, - RecursionLimitReached, - InterpreterNotSupported, - ModuleNotFound, - JarNotFound, - RunPartsEmpty, - PermissionDenied -} - -/// -/// Represents a span within a script file. + DynamicEnvironmentReference, + UnsupportedSyntax, + RecursionLimitReached, + InterpreterNotSupported, + ModuleNotFound, + JarNotFound, + RunPartsEmpty, + PermissionDenied, + WrapperMissingCommand, + SupervisorConfigMissing, + SupervisorUnsupported, + SupervisorProgramNotFound, + DynamicEvaluation, + RunPartsLimitExceeded, + WindowsShimUnsupported, + RuntimeSnapshotUnavailable, + RuntimeProcessNotFound, + RuntimeMatch, + RuntimeMismatch, + InferredEntrypointFromHistory, + InferredEntrypointFromServices, + InferredEntrypointFromSupervisor, + InferredEntrypointFromEntrypointScript +} + +/// +/// Categorises terminal executable kinds. +/// +public enum EntryTraceTerminalType +{ + Unknown, + Script, + Native, + Managed, + Service +} + +/// +/// Represents a span within a script file. /// public readonly record struct EntryTraceSpan( string? Path, @@ -87,14 +114,15 @@ public sealed record EntryTraceEvidence( /// /// Represents a node in the entry trace graph. /// -public sealed record EntryTraceNode( - int Id, - EntryTraceNodeKind Kind, - string DisplayName, - ImmutableArray Arguments, - EntryTraceInterpreterKind InterpreterKind, - EntryTraceEvidence? Evidence, - EntryTraceSpan? Span); +public sealed record EntryTraceNode( + int Id, + EntryTraceNodeKind Kind, + string DisplayName, + ImmutableArray Arguments, + EntryTraceInterpreterKind InterpreterKind, + EntryTraceEvidence? Evidence, + EntryTraceSpan? Span, + ImmutableDictionary? Metadata); /// /// Represents a directed edge in the entry trace graph. @@ -116,10 +144,48 @@ public sealed record EntryTraceDiagnostic( string? RelatedPath); /// -/// Final graph output produced by the analyzer. -/// -public sealed record EntryTraceGraph( - EntryTraceOutcome Outcome, - ImmutableArray Nodes, - ImmutableArray Edges, - ImmutableArray Diagnostics); +/// Final graph output produced by the analyzer. +/// +public sealed record EntryTraceGraph( + EntryTraceOutcome Outcome, + ImmutableArray Nodes, + ImmutableArray Edges, + ImmutableArray Diagnostics, + ImmutableArray Plans, + ImmutableArray Terminals); + +/// +/// Describes a classified terminal executable. +/// +public sealed record EntryTracePlan( + ImmutableArray Command, + ImmutableDictionary Environment, + string WorkingDirectory, + string User, + string TerminalPath, + EntryTraceTerminalType Type, + string? Runtime, + double Confidence, + ImmutableDictionary Evidence); + +/// +/// Describes a classified terminal executable. +/// +public sealed record EntryTraceTerminal( + string Path, + EntryTraceTerminalType Type, + string? Runtime, + double Confidence, + ImmutableDictionary Evidence, + string User, + string WorkingDirectory, + ImmutableArray Arguments); + +/// +/// Represents a fallback entrypoint candidate inferred from image metadata or filesystem. +/// +public sealed record EntryTraceCandidate( + ImmutableArray Command, + string Source, + EntryTraceEvidence? Evidence, + string? Description); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/DirectoryRootFileSystem.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/DirectoryRootFileSystem.cs new file mode 100644 index 00000000..a157a89e --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/DirectoryRootFileSystem.cs @@ -0,0 +1,325 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO; +using System.Linq; +using System.Text; + +namespace StellaOps.Scanner.EntryTrace.FileSystem; + +/// +/// implementation backed by a single on-disk directory. +/// +public sealed class DirectoryRootFileSystem : IRootFileSystem +{ + private readonly DirectoryInfo _root; + + public DirectoryRootFileSystem(string rootPath) + { + ArgumentException.ThrowIfNullOrWhiteSpace(rootPath); + + var fullPath = Path.GetFullPath(rootPath); + _root = new DirectoryInfo(fullPath); + if (!_root.Exists) + { + throw new DirectoryNotFoundException($"Root directory '{fullPath}' does not exist."); + } + } + + public bool TryResolveExecutable(string name, IReadOnlyList searchPaths, out RootFileDescriptor descriptor) + { + descriptor = null!; + + if (string.IsNullOrWhiteSpace(name)) + { + return false; + } + + if (name.Contains('/', StringComparison.Ordinal)) + { + return TryResolveExecutableByPath(name, out descriptor); + } + + if (searchPaths is null) + { + return false; + } + + foreach (var searchPath in searchPaths) + { + if (string.IsNullOrWhiteSpace(searchPath)) + { + continue; + } + + var candidate = Combine(searchPath, name); + if (TryResolveExecutableByPath(candidate, out descriptor)) + { + return true; + } + } + + return false; + } + + public bool TryReadAllText(string path, out RootFileDescriptor descriptor, out string content) + { + if (!TryResolveFile(path, out descriptor, out var fullPath)) + { + content = string.Empty; + return false; + } + + try + { + content = File.ReadAllText(fullPath); + return true; + } + catch + { + content = string.Empty; + return false; + } + } + + public bool TryReadBytes(string path, int maxBytes, out RootFileDescriptor descriptor, out ReadOnlyMemory content) + { + if (!TryResolveFile(path, out descriptor, out var fullPath)) + { + content = ReadOnlyMemory.Empty; + return false; + } + + try + { + var fileInfo = new FileInfo(fullPath); + if (maxBytes > 0 && fileInfo.Length > maxBytes) + { + content = ReadOnlyMemory.Empty; + return false; + } + + var buffer = File.ReadAllBytes(fullPath); + content = new ReadOnlyMemory(buffer); + return true; + } + catch + { + content = ReadOnlyMemory.Empty; + return false; + } + } + + public ImmutableArray EnumerateDirectory(string path) + { + var normalized = Normalize(path); + var fullPath = GetFullPath(normalized, allowDirectory: true); + if (!Directory.Exists(fullPath)) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(); + foreach (var entry in Directory.EnumerateFileSystemEntries(fullPath)) + { + var isDirectory = Directory.Exists(entry); + builder.Add(CreateDescriptor(entry, isDirectory)); + } + + return builder.ToImmutable().Sort(static (left, right) => string.CompareOrdinal(left.Path, right.Path)); + } + + public bool DirectoryExists(string path) + { + var normalized = Normalize(path); + var fullPath = GetFullPath(normalized, allowDirectory: true); + return Directory.Exists(fullPath); + } + + private bool TryResolveExecutableByPath(string path, out RootFileDescriptor descriptor) + { + if (!TryResolveFile(path, out descriptor, out _)) + { + return false; + } + + return descriptor.IsExecutable; + } + + private bool TryResolveFile(string path, out RootFileDescriptor descriptor, out string fullPath) + { + descriptor = null!; + fullPath = string.Empty; + + var normalized = Normalize(path); + fullPath = GetFullPath(normalized); + + if (!File.Exists(fullPath)) + { + return false; + } + + descriptor = CreateDescriptor(fullPath, isDirectory: false); + return true; + } + + private RootFileDescriptor CreateDescriptor(string fullPath, bool isDirectory) + { + var relative = ToRelativePath(fullPath); + + if (isDirectory) + { + return new RootFileDescriptor(relative, null, false, true, null); + } + + var info = new FileInfo(fullPath); + var executable = InferExecutable(info); + var shebang = ExtractShebang(fullPath); + return new RootFileDescriptor(relative, null, executable, false, shebang); + } + + private string GetFullPath(string normalizedPath, bool allowDirectory = false) + { + var relative = normalizedPath.TrimStart('/'); + var combined = Path.GetFullPath(Path.Combine(_root.FullName, relative.Replace('/', Path.DirectorySeparatorChar))); + if (!combined.StartsWith(_root.FullName, StringComparison.Ordinal)) + { + throw new InvalidOperationException($"Path '{normalizedPath}' escapes the root directory."); + } + + if (!allowDirectory && Directory.Exists(combined)) + { + throw new InvalidOperationException($"Path '{normalizedPath}' refers to a directory; a file was expected."); + } + + return combined; + } + + private static string Normalize(string? path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return "/"; + } + + var text = path.Replace('\\', '/').Trim(); + if (!text.StartsWith("/", StringComparison.Ordinal)) + { + text = "/" + text; + } + + var segments = new Stack(); + foreach (var segment in text.Split('/', StringSplitOptions.RemoveEmptyEntries)) + { + if (segment == ".") + { + continue; + } + + if (segment == "..") + { + if (segments.Count > 0) + { + segments.Pop(); + } + + continue; + } + + segments.Push(segment); + } + + if (segments.Count == 0) + { + return "/"; + } + + var builder = new StringBuilder(); + foreach (var segment in segments.Reverse()) + { + builder.Append('/').Append(segment); + } + + return builder.ToString(); + } + + private static string Combine(string basePath, string name) + { + var normalizedBase = Normalize(basePath); + if (normalizedBase == "/") + { + return "/" + name; + } + + return normalizedBase.EndsWith("/", StringComparison.Ordinal) + ? normalizedBase + name + : normalizedBase + "/" + name; + } + + private string ToRelativePath(string fullPath) + { + var relative = Path.GetRelativePath(_root.FullName, fullPath) + .Replace(Path.DirectorySeparatorChar, '/') + .Replace(Path.AltDirectorySeparatorChar, '/'); + + if (!relative.StartsWith("/", StringComparison.Ordinal)) + { + relative = "/" + relative; + } + + return relative; + } + + private static bool InferExecutable(FileInfo info) + { + if (OperatingSystem.IsWindows()) + { + var extension = info.Extension.ToLowerInvariant(); + return extension is ".exe" or ".bat" or ".cmd" or ".com" or ".ps1" or ".sh" or ".py" or ".rb" or ".js"; + } + + try + { +#if NET8_0_OR_GREATER + var mode = File.GetUnixFileMode(info.FullName); + return mode.HasFlag(UnixFileMode.UserExecute) || + mode.HasFlag(UnixFileMode.GroupExecute) || + mode.HasFlag(UnixFileMode.OtherExecute); +#else + return true; +#endif + } + catch + { + return true; + } + } + + private static string? ExtractShebang(string fullPath) + { + try + { + using var stream = new FileStream(fullPath, FileMode.Open, FileAccess.Read, FileShare.Read); + Span buffer = stackalloc byte[256]; + var read = stream.Read(buffer); + if (read < 2) + { + return null; + } + + if (buffer[0] != (byte)'#' || buffer[1] != (byte)'!') + { + return null; + } + + var text = Encoding.UTF8.GetString(buffer[..read]); + var newlineIndex = text.IndexOfAny(new[] { '\r', '\n' }); + var shebang = newlineIndex >= 0 ? text[2..newlineIndex] : text[2..]; + return shebang.Trim(); + } + catch + { + return null; + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/IRootFileSystem.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/IRootFileSystem.cs index 319e0643..06143be8 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/IRootFileSystem.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/IRootFileSystem.cs @@ -1,6 +1,6 @@ using System.Collections.Immutable; -namespace StellaOps.Scanner.EntryTrace; +namespace StellaOps.Scanner.EntryTrace.FileSystem; /// /// Represents a layered read-only filesystem snapshot built from container layers. @@ -12,15 +12,20 @@ public interface IRootFileSystem /// bool TryResolveExecutable(string name, IReadOnlyList searchPaths, out RootFileDescriptor descriptor); - /// - /// Attempts to read the contents of a file as UTF-8 text. - /// - bool TryReadAllText(string path, out RootFileDescriptor descriptor, out string content); - - /// - /// Returns descriptors for entries contained within a directory. - /// - ImmutableArray EnumerateDirectory(string path); + /// + /// Attempts to read the contents of a file as UTF-8 text. + /// + bool TryReadAllText(string path, out RootFileDescriptor descriptor, out string content); + + /// + /// Attempts to read up to bytes from the file. + /// + bool TryReadBytes(string path, int maxBytes, out RootFileDescriptor descriptor, out ReadOnlyMemory content); + + /// + /// Returns descriptors for entries contained within a directory. + /// + ImmutableArray EnumerateDirectory(string path); /// /// Checks whether a directory exists. diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/LayeredRootFileSystem.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/LayeredRootFileSystem.cs index e86144bf..975be528 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/LayeredRootFileSystem.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/LayeredRootFileSystem.cs @@ -7,7 +7,7 @@ using System.Text; using System.Threading; using IOPath = System.IO.Path; -namespace StellaOps.Scanner.EntryTrace; +namespace StellaOps.Scanner.EntryTrace.FileSystem; /// /// Represents an backed by OCI image layers. @@ -15,7 +15,7 @@ namespace StellaOps.Scanner.EntryTrace; public sealed class LayeredRootFileSystem : IRootFileSystem { private const int MaxSymlinkDepth = 32; - private const int MaxCachedTextBytes = 1_048_576; // 1 MiB + private const int MaxCachedBytes = 1_048_576; // 1 MiB private readonly ImmutableDictionary _entries; @@ -118,14 +118,33 @@ public sealed class LayeredRootFileSystem : IRootFileSystem return false; } - descriptor = entry.ToDescriptor(resolvedPath); - return true; - } - - public ImmutableArray EnumerateDirectory(string path) - { - var normalizedDirectory = NormalizeDirectory(path); - var results = ImmutableArray.CreateBuilder(); + descriptor = entry.ToDescriptor(resolvedPath); + return true; + } + + public bool TryReadBytes(string path, int maxBytes, out RootFileDescriptor descriptor, out ReadOnlyMemory content) + { + descriptor = null!; + content = default; + + if (!TryResolveFile(path, out var entry, out var resolvedPath)) + { + return false; + } + + if (!entry.TryReadBytes(maxBytes, out content)) + { + return false; + } + + descriptor = entry.ToDescriptor(resolvedPath); + return true; + } + + public ImmutableArray EnumerateDirectory(string path) + { + var normalizedDirectory = NormalizeDirectory(path); + var results = ImmutableArray.CreateBuilder(); foreach (var entry in _entries.Values) { @@ -362,7 +381,7 @@ public sealed class LayeredRootFileSystem : IRootFileSystem var isExecutable = InferExecutable(entryPath, attributes); var contentProvider = FileContentProvider.FromFile(entryPath); - var shebang = ExtractShebang(contentProvider.Peek(MaxCachedTextBytes)); + var shebang = ExtractShebang(contentProvider.Peek(MaxCachedBytes)); EnsureAncestry(normalized, layer.Digest); _entries[normalized] = FileEntry.File( @@ -409,7 +428,7 @@ public sealed class LayeredRootFileSystem : IRootFileSystem case TarEntryType.ContiguousFile: { var contentProvider = FileContentProvider.FromTarEntry(entry); - var preview = contentProvider.Peek(MaxCachedTextBytes); + var preview = contentProvider.Peek(MaxCachedBytes); var shebang = ExtractShebang(preview); var isExecutable = InferExecutable(entry); @@ -661,16 +680,27 @@ public sealed class LayeredRootFileSystem : IRootFileSystem Kind == FileEntryKind.Directory, Shebang); - public bool TryReadText(out string content) - { - if (Kind != FileEntryKind.File || _content is null) - { - content = string.Empty; - return false; - } - - return _content.TryRead(out content); - } + public bool TryReadText(out string content) + { + if (Kind != FileEntryKind.File || _content is null) + { + content = string.Empty; + return false; + } + + return _content.TryReadText(out content); + } + + public bool TryReadBytes(int maxBytes, out ReadOnlyMemory bytes) + { + if (Kind != FileEntryKind.File || _content is null) + { + bytes = default; + return false; + } + + return _content.TryReadBytes(maxBytes, out bytes); + } public static FileEntry File( string path, @@ -698,74 +728,191 @@ public sealed class LayeredRootFileSystem : IRootFileSystem Symlink } - private sealed class FileContentProvider - { - private readonly Func _factory; - private readonly Lazy _cached; - - private FileContentProvider(Func factory) - { - _factory = factory; - _cached = new Lazy(() => _factory(), LazyThreadSafetyMode.ExecutionAndPublication); - } - - public static FileContentProvider FromFile(string path) - => new(() => - { - try - { - return File.ReadAllText(path); - } - catch - { - return null; - } - }); - - public static FileContentProvider FromTarEntry(TarEntry entry) - { - return new FileContentProvider(() => - { - using var stream = new MemoryStream(); - entry.DataStream?.CopyTo(stream); - if (stream.Length > MaxCachedTextBytes) - { - return null; - } - - stream.Position = 0; - using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, leaveOpen: true); - return reader.ReadToEnd(); - }); - } - - public string? Peek(int maxBytes) - { - var content = _cached.Value; - if (content is null) - { - return null; - } - - if (content.Length * sizeof(char) <= maxBytes) - { - return content; - } - - return content[..Math.Min(content.Length, maxBytes / sizeof(char))]; - } - - public bool TryRead(out string content) - { - var value = _cached.Value; - if (value is null) - { - content = string.Empty; - return false; - } - - content = value; - return true; - } - } -} + private sealed class FileContentProvider + { + private readonly Func? _binaryFactory; + private readonly Func? _textFactory; + private readonly Lazy _cachedBytes; + private readonly Lazy _cachedText; + + private FileContentProvider( + Func? binaryFactory, + Func? textFactory) + { + _binaryFactory = binaryFactory; + _textFactory = textFactory; + _cachedBytes = new Lazy(() => _binaryFactory?.Invoke(), LazyThreadSafetyMode.ExecutionAndPublication); + _cachedText = new Lazy(() => + { + if (_textFactory is not null) + { + return _textFactory(); + } + + var bytes = _cachedBytes.Value; + if (bytes is null) + { + return null; + } + + try + { + return Encoding.UTF8.GetString(bytes); + } + catch + { + return null; + } + }, LazyThreadSafetyMode.ExecutionAndPublication); + } + + public static FileContentProvider FromFile(string path) + { + return new FileContentProvider( + () => ReadFileBytes(path, MaxCachedBytes), + () => + { + try + { + return File.ReadAllText(path); + } + catch + { + return null; + } + }); + } + + public static FileContentProvider FromTarEntry(TarEntry entry) + { + byte[]? cached = null; + + return new FileContentProvider( + () => + { + if (cached is not null) + { + return cached; + } + + cached = ReadTarEntryBytes(entry, MaxCachedBytes); + return cached; + }, + () => + { + if (cached is null) + { + cached = ReadTarEntryBytes(entry, MaxCachedBytes); + } + + if (cached is null) + { + return null; + } + + try + { + return Encoding.UTF8.GetString(cached); + } + catch + { + return null; + } + }); + } + + public string? Peek(int maxBytes) + { + var text = _cachedText.Value; + if (text is null) + { + return null; + } + + if (text.Length * sizeof(char) <= maxBytes) + { + return text; + } + + return text[..Math.Min(text.Length, maxBytes / sizeof(char))]; + } + + public bool TryReadText(out string content) + { + var text = _cachedText.Value; + if (text is null) + { + content = string.Empty; + return false; + } + + content = text; + return true; + } + + public bool TryReadBytes(int maxBytes, out ReadOnlyMemory bytes) + { + var data = _cachedBytes.Value; + if (data is null || data.Length == 0) + { + bytes = default; + return false; + } + + var length = Math.Min(maxBytes, data.Length); + bytes = new ReadOnlyMemory(data, 0, length); + return true; + } + + private static byte[]? ReadFileBytes(string path, int maxBytes) + { + try + { + using var stream = File.OpenRead(path); + return ReadStreamWithLimit(stream, maxBytes); + } + catch + { + return null; + } + } + + private static byte[]? ReadTarEntryBytes(TarEntry entry, int maxBytes) + { + if (entry.DataStream is null) + { + return null; + } + + try + { + entry.DataStream.Position = 0; + return ReadStreamWithLimit(entry.DataStream, maxBytes); + } + catch + { + return null; + } + } + + private static byte[]? ReadStreamWithLimit(Stream stream, int maxBytes) + { + using var buffer = new MemoryStream(); + var remaining = maxBytes; + var temp = new byte[8192]; + int read; + while (remaining > 0 && (read = stream.Read(temp, 0, Math.Min(temp.Length, remaining))) > 0) + { + buffer.Write(temp, 0, read); + remaining -= read; + } + + if (buffer.Length == 0 && stream.ReadByte() == -1) + { + return Array.Empty(); + } + + return buffer.ToArray(); + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Oci/OciImageConfig.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Oci/OciImageConfig.cs index 7f964f13..219ed432 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Oci/OciImageConfig.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Oci/OciImageConfig.cs @@ -8,14 +8,17 @@ namespace StellaOps.Scanner.EntryTrace; /// /// Represents the deserialized OCI image config document. /// -internal sealed class OciImageConfiguration -{ - [JsonPropertyName("config")] - public OciImageConfig? Config { get; init; } - - [JsonPropertyName("container_config")] - public OciImageConfig? ContainerConfig { get; init; } -} +internal sealed class OciImageConfiguration +{ + [JsonPropertyName("config")] + public OciImageConfig? Config { get; init; } + + [JsonPropertyName("container_config")] + public OciImageConfig? ContainerConfig { get; init; } + + [JsonPropertyName("history")] + public ImmutableArray History { get; init; } = ImmutableArray.Empty; +} /// /// Logical representation of the OCI image config fields used by EntryTrace. @@ -34,12 +37,15 @@ public sealed class OciImageConfig [JsonConverter(typeof(FlexibleStringListConverter))] public ImmutableArray Command { get; init; } = ImmutableArray.Empty; - [JsonPropertyName("WorkingDir")] - public string? WorkingDirectory { get; init; } - - [JsonPropertyName("User")] - public string? User { get; init; } -} + [JsonPropertyName("WorkingDir")] + public string? WorkingDirectory { get; init; } + + [JsonPropertyName("User")] + public string? User { get; init; } + + [JsonIgnore] + public ImmutableArray History { get; init; } = ImmutableArray.Empty; +} /// /// Loads instances from OCI config JSON. @@ -65,19 +71,20 @@ public static class OciImageConfigLoader var configuration = JsonSerializer.Deserialize(stream, SerializerOptions) ?? throw new InvalidDataException("OCI image config is empty or invalid."); - if (configuration.Config is not null) - { - return configuration.Config; - } - - if (configuration.ContainerConfig is not null) - { - return configuration.ContainerConfig; - } - - throw new InvalidDataException("OCI image config does not include a config section."); - } -} + var baseConfig = configuration.Config ?? configuration.ContainerConfig + ?? throw new InvalidDataException("OCI image config does not include a config section."); + + return new OciImageConfig + { + Environment = baseConfig.Environment, + Entrypoint = baseConfig.Entrypoint, + Command = baseConfig.Command, + WorkingDirectory = baseConfig.WorkingDirectory, + User = baseConfig.User, + History = configuration.History + }; + } +} internal sealed class FlexibleStringListConverter : JsonConverter> { @@ -116,14 +123,18 @@ internal sealed class FlexibleStringListConverter : JsonConverter value, JsonSerializerOptions options) - { - writer.WriteStartArray(); - foreach (var entry in value) - { - writer.WriteStringValue(entry); - } - - writer.WriteEndArray(); - } -} + public override void Write(Utf8JsonWriter writer, ImmutableArray value, JsonSerializerOptions options) + { + writer.WriteStartArray(); + foreach (var entry in value) + { + writer.WriteStringValue(entry); + } + + writer.WriteEndArray(); + } +} + +public sealed record OciHistoryEntry( + [property: JsonPropertyName("created_by")] string? CreatedBy, + [property: JsonPropertyName("empty_layer")] bool EmptyLayer); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/EntryTraceRuntimeReconciler.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/EntryTraceRuntimeReconciler.cs new file mode 100644 index 00000000..fb08019b --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/EntryTraceRuntimeReconciler.cs @@ -0,0 +1,321 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO; +using System.Linq; + +namespace StellaOps.Scanner.EntryTrace.Runtime; + +public sealed class EntryTraceRuntimeReconciler +{ + private static readonly HashSet WrapperNames = new(StringComparer.OrdinalIgnoreCase) + { + "tini", + "tini-static", + "tini64", + "dumb-init", + "dumbinit", + "gosu", + "su-exec", + "chpst", + "s6-supervise", + "s6-svscan", + "s6-svscanctl", + "s6-rc-init", + "runsv", + "runsvdir", + "supervisord", + "sh", + "bash", + "dash", + "ash", + "env" + }; + + public EntryTraceGraph Reconcile(EntryTraceGraph graph, ProcGraph? procGraph) + { + if (graph is null) + { + throw new ArgumentNullException(nameof(graph)); + } + + var diagnostics = graph.Diagnostics.ToBuilder(); + diagnostics.RemoveAll(d => + d.Reason is EntryTraceUnknownReason.RuntimeSnapshotUnavailable + or EntryTraceUnknownReason.RuntimeProcessNotFound + or EntryTraceUnknownReason.RuntimeMatch + or EntryTraceUnknownReason.RuntimeMismatch); + + if (procGraph is null || procGraph.Processes.Count == 0) + { + diagnostics.Add(new EntryTraceDiagnostic( + EntryTraceDiagnosticSeverity.Info, + EntryTraceUnknownReason.RuntimeSnapshotUnavailable, + "Runtime process snapshot unavailable; static confidence retained.", + Span: null, + RelatedPath: null)); + + return graph with { Diagnostics = diagnostics.ToImmutable() }; + } + + var runtimeTerminals = BuildRuntimeTerminals(procGraph); + if (runtimeTerminals.Length == 0) + { + diagnostics.Add(new EntryTraceDiagnostic( + EntryTraceDiagnosticSeverity.Warning, + EntryTraceUnknownReason.RuntimeProcessNotFound, + "Runtime process snapshot did not reveal a terminal executable.", + Span: null, + RelatedPath: null)); + + return graph with { Diagnostics = diagnostics.ToImmutable() }; + } + + var planBuilder = ImmutableArray.CreateBuilder(graph.Plans.Length); + var terminalBuilder = ImmutableArray.CreateBuilder(graph.Terminals.Length); + var terminalIndexMap = BuildTerminalIndexMap(graph.Terminals, terminalBuilder); + + foreach (var plan in graph.Plans) + { + var match = SelectBestRuntimeMatch(plan.TerminalPath, runtimeTerminals); + var confidence = EvaluateConfidence(plan.TerminalPath, match?.Path); + + planBuilder.Add(plan with { Confidence = confidence.Score }); + + if (terminalIndexMap.TryGetValue(plan.TerminalPath, out var indices) && indices.Count > 0) + { + var index = indices.Dequeue(); + terminalBuilder[index] = terminalBuilder[index] with { Confidence = confidence.Score }; + } + + diagnostics.Add(BuildDiagnostic(confidence, plan.TerminalPath)); + } + + // Update any terminals that were not tied to plans. + for (var i = 0; i < terminalBuilder.Count; i++) + { + if (terminalBuilder[i].Confidence > 0) + { + continue; + } + + var terminal = terminalBuilder[i]; + var match = SelectBestRuntimeMatch(terminal.Path, runtimeTerminals); + var confidence = EvaluateConfidence(terminal.Path, match?.Path); + terminalBuilder[i] = terminal with { Confidence = confidence.Score }; + } + + return graph with + { + Plans = planBuilder.ToImmutable(), + Terminals = terminalBuilder.ToImmutable(), + Diagnostics = diagnostics.ToImmutable() + }; + } + + private static ImmutableArray BuildRuntimeTerminals(ProcGraph graph) + { + var allCandidates = new List(); + + foreach (var process in graph.Processes.Values + .OrderBy(p => p.StartTimeTicks == 0 ? ulong.MaxValue : p.StartTimeTicks) + .ThenBy(p => p.Pid)) + { + var normalizedPath = NormalizeRuntimePath(process); + if (string.IsNullOrWhiteSpace(normalizedPath)) + { + continue; + } + + allCandidates.Add(new RuntimeTerminal(normalizedPath, process)); + } + + if (allCandidates.Count == 0) + { + return ImmutableArray.Empty; + } + + var preferred = allCandidates.Where(candidate => !IsWrapper(candidate.Process)).ToList(); + var source = preferred.Count > 0 ? preferred : allCandidates; + + var unique = new Dictionary(StringComparer.Ordinal); + foreach (var candidate in source) + { + if (!unique.ContainsKey(candidate.Path)) + { + unique[candidate.Path] = candidate; + } + } + + return unique.Values.OrderBy(candidate => candidate.Process.StartTimeTicks == 0 ? ulong.MaxValue : candidate.Process.StartTimeTicks) + .ThenBy(candidate => candidate.Process.Pid) + .ToImmutableArray(); + } + + private static RuntimeTerminal? SelectBestRuntimeMatch(string predictedPath, ImmutableArray runtimeTerminals) + { + if (runtimeTerminals.IsDefaultOrEmpty) + { + return null; + } + + RuntimeTerminal? best = null; + double bestScore = double.MinValue; + + foreach (var runtime in runtimeTerminals) + { + var (score, _, _) = EvaluateConfidence(predictedPath, runtime.Path); + if (score > bestScore) + { + bestScore = score; + best = runtime; + if (score >= 95d) + { + break; + } + } + } + + return best; + } + + private static Dictionary> BuildTerminalIndexMap( + ImmutableArray terminals, + ImmutableArray.Builder terminalBuilder) + { + var map = new Dictionary>(StringComparer.Ordinal); + for (var i = 0; i < terminals.Length; i++) + { + var terminal = terminals[i]; + terminalBuilder.Add(terminal); + + if (!map.TryGetValue(terminal.Path, out var indices)) + { + indices = new Queue(); + map[terminal.Path] = indices; + } + + indices.Enqueue(i); + } + + return map; + } + + private static ConfidenceResult EvaluateConfidence(string predictedPath, string? runtimePath) + { + if (string.IsNullOrWhiteSpace(runtimePath)) + { + return new ConfidenceResult(60d, ConfidenceLevel.Low, string.Empty); + } + + var normalizedPredicted = NormalizeComparisonPath(predictedPath); + var normalizedRuntime = NormalizeComparisonPath(runtimePath); + + if (string.Equals(normalizedPredicted, normalizedRuntime, StringComparison.Ordinal)) + { + return new ConfidenceResult(95d, ConfidenceLevel.High, runtimePath); + } + + var predictedName = Path.GetFileName(normalizedPredicted); + var runtimeName = Path.GetFileName(normalizedRuntime); + + if (!string.IsNullOrEmpty(predictedName) && + string.Equals(predictedName, runtimeName, StringComparison.Ordinal)) + { + return new ConfidenceResult(90d, ConfidenceLevel.High, runtimePath); + } + + if (!string.IsNullOrEmpty(predictedName) && + string.Equals(predictedName, runtimeName, StringComparison.OrdinalIgnoreCase)) + { + return new ConfidenceResult(80d, ConfidenceLevel.Medium, runtimePath); + } + + return new ConfidenceResult(60d, ConfidenceLevel.Low, runtimePath); + } + + private static EntryTraceDiagnostic BuildDiagnostic(ConfidenceResult result, string predictedPath) + { + var runtimePath = string.IsNullOrWhiteSpace(result.RuntimePath) ? "" : result.RuntimePath; + var severity = result.Level == ConfidenceLevel.High + ? EntryTraceDiagnosticSeverity.Info + : EntryTraceDiagnosticSeverity.Warning; + var reason = result.Level == ConfidenceLevel.High + ? EntryTraceUnknownReason.RuntimeMatch + : EntryTraceUnknownReason.RuntimeMismatch; + var message = result.Level == ConfidenceLevel.High + ? $"Runtime process '{runtimePath}' matches EntryTrace prediction '{predictedPath}'." + : $"Runtime process '{runtimePath}' diverges from EntryTrace prediction '{predictedPath}'."; + + return new EntryTraceDiagnostic( + severity, + reason, + message, + Span: null, + RelatedPath: string.IsNullOrWhiteSpace(result.RuntimePath) ? null : result.RuntimePath); + } + + private static bool IsWrapper(ProcProcess process) + { + var command = GetCommandName(process); + return command.Length > 0 && WrapperNames.Contains(command); + } + + private static string GetCommandName(ProcProcess process) + { + if (!string.IsNullOrWhiteSpace(process.CommandName)) + { + return process.CommandName; + } + + if (!process.CommandLine.IsDefaultOrEmpty && process.CommandLine.Length > 0) + { + return Path.GetFileName(process.CommandLine[0]); + } + + if (!string.IsNullOrWhiteSpace(process.ExecutablePath)) + { + return Path.GetFileName(process.ExecutablePath); + } + + return string.Empty; + } + + private static string NormalizeRuntimePath(ProcProcess process) + { + if (!string.IsNullOrWhiteSpace(process.ExecutablePath)) + { + return process.ExecutablePath; + } + + if (!process.CommandLine.IsDefaultOrEmpty && process.CommandLine.Length > 0) + { + return process.CommandLine[0]; + } + + return process.CommandName; + } + + private static string NormalizeComparisonPath(string? path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return string.Empty; + } + + var trimmed = path.Trim(); + var normalized = trimmed.Replace('\\', '/'); + return normalized; + } + + private readonly record struct RuntimeTerminal(string Path, ProcProcess Process); + + private readonly record struct ConfidenceResult(double Score, ConfidenceLevel Level, string RuntimePath); + + private enum ConfidenceLevel + { + High, + Medium, + Low + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcFileSystemSnapshot.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcFileSystemSnapshot.cs new file mode 100644 index 00000000..53b866f5 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcFileSystemSnapshot.cs @@ -0,0 +1,230 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Text; + +namespace StellaOps.Scanner.EntryTrace.Runtime; + +public sealed class ProcFileSystemSnapshot : IProcSnapshotProvider +{ + private readonly string _rootPath; + + public ProcFileSystemSnapshot(string? rootPath = null) + { + _rootPath = string.IsNullOrWhiteSpace(rootPath) ? "/proc" : rootPath; + } + + public IEnumerable EnumerateProcessIds() + { + if (!Directory.Exists(_rootPath)) + { + yield break; + } + + foreach (var directory in Directory.EnumerateDirectories(_rootPath).OrderBy(d => d, StringComparer.Ordinal)) + { + var name = Path.GetFileName(directory); + if (int.TryParse(name, NumberStyles.Integer, CultureInfo.InvariantCulture, out var pid)) + { + yield return pid; + } + } + } + + public bool TryReadProcess(int pid, out ProcProcess process) + { + process = null!; + try + { + var statPath = Path.Combine(_rootPath, pid.ToString(CultureInfo.InvariantCulture), "stat"); + var cmdPath = Path.Combine(_rootPath, pid.ToString(CultureInfo.InvariantCulture), "cmdline"); + var exePath = Path.Combine(_rootPath, pid.ToString(CultureInfo.InvariantCulture), "exe"); + + if (!File.Exists(statPath)) + { + return false; + } + + var statContent = File.ReadAllText(statPath); + var commandName = ParseCommandName(statContent); + var parentPid = ParseParentPid(statContent); + var startTime = ParseStartTime(statContent); + var commandLine = ReadCommandLine(cmdPath, statContent); + var executable = ResolveExecutablePath(exePath, commandLine); + + process = new ProcProcess(pid, parentPid, executable, commandLine, commandName, startTime); + return true; + } + catch + { + return false; + } + } + + private static string ParseCommandName(string statContent) + { + if (string.IsNullOrWhiteSpace(statContent)) + { + return string.Empty; + } + + var openIndex = statContent.IndexOf('('); + var closeIndex = statContent.LastIndexOf(')'); + if (openIndex >= 0 && closeIndex > openIndex) + { + return statContent.Substring(openIndex + 1, closeIndex - openIndex - 1); + } + + return string.Empty; + } + + private static int ParseParentPid(string statContent) + { + if (string.IsNullOrWhiteSpace(statContent)) + { + return 0; + } + + var closeIndex = statContent.LastIndexOf(')'); + if (closeIndex < 0 || closeIndex + 2 >= statContent.Length) + { + return 0; + } + + var after = statContent.Substring(closeIndex + 2); + var parts = after.Split(' ', StringSplitOptions.RemoveEmptyEntries); + if (parts.Length < 2) + { + return 0; + } + + return int.TryParse(parts[1], NumberStyles.Integer, CultureInfo.InvariantCulture, out var parentPid) + ? parentPid + : 0; + } + + private static ulong ParseStartTime(string statContent) + { + if (string.IsNullOrWhiteSpace(statContent)) + { + return 0; + } + + var closeIndex = statContent.LastIndexOf(')'); + if (closeIndex < 0 || closeIndex + 2 >= statContent.Length) + { + return 0; + } + + var after = statContent.Substring(closeIndex + 2); + var parts = after.Split(' ', StringSplitOptions.RemoveEmptyEntries); + if (parts.Length < 19) + { + return 0; + } + + return ulong.TryParse(parts[18], NumberStyles.Integer, CultureInfo.InvariantCulture, out var startTime) + ? startTime + : 0; + } + + private static ImmutableArray ReadCommandLine(string path, string statContent) + { + try + { + if (File.Exists(path)) + { + var bytes = File.ReadAllBytes(path); + if (bytes.Length > 0) + { + var segments = SplitNullTerminated(bytes) + .Where(segment => segment.Length > 0) + .ToImmutableArray(); + if (!segments.IsDefaultOrEmpty) + { + return segments; + } + } + } + } + catch + { + // ignore + } + + var openIndex = statContent.IndexOf('('); + var closeIndex = statContent.LastIndexOf(')'); + if (openIndex >= 0 && closeIndex > openIndex) + { + var command = statContent.Substring(openIndex + 1, closeIndex - openIndex - 1); + return ImmutableArray.Create(command); + } + + return ImmutableArray.Empty; + } + + private static string ResolveExecutablePath(string path, ImmutableArray commandLine) + { + try + { + if (File.Exists(path)) + { + try + { + var info = File.ResolveLinkTarget(path, returnFinalTarget: true); + if (info is not null) + { + return info.FullName; + } + } + catch + { + // fall through to command line fallback + } + + var fullPath = Path.GetFullPath(path); + if (!string.IsNullOrWhiteSpace(fullPath)) + { + return fullPath; + } + } + } + catch + { + // ignore failures to resolve symlinks + } + + if (!commandLine.IsDefaultOrEmpty && commandLine[0].Contains('/')) + { + return commandLine[0]; + } + + return string.Empty; + } + + private static IEnumerable SplitNullTerminated(byte[] buffer) + { + var start = 0; + for (var i = 0; i < buffer.Length; i++) + { + if (buffer[i] == 0) + { + var length = i - start; + if (length > 0) + { + yield return Encoding.UTF8.GetString(buffer, start, length); + } + + start = i + 1; + } + } + + if (start < buffer.Length) + { + yield return Encoding.UTF8.GetString(buffer, start, buffer.Length - start); + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcGraph.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcGraph.cs new file mode 100644 index 00000000..b0dd1e79 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcGraph.cs @@ -0,0 +1,8 @@ +using System.Collections.Immutable; + +namespace StellaOps.Scanner.EntryTrace.Runtime; + +public sealed record ProcGraph( + int RootPid, + ImmutableDictionary Processes, + ImmutableDictionary> Children); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcGraphBuilder.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcGraphBuilder.cs new file mode 100644 index 00000000..580e91c9 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcGraphBuilder.cs @@ -0,0 +1,104 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; + +namespace StellaOps.Scanner.EntryTrace.Runtime; + +public interface IProcSnapshotProvider +{ + IEnumerable EnumerateProcessIds(); + + bool TryReadProcess(int pid, out ProcProcess process); +} + +public static class ProcGraphBuilder +{ + public static ProcGraph? Build(IProcSnapshotProvider provider) + { + if (provider is null) + { + throw new ArgumentNullException(nameof(provider)); + } + + var processes = new Dictionary(); + foreach (var pid in provider.EnumerateProcessIds().OrderBy(pid => pid)) + { + if (pid <= 0) + { + continue; + } + + if (provider.TryReadProcess(pid, out var process)) + { + processes[pid] = process; + } + } + + if (processes.Count == 0) + { + return null; + } + + var rootPid = DetermineRootPid(processes); + var children = BuildChildren(processes); + return new ProcGraph( + rootPid, + processes.ToImmutableDictionary(pair => pair.Key, pair => pair.Value), + children); + } + + private static int DetermineRootPid(Dictionary processes) + { + if (processes.ContainsKey(1)) + { + return 1; + } + + var childPids = new HashSet(); + foreach (var process in processes.Values) + { + if (processes.ContainsKey(process.ParentPid)) + { + childPids.Add(process.Pid); + } + } + + var rootCandidates = processes.Keys.Where(pid => !childPids.Contains(pid)); + return rootCandidates.OrderBy(pid => pid).FirstOrDefault(processes.Keys.Min()); + } + + private static ImmutableDictionary> BuildChildren(Dictionary processes) + { + var map = new Dictionary>(processes.Count); + + foreach (var process in processes.Values) + { + if (!processes.ContainsKey(process.ParentPid)) + { + continue; + } + + if (!map.TryGetValue(process.ParentPid, out var list)) + { + list = new List(); + map[process.ParentPid] = list; + } + + list.Add(process.Pid); + } + + return map.ToImmutableDictionary( + pair => pair.Key, + pair => pair.Value + .OrderBy(pid => NormalizeStartTime(processes[pid])) + .ThenBy(pid => pid) + .Select(pid => pid) + .ToImmutableArray()); + } + + private static ulong NormalizeStartTime(ProcProcess process) + { + return process.StartTimeTicks == 0 ? ulong.MaxValue : process.StartTimeTicks; + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcProcess.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcProcess.cs new file mode 100644 index 00000000..16f48787 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Runtime/ProcProcess.cs @@ -0,0 +1,11 @@ +using System.Collections.Immutable; + +namespace StellaOps.Scanner.EntryTrace.Runtime; + +public sealed record ProcProcess( + int Pid, + int ParentPid, + string ExecutablePath, + ImmutableArray CommandLine, + string CommandName, + ulong StartTimeTicks); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Serialization/EntryTraceGraphSerializer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Serialization/EntryTraceGraphSerializer.cs new file mode 100644 index 00000000..fe3a1c46 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Serialization/EntryTraceGraphSerializer.cs @@ -0,0 +1,309 @@ +using System.Collections.Immutable; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.EntryTrace.Serialization; + +public static class EntryTraceGraphSerializer +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = false + }; + + static EntryTraceGraphSerializer() + { + SerializerOptions.Converters.Add(new JsonStringEnumConverter()); + } + + public static string Serialize(EntryTraceGraph graph) + { + ArgumentNullException.ThrowIfNull(graph); + var contract = EntryTraceGraphContract.FromGraph(graph); + return JsonSerializer.Serialize(contract, SerializerOptions); + } + + public static EntryTraceGraph Deserialize(string json) + { + ArgumentException.ThrowIfNullOrWhiteSpace(json); + var contract = JsonSerializer.Deserialize(json, SerializerOptions) + ?? throw new InvalidOperationException("Failed to deserialize EntryTrace graph."); + return contract.ToGraph(); + } + + private sealed class EntryTraceGraphContract + { + public EntryTraceOutcome Outcome { get; set; } + public List Nodes { get; set; } = new(); + public List Edges { get; set; } = new(); + public List Diagnostics { get; set; } = new(); + public List Plans { get; set; } = new(); + public List Terminals { get; set; } = new(); + + public static EntryTraceGraphContract FromGraph(EntryTraceGraph graph) + { + return new EntryTraceGraphContract + { + Outcome = graph.Outcome, + Nodes = graph.Nodes.Select(EntryTraceNodeContract.FromNode).ToList(), + Edges = graph.Edges.Select(EntryTraceEdgeContract.FromEdge).ToList(), + Diagnostics = graph.Diagnostics.Select(EntryTraceDiagnosticContract.FromDiagnostic).ToList(), + Plans = graph.Plans.Select(EntryTracePlanContract.FromPlan).ToList(), + Terminals = graph.Terminals.Select(EntryTraceTerminalContract.FromTerminal).ToList() + }; + } + + public EntryTraceGraph ToGraph() + { + return new EntryTraceGraph( + Outcome, + Nodes.Select(n => n.ToNode()).ToImmutableArray(), + Edges.Select(e => e.ToEdge()).ToImmutableArray(), + Diagnostics.Select(d => d.ToDiagnostic()).ToImmutableArray(), + Plans.Select(p => p.ToPlan()).ToImmutableArray(), + Terminals.Select(t => t.ToTerminal()).ToImmutableArray()); + } + } + + private sealed class EntryTraceNodeContract + { + public int Id { get; set; } + public EntryTraceNodeKind Kind { get; set; } + public string DisplayName { get; set; } = string.Empty; + public List Arguments { get; set; } = new(); + public EntryTraceInterpreterKind InterpreterKind { get; set; } + public EntryTraceEvidenceContract? Evidence { get; set; } + public EntryTraceSpanContract? Span { get; set; } + public Dictionary? Metadata { get; set; } + + public static EntryTraceNodeContract FromNode(EntryTraceNode node) + { + return new EntryTraceNodeContract + { + Id = node.Id, + Kind = node.Kind, + DisplayName = node.DisplayName, + Arguments = node.Arguments.ToList(), + InterpreterKind = node.InterpreterKind, + Evidence = node.Evidence is null ? null : EntryTraceEvidenceContract.FromEvidence(node.Evidence), + Span = node.Span is null ? null : EntryTraceSpanContract.FromSpan(node.Span.Value), + Metadata = node.Metadata?.ToDictionary(pair => pair.Key, pair => pair.Value, StringComparer.Ordinal) + }; + } + + public EntryTraceNode ToNode() + { + return new EntryTraceNode( + Id, + Kind, + DisplayName, + Arguments.ToImmutableArray(), + InterpreterKind, + Evidence?.ToEvidence(), + Span?.ToSpan(), + Metadata is null ? null : Metadata.ToImmutableDictionary(StringComparer.Ordinal)); + } + } + + private sealed class EntryTraceEdgeContract + { + public int From { get; set; } + public int To { get; set; } + public string Relationship { get; set; } = string.Empty; + public Dictionary? Metadata { get; set; } + + public static EntryTraceEdgeContract FromEdge(EntryTraceEdge edge) + { + return new EntryTraceEdgeContract + { + From = edge.FromNodeId, + To = edge.ToNodeId, + Relationship = edge.Relationship, + Metadata = edge.Metadata?.ToDictionary(pair => pair.Key, pair => pair.Value, StringComparer.Ordinal) + }; + } + + public EntryTraceEdge ToEdge() + { + return new EntryTraceEdge( + From, + To, + Relationship, + Metadata is null ? null : Metadata.ToImmutableDictionary(StringComparer.Ordinal)); + } + } + + private sealed class EntryTraceDiagnosticContract + { + public EntryTraceDiagnosticSeverity Severity { get; set; } + public EntryTraceUnknownReason Reason { get; set; } + public string Message { get; set; } = string.Empty; + public EntryTraceSpanContract? Span { get; set; } + public string? RelatedPath { get; set; } + + public static EntryTraceDiagnosticContract FromDiagnostic(EntryTraceDiagnostic diagnostic) + { + return new EntryTraceDiagnosticContract + { + Severity = diagnostic.Severity, + Reason = diagnostic.Reason, + Message = diagnostic.Message, + Span = diagnostic.Span is null ? null : EntryTraceSpanContract.FromSpan(diagnostic.Span.Value), + RelatedPath = diagnostic.RelatedPath + }; + } + + public EntryTraceDiagnostic ToDiagnostic() + { + return new EntryTraceDiagnostic( + Severity, + Reason, + Message, + Span?.ToSpan(), + RelatedPath); + } + } + + private sealed class EntryTracePlanContract + { + public List Command { get; set; } = new(); + public Dictionary Environment { get; set; } = new(); + public string WorkingDirectory { get; set; } = string.Empty; + public string User { get; set; } = string.Empty; + public string TerminalPath { get; set; } = string.Empty; + public EntryTraceTerminalType Type { get; set; } + public string? Runtime { get; set; } + public double Confidence { get; set; } + public Dictionary Evidence { get; set; } = new(); + + public static EntryTracePlanContract FromPlan(EntryTracePlan plan) + { + return new EntryTracePlanContract + { + Command = plan.Command.ToList(), + Environment = plan.Environment.ToDictionary(pair => pair.Key, pair => pair.Value, StringComparer.Ordinal), + WorkingDirectory = plan.WorkingDirectory, + User = plan.User, + TerminalPath = plan.TerminalPath, + Type = plan.Type, + Runtime = plan.Runtime, + Confidence = plan.Confidence, + Evidence = plan.Evidence.ToDictionary(pair => pair.Key, pair => pair.Value, StringComparer.Ordinal) + }; + } + + public EntryTracePlan ToPlan() + { + return new EntryTracePlan( + Command.ToImmutableArray(), + Environment.ToImmutableDictionary(StringComparer.Ordinal), + WorkingDirectory, + User, + TerminalPath, + Type, + Runtime, + Confidence, + Evidence.ToImmutableDictionary(StringComparer.Ordinal)); + } + } + + private sealed class EntryTraceTerminalContract + { + public string Path { get; set; } = string.Empty; + public EntryTraceTerminalType Type { get; set; } + public string? Runtime { get; set; } + public double Confidence { get; set; } + public Dictionary Evidence { get; set; } = new(); + public string User { get; set; } = string.Empty; + public string WorkingDirectory { get; set; } = string.Empty; + public List Arguments { get; set; } = new(); + + public static EntryTraceTerminalContract FromTerminal(EntryTraceTerminal terminal) + { + return new EntryTraceTerminalContract + { + Path = terminal.Path, + Type = terminal.Type, + Runtime = terminal.Runtime, + Confidence = terminal.Confidence, + Evidence = terminal.Evidence.ToDictionary(pair => pair.Key, pair => pair.Value, StringComparer.Ordinal), + User = terminal.User, + WorkingDirectory = terminal.WorkingDirectory, + Arguments = terminal.Arguments.ToList() + }; + } + + public EntryTraceTerminal ToTerminal() + { + return new EntryTraceTerminal( + Path, + Type, + Runtime, + Confidence, + Evidence.ToImmutableDictionary(StringComparer.Ordinal), + User, + WorkingDirectory, + Arguments.ToImmutableArray()); + } + } + + private sealed class EntryTraceEvidenceContract + { + public string Path { get; set; } = string.Empty; + public string? Layer { get; set; } + public string? Source { get; set; } + public Dictionary? Metadata { get; set; } + + public static EntryTraceEvidenceContract FromEvidence(EntryTraceEvidence evidence) + { + return new EntryTraceEvidenceContract + { + Path = evidence.Path, + Layer = evidence.LayerDigest, + Source = evidence.Source, + Metadata = evidence.Metadata?.ToDictionary(pair => pair.Key, pair => pair.Value, StringComparer.Ordinal) + }; + } + + public EntryTraceEvidence ToEvidence() + { + return new EntryTraceEvidence( + Path, + Layer, + Source ?? string.Empty, + Metadata is null ? null : new Dictionary(Metadata, StringComparer.Ordinal)); + } + } + + private sealed class EntryTraceSpanContract + { + public string? Path { get; set; } + public int StartLine { get; set; } + public int StartColumn { get; set; } + public int EndLine { get; set; } + public int EndColumn { get; set; } + + public static EntryTraceSpanContract FromSpan(EntryTraceSpan span) + { + return new EntryTraceSpanContract + { + Path = span.Path, + StartLine = span.StartLine, + StartColumn = span.StartColumn, + EndLine = span.EndLine, + EndColumn = span.EndColumn + }; + } + + public EntryTraceSpan ToSpan() + { + return new EntryTraceSpan( + Path, + StartLine, + StartColumn, + EndLine, + EndColumn); + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Serialization/EntryTraceNdjsonWriter.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Serialization/EntryTraceNdjsonWriter.cs new file mode 100644 index 00000000..84279858 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Serialization/EntryTraceNdjsonWriter.cs @@ -0,0 +1,333 @@ +using System; +using System.Buffers; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.Encodings.Web; +using System.Text.Json; + +namespace StellaOps.Scanner.EntryTrace; + +public sealed record EntryTraceNdjsonMetadata( + string ScanId, + string ImageDigest, + DateTimeOffset GeneratedAtUtc, + string? Source = null); + +public static class EntryTraceNdjsonWriter +{ + private static readonly JsonWriterOptions WriterOptions = new() + { + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, + Indented = false, + SkipValidation = false + }; + + public static ImmutableArray Serialize(EntryTraceGraph graph, EntryTraceNdjsonMetadata metadata) + { + if (graph is null) + { + throw new ArgumentNullException(nameof(graph)); + } + + var result = ImmutableArray.CreateBuilder(); + + result.Add(BuildLine(writer => WriteEntry(writer, graph, metadata))); + + foreach (var node in graph.Nodes.OrderBy(n => n.Id)) + { + result.Add(BuildLine(writer => WriteNode(writer, node))); + } + + foreach (var edge in graph.Edges + .OrderBy(e => e.FromNodeId) + .ThenBy(e => e.ToNodeId) + .ThenBy(e => e.Relationship, StringComparer.Ordinal)) + { + result.Add(BuildLine(writer => WriteEdge(writer, edge))); + } + + foreach (var plan in graph.Plans + .OrderBy(p => p.TerminalPath, StringComparer.Ordinal) + .ThenBy(p => p.Runtime, StringComparer.Ordinal)) + { + result.Add(BuildLine(writer => WriteTarget(writer, plan))); + } + + foreach (var diagnostic in graph.Diagnostics + .OrderBy(d => d.Severity) + .ThenBy(d => d.Reason) + .ThenBy(d => d.Message, StringComparer.Ordinal)) + { + result.Add(BuildLine(writer => WriteWarning(writer, diagnostic))); + } + + foreach (var capability in ExtractCapabilities(graph)) + { + result.Add(BuildLine(writer => WriteCapability(writer, capability))); + } + + return result.ToImmutable(); + } + + private static void WriteEntry(Utf8JsonWriter writer, EntryTraceGraph graph, EntryTraceNdjsonMetadata metadata) + { + writer.WriteStartObject(); + writer.WriteString("type", "entrytrace.entry"); + writer.WriteString("scan_id", metadata.ScanId); + writer.WriteString("image_digest", metadata.ImageDigest); + writer.WriteString("outcome", graph.Outcome.ToString().ToLowerInvariant()); + writer.WriteNumber("nodes", graph.Nodes.Length); + writer.WriteNumber("edges", graph.Edges.Length); + writer.WriteNumber("targets", graph.Plans.Length); + writer.WriteNumber("warnings", graph.Diagnostics.Length); + writer.WriteString("generated_at", metadata.GeneratedAtUtc.UtcDateTime.ToString("O")); + if (!string.IsNullOrWhiteSpace(metadata.Source)) + { + writer.WriteString("source", metadata.Source); + } + writer.WriteEndObject(); + } + + private static void WriteNode(Utf8JsonWriter writer, EntryTraceNode node) + { + writer.WriteStartObject(); + writer.WriteString("type", "entrytrace.node"); + writer.WriteNumber("id", node.Id); + writer.WriteString("kind", node.Kind.ToString().ToLowerInvariant()); + writer.WriteString("display_name", node.DisplayName); + writer.WritePropertyName("arguments"); + WriteArray(writer, node.Arguments); + writer.WriteString("interpreter", node.InterpreterKind.ToString().ToLowerInvariant()); + if (node.Evidence is not null) + { + writer.WritePropertyName("evidence"); + WriteEvidence(writer, node.Evidence); + } + if (node.Span is not null) + { + writer.WritePropertyName("span"); + WriteSpan(writer, node.Span.Value); + } + if (node.Metadata is not null && node.Metadata.Count > 0) + { + writer.WritePropertyName("metadata"); + WriteDictionary(writer, node.Metadata); + } + writer.WriteEndObject(); + } + + private static void WriteEdge(Utf8JsonWriter writer, EntryTraceEdge edge) + { + writer.WriteStartObject(); + writer.WriteString("type", "entrytrace.edge"); + writer.WriteNumber("from", edge.FromNodeId); + writer.WriteNumber("to", edge.ToNodeId); + writer.WriteString("relationship", edge.Relationship); + if (edge.Metadata is not null && edge.Metadata.Count > 0) + { + writer.WritePropertyName("metadata"); + WriteDictionary(writer, edge.Metadata); + } + writer.WriteEndObject(); + } + + private static void WriteTarget(Utf8JsonWriter writer, EntryTracePlan plan) + { + writer.WriteStartObject(); + writer.WriteString("type", "entrytrace.target"); + writer.WriteString("path", plan.TerminalPath); + writer.WriteString("runtime", plan.Runtime); + writer.WriteString("terminal_type", plan.Type.ToString().ToLowerInvariant()); + writer.WriteNumber("confidence", Math.Round(plan.Confidence, 4)); + writer.WriteString("confidence_level", ConfidenceLevelFromScore(plan.Confidence)); + writer.WriteString("user", plan.User); + writer.WriteString("working_directory", plan.WorkingDirectory); + writer.WritePropertyName("arguments"); + WriteArray(writer, plan.Command); + if (plan.Environment is not null && plan.Environment.Count > 0) + { + writer.WritePropertyName("environment"); + WriteDictionary(writer, plan.Environment); + } + if (plan.Evidence is not null && plan.Evidence.Count > 0) + { + writer.WritePropertyName("evidence"); + WriteDictionary(writer, plan.Evidence); + } + writer.WriteEndObject(); + } + + private static void WriteWarning(Utf8JsonWriter writer, EntryTraceDiagnostic diagnostic) + { + writer.WriteStartObject(); + writer.WriteString("type", "entrytrace.warning"); + writer.WriteString("severity", diagnostic.Severity.ToString().ToLowerInvariant()); + writer.WriteString("reason", diagnostic.Reason.ToString().ToLowerInvariant()); + writer.WriteString("message", diagnostic.Message); + if (diagnostic.Span is not null) + { + writer.WritePropertyName("span"); + WriteSpan(writer, diagnostic.Span.Value); + } + if (!string.IsNullOrEmpty(diagnostic.RelatedPath)) + { + writer.WriteString("related_path", diagnostic.RelatedPath); + } + writer.WriteEndObject(); + } + + private static void WriteCapability(Utf8JsonWriter writer, CapabilitySummary capability) + { + writer.WriteStartObject(); + writer.WriteString("type", "entrytrace.capability"); + writer.WriteString("category", capability.Category); + writer.WriteString("name", capability.Name); + writer.WriteNumber("occurrences", capability.Count); + writer.WriteEndObject(); + } + + private static string BuildLine(Action build) + { + var buffer = new ArrayBufferWriter(256); + using (var writer = new Utf8JsonWriter(buffer, WriterOptions)) + { + build(writer); + writer.Flush(); + } + + var json = Encoding.UTF8.GetString(buffer.WrittenSpan); + return json + "\n"; + } + + private static void WriteArray(Utf8JsonWriter writer, ImmutableArray values) + { + writer.WriteStartArray(); + foreach (var value in values) + { + writer.WriteStringValue(value); + } + writer.WriteEndArray(); + } + + private static void WriteDictionary(Utf8JsonWriter writer, IReadOnlyDictionary values) + { + writer.WriteStartObject(); + foreach (var kvp in values.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) + { + writer.WriteString(kvp.Key, kvp.Value); + } + writer.WriteEndObject(); + } + + private static void WriteEvidence(Utf8JsonWriter writer, EntryTraceEvidence evidence) + { + writer.WriteStartObject(); + writer.WriteString("path", evidence.Path); + if (!string.IsNullOrWhiteSpace(evidence.LayerDigest)) + { + writer.WriteString("layer", evidence.LayerDigest); + } + if (!string.IsNullOrWhiteSpace(evidence.Source)) + { + writer.WriteString("source", evidence.Source); + } + if (evidence.Metadata is not null && evidence.Metadata.Count > 0) + { + writer.WritePropertyName("metadata"); + WriteDictionary(writer, evidence.Metadata); + } + writer.WriteEndObject(); + } + + private static void WriteSpan(Utf8JsonWriter writer, EntryTraceSpan span) + { + writer.WriteStartObject(); + if (!string.IsNullOrWhiteSpace(span.Path)) + { + writer.WriteString("path", span.Path); + } + writer.WriteNumber("start_line", span.StartLine); + writer.WriteNumber("start_column", span.StartColumn); + writer.WriteNumber("end_line", span.EndLine); + writer.WriteNumber("end_column", span.EndColumn); + writer.WriteEndObject(); + } + + private static ImmutableArray ExtractCapabilities(EntryTraceGraph graph) + { + var accumulator = new Dictionary<(string Category, string Name), int>(CapabilityKeyComparer.Instance); + + foreach (var metadata in graph.Nodes.Select(n => n.Metadata).Where(m => m is not null)) + { + AccumulateCapability(accumulator, metadata!); + } + + foreach (var metadata in graph.Edges.Select(e => e.Metadata).Where(m => m is not null)) + { + AccumulateCapability(accumulator, metadata!); + } + + return accumulator + .OrderBy(kvp => kvp.Key.Category, comparer: StringComparer.Ordinal) + .ThenBy(kvp => kvp.Key.Name, comparer: StringComparer.Ordinal) + .Select(kvp => new CapabilitySummary(kvp.Key.Category, kvp.Key.Name, kvp.Value)) + .ToImmutableArray(); + } + + private static void AccumulateCapability( + IDictionary<(string Category, string Name), int> accumulator, + IReadOnlyDictionary metadata) + { + if (!metadata.TryGetValue("wrapper.category", out var category) || string.IsNullOrWhiteSpace(category)) + { + return; + } + + if (!metadata.TryGetValue("wrapper.name", out var name) || string.IsNullOrWhiteSpace(name)) + { + name = category; + } + + var key = (category, name); + accumulator.TryGetValue(key, out var count); + accumulator[key] = count + 1; + } + + private readonly record struct CapabilitySummary(string Category, string Name, int Count); + + private static string ConfidenceLevelFromScore(double score) + { + if (score >= 90d) + { + return "high"; + } + + if (score >= 75d) + { + return "medium"; + } + + return "low"; + } + + private sealed class CapabilityKeyComparer : IEqualityComparer<(string Category, string Name)> + { + public static CapabilityKeyComparer Instance { get; } = new(); + + public bool Equals((string Category, string Name) x, (string Category, string Name) y) + { + return StringComparer.OrdinalIgnoreCase.Equals(x.Category, y.Category) + && StringComparer.OrdinalIgnoreCase.Equals(x.Name, y.Name); + } + + public int GetHashCode((string Category, string Name) obj) + { + var categoryHash = StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Category ?? string.Empty); + var nameHash = StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Name ?? string.Empty); + return HashCode.Combine(categoryHash, nameHash); + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/ServiceCollectionExtensions.cs index b38fd37d..8975fece 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/ServiceCollectionExtensions.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/ServiceCollectionExtensions.cs @@ -1,29 +1,32 @@ -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Options; -using StellaOps.Scanner.EntryTrace.Diagnostics; - -namespace StellaOps.Scanner.EntryTrace; - -public static class ServiceCollectionExtensions -{ - public static IServiceCollection AddEntryTraceAnalyzer(this IServiceCollection services, Action? configure = null) - { - if (services is null) - { - throw new ArgumentNullException(nameof(services)); - } - - services.AddOptions() - .BindConfiguration(EntryTraceAnalyzerOptions.SectionName); - - if (configure is not null) - { - services.Configure(configure); - } - - services.TryAddSingleton(); - services.TryAddSingleton(); - return services; - } -} +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.EntryTrace.Diagnostics; +using StellaOps.Scanner.EntryTrace.Runtime; + +namespace StellaOps.Scanner.EntryTrace; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddEntryTraceAnalyzer(this IServiceCollection services, Action? configure = null) + { + if (services is null) + { + throw new ArgumentNullException(nameof(services)); + } + + services.AddOptions() + .BindConfiguration(EntryTraceAnalyzerOptions.SectionName); + + if (configure is not null) + { + services.Configure(configure); + } + + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + return services; + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj index 479a42e4..0b20f351 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj @@ -15,5 +15,7 @@ + + - \ No newline at end of file + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md index af68f200..a0d19864 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md @@ -2,11 +2,16 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SCANNER-ENTRYTRACE-18-502 | TODO | EntryTrace Guild | SCANNER-ENTRYTRACE-18-501 | Expand chain walker with init shim/user-switch/supervisor recognition plus env/workdir accumulation and guarded edges. | Graph nodes annotate tini/dumb-init/gosu/su-exec/s6/supervisord/runit branches with capability tags, environment deltas, and guard metadata validated against fixture scripts. | -| SCANNER-ENTRYTRACE-18-503 | TODO | EntryTrace Guild | SCANNER-ENTRYTRACE-18-502 | Introduce target classifier + EntryPlan handoff with confidence scoring for ELF/Java/.NET/Node/Python and user/workdir context. | Analyzer returns typed targets with confidence metrics and per-branch EntryPlans exercised via golden fixtures and language analyzer stubs. | -| SCANNER-ENTRYTRACE-18-504 | TODO | EntryTrace Guild | SCANNER-ENTRYTRACE-18-503 | Emit EntryTrace AOC NDJSON (`entrytrace.entry/node/edge/target/warning/capability`) and wire CLI/service streaming outputs. | NDJSON writer passes determinism tests, CLI/service endpoints stream ordered observations, and diagnostics integrate new warning codes for dynamic eval/glob limits/windows shims. | -| ENTRYTRACE-SURFACE-01 | TODO | EntryTrace Guild | SURFACE-VAL-02, SURFACE-FS-02 | Run Surface.Validation prereq checks and resolve cached entry fragments via Surface.FS to avoid duplicate parsing. | EntryTrace performance metrics show reuse; regression tests updated; validation errors surfaced consistently. | -| ENTRYTRACE-SURFACE-02 | TODO | EntryTrace Guild | SURFACE-SECRETS-02 | Replace direct env/secret access with Surface.Secrets provider when tracing runtime configs. | Shared provider used; failure modes covered; documentation refreshed. | +| SCANNER-ENTRYTRACE-18-502 | DONE (2025-11-01) | EntryTrace Guild | SCANNER-ENTRYTRACE-18-501 | Expand chain walker with init shim/user-switch/supervisor recognition plus env/workdir accumulation and guarded edges. | Graph nodes annotate tini/dumb-init/gosu/su-exec/s6/supervisord/runit branches with capability tags, environment deltas, and guard metadata validated against fixture scripts. | +| SCANNER-ENTRYTRACE-18-503 | DONE (2025-11-01) | EntryTrace Guild | SCANNER-ENTRYTRACE-18-502 | Introduce target classifier + EntryPlan handoff with confidence scoring for ELF/Java/.NET/Node/Python/Ruby/PHP-FPM/Go/Rust/Nginx and user/workdir context; capture PT_INTERP / CLR / Go BuildID / Rust notes and jar manifests as evidence. | Analyzer returns typed targets with confidence metrics, binary fingerprint evidence (PT_INTERP, CLR header, Go/Rust markers, jar Main-Class), and per-branch EntryPlans exercised via golden fixtures and language analyzer stubs. | +| SCANNER-ENTRYTRACE-18-504 | DONE (2025-11-01) | EntryTrace Guild | SCANNER-ENTRYTRACE-18-503 | Emit EntryTrace AOC NDJSON (`entrytrace.entry/node/edge/target/warning/capability`) and wire CLI/service streaming outputs. | NDJSON writer passes determinism tests, CLI/service endpoints stream ordered observations, and diagnostics integrate new warning codes for dynamic eval/glob limits/windows shims. | +| SCANNER-ENTRYTRACE-18-505 | DONE (2025-11-02) | EntryTrace Guild | SCANNER-ENTRYTRACE-18-504 | Implement process-tree replay (ProcGraph) to reconcile `/proc` exec chains with static EntryTrace results, collapsing wrappers (tini/gosu/supervisord) and emitting agreement/conflict diagnostics. | Runtime harness walks `/proc` (tests + fixture containers), merges ProcGraph with static graph, records High/Medium/Low confidence outcomes, and adds coverage to integration tests. | +| SCANNER-ENTRYTRACE-18-506 | DONE (2025-11-02) | EntryTrace Guild, Scanner WebService Guild | SCANNER-ENTRYTRACE-18-505 | Surface EntryTrace graph + confidence via Scanner.WebService and CLI (REST + streaming), including target summary in scan reports and policy payloads. | WebService exposes `/scans/{id}/entrytrace` + CLI verb, responses include chain/terminal/confidence/evidence, golden fixtures updated, and Policy/Export contracts documented. | +| SCANNER-ENTRYTRACE-18-507 | DOING (2025-11-02) | EntryTrace Guild | SCANNER-ENTRYTRACE-18-503 | Expand candidate discovery beyond ENTRYPOINT/CMD by scanning Docker history metadata and default service directories (`/etc/services/**`, `/s6/**`, `/etc/supervisor/*.conf`, `/usr/local/bin/*-entrypoint`) when explicit commands are absent. | Analyzer produces deterministic fallback candidates with evidence per discovery source, golden fixtures cover supervisor/service directories, and diagnostics distinguish inferred vs declared entrypoints. | +| SCANNER-ENTRYTRACE-18-508 | DOING (2025-11-02) | EntryTrace Guild | SCANNER-ENTRYTRACE-18-503 | Extend wrapper catalogue to collapse language/package launchers (`bundle`, `bundle exec`, `docker-php-entrypoint`, `npm`, `yarn node`, `pipenv`, `poetry run`) and vendor init scripts before terminal classification. | Wrapper detection table includes the new aliases with metadata, analyzer unwraps them into underlying commands, and fixture scripts assert metadata for runtime/package managers. | +| SCANNER-ENTRYTRACE-18-509 | DONE (2025-11-02) | EntryTrace Guild, QA Guild | SCANNER-ENTRYTRACE-18-506 | Add regression coverage for persisted EntryTrace surfaces (result store, WebService endpoint, CLI renderer) and NDJSON payload hashing. | Unit/integration tests cover result retrieval (store/WebService), CLI rendering (`scan entrytrace`), and NDJSON hash stability with fixture snapshots. | +| ENTRYTRACE-SURFACE-01 | DONE (2025-11-02) | EntryTrace Guild | SURFACE-VAL-02, SURFACE-FS-02 | Run Surface.Validation prereq checks and resolve cached entry fragments via Surface.FS to avoid duplicate parsing. | EntryTrace performance metrics show reuse; regression tests updated; validation errors surfaced consistently. | +| ENTRYTRACE-SURFACE-02 | DONE (2025-11-02) | EntryTrace Guild | SURFACE-SECRETS-02 | Replace direct env/secret access with Surface.Secrets provider when tracing runtime configs. | Shared provider used; failure modes covered; documentation refreshed. | ## Status Review — 2025-10-19 diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/EntryTraceDocument.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/EntryTraceDocument.cs new file mode 100644 index 00000000..f2e05a7c --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/EntryTraceDocument.cs @@ -0,0 +1,23 @@ +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Scanner.Storage.Catalog; + +[BsonIgnoreExtraElements] +public sealed class EntryTraceDocument +{ + [BsonId] + public string ScanId { get; set; } = string.Empty; + + [BsonElement("image_digest")] + public string ImageDigest { get; set; } = string.Empty; + + [BsonElement("generated_at")] + public DateTime GeneratedAtUtc { get; set; } + = DateTime.UtcNow; + + [BsonElement("graph_json")] + public string GraphJson { get; set; } = string.Empty; + + [BsonElement("ndjson")] + public List Ndjson { get; set; } = new(); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs index bec39fc0..211cfb1a 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs @@ -5,9 +5,10 @@ using Amazon.S3; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using MongoDB.Driver; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.Scanner.EntryTrace; using StellaOps.Scanner.Storage.Migrations; using StellaOps.Scanner.Storage.Mongo; using StellaOps.Scanner.Storage.ObjectStore; @@ -64,6 +65,8 @@ public static class ServiceCollectionExtensions services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); + services.TryAddSingleton(); + services.AddSingleton(); services.AddHttpClient(RustFsArtifactObjectStore.HttpClientName) .ConfigureHttpClient((sp, client) => diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Mongo/MongoCollectionProvider.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Mongo/MongoCollectionProvider.cs index 30ff7c2f..3f2d7e9f 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Mongo/MongoCollectionProvider.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Mongo/MongoCollectionProvider.cs @@ -22,6 +22,7 @@ public sealed class MongoCollectionProvider public IMongoCollection Jobs => GetCollection(ScannerStorageDefaults.Collections.Jobs); public IMongoCollection LifecycleRules => GetCollection(ScannerStorageDefaults.Collections.LifecycleRules); public IMongoCollection RuntimeEvents => GetCollection(ScannerStorageDefaults.Collections.RuntimeEvents); + public IMongoCollection EntryTrace => GetCollection(ScannerStorageDefaults.Collections.EntryTrace); private IMongoCollection GetCollection(string name) { diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/EntryTraceRepository.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/EntryTraceRepository.cs new file mode 100644 index 00000000..b6bc8c7f --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/EntryTraceRepository.cs @@ -0,0 +1,33 @@ +using MongoDB.Driver; +using StellaOps.Scanner.Storage.Catalog; +using StellaOps.Scanner.Storage.Mongo; + +namespace StellaOps.Scanner.Storage.Repositories; + +public sealed class EntryTraceRepository +{ + private readonly MongoCollectionProvider _collections; + + public EntryTraceRepository(MongoCollectionProvider collections) + { + _collections = collections ?? throw new ArgumentNullException(nameof(collections)); + } + + public async Task GetAsync(string scanId, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(scanId); + return await _collections.EntryTrace + .Find(x => x.ScanId == scanId) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + } + + public async Task UpsertAsync(EntryTraceDocument document, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(document); + var options = new ReplaceOptions { IsUpsert = true }; + await _collections.EntryTrace + .ReplaceOneAsync(x => x.ScanId == document.ScanId, document, options, cancellationToken) + .ConfigureAwait(false); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/ScannerStorageDefaults.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/ScannerStorageDefaults.cs index 706b816b..20af262a 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/ScannerStorageDefaults.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/ScannerStorageDefaults.cs @@ -22,6 +22,7 @@ public static class ScannerStorageDefaults public const string Jobs = "jobs"; public const string LifecycleRules = "lifecycle_rules"; public const string RuntimeEvents = "runtime.events"; + public const string EntryTrace = "entrytrace"; public const string Migrations = "schema_migrations"; } diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Services/EntryTraceResultStore.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Services/EntryTraceResultStore.cs new file mode 100644 index 00000000..518bf1c1 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Services/EntryTraceResultStore.cs @@ -0,0 +1,54 @@ +using System.Collections.Immutable; +using StellaOps.Scanner.EntryTrace; +using StellaOps.Scanner.EntryTrace.Serialization; +using StellaOps.Scanner.Storage.Catalog; +using StellaOps.Scanner.Storage.Repositories; + +namespace StellaOps.Scanner.Storage.Services; + +public sealed class EntryTraceResultStore : IEntryTraceResultStore +{ + private readonly EntryTraceRepository _repository; + + public EntryTraceResultStore(EntryTraceRepository repository) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + } + + public async Task StoreAsync(EntryTraceResult result, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(result); + + var document = new EntryTraceDocument + { + ScanId = result.ScanId, + ImageDigest = result.ImageDigest, + GeneratedAtUtc = result.GeneratedAtUtc.UtcDateTime, + GraphJson = EntryTraceGraphSerializer.Serialize(result.Graph), + Ndjson = result.Ndjson.ToList() + }; + + await _repository.UpsertAsync(document, cancellationToken).ConfigureAwait(false); + } + + public async Task GetAsync(string scanId, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(scanId); + + var document = await _repository.GetAsync(scanId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + return null; + } + + var graph = EntryTraceGraphSerializer.Deserialize(document.GraphJson); + var ndjson = document.Ndjson?.ToImmutableArray() ?? ImmutableArray.Empty; + var generatedAt = DateTime.SpecifyKind(document.GeneratedAtUtc, DateTimeKind.Utc); + return new EntryTraceResult( + document.ScanId, + document.ImageDigest, + new DateTimeOffset(generatedAt), + graph, + ndjson); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj index 9fdda755..b1c48561 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj @@ -6,13 +6,16 @@ enable true - - - - - - - - - - + + + + + + + + + + + + + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/AssemblyInfo.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/AssemblyInfo.cs new file mode 100644 index 00000000..72730cf5 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.Surface.Env.Tests")] diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/ISurfaceEnvironment.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/ISurfaceEnvironment.cs new file mode 100644 index 00000000..8d2cbc46 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/ISurfaceEnvironment.cs @@ -0,0 +1,19 @@ +using System.Collections.Generic; + +namespace StellaOps.Scanner.Surface.Env; + +/// +/// Provides resolved surface environment settings for a component. +/// +public interface ISurfaceEnvironment +{ + /// + /// Gets the resolved settings for the current component. + /// + SurfaceEnvironmentSettings Settings { get; } + + /// + /// Gets the raw environment variables and configuration values that were used while building the settings. + /// + IReadOnlyDictionary RawVariables { get; } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..ee1ff715 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/ServiceCollectionExtensions.cs @@ -0,0 +1,19 @@ +using Microsoft.Extensions.DependencyInjection; + +namespace StellaOps.Scanner.Surface.Env; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddSurfaceEnvironment( + this IServiceCollection services, + Action? configure = null) + { + if (services is null) + { + throw new ArgumentNullException(nameof(services)); + } + + services.AddSingleton(sp => SurfaceEnvironmentFactory.Create(sp, configure)); + return services; + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/StellaOps.Scanner.Surface.Env.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/StellaOps.Scanner.Surface.Env.csproj new file mode 100644 index 00000000..04df348e --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/StellaOps.Scanner.Surface.Env.csproj @@ -0,0 +1,20 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironment.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironment.cs new file mode 100644 index 00000000..953ef10f --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironment.cs @@ -0,0 +1,41 @@ +using System.Collections.Generic; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Scanner.Surface.Env; + +internal sealed class SurfaceEnvironment : ISurfaceEnvironment +{ + public SurfaceEnvironment(SurfaceEnvironmentSettings settings, IReadOnlyDictionary raw) + { + Settings = settings ?? throw new ArgumentNullException(nameof(settings)); + RawVariables = raw ?? throw new ArgumentNullException(nameof(raw)); + } + + public SurfaceEnvironmentSettings Settings { get; } + + public IReadOnlyDictionary RawVariables { get; } +} + +internal static class SurfaceEnvironmentFactory +{ + public static ISurfaceEnvironment Create(IServiceProvider services, Action? configure = null) + { + var options = new SurfaceEnvironmentOptions(); + configure?.Invoke(options); + + if (options.Prefixes.Count == 0) + { + options.AddPrefix("SCANNER"); + } + + var configuration = services.GetRequiredService(); + var logger = services.GetRequiredService>(); + + var builder = new SurfaceEnvironmentBuilder(services, configuration, logger, options); + var settings = builder.Build(); + var raw = builder.GetRawVariables(); + return new SurfaceEnvironment(settings, raw); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentBuilder.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentBuilder.cs new file mode 100644 index 00000000..e83b7ea9 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentBuilder.cs @@ -0,0 +1,295 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Security.Cryptography.X509Certificates; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Scanner.Surface.Env; + +/// +/// Resolves instances from configuration sources. +/// +public sealed class SurfaceEnvironmentBuilder +{ + private readonly IServiceProvider _services; + private readonly IConfiguration _configuration; + private readonly ILogger _logger; + private readonly SurfaceEnvironmentOptions _options; + private readonly Dictionary _raw = new(StringComparer.OrdinalIgnoreCase); + + public SurfaceEnvironmentBuilder( + IServiceProvider services, + IConfiguration configuration, + ILogger logger, + SurfaceEnvironmentOptions options) + { + _services = services ?? throw new ArgumentNullException(nameof(services)); + _configuration = configuration ?? throw new ArgumentNullException(nameof(configuration)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + + if (_options.Prefixes.Count == 0) + { + _options.AddPrefix("SCANNER"); + } + } + + public SurfaceEnvironmentSettings Build() + { + var endpoint = ResolveUri("SURFACE_FS_ENDPOINT", required: _options.RequireSurfaceEndpoint); + var bucket = ResolveString("SURFACE_FS_BUCKET", "surface-cache", required: endpoint is not null); + var region = ResolveOptionalString("SURFACE_FS_REGION"); + var cacheRoot = ResolveDirectory("SURFACE_CACHE_ROOT", new DirectoryInfo(Path.Combine(Path.GetTempPath(), "stellaops", "surface"))); + var cacheQuota = ResolveInt("SURFACE_CACHE_QUOTA_MB", 4096, min: 64, max: 262144); + var prefetch = ResolveBool("SURFACE_PREFETCH_ENABLED", defaultValue: false); + var featureFlags = ResolveFeatureFlags(); + var secrets = ResolveSecretsConfiguration(); + var tls = ResolveTlsConfiguration(); + var tenant = ResolveTenant() ?? "default"; + + var settings = new SurfaceEnvironmentSettings( + endpoint ?? new Uri("https://surface.invalid"), + bucket, + region, + cacheRoot, + cacheQuota, + prefetch, + featureFlags, + secrets, + tenant, + tls); + + return settings with { CreatedAtUtc = DateTimeOffset.UtcNow }; + } + + public IReadOnlyDictionary GetRawVariables() + => new Dictionary(_raw, StringComparer.OrdinalIgnoreCase); + + private Uri? ResolveUri(string suffix, bool required) + { + var value = ResolveString(suffix, required: required); + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + if (!Uri.TryCreate(value, UriKind.Absolute, out var uri)) + { + throw new SurfaceEnvironmentException($"Value '{value}' for {suffix} is not a valid absolute URI.", suffix); + } + + if (!string.Equals(uri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) + { + _logger.LogWarning("Surface environment endpoint {Endpoint} is not HTTPS.", uri); + } + + return uri; + } + + private string ResolveString(string suffix, string? defaultValue = null, bool required = false) + { + var value = ResolveOptionalString(suffix); + if (!string.IsNullOrWhiteSpace(value)) + { + return value!; + } + + if (required && defaultValue is null) + { + throw new SurfaceEnvironmentException($"Required surface environment variable {FormatNames(suffix)} was not provided.", suffix); + } + + return defaultValue ?? string.Empty; + } + + private string? ResolveOptionalString(string suffix) + { + foreach (var name in EnumerateNames(suffix)) + { + var value = Environment.GetEnvironmentVariable(name); + if (!string.IsNullOrWhiteSpace(value)) + { + _raw[name] = value!; + return value; + } + } + + var configKey = BuildConfigurationKey(suffix); + var configured = _configuration[configKey]; + if (!string.IsNullOrWhiteSpace(configured)) + { + _raw[configKey] = configured!; + return configured; + } + + return null; + } + + private DirectoryInfo ResolveDirectory(string suffix, DirectoryInfo fallback) + { + var path = ResolveOptionalString(suffix) ?? fallback.FullName; + var directory = new DirectoryInfo(path); + if (!directory.Exists) + { + directory.Create(); + } + + return directory; + } + + private int ResolveInt(string suffix, int defaultValue, int min, int max) + { + var value = ResolveOptionalString(suffix); + if (string.IsNullOrWhiteSpace(value)) + { + return defaultValue; + } + + if (!int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) + { + throw new SurfaceEnvironmentException($"Value '{value}' for {suffix} is not a valid integer.", suffix); + } + + if (parsed < min || parsed > max) + { + throw new SurfaceEnvironmentException($"Value '{parsed}' for {suffix} must be between {min} and {max}.", suffix); + } + + return parsed; + } + + private bool ResolveBool(string suffix, bool defaultValue) + { + var value = ResolveOptionalString(suffix); + if (string.IsNullOrWhiteSpace(value)) + { + return defaultValue; + } + + if (!bool.TryParse(value, out var parsed)) + { + throw new SurfaceEnvironmentException($"Value '{value}' for {suffix} is not a valid boolean.", suffix); + } + + return parsed; + } + + private IReadOnlyCollection ResolveFeatureFlags() + { + var rawFlags = ResolveOptionalString("SURFACE_FEATURES"); + if (string.IsNullOrWhiteSpace(rawFlags)) + { + return Array.Empty(); + } + + var flags = rawFlags.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Select(flag => flag.ToLowerInvariant()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + foreach (var flag in flags) + { + if (_options.KnownFeatureFlags.Count > 0 && !_options.KnownFeatureFlags.Contains(flag)) + { + _logger.LogWarning("Unknown surface feature flag '{Flag}' detected for component {Component}.", flag, _options.ComponentName); + } + else + { + _logger.LogDebug("Surface environment feature flag detected: {Flag}.", flag); + } + } + + return flags; + } + + private SurfaceSecretsConfiguration ResolveSecretsConfiguration() + { + var provider = ResolveString("SURFACE_SECRETS_PROVIDER", "kubernetes"); + var root = ResolveOptionalString("SURFACE_SECRETS_ROOT"); + var ns = ResolveOptionalString("SURFACE_SECRETS_NAMESPACE"); + var fallback = ResolveOptionalString("SURFACE_SECRETS_FALLBACK_PROVIDER"); + var allowInline = ResolveBool("SURFACE_SECRETS_ALLOW_INLINE", defaultValue: false); + var tenant = ResolveOptionalString("SURFACE_SECRETS_TENANT") ?? ResolveTenant() ?? "default"; + + return new SurfaceSecretsConfiguration(provider, tenant, root, ns, fallback, allowInline); + } + + private SurfaceTlsConfiguration ResolveTlsConfiguration() + { + var certPath = ResolveOptionalString("SURFACE_TLS_CERT_PATH"); + var keyPath = ResolveOptionalString("SURFACE_TLS_KEY_PATH"); + + X509Certificate2Collection? certificates = null; + if (!string.IsNullOrWhiteSpace(certPath)) + { + try + { + if (!File.Exists(certPath)) + { + throw new FileNotFoundException("TLS certificate path not found.", certPath); + } + + var certificate = X509CertificateLoader.LoadCertificateFromFile(certPath); + certificates = new X509Certificate2Collection { certificate }; + } + catch (Exception ex) + { + throw new SurfaceEnvironmentException($"Failed to load TLS certificate from '{certPath}': {ex.Message}", "SURFACE_TLS_CERT_PATH", ex); + } + } + + return new SurfaceTlsConfiguration(certPath, keyPath, certificates); + } + + private string? ResolveTenant() + { + var tenant = ResolveOptionalString("SURFACE_TENANT"); + if (!string.IsNullOrWhiteSpace(tenant)) + { + return tenant; + } + + if (_options.TenantResolver is not null) + { + try + { + tenant = _options.TenantResolver(_services); + if (!string.IsNullOrWhiteSpace(tenant)) + { + return tenant; + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Tenant resolver for component {Component} threw an exception.", _options.ComponentName); + } + } + + return null; + } + + private IEnumerable EnumerateNames(string suffix) + { + foreach (var prefix in _options.Prefixes) + { + yield return $"{prefix}_{suffix}"; + } + + yield return suffix; + } + + private string BuildConfigurationKey(string suffix) + { + var withoutPrefix = suffix.StartsWith("SURFACE_", StringComparison.OrdinalIgnoreCase) + ? suffix[8..] + : suffix; + + return $"Surface:{withoutPrefix.Replace('_', ':')}"; + } + + private string FormatNames(string suffix) + => string.Join(", ", EnumerateNames(suffix)); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentException.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentException.cs new file mode 100644 index 00000000..86a09728 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentException.cs @@ -0,0 +1,20 @@ +using System; + +namespace StellaOps.Scanner.Surface.Env; + +public sealed class SurfaceEnvironmentException : Exception +{ + public SurfaceEnvironmentException(string message, string variable) + : base(message) + { + Variable = variable; + } + + public SurfaceEnvironmentException(string message, string variable, Exception innerException) + : base(message, innerException) + { + Variable = variable; + } + + public string Variable { get; } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentOptions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentOptions.cs new file mode 100644 index 00000000..f8c1a4e3 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentOptions.cs @@ -0,0 +1,53 @@ +using System.Collections.Generic; + +namespace StellaOps.Scanner.Surface.Env; + +/// +/// Options controlling how the surface environment is resolved. +/// +public sealed class SurfaceEnvironmentOptions +{ + private readonly List _prefixes = new(); + + /// + /// Gets or sets the logical component name (e.g. "Scanner.Worker", "Zastava.Observer"). + /// + public string ComponentName { get; set; } = "Scanner.Worker"; + + /// + /// Gets the ordered list of environment variable prefixes that will be probed when resolving configuration values. + /// The prefixes are evaluated in order; the first match wins. + /// + public IReadOnlyList Prefixes => _prefixes; + + /// + /// Adds a prefix to the ordered prefix list. + /// + public void AddPrefix(string prefix) + { + if (string.IsNullOrWhiteSpace(prefix)) + { + throw new ArgumentException("Prefix cannot be null or whitespace.", nameof(prefix)); + } + + if (!_prefixes.Contains(prefix, StringComparer.OrdinalIgnoreCase)) + { + _prefixes.Add(prefix); + } + } + + /// + /// When set to true, a missing Surface FS endpoint raises an exception. + /// + public bool RequireSurfaceEndpoint { get; set; } = true; + + /// + /// Optional delegate used to resolve the tenant when not explicitly provided via environment variables. + /// + public Func? TenantResolver { get; set; } + + /// + /// Gets or sets the set of recognised feature flags. Unknown flags produce validation warnings. + /// + public ISet KnownFeatureFlags { get; } = new HashSet(StringComparer.OrdinalIgnoreCase); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentSettings.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentSettings.cs new file mode 100644 index 00000000..8401e0e6 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceEnvironmentSettings.cs @@ -0,0 +1,25 @@ +using System.Collections.Generic; +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.Scanner.Surface.Env; + +/// +/// Snapshot of the resolved surface environment configuration for a component. +/// +public sealed record SurfaceEnvironmentSettings( + Uri SurfaceFsEndpoint, + string SurfaceFsBucket, + string? SurfaceFsRegion, + DirectoryInfo CacheRoot, + int CacheQuotaMegabytes, + bool PrefetchEnabled, + IReadOnlyCollection FeatureFlags, + SurfaceSecretsConfiguration Secrets, + string Tenant, + SurfaceTlsConfiguration Tls) +{ + /// + /// Gets the timestamp (UTC) when the configuration snapshot was created. + /// + public DateTimeOffset CreatedAtUtc { get; init; } = DateTimeOffset.UtcNow; +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceSecretsConfiguration.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceSecretsConfiguration.cs new file mode 100644 index 00000000..ecffe9f2 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceSecretsConfiguration.cs @@ -0,0 +1,15 @@ +namespace StellaOps.Scanner.Surface.Env; + +/// +/// Represents secret provider configuration resolved for the current component. +/// +public sealed record SurfaceSecretsConfiguration( + string Provider, + string Tenant, + string? Root, + string? Namespace, + string? FallbackProvider, + bool AllowInline) +{ + public bool HasFallback => !string.IsNullOrWhiteSpace(FallbackProvider); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceTlsConfiguration.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceTlsConfiguration.cs new file mode 100644 index 00000000..37c6071e --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/SurfaceTlsConfiguration.cs @@ -0,0 +1,14 @@ +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.Scanner.Surface.Env; + +/// +/// TLS configuration associated with the surface endpoints. +/// +public sealed record SurfaceTlsConfiguration( + string? CertificatePath, + string? PrivateKeyPath, + X509Certificate2Collection? ClientCertificates) +{ + public bool HasClientCertificates => ClientCertificates is { Count: > 0 }; +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/TASKS.md index 3443275f..a8421c5b 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/TASKS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env/TASKS.md @@ -2,8 +2,8 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SURFACE-ENV-01 | TODO | Scanner Guild, Zastava Guild | ARCH-SURFACE-EPIC | Draft `docs/modules/scanner/design/surface-env.md` enumerating environment variables, defaults, and air-gap behaviour. | Spec merged; env matrix reviewed by Ops + Security. | -| SURFACE-ENV-02 | TODO | Scanner Guild | SURFACE-ENV-01 | Implement strongly-typed env accessors in `StellaOps.Scanner.Surface.Env` with validation and deterministic logging. | Library published; unit tests cover parsing, fallbacks, and error paths. | +| SURFACE-ENV-01 | DOING (2025-11-01) | Scanner Guild, Zastava Guild | ARCH-SURFACE-EPIC | Draft `docs/modules/scanner/design/surface-env.md` enumerating environment variables, defaults, and air-gap behaviour. | Spec merged; env matrix reviewed by Ops + Security. | +| SURFACE-ENV-02 | DOING (2025-11-02) | Scanner Guild | SURFACE-ENV-01 | Implement strongly-typed env accessors in `StellaOps.Scanner.Surface.Env` with validation and deterministic logging. | Library published; unit tests cover parsing, fallbacks, and error paths. | | SURFACE-ENV-03 | TODO | Scanner Guild | SURFACE-ENV-02 | Adopt env helper across Scanner Worker/WebService/BuildX plug-ins. | Services use helper; manifests updated; smoke tests green. | | SURFACE-ENV-04 | TODO | Zastava Guild | SURFACE-ENV-02 | Wire env helper into Zastava Observer/Webhook containers. | Zastava builds reference env helper; admission tests validated. | | SURFACE-ENV-05 | TODO | Ops Guild | SURFACE-ENV-03..04 | Update Helm/Compose/offline kit templates with new env knobs and documentation. | Templates merged; docs include configuration table; air-gap scripts updated. | diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/FileSurfaceCache.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/FileSurfaceCache.cs new file mode 100644 index 00000000..e3ed1fb2 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/FileSurfaceCache.cs @@ -0,0 +1,151 @@ +using System.Collections.Concurrent; +using System.Security.Cryptography; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace StellaOps.Scanner.Surface.FS; + +public sealed class FileSurfaceCache : ISurfaceCache +{ + private readonly string _root; + private readonly ILogger _logger; + private readonly ConcurrentDictionary _locks = new(StringComparer.Ordinal); + + public FileSurfaceCache( + IOptions options, + ILogger logger) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + var root = options?.Value?.ResolveRoot(); + if (string.IsNullOrWhiteSpace(root)) + { + throw new ArgumentException("Surface cache root directory must be provided.", nameof(options)); + } + + _root = root!; + } + + public async Task GetOrCreateAsync( + SurfaceCacheKey key, + Func> factory, + Func> serializer, + Func, T> deserializer, + CancellationToken cancellationToken = default) + { + if (key is null) + { + throw new ArgumentNullException(nameof(key)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + var path = ResolvePath(key); + if (TryRead(path, deserializer, out var value)) + { + _logger.LogTrace("Surface cache hit for {Key}.", key); + return value!; + } + + var gate = _locks.GetOrAdd(path, _ => new SemaphoreSlim(1, 1)); + await gate.WaitAsync(cancellationToken).ConfigureAwait(false); + + try + { + if (TryRead(path, deserializer, out value)) + { + _logger.LogTrace("Surface cache race recovered for {Key}.", key); + return value!; + } + + value = await factory(cancellationToken).ConfigureAwait(false); + var payload = serializer(value); + Directory.CreateDirectory(Path.GetDirectoryName(path)!); + await File.WriteAllBytesAsync(path, payload.ToArray(), cancellationToken).ConfigureAwait(false); + return value; + } + finally + { + gate.Release(); + } + } + + public Task TryGetAsync( + SurfaceCacheKey key, + Func, T> deserializer, + CancellationToken cancellationToken = default) + { + if (key is null) + { + throw new ArgumentNullException(nameof(key)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + var path = ResolvePath(key); + return Task.FromResult(TryRead(path, deserializer, out var value) ? value : default); + } + + public async Task SetAsync( + SurfaceCacheKey key, + ReadOnlyMemory payload, + CancellationToken cancellationToken = default) + { + if (key is null) + { + throw new ArgumentNullException(nameof(key)); + } + + var path = ResolvePath(key); + Directory.CreateDirectory(Path.GetDirectoryName(path)!); + await File.WriteAllBytesAsync(path, payload.ToArray(), cancellationToken).ConfigureAwait(false); + } + + private string ResolvePath(SurfaceCacheKey key) + { + var hash = ComputeHash(key.ContentKey); + var tenant = Sanitize(key.Tenant); + var ns = Sanitize(key.Namespace); + return Path.Combine(_root, ns, tenant, hash[..2], hash[2..4], $"{hash}.bin"); + } + + private static string ComputeHash(string input) + { + using var sha = SHA256.Create(); + var bytes = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(input)); + return Convert.ToHexString(bytes).ToLowerInvariant(); + } + + private static string Sanitize(string value) + => string.IsNullOrWhiteSpace(value) + ? "default" + : value.Replace('/', '_').Replace('\\', '_'); + + private static bool TryRead(string path, Func, T> deserializer, out T? value) + { + value = default; + if (!File.Exists(path)) + { + return false; + } + + try + { + var bytes = File.ReadAllBytes(path); + value = deserializer(bytes); + return true; + } + catch + { + try + { + File.Delete(path); + } + catch + { + // ignore + } + + return false; + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/ISurfaceCache.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/ISurfaceCache.cs new file mode 100644 index 00000000..a17fa3f6 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/ISurfaceCache.cs @@ -0,0 +1,24 @@ +namespace StellaOps.Scanner.Surface.FS; + +/// +/// Provides content-addressable storage for surface artefacts. +/// +public interface ISurfaceCache +{ + Task GetOrCreateAsync( + SurfaceCacheKey key, + Func> factory, + Func> serializer, + Func, T> deserializer, + CancellationToken cancellationToken = default); + + Task TryGetAsync( + SurfaceCacheKey key, + Func, T> deserializer, + CancellationToken cancellationToken = default); + + Task SetAsync( + SurfaceCacheKey key, + ReadOnlyMemory payload, + CancellationToken cancellationToken = default); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..0d5c4f60 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/ServiceCollectionExtensions.cs @@ -0,0 +1,59 @@ +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; + +namespace StellaOps.Scanner.Surface.FS; + +public static class ServiceCollectionExtensions +{ + private const string ConfigurationSection = "Surface:Cache"; + + public static IServiceCollection AddSurfaceFileCache( + this IServiceCollection services, + Action? configure = null) + { + if (services is null) + { + throw new ArgumentNullException(nameof(services)); + } + + services.AddOptions() + .BindConfiguration(ConfigurationSection); + + if (configure is not null) + { + services.Configure(configure); + } + + services.TryAddSingleton(); + services.TryAddEnumerable(ServiceDescriptor.Singleton, SurfaceCacheOptionsValidator>()); + return services; + } + + private sealed class SurfaceCacheOptionsValidator : IValidateOptions + { + public ValidateOptionsResult Validate(string? name, SurfaceCacheOptions options) + { + if (options is null) + { + return ValidateOptionsResult.Fail("Options cannot be null."); + } + + try + { + var root = options.ResolveRoot(); + if (string.IsNullOrWhiteSpace(root)) + { + return ValidateOptionsResult.Fail("Root directory cannot be empty."); + } + } + catch (Exception ex) + { + return ValidateOptionsResult.Fail($"Failed to resolve surface cache root: {ex.Message}"); + } + + return ValidateOptionsResult.Success; + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/StellaOps.Scanner.Surface.FS.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/StellaOps.Scanner.Surface.FS.csproj new file mode 100644 index 00000000..b1f01c7f --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/StellaOps.Scanner.Surface.FS.csproj @@ -0,0 +1,26 @@ + + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + + + + + + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheJsonSerializer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheJsonSerializer.cs new file mode 100644 index 00000000..86d119f5 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheJsonSerializer.cs @@ -0,0 +1,31 @@ +using System; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.Surface.FS; + +internal static class SurfaceCacheJsonSerializer +{ + private static readonly JsonSerializerOptions Options = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = false, + Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping + }; + + static SurfaceCacheJsonSerializer() + { + Options.Converters.Add(new JsonStringEnumConverter(JsonNamingPolicy.CamelCase)); + } + + public static ReadOnlyMemory Serialize(T value) + { + return JsonSerializer.SerializeToUtf8Bytes(value, Options); + } + + public static T Deserialize(ReadOnlyMemory payload) + { + return JsonSerializer.Deserialize(payload.Span, Options)!; + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheKey.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheKey.cs new file mode 100644 index 00000000..684ab362 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheKey.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Scanner.Surface.FS; + +/// +/// Identifies a cached artefact within the surface file store. +/// +public sealed record SurfaceCacheKey(string Namespace, string Tenant, string ContentKey) +{ + public override string ToString() + => $"{Namespace}/{Tenant}/{ContentKey}"; +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheOptions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheOptions.cs new file mode 100644 index 00000000..5b2f8b97 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/SurfaceCacheOptions.cs @@ -0,0 +1,22 @@ +namespace StellaOps.Scanner.Surface.FS; + +/// +/// Configures the on-disk storage used by the surface cache. +/// +public sealed class SurfaceCacheOptions +{ + /// + /// Root directory where cached payloads are stored. Defaults to a deterministic path under the temporary directory. + /// + public string? RootDirectory { get; set; } + + internal string ResolveRoot() + { + if (!string.IsNullOrWhiteSpace(RootDirectory)) + { + return RootDirectory!; + } + + return Path.Combine(Path.GetTempPath(), "stellaops", "surface-cache"); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/TASKS.md index 6a3ff18d..6ac5b163 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/TASKS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS/TASKS.md @@ -2,8 +2,8 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SURFACE-FS-01 | TODO | Scanner Guild, Zastava Guild | ARCH-SURFACE-EPIC | Author `docs/modules/scanner/design/surface-fs.md` defining cache layout, pointer schema, tenancy, and offline handling. | Spec merged; reviewers from Scanner/Zastava sign off; component map cross-link drafted. | -| SURFACE-FS-02 | TODO | Scanner Guild | SURFACE-FS-01 | Implement `StellaOps.Scanner.Surface.FS` core abstractions (writer, reader, manifest models) with deterministic serialization + unit tests. | Library compiles; tests pass; XML docs cover public types. | +| SURFACE-FS-01 | DOING (2025-11-02) | Scanner Guild, Zastava Guild | ARCH-SURFACE-EPIC | Author `docs/modules/scanner/design/surface-fs.md` defining cache layout, pointer schema, tenancy, and offline handling. | Spec merged; reviewers from Scanner/Zastava sign off; component map cross-link drafted. | +| SURFACE-FS-02 | DOING (2025-11-02) | Scanner Guild | SURFACE-FS-01 | Implement `StellaOps.Scanner.Surface.FS` core abstractions (writer, reader, manifest models) with deterministic serialization + unit tests. | Library compiles; tests pass; XML docs cover public types. | | SURFACE-FS-03 | TODO | Scanner Guild | SURFACE-FS-02 | Integrate Surface.FS writer into Scanner Worker analyzer pipeline to persist layer + entry-trace fragments. | Worker produces cache entries in integration tests; observability counters emitted. | | SURFACE-FS-04 | TODO | Zastava Guild | SURFACE-FS-02 | Integrate Surface.FS reader into Zastava Observer runtime drift loop. | Observer validates runtime artefacts via cache; regression tests updated. | | SURFACE-FS-05 | TODO | Scanner Guild, Scheduler Guild | SURFACE-FS-03 | Expose Surface.FS pointers via Scanner WebService reports and coordinate rescan planning with Scheduler. | API contracts updated; Scheduler consumes pointers; docs refreshed. | diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/AssemblyInfo.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/AssemblyInfo.cs new file mode 100644 index 00000000..ff6f5399 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.Surface.Secrets.Tests")] diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/ISurfaceSecretProvider.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/ISurfaceSecretProvider.cs new file mode 100644 index 00000000..f6ba45f6 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/ISurfaceSecretProvider.cs @@ -0,0 +1,10 @@ +using System.Threading.Tasks; + +namespace StellaOps.Scanner.Surface.Secrets; + +public interface ISurfaceSecretProvider +{ + ValueTask GetAsync( + SurfaceSecretRequest request, + CancellationToken cancellationToken = default); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/CompositeSurfaceSecretProvider.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/CompositeSurfaceSecretProvider.cs new file mode 100644 index 00000000..5ce7cca6 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/CompositeSurfaceSecretProvider.cs @@ -0,0 +1,39 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; + +namespace StellaOps.Scanner.Surface.Secrets.Providers; + +internal sealed class CompositeSurfaceSecretProvider : ISurfaceSecretProvider +{ + private readonly IReadOnlyList _providers; + + public CompositeSurfaceSecretProvider(IEnumerable providers) + { + _providers = providers?.ToArray() ?? throw new ArgumentNullException(nameof(providers)); + if (_providers.Count == 0) + { + throw new ArgumentException("At least one provider must be supplied.", nameof(providers)); + } + } + + public async ValueTask GetAsync( + SurfaceSecretRequest request, + CancellationToken cancellationToken = default) + { + foreach (var provider in _providers) + { + try + { + return await provider.GetAsync(request, cancellationToken).ConfigureAwait(false); + } + catch (SurfaceSecretNotFoundException) + { + // try next provider + } + } + + throw new SurfaceSecretNotFoundException(request); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/FileSurfaceSecretProvider.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/FileSurfaceSecretProvider.cs new file mode 100644 index 00000000..af874088 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/FileSurfaceSecretProvider.cs @@ -0,0 +1,65 @@ + +using System; +using System.Collections.Generic; +using System.IO; +using System.Text.Json; + +namespace StellaOps.Scanner.Surface.Secrets.Providers; + +internal sealed class FileSurfaceSecretProvider : ISurfaceSecretProvider +{ + private readonly string _root; + + public FileSurfaceSecretProvider(string root) + { + if (string.IsNullOrWhiteSpace(root)) + { + throw new ArgumentException("File secret provider root cannot be null or whitespace.", nameof(root)); + } + + _root = root; + } + + public async ValueTask GetAsync( + SurfaceSecretRequest request, + CancellationToken cancellationToken = default) + { + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + var path = ResolvePath(request); + if (!File.Exists(path)) + { + throw new SurfaceSecretNotFoundException(request); + } + + await using var stream = File.OpenRead(path); + var descriptor = await JsonSerializer.DeserializeAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + if (descriptor is null) + { + throw new SurfaceSecretNotFoundException(request); + } + + if (string.IsNullOrWhiteSpace(descriptor.Payload)) + { + return SurfaceSecretHandle.Empty; + } + + var bytes = Convert.FromBase64String(descriptor.Payload); + return SurfaceSecretHandle.FromBytes(bytes, descriptor.Metadata); + } + + private string ResolvePath(SurfaceSecretRequest request) + { + var name = request.Name ?? "default"; + return Path.Combine(_root, request.Tenant, request.Component, request.SecretType, name + ".json"); + } + + private sealed class FileSecretDescriptor + { + public string? Payload { get; init; } + public Dictionary? Metadata { get; init; } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/InMemorySurfaceSecretProvider.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/InMemorySurfaceSecretProvider.cs new file mode 100644 index 00000000..b78a6318 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/InMemorySurfaceSecretProvider.cs @@ -0,0 +1,38 @@ +using System.Collections.Concurrent; + +namespace StellaOps.Scanner.Surface.Secrets.Providers; + +public sealed class InMemorySurfaceSecretProvider : ISurfaceSecretProvider +{ + private readonly ConcurrentDictionary _secrets = new(StringComparer.OrdinalIgnoreCase); + + public void Add(SurfaceSecretRequest request, SurfaceSecretHandle handle) + { + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + if (handle is null) + { + throw new ArgumentNullException(nameof(handle)); + } + + _secrets[request.CacheKey] = handle; + } + + public ValueTask GetAsync(SurfaceSecretRequest request, CancellationToken cancellationToken = default) + { + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + if (_secrets.TryGetValue(request.CacheKey, out var handle)) + { + return ValueTask.FromResult(handle); + } + + throw new SurfaceSecretNotFoundException(request); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/InlineSurfaceSecretProvider.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/InlineSurfaceSecretProvider.cs new file mode 100644 index 00000000..8afcb6b1 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/InlineSurfaceSecretProvider.cs @@ -0,0 +1,48 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using StellaOps.Scanner.Surface.Env; + +namespace StellaOps.Scanner.Surface.Secrets.Providers; + +internal sealed class InlineSurfaceSecretProvider : ISurfaceSecretProvider +{ + private readonly SurfaceSecretsConfiguration _configuration; + + public InlineSurfaceSecretProvider(SurfaceSecretsConfiguration configuration) + { + _configuration = configuration ?? throw new ArgumentNullException(nameof(configuration)); + } + + public ValueTask GetAsync( + SurfaceSecretRequest request, + CancellationToken cancellationToken = default) + { + if (!_configuration.AllowInline) + { + throw new SurfaceSecretNotFoundException(request); + } + + var envKey = BuildEnvironmentKey(request); + var value = Environment.GetEnvironmentVariable(envKey); + if (string.IsNullOrWhiteSpace(value)) + { + throw new SurfaceSecretNotFoundException(request); + } + + var bytes = Convert.FromBase64String(value); + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["source"] = "inline-env", + ["key"] = envKey + }; + + return ValueTask.FromResult(SurfaceSecretHandle.FromBytes(bytes, metadata)); + } + + private static string BuildEnvironmentKey(SurfaceSecretRequest request) + { + var name = string.IsNullOrWhiteSpace(request.Name) ? "DEFAULT" : request.Name.ToUpperInvariant(); + return $"SURFACE_SECRET_{request.Tenant.ToUpperInvariant()}_{request.Component.ToUpperInvariant()}_{request.SecretType.ToUpperInvariant()}_{name}"; + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/KubernetesSurfaceSecretProvider.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/KubernetesSurfaceSecretProvider.cs new file mode 100644 index 00000000..521a611b --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/Providers/KubernetesSurfaceSecretProvider.cs @@ -0,0 +1,51 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Surface.Env; + +namespace StellaOps.Scanner.Surface.Secrets.Providers; + +internal sealed class KubernetesSurfaceSecretProvider : ISurfaceSecretProvider +{ + private readonly SurfaceSecretsConfiguration _configuration; + private readonly ILogger _logger; + + public KubernetesSurfaceSecretProvider(SurfaceSecretsConfiguration configuration, ILogger logger) + { + _configuration = configuration ?? throw new ArgumentNullException(nameof(configuration)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + if (string.IsNullOrWhiteSpace(configuration.Root)) + { + throw new ArgumentException("Kubernetes secret provider requires a root directory where secrets are mounted.", nameof(configuration)); + } + } + + public async ValueTask GetAsync( + SurfaceSecretRequest request, + CancellationToken cancellationToken = default) + { + var directory = Path.Combine(_configuration.Root!, request.Tenant, request.Component, request.SecretType); + if (!Directory.Exists(directory)) + { + _logger.LogDebug("Kubernetes secret directory {Directory} not found.", directory); + throw new SurfaceSecretNotFoundException(request); + } + + var name = request.Name ?? "default"; + var payloadPath = Path.Combine(directory, name); + if (!File.Exists(payloadPath)) + { + throw new SurfaceSecretNotFoundException(request); + } + + var bytes = await File.ReadAllBytesAsync(payloadPath, cancellationToken).ConfigureAwait(false); + return SurfaceSecretHandle.FromBytes(bytes, new Dictionary + { + ["source"] = "kubernetes", + ["path"] = payloadPath + }); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..37af26b0 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/ServiceCollectionExtensions.cs @@ -0,0 +1,66 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Surface.Env; +using StellaOps.Scanner.Surface.Secrets.Providers; + +namespace StellaOps.Scanner.Surface.Secrets; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddSurfaceSecrets( + this IServiceCollection services, + Action? configure = null) + { + if (services is null) + { + throw new ArgumentNullException(nameof(services)); + } + + services.AddOptions(); + if (configure is not null) + { + services.Configure(configure); + } + + services.TryAddSingleton(sp => + { + var env = sp.GetRequiredService(); + var options = sp.GetRequiredService>().Value; + var logger = sp.GetRequiredService().CreateLogger("SurfaceSecrets"); + return CreateProvider(env.Settings.Secrets, logger); + }); + + return services; + } + + private static ISurfaceSecretProvider CreateProvider(SurfaceSecretsConfiguration configuration, ILogger logger) + { + var providers = new List(); + + switch (configuration.Provider.ToLowerInvariant()) + { + case "kubernetes": + providers.Add(new KubernetesSurfaceSecretProvider(configuration, logger)); + break; + case "file": + providers.Add(new FileSurfaceSecretProvider(configuration.Root ?? throw new ArgumentException("Secrets root is required for file provider."))); + break; + case "inline": + providers.Add(new InlineSurfaceSecretProvider(configuration)); + break; + default: + logger.LogWarning("Unknown surface secret provider '{Provider}'. Falling back to inline provider.", configuration.Provider); + providers.Add(new InlineSurfaceSecretProvider(configuration)); + break; + } + + if (!string.IsNullOrWhiteSpace(configuration.FallbackProvider)) + { + providers.Add(new InlineSurfaceSecretProvider(configuration with { Provider = configuration.FallbackProvider })); + } + + return providers.Count == 1 ? providers[0] : new CompositeSurfaceSecretProvider(providers); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/StellaOps.Scanner.Surface.Secrets.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/StellaOps.Scanner.Surface.Secrets.csproj new file mode 100644 index 00000000..5932b1a3 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/StellaOps.Scanner.Surface.Secrets.csproj @@ -0,0 +1,22 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + + + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretHandle.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretHandle.cs new file mode 100644 index 00000000..bd97b86c --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretHandle.cs @@ -0,0 +1,89 @@ +using System; +using System.Buffers; +using System.Collections.Generic; +using System.Security.Cryptography; +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.Scanner.Surface.Secrets; + +public sealed class SurfaceSecretHandle : IDisposable +{ + private readonly byte[]? _buffer; + private readonly int _length; + private readonly X509Certificate2Collection? _certificates; + private bool _disposed; + + private SurfaceSecretHandle(byte[]? buffer, int length, X509Certificate2Collection? certificates, IReadOnlyDictionary metadata) + { + _buffer = buffer; + _length = length; + _certificates = certificates; + Metadata = metadata; + } + + public IReadOnlyDictionary Metadata { get; } + + public ReadOnlyMemory AsBytes() + { + ThrowIfDisposed(); + return _buffer is null ? ReadOnlyMemory.Empty : new ReadOnlyMemory(_buffer, 0, _length); + } + + public X509Certificate2Collection? AsCertificateCollection() + { + ThrowIfDisposed(); + return _certificates; + } + + public void Dispose() + { + if (_disposed) + { + return; + } + + if (_buffer is not null) + { + CryptographicOperations.ZeroMemory(_buffer.AsSpan(0, _length)); + ArrayPool.Shared.Return(_buffer); + } + + if (_certificates is not null) + { + foreach (var certificate in _certificates) + { + certificate.Dispose(); + } + } + + _disposed = true; + } + + private void ThrowIfDisposed() + { + if (_disposed) + { + throw new ObjectDisposedException(nameof(SurfaceSecretHandle)); + } + } + + public static SurfaceSecretHandle FromBytes(ReadOnlySpan bytes, IDictionary? metadata = null) + { + var buffer = ArrayPool.Shared.Rent(bytes.Length); + bytes.CopyTo(buffer); + var readOnlyMetadata = metadata is null + ? new Dictionary(StringComparer.OrdinalIgnoreCase) + : new Dictionary(metadata, StringComparer.OrdinalIgnoreCase); + return new SurfaceSecretHandle(buffer, bytes.Length, null, readOnlyMetadata); + } + + public static SurfaceSecretHandle Empty { get; } = new SurfaceSecretHandle(null, 0, null, new Dictionary()); + + public static SurfaceSecretHandle FromCertificates(X509Certificate2Collection certificates, IDictionary? metadata = null) + { + var readOnlyMetadata = metadata is null + ? new Dictionary(StringComparer.OrdinalIgnoreCase) + : new Dictionary(metadata, StringComparer.OrdinalIgnoreCase); + return new SurfaceSecretHandle(null, 0, certificates, readOnlyMetadata); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretNotFoundException.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretNotFoundException.cs new file mode 100644 index 00000000..b96f9528 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretNotFoundException.cs @@ -0,0 +1,12 @@ +namespace StellaOps.Scanner.Surface.Secrets; + +public sealed class SurfaceSecretNotFoundException : Exception +{ + public SurfaceSecretNotFoundException(SurfaceSecretRequest request) + : base($"Surface secret not found for tenant '{request.Tenant}', component '{request.Component}', type '{request.SecretType}'.") + { + Request = request; + } + + public SurfaceSecretRequest Request { get; } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretRequest.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretRequest.cs new file mode 100644 index 00000000..8a2b3455 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretRequest.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Scanner.Surface.Secrets; + +public sealed record SurfaceSecretRequest( + string Tenant, + string Component, + string SecretType, + string? Name = null) +{ + public string CacheKey => string.Join(':', Tenant, Component, SecretType, Name ?? "default"); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretsOptions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretsOptions.cs new file mode 100644 index 00000000..64ce02ad --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/SurfaceSecretsOptions.cs @@ -0,0 +1,17 @@ +namespace StellaOps.Scanner.Surface.Secrets; + +/// +/// Configuration options for the surface secrets subsystem. +/// +public sealed class SurfaceSecretsOptions +{ + /// + /// Gets or sets the logical component name requesting secrets. + /// + public string ComponentName { get; set; } = "Scanner.Worker"; + + /// + /// Gets or sets the set of secret types that should be eagerly validated at startup. + /// + public ISet RequiredSecretTypes { get; } = new HashSet(StringComparer.OrdinalIgnoreCase); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/TASKS.md index bf4ace26..30921dbf 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/TASKS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets/TASKS.md @@ -2,8 +2,8 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SURFACE-SECRETS-01 | TODO | Scanner Guild, Security Guild | ARCH-SURFACE-EPIC | Produce `docs/modules/scanner/design/surface-secrets.md` defining secret reference schema, storage backends, scopes, and rotation. | Spec approved by Security + Authority guilds; threat model ticket logged. | -| SURFACE-SECRETS-02 | TODO | Scanner Guild | SURFACE-SECRETS-01 | Implement `StellaOps.Scanner.Surface.Secrets` core provider interfaces, secret models, and in-memory test backend. | Library builds; tests pass; XML docs cover public API. | +| SURFACE-SECRETS-01 | DOING (2025-11-02) | Scanner Guild, Security Guild | ARCH-SURFACE-EPIC | Produce `docs/modules/scanner/design/surface-secrets.md` defining secret reference schema, storage backends, scopes, and rotation. | Spec approved by Security + Authority guilds; threat model ticket logged. | +| SURFACE-SECRETS-02 | DOING (2025-11-02) | Scanner Guild | SURFACE-SECRETS-01 | Implement `StellaOps.Scanner.Surface.Secrets` core provider interfaces, secret models, and in-memory test backend. | Library builds; tests pass; XML docs cover public API. | | SURFACE-SECRETS-03 | TODO | Scanner Guild | SURFACE-SECRETS-02 | Add Kubernetes/File/Offline backends with deterministic caching and audit hooks. | Backends integrated; integration tests simulate rotation + offline bundles. | | SURFACE-SECRETS-04 | TODO | Scanner Guild | SURFACE-SECRETS-02 | Integrate Surface.Secrets into Scanner Worker/WebService/BuildX for registry + CAS creds. | Scanner components consume library; legacy secret code removed; smoke tests updated. | | SURFACE-SECRETS-05 | TODO | Zastava Guild | SURFACE-SECRETS-02 | Invoke Surface.Secrets from Zastava Observer/Webhook for CAS & attestation secrets. | Zastava uses shared provider; admission + observer tests cover secret errors. | diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidationReporter.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidationReporter.cs new file mode 100644 index 00000000..b4dfbfc4 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidationReporter.cs @@ -0,0 +1,9 @@ +namespace StellaOps.Scanner.Surface.Validation; + +/// +/// Reports validation outcomes for observability purposes. +/// +public interface ISurfaceValidationReporter +{ + void Report(SurfaceValidationContext context, SurfaceValidationResult result); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidator.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidator.cs new file mode 100644 index 00000000..4a311076 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidator.cs @@ -0,0 +1,14 @@ +using System.Threading.Tasks; + +namespace StellaOps.Scanner.Surface.Validation; + +/// +/// Contract implemented by components that validate surface prerequisites. +/// +public interface ISurfaceValidator +{ + ValueTask ValidateAsync( + SurfaceValidationContext context, + CancellationToken cancellationToken = default); +} + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidatorRunner.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidatorRunner.cs new file mode 100644 index 00000000..14dd6b4b --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ISurfaceValidatorRunner.cs @@ -0,0 +1,16 @@ +namespace StellaOps.Scanner.Surface.Validation; + +/// +/// Executes registered surface validators and aggregates their results. +/// +public interface ISurfaceValidatorRunner +{ + ValueTask RunAllAsync( + SurfaceValidationContext context, + CancellationToken cancellationToken = default); + + ValueTask EnsureAsync( + SurfaceValidationContext context, + CancellationToken cancellationToken = default); +} + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/LoggingSurfaceValidationReporter.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/LoggingSurfaceValidationReporter.cs new file mode 100644 index 00000000..e04def5b --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/LoggingSurfaceValidationReporter.cs @@ -0,0 +1,49 @@ +using Microsoft.Extensions.Logging; + +namespace StellaOps.Scanner.Surface.Validation; + +internal sealed class LoggingSurfaceValidationReporter : ISurfaceValidationReporter +{ + private readonly ILogger _logger; + + public LoggingSurfaceValidationReporter(ILogger logger) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public void Report(SurfaceValidationContext context, SurfaceValidationResult result) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + if (result is null) + { + throw new ArgumentNullException(nameof(result)); + } + + if (result.IsSuccess) + { + _logger.LogInformation("Surface validation succeeded for component {Component}.", context.ComponentName); + return; + } + + foreach (var issue in result.Issues) + { + var logLevel = issue.Severity switch + { + SurfaceValidationSeverity.Info => LogLevel.Information, + SurfaceValidationSeverity.Warning => LogLevel.Warning, + _ => LogLevel.Error + }; + + _logger.Log(logLevel, + "Surface validation issue for component {Component}: {Code} - {Message}. Hint: {Hint}", + context.ComponentName, + issue.Code, + issue.Message, + issue.Hint); + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..7fa21474 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/ServiceCollectionExtensions.cs @@ -0,0 +1,44 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Surface.Validation.Validators; + +namespace StellaOps.Scanner.Surface.Validation; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddSurfaceValidation( + this IServiceCollection services, + Action? configure = null) + { + if (services is null) + { + throw new ArgumentNullException(nameof(services)); + } + + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.TryAddSingleton, SurfaceValidationOptionsConfigurator>(); + + if (configure is not null) + { + var builder = new SurfaceValidationBuilder(services); + configure(builder); + } + + return services; + } + + private sealed class SurfaceValidationOptionsConfigurator : IConfigureOptions + { + public void Configure(SurfaceValidationOptions options) + { + options ??= new SurfaceValidationOptions(); + options.ThrowOnFailure = true; + options.ContinueOnError = false; + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj new file mode 100644 index 00000000..03ec5246 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj @@ -0,0 +1,23 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + + + + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationBuilder.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationBuilder.cs new file mode 100644 index 00000000..262dde9e --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationBuilder.cs @@ -0,0 +1,33 @@ +using System; +using Microsoft.Extensions.DependencyInjection; + +namespace StellaOps.Scanner.Surface.Validation; + +public sealed class SurfaceValidationBuilder +{ + private readonly IServiceCollection _services; + + internal SurfaceValidationBuilder(IServiceCollection services) + { + _services = services; + } + + public SurfaceValidationBuilder AddValidator() + where TValidator : class, ISurfaceValidator + { + _services.AddSingleton(); + return this; + } + + public SurfaceValidationBuilder AddValidator(Func factory) + { + if (factory is null) + { + throw new ArgumentNullException(nameof(factory)); + } + + _services.AddSingleton(provider => factory(provider)); + return this; + } + +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationContext.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationContext.cs new file mode 100644 index 00000000..2472046b --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationContext.cs @@ -0,0 +1,43 @@ +using System; +using System.Collections.Generic; +using StellaOps.Scanner.Surface.Env; + +namespace StellaOps.Scanner.Surface.Validation; + +/// +/// Context supplied to validation checks to describe the surface configuration. +/// +public sealed record SurfaceValidationContext( + IServiceProvider Services, + string ComponentName, + SurfaceEnvironmentSettings Environment, + IReadOnlyDictionary Properties) +{ + public static SurfaceValidationContext Create( + IServiceProvider services, + string componentName, + SurfaceEnvironmentSettings environment, + IReadOnlyDictionary? properties = null) + { + if (services is null) + { + throw new ArgumentNullException(nameof(services)); + } + + if (string.IsNullOrWhiteSpace(componentName)) + { + throw new ArgumentException("Component name cannot be null or whitespace.", nameof(componentName)); + } + + if (environment is null) + { + throw new ArgumentNullException(nameof(environment)); + } + + return new SurfaceValidationContext( + services, + componentName, + environment, + properties ?? new Dictionary(StringComparer.OrdinalIgnoreCase)); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationException.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationException.cs new file mode 100644 index 00000000..38474e65 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationException.cs @@ -0,0 +1,14 @@ +using System.Collections.Immutable; + +namespace StellaOps.Scanner.Surface.Validation; + +public sealed class SurfaceValidationException : Exception +{ + public SurfaceValidationException(string message, IEnumerable issues) + : base(message) + { + Issues = issues.ToImmutableArray(); + } + + public ImmutableArray Issues { get; } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationIssue.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationIssue.cs new file mode 100644 index 00000000..a9f69012 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationIssue.cs @@ -0,0 +1,25 @@ +using System.Diagnostics.CodeAnalysis; + +namespace StellaOps.Scanner.Surface.Validation; + +/// +/// Represents a single validation finding produced by a surface validator. +/// +public sealed record SurfaceValidationIssue( + string Code, + string Message, + SurfaceValidationSeverity Severity, + string? Hint = null) +{ + public static SurfaceValidationIssue Info(string code, string message, string? hint = null) + => new(code, message, SurfaceValidationSeverity.Info, hint); + + public static SurfaceValidationIssue Warning(string code, string message, string? hint = null) + => new(code, message, SurfaceValidationSeverity.Warning, hint); + + public static SurfaceValidationIssue Error(string code, string message, string? hint = null) + => new(code, message, SurfaceValidationSeverity.Error, hint); + + [MemberNotNullWhen(true, nameof(Hint))] + public bool HasHint => !string.IsNullOrWhiteSpace(Hint); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationIssueCodes.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationIssueCodes.cs new file mode 100644 index 00000000..2669f05f --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationIssueCodes.cs @@ -0,0 +1,15 @@ +namespace StellaOps.Scanner.Surface.Validation; + +public static class SurfaceValidationIssueCodes +{ + public const string SurfaceEndpointMissing = "SURFACE_ENV_MISSING_ENDPOINT"; + public const string SurfaceEndpointInvalid = "SURFACE_ENV_ENDPOINT_INVALID"; + public const string CacheDirectoryUnwritable = "SURFACE_ENV_CACHE_DIR_UNWRITABLE"; + public const string CacheQuotaInvalid = "SURFACE_ENV_CACHE_QUOTA_INVALID"; + public const string SecretsProviderUnknown = "SURFACE_SECRET_PROVIDER_UNKNOWN"; + public const string SecretsConfigurationMissing = "SURFACE_SECRET_CONFIGURATION_MISSING"; + public const string TenantMissing = "SURFACE_ENV_TENANT_MISSING"; + public const string BucketMissing = "SURFACE_FS_BUCKET_MISSING"; + public const string FeatureUnknown = "SURFACE_FEATURE_UNKNOWN"; + public const string ValidatorException = "SURFACE_VALIDATOR_EXCEPTION"; +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationOptions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationOptions.cs new file mode 100644 index 00000000..b46498cc --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationOptions.cs @@ -0,0 +1,18 @@ +namespace StellaOps.Scanner.Surface.Validation; + +/// +/// Controls behaviour of the surface validation runner. +/// +public sealed class SurfaceValidationOptions +{ + /// + /// Gets or sets a value indicating whether the runner should continue invoking validators after an error is recorded. + /// + public bool ContinueOnError { get; set; } + + /// + /// Gets or sets a value indicating whether the runner should throw a when validation fails. + /// Defaults to true to align with fail-fast expectations. + /// + public bool ThrowOnFailure { get; set; } = true; +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationResult.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationResult.cs new file mode 100644 index 00000000..991c9338 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationResult.cs @@ -0,0 +1,29 @@ +using System.Collections.Immutable; + +namespace StellaOps.Scanner.Surface.Validation; + +/// +/// Aggregate outcome emitted after running all registered validators. +/// +public sealed record SurfaceValidationResult +{ + private SurfaceValidationResult(bool isSuccess, ImmutableArray issues) + { + IsSuccess = isSuccess; + Issues = issues; + } + + public bool IsSuccess { get; } + + public ImmutableArray Issues { get; } + + public static SurfaceValidationResult Success() + => new(true, ImmutableArray.Empty); + + public static SurfaceValidationResult FromIssues(IEnumerable issues) + { + var immutable = issues.ToImmutableArray(); + var success = immutable.All(issue => issue.Severity != SurfaceValidationSeverity.Error); + return new SurfaceValidationResult(success, immutable); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationSeverity.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationSeverity.cs new file mode 100644 index 00000000..387737c5 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidationSeverity.cs @@ -0,0 +1,11 @@ +namespace StellaOps.Scanner.Surface.Validation; + +/// +/// Severity classification for surface validation issues. +/// +public enum SurfaceValidationSeverity +{ + Info = 0, + Warning = 1, + Error = 2, +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidatorRunner.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidatorRunner.cs new file mode 100644 index 00000000..a0fda554 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/SurfaceValidatorRunner.cs @@ -0,0 +1,98 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace StellaOps.Scanner.Surface.Validation; + +internal sealed class SurfaceValidatorRunner : ISurfaceValidatorRunner +{ + private readonly IReadOnlyList _validators; + private readonly ILogger _logger; + private readonly ISurfaceValidationReporter _reporter; + private readonly SurfaceValidationOptions _options; + + public SurfaceValidatorRunner( + IEnumerable validators, + ILogger logger, + ISurfaceValidationReporter reporter, + IOptions options) + { + _validators = validators?.ToArray() ?? Array.Empty(); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _reporter = reporter ?? throw new ArgumentNullException(nameof(reporter)); + _options = options?.Value ?? new SurfaceValidationOptions(); + } + + public async ValueTask RunAllAsync( + SurfaceValidationContext context, + CancellationToken cancellationToken = default) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + if (_validators.Count == 0) + { + var success = SurfaceValidationResult.Success(); + _reporter.Report(context, success); + return success; + } + + var issues = new List(); + foreach (var validator in _validators) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + var result = await validator.ValidateAsync(context, cancellationToken).ConfigureAwait(false); + if (!result.IsSuccess) + { + issues.AddRange(result.Issues); + + if (!_options.ContinueOnError && result.Issues.Any(issue => issue.Severity == SurfaceValidationSeverity.Error)) + { + break; + } + } + } + catch (Exception ex) + { + _logger.LogError(ex, "Surface validator {Validator} threw an exception.", validator.GetType().FullName); + issues.Add(SurfaceValidationIssue.Error( + SurfaceValidationIssueCodes.ValidatorException, + $"Validator '{validator.GetType().FullName}' threw an exception: {ex.Message}", + "Inspect logs for stack trace.")); + + if (!_options.ContinueOnError) + { + break; + } + } + } + + var resultAggregate = issues.Count == 0 + ? SurfaceValidationResult.Success() + : SurfaceValidationResult.FromIssues(issues); + + _reporter.Report(context, resultAggregate); + return resultAggregate; + } + + public async ValueTask EnsureAsync( + SurfaceValidationContext context, + CancellationToken cancellationToken = default) + { + var result = await RunAllAsync(context, cancellationToken).ConfigureAwait(false); + if (!result.IsSuccess && _options.ThrowOnFailure) + { + throw new SurfaceValidationException( + $"Surface validation failed for component '{context.ComponentName}'.", + result.Issues); + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/TASKS.md index 902dbe0a..7a485a6f 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/TASKS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/TASKS.md @@ -2,7 +2,7 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SURFACE-VAL-01 | TODO | Scanner Guild, Security Guild | SURFACE-FS-01, SURFACE-ENV-01 | Define validation framework (design doc `surface-validation.md`) covering SOLID extension points and queryable checks for env/cache/secrets. | Spec merged; architecture sign-off from Scanner + Security; checklist of baseline validators established. | +| SURFACE-VAL-01 | DOING (2025-11-01) | Scanner Guild, Security Guild | SURFACE-FS-01, SURFACE-ENV-01 | Define validation framework (design doc `surface-validation.md`) covering SOLID extension points and queryable checks for env/cache/secrets. | Spec merged; architecture sign-off from Scanner + Security; checklist of baseline validators established. | | SURFACE-VAL-02 | TODO | Scanner Guild | SURFACE-VAL-01, SURFACE-ENV-02, SURFACE-FS-02 | Implement base validation library (interfaces, check registry, default validators for env/cached manifests, secret refs) with unit tests. | Library published; validation registry supports DI; tests cover success/failure; XML docs added. | | SURFACE-VAL-03 | TODO | Scanner Guild, Analyzer Guild | SURFACE-VAL-02 | Integrate validation pipeline into Scanner analyzers (Lang, EntryTrace, etc.) to ensure consistent checks before processing. | Analyzers call validation hooks; integration tests updated; performance baseline measured. | | SURFACE-VAL-04 | TODO | Scanner Guild, Zastava Guild | SURFACE-VAL-02 | Expose validation helpers to Zastava and other runtime consumers (Observer/Webhook) for preflight checks. | Zastava uses shared validators; admission tests include validation failure scenarios. | diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceCacheValidator.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceCacheValidator.cs new file mode 100644 index 00000000..989c5a73 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceCacheValidator.cs @@ -0,0 +1,56 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading.Tasks; +using StellaOps.Scanner.Surface.Validation; + +namespace StellaOps.Scanner.Surface.Validation.Validators; + +internal sealed class SurfaceCacheValidator : ISurfaceValidator +{ + public ValueTask ValidateAsync( + SurfaceValidationContext context, + CancellationToken cancellationToken = default) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + var issues = new List(); + var directory = context.Environment.CacheRoot; + try + { + if (!directory.Exists) + { + directory.Create(); + } + + var testFile = Path.Combine(directory.FullName, ".validation"); + using (File.Open(testFile, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None)) + { + } + + File.Delete(testFile); + } + catch (Exception ex) + { + issues.Add(SurfaceValidationIssue.Error( + SurfaceValidationIssueCodes.CacheDirectoryUnwritable, + $"Surface cache directory '{directory.FullName}' is not writable: {ex.Message}", + "Ensure the cache directory exists and is writable by the process user.")); + } + + if (context.Environment.CacheQuotaMegabytes <= 0) + { + issues.Add(SurfaceValidationIssue.Error( + SurfaceValidationIssueCodes.CacheQuotaInvalid, + "Surface cache quota must be greater than zero.", + "Set SCANNER_SURFACE_CACHE_QUOTA_MB to a positive value.")); + } + + return ValueTask.FromResult(issues.Count == 0 + ? SurfaceValidationResult.Success() + : SurfaceValidationResult.FromIssues(issues)); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceEndpointValidator.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceEndpointValidator.cs new file mode 100644 index 00000000..49be9bde --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceEndpointValidator.cs @@ -0,0 +1,35 @@ +using StellaOps.Scanner.Surface.Env; + +namespace StellaOps.Scanner.Surface.Validation.Validators; + +internal sealed class SurfaceEndpointValidator : ISurfaceValidator +{ + public ValueTask ValidateAsync(SurfaceValidationContext context, CancellationToken cancellationToken = default) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + var issues = new List(); + if (context.Environment.SurfaceFsEndpoint is null || string.Equals(context.Environment.SurfaceFsEndpoint.Host, "surface.invalid", StringComparison.Ordinal)) + { + issues.Add(SurfaceValidationIssue.Error( + SurfaceValidationIssueCodes.SurfaceEndpointMissing, + "Surface FS endpoint is missing or invalid.", + "Set SCANNER_SURFACE_FS_ENDPOINT to the RustFS/S3 endpoint.")); + } + + if (string.IsNullOrWhiteSpace(context.Environment.SurfaceFsBucket)) + { + issues.Add(SurfaceValidationIssue.Error( + SurfaceValidationIssueCodes.BucketMissing, + "Surface FS bucket must be provided.", + "Set SCANNER_SURFACE_FS_BUCKET")); + } + + return ValueTask.FromResult(issues.Count == 0 + ? SurfaceValidationResult.Success() + : SurfaceValidationResult.FromIssues(issues)); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceSecretsValidator.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceSecretsValidator.cs new file mode 100644 index 00000000..bb7f0ba3 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/Validators/SurfaceSecretsValidator.cs @@ -0,0 +1,68 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using StellaOps.Scanner.Surface.Env; +using StellaOps.Scanner.Surface.Validation; + +namespace StellaOps.Scanner.Surface.Validation.Validators; + +internal sealed class SurfaceSecretsValidator : ISurfaceValidator +{ + private static readonly HashSet KnownProviders = new(StringComparer.OrdinalIgnoreCase) + { + "kubernetes", + "file", + "inline" + }; + + public ValueTask ValidateAsync( + SurfaceValidationContext context, + CancellationToken cancellationToken = default) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + var issues = new List(); + var secrets = context.Environment.Secrets; + + if (!KnownProviders.Contains(secrets.Provider)) + { + issues.Add(SurfaceValidationIssue.Error( + SurfaceValidationIssueCodes.SecretsProviderUnknown, + $"Surface secrets provider '{secrets.Provider}' is not recognised.", + "Set SCANNER_SURFACE_SECRETS_PROVIDER to 'kubernetes', 'file', or another supported provider.")); + } + + if (string.Equals(secrets.Provider, "kubernetes", StringComparison.OrdinalIgnoreCase) && + string.IsNullOrWhiteSpace(secrets.Namespace)) + { + issues.Add(SurfaceValidationIssue.Error( + SurfaceValidationIssueCodes.SecretsConfigurationMissing, + "Kubernetes secrets provider requires a namespace.", + "Set SCANNER_SURFACE_SECRETS_NAMESPACE to the target namespace.")); + } + + if (string.Equals(secrets.Provider, "file", StringComparison.OrdinalIgnoreCase) && + string.IsNullOrWhiteSpace(secrets.Root)) + { + issues.Add(SurfaceValidationIssue.Error( + SurfaceValidationIssueCodes.SecretsConfigurationMissing, + "File secrets provider requires a root directory.", + "Set SCANNER_SURFACE_SECRETS_ROOT to a directory path.")); + } + + if (string.IsNullOrWhiteSpace(secrets.Tenant)) + { + issues.Add(SurfaceValidationIssue.Error( + SurfaceValidationIssueCodes.TenantMissing, + "Surface secrets tenant cannot be empty.", + "Set SCANNER_SURFACE_SECRETS_TENANT or ensure the tenant resolver provides a value.")); + } + + return ValueTask.FromResult(issues.Count == 0 + ? SurfaceValidationResult.Success() + : SurfaceValidationResult.FromIssues(issues)); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/fallback/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/fallback/expected.json new file mode 100644 index 00000000..ec126219 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/fallback/expected.json @@ -0,0 +1,25 @@ +[ + { + "analyzerId": "rust", + "componentKey": "bin::sha256:10f3c03766e4403be40add0467a2b2d07fd7006e4b8515ab88740ffa327ea775", + "purl": null, + "name": "opaque_bin", + "version": null, + "type": "bin", + "usedByEntrypoint": true, + "metadata": { + "binary.path": "usr/local/bin/opaque_bin", + "binary.sha256": "10f3c03766e4403be40add0467a2b2d07fd7006e4b8515ab88740ffa327ea775", + "provenance": "binary" + }, + "evidence": [ + { + "kind": "file", + "source": "binary", + "locator": "usr/local/bin/opaque_bin", + "value": null, + "sha256": "10f3c03766e4403be40add0467a2b2d07fd7006e4b8515ab88740ffa327ea775" + } + ] + } +] diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/heuristics/competitor-baseline.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/heuristics/competitor-baseline.json new file mode 100644 index 00000000..75d85ad9 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/heuristics/competitor-baseline.json @@ -0,0 +1,8 @@ +{ + "detectedCrates": [ + { + "name": "serde", + "note": "Binary symbol scan matched only serde" + } + ] +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/heuristics/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/heuristics/expected.json new file mode 100644 index 00000000..98fafef5 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/heuristics/expected.json @@ -0,0 +1,68 @@ +[ + { + "analyzerId": "rust", + "componentKey": "rust::heuristic::reqwest::usr/local/bin/heuristic_app", + "name": "reqwest", + "type": "cargo", + "usedByEntrypoint": true, + "metadata": { + "binary.paths": "usr/local/bin/heuristic_app", + "binary.sha256": "4caf60c501a594b5d4b8d909b3e91fccc4447692b9e144f322a333255909310b", + "crate": "reqwest", + "provenance": "heuristic" + }, + "evidence": [ + { + "kind": "derived", + "source": "rust.heuristic", + "locator": "usr/local/bin/heuristic_app", + "value": "reqwest", + "sha256": "4caf60c501a594b5d4b8d909b3e91fccc4447692b9e144f322a333255909310b" + } + ] + }, + { + "analyzerId": "rust", + "componentKey": "rust::heuristic::serde::usr/local/bin/heuristic_app", + "name": "serde", + "type": "cargo", + "usedByEntrypoint": true, + "metadata": { + "binary.paths": "usr/local/bin/heuristic_app", + "binary.sha256": "4caf60c501a594b5d4b8d909b3e91fccc4447692b9e144f322a333255909310b", + "crate": "serde", + "provenance": "heuristic" + }, + "evidence": [ + { + "kind": "derived", + "source": "rust.heuristic", + "locator": "usr/local/bin/heuristic_app", + "value": "serde", + "sha256": "4caf60c501a594b5d4b8d909b3e91fccc4447692b9e144f322a333255909310b" + } + ] + }, + { + "analyzerId": "rust", + "componentKey": "rust::heuristic::tokio::usr/local/bin/heuristic_app", + "name": "tokio", + "type": "cargo", + "usedByEntrypoint": true, + "metadata": { + "binary.paths": "usr/local/bin/heuristic_app", + "binary.sha256": "4caf60c501a594b5d4b8d909b3e91fccc4447692b9e144f322a333255909310b", + "crate": "tokio", + "provenance": "heuristic" + }, + "evidence": [ + { + "kind": "derived", + "source": "rust.heuristic", + "locator": "usr/local/bin/heuristic_app", + "value": "tokio", + "sha256": "4caf60c501a594b5d4b8d909b3e91fccc4447692b9e144f322a333255909310b" + } + ] + } +] diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustHeuristicCoverageComparisonTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustHeuristicCoverageComparisonTests.cs new file mode 100644 index 00000000..eb439864 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustHeuristicCoverageComparisonTests.cs @@ -0,0 +1,77 @@ +using System.Linq; +using System.Text.Json; +using StellaOps.Scanner.Analyzers.Lang.Rust; +using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Tests.Rust; + +public sealed class RustHeuristicCoverageComparisonTests +{ + [Fact] + public async Task HeuristicCoverageExceedsCompetitorBaselineAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = TestPaths.ResolveFixture("lang", "rust", "heuristics"); + var baselinePath = Path.Combine(fixturePath, "competitor-baseline.json"); + + var analyzers = new ILanguageAnalyzer[] + { + new RustLanguageAnalyzer() + }; + + var output = await LanguageAnalyzerTestHarness.RunToJsonAsync( + fixturePath, + analyzers, + cancellationToken); + + using var ours = JsonDocument.Parse(output); + var heuristicNames = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var element in ours.RootElement.EnumerateArray()) + { + if (!element.TryGetProperty("metadata", out var metadata) || metadata.ValueKind != JsonValueKind.Object) + { + continue; + } + + var provenance = metadata.EnumerateObject() + .FirstOrDefault(p => string.Equals(p.Name, "provenance", StringComparison.OrdinalIgnoreCase)); + + if (provenance.Value.ValueKind == JsonValueKind.String && + string.Equals(provenance.Value.GetString(), "heuristic", StringComparison.OrdinalIgnoreCase)) + { + if (element.TryGetProperty("name", out var nameProperty) && nameProperty.ValueKind == JsonValueKind.String) + { + var value = nameProperty.GetString(); + if (!string.IsNullOrWhiteSpace(value)) + { + heuristicNames.Add(value); + } + } + } + } + + using var competitor = JsonDocument.Parse(await File.ReadAllTextAsync(baselinePath, cancellationToken)); + var competitorNames = new HashSet(StringComparer.OrdinalIgnoreCase); + if (competitor.RootElement.TryGetProperty("detectedCrates", out var detectedCrates) && detectedCrates.ValueKind == JsonValueKind.Array) + { + foreach (var entry in detectedCrates.EnumerateArray()) + { + if (entry.ValueKind == JsonValueKind.Object && entry.TryGetProperty("name", out var nameProperty) && nameProperty.ValueKind == JsonValueKind.String) + { + var name = nameProperty.GetString(); + if (!string.IsNullOrWhiteSpace(name)) + { + competitorNames.Add(name); + } + } + } + } + + Assert.NotEmpty(competitorNames); + Assert.True(heuristicNames.IsSupersetOf(competitorNames)); + + var improvement = (double)heuristicNames.Count / competitorNames.Count; + Assert.True(improvement >= 1.15, $"Expected at least 15% improvement; got {improvement:P2} ({heuristicNames.Count} vs {competitorNames.Count})."); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustLanguageAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustLanguageAnalyzerTests.cs index 63d97680..a34aa122 100644 --- a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustLanguageAnalyzerTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustLanguageAnalyzerTests.cs @@ -1,7 +1,8 @@ using System; using System.IO; using System.Linq; -using StellaOps.Scanner.Analyzers.Lang.Rust; +using System.Text.Json.Nodes; +using StellaOps.Scanner.Analyzers.Lang.Rust; using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; @@ -35,25 +36,86 @@ public sealed class RustLanguageAnalyzerTests } [Fact] - public async Task AnalyzerIsThreadSafeUnderConcurrencyAsync() - { - var cancellationToken = TestContext.Current.CancellationToken; - var fixturePath = TestPaths.ResolveFixture("lang", "rust", "simple"); - - var analyzers = new ILanguageAnalyzer[] - { - new RustLanguageAnalyzer() - }; - - var workers = Math.Max(Environment.ProcessorCount, 4); - var tasks = Enumerable.Range(0, workers) - .Select(_ => LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken)); - - var results = await Task.WhenAll(tasks); - var baseline = results[0]; - foreach (var result in results) - { - Assert.Equal(baseline, result); - } - } -} + public async Task AnalyzerIsThreadSafeUnderConcurrencyAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = TestPaths.ResolveFixture("lang", "rust", "simple"); + + var analyzers = new ILanguageAnalyzer[] + { + new RustLanguageAnalyzer() + }; + + var workers = Math.Max(Environment.ProcessorCount, 4); + var tasks = Enumerable.Range(0, workers) + .Select(_ => LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken)); + + var results = await Task.WhenAll(tasks); + var baseline = results[0]; + foreach (var result in results) + { + Assert.Equal(baseline, result); + } + } + + [Fact] + public async Task HeuristicFixtureProducesExpectedOutputAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = TestPaths.ResolveFixture("lang", "rust", "heuristics"); + var goldenPath = Path.Combine(fixturePath, "expected.json"); + var usageHints = new LanguageUsageHints(new[] + { + Path.Combine(fixturePath, "usr/local/bin/heuristic_app") + }); + + var analyzers = new ILanguageAnalyzer[] + { + new RustLanguageAnalyzer() + }; + + await LanguageAnalyzerTestHarness.AssertDeterministicAsync( + fixturePath, + goldenPath, + analyzers, + cancellationToken, + usageHints); + } + + [Fact] + public async Task FallbackFixtureProducesExpectedOutputAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = TestPaths.ResolveFixture("lang", "rust", "fallback"); + var goldenPath = Path.Combine(fixturePath, "expected.json"); + var usageHints = new LanguageUsageHints(new[] + { + Path.Combine(fixturePath, "usr/local/bin/opaque_bin") + }); + + var analyzers = new ILanguageAnalyzer[] + { + new RustLanguageAnalyzer() + }; + + var actualJson = await LanguageAnalyzerTestHarness.RunToJsonAsync( + fixturePath, + analyzers, + cancellationToken, + usageHints); + + var repeat = await LanguageAnalyzerTestHarness.RunToJsonAsync( + fixturePath, + analyzers, + cancellationToken, + usageHints); + Assert.Equal(actualJson, repeat); + + var expectedJson = await File.ReadAllTextAsync(goldenPath, cancellationToken); + var actualNode = JsonNode.Parse(actualJson); + var expectedNode = JsonNode.Parse(expectedJson); + Assert.True( + JsonNode.DeepEquals(expectedNode, actualNode), + "Fallback fixture output does not match expected snapshot."); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceAnalyzerTests.cs index 62aa7fdd..6de30274 100644 --- a/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceAnalyzerTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceAnalyzerTests.cs @@ -1,52 +1,64 @@ -using System.Collections.Immutable; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Scanner.EntryTrace.Diagnostics; -using Xunit; - -namespace StellaOps.Scanner.EntryTrace.Tests; - -public sealed class EntryTraceAnalyzerTests -{ - private static EntryTraceAnalyzer CreateAnalyzer() - { - var options = Options.Create(new EntryTraceAnalyzerOptions - { - MaxDepth = 32, - FollowRunParts = true - }); - return new EntryTraceAnalyzer(options, new EntryTraceMetrics(), NullLogger.Instance); - } - - [Fact] - public async Task ResolveAsync_FollowsShellIncludeAndPythonModule() - { - var fs = new TestRootFileSystem(); - fs.AddFile("/entrypoint.sh", """ - #!/bin/sh - source /opt/setup.sh - exec python -m app.main --flag - """); - fs.AddFile("/opt/setup.sh", """ - #!/bin/sh - run-parts /opt/setup.d - """); - fs.AddDirectory("/opt/setup.d"); - fs.AddFile("/opt/setup.d/001-node.sh", """ - #!/bin/sh - exec node /app/server.js - """); - fs.AddFile("/opt/setup.d/010-java.sh", """ - #!/bin/sh - java -jar /app/app.jar - """); - fs.AddFile("/usr/bin/python", "#!/usr/bin/env python3\n", executable: true); - fs.AddFile("/usr/bin/node", "#!/usr/bin/env node\n", executable: true); - fs.AddFile("/usr/bin/java", "", executable: true); - fs.AddFile("/app/server.js", "console.log('hello');", executable: true); - fs.AddFile("/app/app.jar", string.Empty, executable: true); - - var analyzer = CreateAnalyzer(); +using System.Collections.Immutable; +using System.IO; +using System.IO.Compression; +using System.Text; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.EntryTrace.Diagnostics; +using Xunit; +using Xunit.Abstractions; +using Xunit.Sdk; + +namespace StellaOps.Scanner.EntryTrace.Tests; + +public sealed class EntryTraceAnalyzerTests +{ + private readonly ITestOutputHelper _output; + + public EntryTraceAnalyzerTests(ITestOutputHelper output) + { + _output = output; + } + + private static EntryTraceAnalyzer CreateAnalyzer() + { + var options = Options.Create(new EntryTraceAnalyzerOptions + { + MaxDepth = 32, + FollowRunParts = true + }); + return new EntryTraceAnalyzer(options, new EntryTraceMetrics(), NullLogger.Instance); + } + + [Fact] + public async Task ResolveAsync_FollowsShellIncludeAndPythonModule() + { + var fs = new TestRootFileSystem(); + fs.AddFile("/entrypoint.sh", """ + #!/bin/sh + source /opt/setup.sh + exec python -m app.main --flag + """); + fs.AddFile("/opt/setup.sh", """ + #!/bin/sh + run-parts /opt/setup.d + """); + fs.AddDirectory("/opt/setup.d"); + fs.AddFile("/opt/setup.d/001-node.sh", """ + #!/bin/sh + exec node /app/server.js + """); + fs.AddFile("/opt/setup.d/010-java.sh", """ + #!/bin/sh + java -jar /app/app.jar + """); + fs.AddFile("/usr/bin/python", "#!/usr/bin/env python3\n", executable: true); + fs.AddFile("/usr/bin/node", "#!/usr/bin/env node\n", executable: true); + fs.AddFile("/usr/bin/java", "", executable: true); + fs.AddFile("/app/server.js", "console.log('hello');", executable: true); + fs.AddFile("/app/app.jar", string.Empty, executable: true); + + var analyzer = CreateAnalyzer(); var context = new EntryTraceContext( fs, ImmutableDictionary.Empty, @@ -56,36 +68,40 @@ public sealed class EntryTraceAnalyzerTests "sha256:image", "scan-entrytrace-1", NullLogger.Instance); - - var spec = EntrypointSpecification.FromExecForm(new[] { "/entrypoint.sh" }, Array.Empty()); - var result = await analyzer.ResolveAsync(spec, context); - - Assert.Equal(EntryTraceOutcome.Resolved, result.Outcome); - Assert.Empty(result.Diagnostics); - - var nodeNames = result.Nodes.Select(n => (n.Kind, n.DisplayName)).ToArray(); - Assert.Contains((EntryTraceNodeKind.Command, "/entrypoint.sh"), nodeNames); - Assert.Contains((EntryTraceNodeKind.Include, "/opt/setup.sh"), nodeNames); - Assert.Contains(nodeNames, tuple => tuple.Kind == EntryTraceNodeKind.Command && tuple.DisplayName == "python"); - Assert.Contains(nodeNames, tuple => tuple.Kind == EntryTraceNodeKind.Command && tuple.DisplayName == "node"); - Assert.Contains(nodeNames, tuple => tuple.Kind == EntryTraceNodeKind.Command && tuple.DisplayName == "java"); - Assert.Contains(nodeNames, tuple => tuple.Kind == EntryTraceNodeKind.RunPartsDirectory && tuple.DisplayName == "/opt/setup.d"); - - Assert.Contains(result.Edges, edge => edge.Relationship == "python-module" && edge.Metadata is { } metadata && metadata.TryGetValue("module", out var module) && module == "app.main"); - } - - [Fact] - public async Task ResolveAsync_RecordsDiagnosticsForMissingInclude() - { - var fs = new TestRootFileSystem(); - fs.AddFile("/entrypoint.sh", """ - #!/bin/sh - source /missing/setup.sh - exec /bin/true - """); - fs.AddFile("/bin/true", string.Empty, executable: true); - - var analyzer = CreateAnalyzer(); + + var spec = EntrypointSpecification.FromExecForm(new[] { "/entrypoint.sh" }, Array.Empty()); + var result = await analyzer.ResolveAsync(spec, context); + + if (result.Outcome != EntryTraceOutcome.Resolved) + { + var details = string.Join(", ", result.Diagnostics.Select(d => d.Severity + ":" + d.Reason)); + throw new XunitException("Unexpected outcome: " + result.Outcome + "; diagnostics=" + details); + } + Assert.Empty(result.Diagnostics); + + var nodeNames = result.Nodes.Select(n => (n.Kind, n.DisplayName)).ToArray(); + Assert.Contains((EntryTraceNodeKind.Command, "/entrypoint.sh"), nodeNames); + Assert.Contains((EntryTraceNodeKind.Include, "/opt/setup.sh"), nodeNames); + Assert.Contains(nodeNames, tuple => tuple.Kind == EntryTraceNodeKind.Command && tuple.DisplayName == "python"); + Assert.Contains(nodeNames, tuple => tuple.Kind == EntryTraceNodeKind.Command && tuple.DisplayName == "node"); + Assert.Contains(nodeNames, tuple => tuple.Kind == EntryTraceNodeKind.Command && tuple.DisplayName == "java"); + Assert.Contains(nodeNames, tuple => tuple.Kind == EntryTraceNodeKind.RunPartsDirectory && tuple.DisplayName == "/opt/setup.d"); + + Assert.Contains(result.Edges, edge => edge.Relationship == "python-module" && edge.Metadata is { } metadata && metadata.TryGetValue("module", out var module) && module == "app.main"); + } + + [Fact] + public async Task ResolveAsync_RecordsDiagnosticsForMissingInclude() + { + var fs = new TestRootFileSystem(); + fs.AddFile("/entrypoint.sh", """ + #!/bin/sh + source /missing/setup.sh + exec /bin/true + """); + fs.AddFile("/bin/true", string.Empty, executable: true); + + var analyzer = CreateAnalyzer(); var context = new EntryTraceContext( fs, ImmutableDictionary.Empty, @@ -95,27 +111,27 @@ public sealed class EntryTraceAnalyzerTests "sha256:image", "scan-entrytrace-2", NullLogger.Instance); - - var spec = EntrypointSpecification.FromExecForm(new[] { "/entrypoint.sh" }, Array.Empty()); - var result = await analyzer.ResolveAsync(spec, context); - - Assert.Equal(EntryTraceOutcome.PartiallyResolved, result.Outcome); - Assert.Single(result.Diagnostics); - Assert.Equal(EntryTraceUnknownReason.MissingFile, result.Diagnostics[0].Reason); - } - - [Fact] - public async Task ResolveAsync_IsDeterministic() - { - var fs = new TestRootFileSystem(); - fs.AddFile("/entrypoint.sh", """ - #!/bin/sh - exec node /app/index.js - """); - fs.AddFile("/usr/bin/node", string.Empty, executable: true); - fs.AddFile("/app/index.js", "console.log('deterministic');", executable: true); - - var analyzer = CreateAnalyzer(); + + var spec = EntrypointSpecification.FromExecForm(new[] { "/entrypoint.sh" }, Array.Empty()); + var result = await analyzer.ResolveAsync(spec, context); + + Assert.Equal(EntryTraceOutcome.PartiallyResolved, result.Outcome); + Assert.Single(result.Diagnostics); + Assert.Equal(EntryTraceUnknownReason.MissingFile, result.Diagnostics[0].Reason); + } + + [Fact] + public async Task ResolveAsync_IsDeterministic() + { + var fs = new TestRootFileSystem(); + fs.AddFile("/entrypoint.sh", """ + #!/bin/sh + exec node /app/index.js + """); + fs.AddFile("/usr/bin/node", string.Empty, executable: true); + fs.AddFile("/app/index.js", "console.log('deterministic');", executable: true); + + var analyzer = CreateAnalyzer(); var context = new EntryTraceContext( fs, ImmutableDictionary.Empty, @@ -125,13 +141,13 @@ public sealed class EntryTraceAnalyzerTests "sha256:image", "scan-entrytrace-3", NullLogger.Instance); - - var spec = EntrypointSpecification.FromExecForm(new[] { "/entrypoint.sh" }, Array.Empty()); - var first = await analyzer.ResolveAsync(spec, context); - var second = await analyzer.ResolveAsync(spec, context); - - Assert.Equal(first.Outcome, second.Outcome); - Assert.Equal(first.Diagnostics, second.Diagnostics); + + var spec = EntrypointSpecification.FromExecForm(new[] { "/entrypoint.sh" }, Array.Empty()); + var first = await analyzer.ResolveAsync(spec, context); + var second = await analyzer.ResolveAsync(spec, context); + + Assert.Equal(first.Outcome, second.Outcome); + Assert.Equal(first.Diagnostics, second.Diagnostics); Assert.Equal(first.Nodes.Select(n => (n.Kind, n.DisplayName)).ToArray(), second.Nodes.Select(n => (n.Kind, n.DisplayName)).ToArray()); Assert.Equal(first.Edges.Select(e => (e.FromNodeId, e.ToNodeId, e.Relationship)).ToArray(), second.Edges.Select(e => (e.FromNodeId, e.ToNodeId, e.Relationship)).ToArray()); @@ -144,6 +160,7 @@ public sealed class EntryTraceAnalyzerTests fs.AddFile("/windows/system32/cmd.exe", string.Empty, executable: true); fs.AddFile("/scripts/start.bat", "@echo off\r\necho start\r\n", executable: true); + var analyzer = CreateAnalyzer(); var context = new EntryTraceContext( fs, @@ -158,8 +175,230 @@ public sealed class EntryTraceAnalyzerTests var spec = EntrypointSpecification.FromExecForm(new[] { "cmd.exe", "/c", "/scripts/start.bat" }, Array.Empty()); var result = await analyzer.ResolveAsync(spec, context); - Assert.Equal(EntryTraceOutcome.PartiallyResolved, result.Outcome); + Assert.Equal(EntryTraceOutcome.Resolved, result.Outcome); Assert.Contains(result.Nodes, node => node.Kind == EntryTraceNodeKind.Script && node.DisplayName == "/scripts/start.bat"); Assert.Contains(result.Diagnostics, diagnostic => diagnostic.Reason == EntryTraceUnknownReason.UnsupportedSyntax); } + + [Fact] + public async Task ResolveAsync_ClassifiesGoBinaryWithPlan() + { + var fs = new TestRootFileSystem(); + var goBinary = CreateGoBinary(); + fs.AddBinaryFile("/usr/local/bin/goapp", goBinary, executable: true); + + var analyzer = CreateAnalyzer(); + var context = new EntryTraceContext( + fs, + ImmutableDictionary.Empty, + ImmutableArray.Create("/usr/local/bin"), + "/", + "root", + "sha256:go-image", + "scan-go", + NullLogger.Instance); + + var spec = EntrypointSpecification.FromExecForm(new[] { "/usr/local/bin/goapp", "--serve" }, null); + var result = await analyzer.ResolveAsync(spec, context); + + Assert.Equal(EntryTraceOutcome.Resolved, result.Outcome); + + var terminal = Assert.Single(result.Terminals); + Assert.Equal("/usr/local/bin/goapp", terminal.Path); + Assert.Equal(EntryTraceTerminalType.Native, terminal.Type); + Assert.Equal("go", terminal.Runtime); + Assert.True(terminal.Confidence >= 70d); + Assert.False(terminal.Arguments.IsDefault); + Assert.Equal(2, terminal.Arguments.Length); + Assert.Equal("--serve", terminal.Arguments[1]); + Assert.Contains("runtime", terminal.Evidence.Keys); + + var plan = Assert.Single(result.Plans); + Assert.Equal(terminal.Path, plan.TerminalPath); + Assert.Equal("go", plan.Runtime); + Assert.Equal(terminal.Confidence, plan.Confidence); + Assert.Equal(terminal.Arguments, plan.Command); + Assert.Equal("/", plan.WorkingDirectory); + } + + [Fact] + public async Task ResolveAsync_ExtractsJarManifestEvidence() + { + var fs = new TestRootFileSystem(); + var jarBytes = CreateJarWithManifest("com.example.Main"); + fs.AddBinaryFile("/app/example.jar", jarBytes, executable: true); + fs.AddFile("/usr/bin/java", string.Empty, executable: true); + Assert.True(fs.TryResolveExecutable("/app/example.jar", Array.Empty(), out _)); + + var analyzer = CreateAnalyzer(); + var context = new EntryTraceContext( + fs, + ImmutableDictionary.Empty, + ImmutableArray.Create("/usr/bin"), + "/", + "root", + "sha256:java-image", + "scan-jar", + NullLogger.Instance); + + var spec = EntrypointSpecification.FromExecForm(new[] { "java", "-jar", "/app/example.jar" }, null); + var result = await analyzer.ResolveAsync(spec, context); + + if (result.Terminals.Length == 0) + { + var nodeSummary = string.Join(", ", result.Nodes.Select(n => $"{n.Kind}:{n.DisplayName}")); + var diagSummary = string.Join(", ", result.Diagnostics.Select(d => d.Reason.ToString())); + throw new XunitException($"Terminals empty; nodes={nodeSummary}; diags={diagSummary}"); + } + + var terminal = Assert.Single(result.Terminals); + Assert.Equal(EntryTraceTerminalType.Managed, terminal.Type); + Assert.Equal("java", terminal.Runtime); + Assert.True(terminal.Evidence.ContainsKey("jar.manifest")); + Assert.Equal("com.example.Main", terminal.Evidence["jar.main-class"]); + Assert.Contains("-jar", terminal.Arguments); + + var plan = Assert.Single(result.Plans); + Assert.Equal(terminal.Evidence, plan.Evidence); + Assert.Equal("java", plan.Runtime); + Assert.Equal(terminal.Confidence, plan.Confidence); + } + + [Fact] + public async Task ResolveAsync_UsesHistoryCandidateWhenEntrypointMissing() + { + var fs = new TestRootFileSystem(); + fs.AddBinaryFile("/usr/bin/node", CreateGoBinary(), executable: true); + + var config = new OciImageConfig + { + History = ImmutableArray.Create(new OciHistoryEntry("/bin/sh -c #(nop) CMD [\"/usr/bin/node\",\"/app/server.js\"]", false)) + }; + + var options = new EntryTraceAnalyzerOptions(); + var imageContext = EntryTraceImageContextFactory.Create( + config, + fs, + options, + "sha256:image-history", + "scan-history", + NullLogger.Instance); + + var analyzer = CreateAnalyzer(); + var result = await analyzer.ResolveAsync(imageContext.Entrypoint, imageContext.Context); + + Assert.Equal(EntryTraceOutcome.Resolved, result.Outcome); + Assert.Contains(result.Diagnostics, diagnostic => diagnostic.Reason == EntryTraceUnknownReason.InferredEntrypointFromHistory); + var terminal = Assert.Single(result.Terminals); + Assert.Equal("/usr/bin/node", terminal.Path); + Assert.Contains("/usr/bin/node", terminal.Arguments[0]); + } + + [Fact] + public async Task ResolveAsync_DiscoversSupervisorCommand() + { + var fs = new TestRootFileSystem(); + fs.AddBinaryFile("/usr/bin/gunicorn", CreateGoBinary(), executable: true); + fs.AddDirectory("/etc/supervisor"); + fs.AddFile("/etc/supervisor/app.conf", """ + [program:web] + command=gunicorn app:app + """); + + var config = new OciImageConfig(); + var options = new EntryTraceAnalyzerOptions(); + var imageContext = EntryTraceImageContextFactory.Create( + config, + fs, + options, + "sha256:image-supervisor", + "scan-supervisor", + NullLogger.Instance); + + var analyzer = CreateAnalyzer(); + var result = await analyzer.ResolveAsync(imageContext.Entrypoint, imageContext.Context); + + Assert.Equal(EntryTraceOutcome.Resolved, result.Outcome); + Assert.Contains(result.Diagnostics, diagnostic => diagnostic.Reason == EntryTraceUnknownReason.InferredEntrypointFromSupervisor); + var terminal = Assert.Single(result.Terminals); + Assert.Equal("/usr/bin/gunicorn", terminal.Path); + Assert.Contains("gunicorn", terminal.Arguments[0]); + } + + [Fact] + public async Task ResolveAsync_DiscoversServiceRunScript() + { + var fs = new TestRootFileSystem(); + fs.AddDirectory("/etc"); + fs.AddDirectory("/etc/services.d"); + fs.AddDirectory("/etc/services.d/web"); + fs.AddBinaryFile("/usr/bin/python", CreateGoBinary(), executable: true); + fs.AddFile("/etc/services.d/web/run", """ + #!/bin/sh + /usr/bin/python -m app.main + """); + + var config = new OciImageConfig(); + var options = new EntryTraceAnalyzerOptions(); + var imageContext = EntryTraceImageContextFactory.Create( + config, + fs, + options, + "sha256:image-service", + "scan-service", + NullLogger.Instance); + + var candidates = imageContext.Context.Candidates; + var candidateSummary = string.Join(", ", candidates.Select(c => $"{c.Source}:{string.Join(' ', c.Command)}")); + Assert.True( + candidates.Length > 0, + $"Candidates discovered: {candidateSummary}"); + var inferred = Assert.Single(candidates); + Assert.Equal("service-directory", inferred.Source); + Assert.Equal("/etc/services.d/web/run", inferred.Command[0]); + Assert.NotNull(inferred.Evidence); + Assert.True(inferred.Evidence!.Metadata?.ContainsKey("service_dir") ?? false); + Assert.Equal("/etc/services.d/web", inferred.Evidence!.Metadata!["service_dir"]); + + var analyzer = CreateAnalyzer(); + var result = await analyzer.ResolveAsync(imageContext.Entrypoint, imageContext.Context); + + var nonInfoDiagnostics = result.Diagnostics.Where(d => d.Severity != EntryTraceDiagnosticSeverity.Info).ToArray(); + Assert.True(nonInfoDiagnostics.Length == 0, string.Join(", ", nonInfoDiagnostics.Select(d => d.Severity + ":" + d.Reason))); + Assert.Equal(EntryTraceOutcome.Resolved, result.Outcome); + Assert.Contains(result.Diagnostics, diagnostic => diagnostic.Reason == EntryTraceUnknownReason.InferredEntrypointFromServices); + var terminal = Assert.Single(result.Terminals); + Assert.Equal("/usr/bin/python", terminal.Path); + Assert.Contains("/usr/bin/python", terminal.Arguments[0]); + Assert.Contains(result.Nodes, node => node.Kind == EntryTraceNodeKind.Script && node.DisplayName == "/etc/services.d/web/run"); + Assert.Equal(EntryTraceTerminalType.Native, terminal.Type); + } + + private static byte[] CreateGoBinary() + { + var buffer = new byte[256]; + buffer[0] = 0x7F; + buffer[1] = (byte)'E'; + buffer[2] = (byte)'L'; + buffer[3] = (byte)'F'; + + var signature = Encoding.ASCII.GetBytes("Go build ID"); + signature.CopyTo(buffer, 32); + return buffer; + } + + private static byte[] CreateJarWithManifest(string mainClass) + { + using var stream = new MemoryStream(); + using (var archive = new ZipArchive(stream, ZipArchiveMode.Create, leaveOpen: true)) + { + var manifest = archive.CreateEntry("META-INF/MANIFEST.MF"); + using var writer = new StreamWriter(manifest.Open(), Encoding.UTF8); + writer.WriteLine("Manifest-Version: 1.0"); + writer.WriteLine($"Main-Class: {mainClass}"); + writer.Flush(); + } + + return stream.ToArray(); + } } diff --git a/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceNdjsonWriterTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceNdjsonWriterTests.cs new file mode 100644 index 00000000..71838f4f --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceNdjsonWriterTests.cs @@ -0,0 +1,152 @@ +using System; +using System.Buffers; +using System.Collections.Immutable; +using System.Globalization; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using StellaOps.Scanner.EntryTrace; +using Xunit; + +namespace StellaOps.Scanner.EntryTrace.Tests; + +public sealed class EntryTraceNdjsonWriterTests +{ + [Fact] + public void Serialize_ProducesDeterministicNdjsonLines() + { + var (graph, metadata) = CreateSampleGraph(); + + var lines = EntryTraceNdjsonWriter.Serialize(graph, metadata); + + Assert.Equal(6, lines.Length); + + var entryJson = Parse(lines[0]); + Assert.Equal("entrytrace.entry", entryJson.GetProperty("type").GetString()); + Assert.Equal(metadata.ScanId, entryJson.GetProperty("scan_id").GetString()); + Assert.Equal("resolved", entryJson.GetProperty("outcome").GetString()); + Assert.Equal(1, entryJson.GetProperty("nodes").GetInt32()); + Assert.Equal(1, entryJson.GetProperty("edges").GetInt32()); + Assert.Equal(1, entryJson.GetProperty("targets").GetInt32()); + Assert.Equal(1, entryJson.GetProperty("warnings").GetInt32()); + + var nodeJson = Parse(lines[1]); + Assert.Equal("entrytrace.node", nodeJson.GetProperty("type").GetString()); + Assert.Equal("gosu", nodeJson.GetProperty("display_name").GetString()); + Assert.Equal("command", nodeJson.GetProperty("kind").GetString()); + Assert.Equal("user-switch", nodeJson.GetProperty("metadata").GetProperty("wrapper.category").GetString()); + + var edgeJson = Parse(lines[2]); + Assert.Equal("entrytrace.edge", edgeJson.GetProperty("type").GetString()); + Assert.Equal("wraps", edgeJson.GetProperty("relationship").GetString()); + + var targetJson = Parse(lines[3]); + Assert.Equal("entrytrace.target", targetJson.GetProperty("type").GetString()); + Assert.Equal("python", targetJson.GetProperty("runtime").GetString()); + Assert.Equal("scanner", targetJson.GetProperty("user").GetString()); + Assert.Equal(87.5, targetJson.GetProperty("confidence").GetDouble()); + Assert.Equal("medium", targetJson.GetProperty("confidence_level").GetString()); + + var warningJson = Parse(lines[4]); + Assert.Equal("entrytrace.warning", warningJson.GetProperty("type").GetString()); + Assert.Equal("dynamicevaluation", warningJson.GetProperty("reason").GetString()); + + var capabilityJson = Parse(lines[5]); + Assert.Equal("entrytrace.capability", capabilityJson.GetProperty("type").GetString()); + Assert.Equal("user-switch", capabilityJson.GetProperty("category").GetString()); + Assert.Equal("gosu", capabilityJson.GetProperty("name").GetString()); + } + + [Fact] + public void Serialize_ProducesStableSha256Hash() + { + var (graph, metadata) = CreateSampleGraph(); + + var lines = EntryTraceNdjsonWriter.Serialize(graph, metadata); + + var buffer = new ArrayBufferWriter(128); + foreach (var line in lines) + { + var bytes = Encoding.UTF8.GetBytes(line); + buffer.Write(bytes); + } + + var hash = SHA256.HashData(buffer.WrittenSpan); + var actual = Convert.ToHexString(hash).ToLowerInvariant(); + + const string ExpectedHash = "37444d7f68ceafd3e974c10ce5e78c973874d4a0abe73660f8ad204151b93c62"; + Assert.Equal(ExpectedHash, actual); + } + + private static (EntryTraceGraph Graph, EntryTraceNdjsonMetadata Metadata) CreateSampleGraph() + { + var nodeMetadata = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + nodeMetadata["wrapper.category"] = "user-switch"; + nodeMetadata["wrapper.name"] = "gosu"; + + var node = new EntryTraceNode( + 1, + EntryTraceNodeKind.Command, + "gosu", + ImmutableArray.Create("gosu", "scanner", "python", "/app/main.py"), + EntryTraceInterpreterKind.None, + new EntryTraceEvidence("/usr/bin/gosu", "sha256:layer-a", "path", ImmutableDictionary.Empty), + new EntryTraceSpan("/scripts/entrypoint.sh", 1, 0, 1, 10), + nodeMetadata.ToImmutable()); + + var edge = new EntryTraceEdge(1, 2, "wraps", null); + + var plan = new EntryTracePlan( + ImmutableArray.Create("/app/main.py", "--serve"), + ImmutableDictionary.Empty, + "/app", + "scanner", + "/app/main.py", + EntryTraceTerminalType.Native, + "python", + 87.5, + ImmutableDictionary.Empty); + + var terminal = new EntryTraceTerminal( + "/app/main.py", + EntryTraceTerminalType.Native, + "python", + 87.5, + ImmutableDictionary.Empty, + "scanner", + "/app", + ImmutableArray.Create("/app/main.py", "--serve")); + + var diagnostic = new EntryTraceDiagnostic( + EntryTraceDiagnosticSeverity.Warning, + EntryTraceUnknownReason.DynamicEvaluation, + "Command 'eval' prevents static resolution.", + new EntryTraceSpan("/scripts/entrypoint.sh", 5, 0, 5, 30), + "/scripts/entrypoint.sh"); + + var graph = new EntryTraceGraph( + EntryTraceOutcome.Resolved, + ImmutableArray.Create(node), + ImmutableArray.Create(edge), + ImmutableArray.Create(diagnostic), + ImmutableArray.Create(plan), + ImmutableArray.Create(terminal)); + + var metadata = new EntryTraceNdjsonMetadata( + "scan-entrytrace-1", + "sha256:image", + DateTimeOffset.Parse("2025-01-01T00:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal | DateTimeStyles.AssumeUniversal), + Source: "worker"); + + return (graph, metadata); + } + + private static JsonElement Parse(string ndjsonLine) + { + Assert.EndsWith("\n", ndjsonLine, StringComparison.Ordinal); + var json = ndjsonLine.TrimEnd('\n'); + using var document = JsonDocument.Parse(json); + return document.RootElement.Clone(); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/LayeredRootFileSystemTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/LayeredRootFileSystemTests.cs index 257712b6..f512ff62 100644 --- a/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/LayeredRootFileSystemTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/LayeredRootFileSystemTests.cs @@ -1,8 +1,9 @@ -using System; -using System.Formats.Tar; -using System.IO; -using System.Text; -using Xunit; +using System; +using System.Formats.Tar; +using System.IO; +using System.Text; +using StellaOps.Scanner.EntryTrace.FileSystem; +using Xunit; namespace StellaOps.Scanner.EntryTrace.Tests; @@ -17,15 +18,21 @@ public sealed class LayeredRootFileSystemTests : IDisposable } [Fact] - public void FromDirectories_HandlesWhiteoutsAndResolution() - { - var layer1 = CreateLayerDirectory("layer1"); - var layer2 = CreateLayerDirectory("layer2"); - - var usrBin1 = Path.Combine(layer1, "usr", "bin"); - Directory.CreateDirectory(usrBin1); - var entrypointPath = Path.Combine(usrBin1, "entrypoint.sh"); - File.WriteAllText(entrypointPath, "#!/bin/sh\necho layer1\n"); + public void FromDirectories_HandlesWhiteoutsAndResolution() + { + var layer1 = CreateLayerDirectory("layer1"); + var layer2 = CreateLayerDirectory("layer2"); + + var usrBin1 = Path.Combine(layer1, "usr", "bin"); + Directory.CreateDirectory(usrBin1); + var entrypointPath = Path.Combine(usrBin1, "entrypoint.sh"); + File.WriteAllText(entrypointPath, "#!/bin/sh\necho layer1\n"); +#if NET8_0_OR_GREATER + File.SetUnixFileMode(entrypointPath, + UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute | + UnixFileMode.GroupRead | UnixFileMode.GroupExecute | + UnixFileMode.OtherRead | UnixFileMode.OtherExecute); +#endif var optDirectory1 = Path.Combine(layer1, "opt"); Directory.CreateDirectory(optDirectory1); @@ -52,9 +59,28 @@ public sealed class LayeredRootFileSystemTests : IDisposable Assert.False(fs.TryReadAllText("/opt/setup.sh", out _, out _)); var optEntries = fs.EnumerateDirectory("/opt"); - Assert.DoesNotContain(optEntries, entry => entry.Path.EndsWith("setup.sh", StringComparison.Ordinal)); - } - + Assert.DoesNotContain(optEntries, entry => entry.Path.EndsWith("setup.sh", StringComparison.Ordinal)); + } + + [Fact] + public void TryReadBytes_ReturnsLimitedPreview() + { + var layer = CreateLayerDirectory("layer-bytes"); + var usrBin = Path.Combine(layer, "usr", "bin"); + Directory.CreateDirectory(usrBin); + File.WriteAllText(Path.Combine(usrBin, "tool"), "abcdefg"); + + var fs = LayeredRootFileSystem.FromDirectories(new[] + { + new LayeredRootFileSystem.LayerDirectory("sha256:bytes", layer) + }); + + Assert.True(fs.TryReadBytes("/usr/bin/tool", 4, out var descriptor, out var preview)); + Assert.Equal("/usr/bin/tool", descriptor.Path); + Assert.Equal(4, preview.Length); + Assert.Equal("abcd", Encoding.UTF8.GetString(preview.Span)); + } + [Fact] public void FromArchives_ResolvesSymlinkAndWhiteout() { diff --git a/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/EntryTraceRuntimeReconcilerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/EntryTraceRuntimeReconcilerTests.cs new file mode 100644 index 00000000..2067f680 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/EntryTraceRuntimeReconcilerTests.cs @@ -0,0 +1,121 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using StellaOps.Scanner.EntryTrace; +using StellaOps.Scanner.EntryTrace.Runtime; +using Xunit; + +namespace StellaOps.Scanner.EntryTrace.Tests.Runtime; + +public sealed class EntryTraceRuntimeReconcilerTests +{ + private static readonly ImmutableDictionary EmptyDictionary = ImmutableDictionary.Empty; + private static readonly ImmutableArray EmptyArray = ImmutableArray.Empty; + + [Fact] + public void Reconcile_MatchesRuntimeTerminal() + { + var reconciler = new EntryTraceRuntimeReconciler(); + var graph = CreateGraph("/usr/local/bin/app"); + + var procGraph = ProcGraphBuilder.Build(new FakeProvider(new[] + { + CreateProcess(1, 0, "/sbin/tini", "tini", 100), + CreateProcess(5, 1, "/usr/local/bin/app", "app", 200), + })); + + var reconciled = reconciler.Reconcile(graph, procGraph); + + Assert.Equal(95d, reconciled.Plans[0].Confidence); + Assert.Contains(reconciled.Diagnostics, d => d.Reason == EntryTraceUnknownReason.RuntimeMatch); + } + + [Fact] + public void Reconcile_FlagsMismatch_WhenDifferentExecutable() + { + var reconciler = new EntryTraceRuntimeReconciler(); + var graph = CreateGraph("/usr/local/bin/app"); + + var procGraph = ProcGraphBuilder.Build(new FakeProvider(new[] + { + CreateProcess(1, 0, "/sbin/init", "init", 100), + CreateProcess(2, 1, "/usr/bin/other", "other", 200), + })); + + var reconciled = reconciler.Reconcile(graph, procGraph); + + Assert.Equal(60d, reconciled.Plans[0].Confidence); + Assert.Contains(reconciled.Diagnostics, d => d.Reason == EntryTraceUnknownReason.RuntimeMismatch); + } + + [Fact] + public void Reconcile_AddsDiagnostic_WhenSnapshotAbsent() + { + var reconciler = new EntryTraceRuntimeReconciler(); + var graph = CreateGraph("/usr/local/bin/app"); + + var reconciled = reconciler.Reconcile(graph, procGraph: null); + + Assert.Contains(reconciled.Diagnostics, d => d.Reason == EntryTraceUnknownReason.RuntimeSnapshotUnavailable); + } + + private static EntryTraceGraph CreateGraph(string terminalPath) + { + var plan = new EntryTracePlan( + ImmutableArray.Create(terminalPath), + EmptyDictionary, + "/", + "root", + terminalPath, + EntryTraceTerminalType.Native, + Runtime: null, + Confidence: 50d, + EmptyDictionary); + + var terminal = new EntryTraceTerminal( + terminalPath, + EntryTraceTerminalType.Native, + Runtime: null, + Confidence: 50d, + EmptyDictionary, + "root", + "/", + ImmutableArray.Empty); + + return new EntryTraceGraph( + EntryTraceOutcome.Resolved, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Create(plan), + ImmutableArray.Create(terminal)); + } + + private static ProcProcess CreateProcess(int pid, int parentPid, string executable, string commandName, ulong startTime) + { + return new ProcProcess( + pid, + parentPid, + executable, + ImmutableArray.Create(executable), + commandName, + startTime); + } + + private sealed class FakeProvider : IProcSnapshotProvider + { + private readonly Dictionary _processes; + + public FakeProvider(IEnumerable processes) + { + _processes = new Dictionary(); + foreach (var process in processes) + { + _processes[process.Pid] = process; + } + } + + public IEnumerable EnumerateProcessIds() => _processes.Keys; + + public bool TryReadProcess(int pid, out ProcProcess process) => _processes.TryGetValue(pid, out process); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/ProcFileSystemSnapshotTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/ProcFileSystemSnapshotTests.cs new file mode 100644 index 00000000..39499a5d --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/ProcFileSystemSnapshotTests.cs @@ -0,0 +1,87 @@ +using System; +using System.IO; +using System.Text; +using StellaOps.Scanner.EntryTrace.Runtime; +using Xunit; + +namespace StellaOps.Scanner.EntryTrace.Tests.Runtime; + +public sealed class ProcFileSystemSnapshotTests : IDisposable +{ + private readonly TempDirectory _tempDirectory = new(); + + [Fact] + public void EnumerateProcessIds_ReturnsNumericDirectories() + { + CreateProcessDirectory(101); + CreateProcessDirectory(5); + Directory.CreateDirectory(Path.Combine(_tempDirectory.Path, "not-a-pid")); + + var snapshot = new ProcFileSystemSnapshot(_tempDirectory.Path); + var pids = snapshot.EnumerateProcessIds(); + + Assert.Contains(5, pids); + Assert.Contains(101, pids); + Assert.DoesNotContain(-1, pids); + } + + [Fact] + public void TryReadProcess_ParsesStatAndCmdline() + { + var pid = 321; + var procPath = CreateProcessDirectory(pid); + File.WriteAllText( + Path.Combine(procPath, "stat"), + $"{pid} (bash) S 1 {pid} {pid} 0 -1 4194560 0 0 0 0 0 0 0 20 0 1 0 600 0 0 0 0 0 0 0 0 0 0 0 0"); + File.WriteAllBytes( + Path.Combine(procPath, "cmdline"), + Encoding.UTF8.GetBytes("/bin/bash\0-c\0run.sh\0")); + + var snapshot = new ProcFileSystemSnapshot(_tempDirectory.Path); + Assert.True(snapshot.TryReadProcess(pid, out var process)); + Assert.Equal(1, process.ParentPid); + Assert.Equal("/bin/bash", process.CommandLine[0]); + Assert.Equal("bash", process.CommandName); + Assert.Equal((ulong)600, process.StartTimeTicks); + } + + private string CreateProcessDirectory(int pid) + { + var procPath = Path.Combine(_tempDirectory.Path, pid.ToString()); + Directory.CreateDirectory(procPath); + File.WriteAllText(Path.Combine(procPath, "stat"), $"{pid} (init) S 0 0 0 0 -1 4194560 0 0 0 0 0 0 0 20 0 1 0 100 0 0 0 0 0 0 0 0 0 0 0 0"); + File.WriteAllBytes(Path.Combine(procPath, "cmdline"), Encoding.UTF8.GetBytes($"/proc/{pid}/exe\0")); + return procPath; + } + + public void Dispose() + { + _tempDirectory.Dispose(); + } + + private sealed class TempDirectory : IDisposable + { + public TempDirectory() + { + Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"entrytrace-proc-{Guid.NewGuid():n}"); + Directory.CreateDirectory(Path); + } + + public string Path { get; } + + public void Dispose() + { + try + { + if (Directory.Exists(Path)) + { + Directory.Delete(Path, recursive: true); + } + } + catch + { + // ignore cleanup errors for temp directory + } + } + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/ProcGraphBuilderTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/ProcGraphBuilderTests.cs new file mode 100644 index 00000000..04ccfcb1 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/Runtime/ProcGraphBuilderTests.cs @@ -0,0 +1,57 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using StellaOps.Scanner.EntryTrace.Runtime; +using Xunit; + +namespace StellaOps.Scanner.EntryTrace.Tests.Runtime; + +public sealed class ProcGraphBuilderTests +{ + [Fact] + public void Build_ReturnsGraph_WithOrderedChildren() + { + var processes = new[] + { + CreateProcess(1, 0, "/sbin/init", "init", startTime: 100), + CreateProcess(20, 1, "/usr/bin/httpd", "httpd", startTime: 400), + CreateProcess(10, 1, "/usr/local/bin/app", "app", startTime: 300) + }; + + var graph = ProcGraphBuilder.Build(new FakeProcSnapshotProvider(processes)); + + Assert.NotNull(graph); + Assert.Equal(1, graph!.RootPid); + Assert.Equal(3, graph.Processes.Count); + Assert.True(graph.Children.TryGetValue(1, out var children)); + Assert.Equal(new[] { 10, 20 }, children); + } + + private static ProcProcess CreateProcess(int pid, int parentPid, string executable, string commandName, ulong startTime) + { + return new ProcProcess( + pid, + parentPid, + executable, + ImmutableArray.Create(executable), + commandName, + startTime); + } + + private sealed class FakeProcSnapshotProvider : IProcSnapshotProvider + { + private readonly Dictionary _processes; + + public FakeProcSnapshotProvider(IEnumerable processes) + { + _processes = new Dictionary(); + foreach (var process in processes) + { + _processes[process.Pid] = process; + } + } + + public IEnumerable EnumerateProcessIds() => _processes.Keys; + + public bool TryReadProcess(int pid, out ProcProcess process) => _processes.TryGetValue(pid, out process); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/TestRootFileSystem.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/TestRootFileSystem.cs index 93a29e8d..9110f91c 100644 --- a/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/TestRootFileSystem.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/TestRootFileSystem.cs @@ -1,7 +1,8 @@ using System.Collections.Generic; using System.Collections.Immutable; -using System.IO; -using StellaOps.Scanner.EntryTrace; +using System.IO; +using System.Text; +using StellaOps.Scanner.EntryTrace.FileSystem; namespace StellaOps.Scanner.EntryTrace.Tests; @@ -15,22 +16,35 @@ internal sealed class TestRootFileSystem : IRootFileSystem _directories.Add("/"); } - public void AddFile(string path, string content, bool executable = true, string? layer = "sha256:layer-a") - { - var normalized = Normalize(path); - var directory = Path.GetDirectoryName(normalized); - if (!string.IsNullOrEmpty(directory)) - { - _directories.Add(directory!); - } - - _entries[normalized] = new FileEntry(normalized, content, executable, layer, IsDirectory: false); - } + public void AddFile(string path, string content, bool executable = true, string? layer = "sha256:layer-a") + { + var normalized = Normalize(path); + var directory = Path.GetDirectoryName(normalized); + if (!string.IsNullOrEmpty(directory)) + { + EnsureDirectoryChain(directory!); + } + + var bytes = Encoding.UTF8.GetBytes(content); + _entries[normalized] = FileEntry.Create(normalized, bytes, content, executable, layer, isDirectory: false); + } + + public void AddBinaryFile(string path, byte[] content, bool executable = true, string? layer = "sha256:layer-a") + { + var normalized = Normalize(path); + var directory = Path.GetDirectoryName(normalized); + if (!string.IsNullOrEmpty(directory)) + { + EnsureDirectoryChain(directory!); + } + + _entries[normalized] = FileEntry.Create(normalized, content, text: null, executable, layer, isDirectory: false); + } public void AddDirectory(string path) { - var normalized = Normalize(path); - _directories.Add(normalized); + var normalized = Normalize(path); + EnsureDirectoryChain(normalized); } public bool TryResolveExecutable(string name, IReadOnlyList searchPaths, out RootFileDescriptor descriptor) @@ -62,37 +76,66 @@ internal sealed class TestRootFileSystem : IRootFileSystem return false; } - public bool TryReadAllText(string path, out RootFileDescriptor descriptor, out string content) - { - var normalized = Normalize(path); - if (_entries.TryGetValue(normalized, out var file)) - { - descriptor = file.ToDescriptor(); - content = file.Content; - return true; - } + public bool TryReadAllText(string path, out RootFileDescriptor descriptor, out string content) + { + var normalized = Normalize(path); + if (_entries.TryGetValue(normalized, out var file) && file.TryReadText(out content)) + { + descriptor = file.ToDescriptor(); + return true; + } + + descriptor = null!; + content = string.Empty; + return false; + } + + public bool TryReadBytes(string path, int maxBytes, out RootFileDescriptor descriptor, out ReadOnlyMemory content) + { + var normalized = Normalize(path); + if (_entries.TryGetValue(normalized, out var file) && file.TryReadBytes(maxBytes, out content)) + { + descriptor = file.ToDescriptor(); + return true; + } + + descriptor = null!; + content = default; + return false; + } - descriptor = null!; - content = string.Empty; - return false; - } - - public ImmutableArray EnumerateDirectory(string path) - { - var normalized = Normalize(path); - var builder = ImmutableArray.CreateBuilder(); - - foreach (var file in _entries.Values) - { - var directory = Normalize(Path.GetDirectoryName(file.Path) ?? "/"); - if (string.Equals(directory, normalized, StringComparison.Ordinal)) - { - builder.Add(file.ToDescriptor()); - } - } - - return builder.ToImmutable(); - } + public ImmutableArray EnumerateDirectory(string path) + { + var normalized = Normalize(path); + var entries = new List(); + + foreach (var directory in _directories) + { + if (string.Equals(directory, normalized, StringComparison.Ordinal) || + string.Equals(directory, "/", StringComparison.Ordinal)) + { + continue; + } + + var parent = Normalize(Path.GetDirectoryName(directory) ?? "/"); + if (string.Equals(parent, normalized, StringComparison.Ordinal)) + { + entries.Add(new RootFileDescriptor(directory, null, false, true, null)); + } + } + + foreach (var file in _entries.Values) + { + var directory = Normalize(Path.GetDirectoryName(file.Path) ?? "/"); + if (string.Equals(directory, normalized, StringComparison.Ordinal)) + { + entries.Add(file.ToDescriptor()); + } + } + + entries.Sort(static (left, right) => string.CompareOrdinal(left.Path, right.Path)); + return entries.ToImmutableArray(); + } public bool DirectoryExists(string path) { @@ -100,7 +143,7 @@ internal sealed class TestRootFileSystem : IRootFileSystem return _directories.Contains(normalized); } - private static string Combine(string prefix, string name) + private static string Combine(string prefix, string name) { var normalizedPrefix = Normalize(prefix); if (normalizedPrefix == "/") @@ -111,7 +154,7 @@ internal sealed class TestRootFileSystem : IRootFileSystem return Normalize($"{normalizedPrefix}/{name}"); } - private static string Normalize(string path) + private static string Normalize(string path) { if (string.IsNullOrWhiteSpace(path)) { @@ -144,37 +187,117 @@ internal sealed class TestRootFileSystem : IRootFileSystem parts.Add(part); } - return "/" + string.Join('/', parts); - } + return "/" + string.Join('/', parts); + } + + private void EnsureDirectoryChain(string path) + { + var normalized = Normalize(path); + if (string.Equals(normalized, "/", StringComparison.Ordinal)) + { + _directories.Add(normalized); + return; + } + + var segments = normalized.Split('/', StringSplitOptions.RemoveEmptyEntries); + var current = "/"; + foreach (var segment in segments) + { + current = current == "/" ? $"/{segment}" : $"{current}/{segment}"; + _directories.Add(current); + } + } - private sealed record FileEntry(string Path, string Content, bool IsExecutable, string? Layer, bool IsDirectory) - { - public RootFileDescriptor ToDescriptor() - { - var shebang = ExtractShebang(Content); - return new RootFileDescriptor(Path, Layer, IsExecutable, IsDirectory, shebang); - } - } - - private static string? ExtractShebang(string content) - { - if (string.IsNullOrEmpty(content)) - { - return null; - } - - using var reader = new StringReader(content); - var firstLine = reader.ReadLine(); - if (firstLine is null) - { - return null; - } - - if (!firstLine.StartsWith("#!", StringComparison.Ordinal)) - { - return null; - } - - return firstLine[2..].Trim(); - } + private sealed class FileEntry + { + private readonly byte[] _content; + private readonly string? _text; + + private FileEntry(string path, byte[] content, string? text, bool isExecutable, string? layer, bool isDirectory) + { + Path = path; + _content = content; + _text = text; + IsExecutable = isExecutable; + Layer = layer; + IsDirectory = isDirectory; + } + + public string Path { get; } + + public bool IsExecutable { get; } + + public string? Layer { get; } + + public bool IsDirectory { get; } + + public static FileEntry Create(string path, byte[] content, string? text, bool isExecutable, string? layer, bool isDirectory) + => new(path, content, text, isExecutable, layer, isDirectory); + + public RootFileDescriptor ToDescriptor() + { + var shebang = ExtractShebang(_text, _content); + return new RootFileDescriptor(Path, Layer, IsExecutable, IsDirectory, shebang); + } + + public bool TryReadText(out string content) + { + if (IsDirectory || _text is null) + { + content = string.Empty; + return false; + } + + content = _text; + return true; + } + + public bool TryReadBytes(int maxBytes, out ReadOnlyMemory content) + { + if (IsDirectory) + { + content = default; + return false; + } + + var length = Math.Min(maxBytes, _content.Length); + content = new ReadOnlyMemory(_content, 0, length); + return true; + } + } + + private static string? ExtractShebang(string? textContent, byte[] binaryContent) + { + if (!string.IsNullOrEmpty(textContent)) + { + using var reader = new StringReader(textContent); + var firstLine = reader.ReadLine(); + if (firstLine is not null && firstLine.StartsWith("#!", StringComparison.Ordinal)) + { + return firstLine[2..].Trim(); + } + } + + if (binaryContent.Length >= 2 && binaryContent[0] == '#' && binaryContent[1] == '!') + { + var end = Array.IndexOf(binaryContent, (byte)'\n'); + if (end < 0) + { + end = binaryContent.Length; + } + + var shebangLength = Math.Max(0, end - 2); + while (shebangLength > 0 && (binaryContent[2 + shebangLength - 1] == '\r' || binaryContent[2 + shebangLength - 1] == 0)) + { + shebangLength--; + } + + if (shebangLength > 0) + { + return Encoding.UTF8.GetString(binaryContent, 2, shebangLength).Trim(); + } + } + + return null; + } } diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/EntryTraceResultStoreTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/EntryTraceResultStoreTests.cs new file mode 100644 index 00000000..ecb5d56e --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/EntryTraceResultStoreTests.cs @@ -0,0 +1,139 @@ +using System.Collections.Immutable; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.EntryTrace; +using StellaOps.Scanner.EntryTrace.Serialization; +using StellaOps.Scanner.Storage; +using StellaOps.Scanner.Storage.Mongo; +using StellaOps.Scanner.Storage.Repositories; +using StellaOps.Scanner.Storage.Services; +using Xunit; +using MongoDB.Driver; + +namespace StellaOps.Scanner.Storage.Tests; + +public sealed class EntryTraceResultStoreTests : IClassFixture +{ + private readonly ScannerMongoFixture _fixture; + + public EntryTraceResultStoreTests(ScannerMongoFixture fixture) + { + _fixture = fixture; + } + + [Fact] + public async Task StoreAsync_ThrowsWhenResultNull() + { + var store = CreateStore(); + await Assert.ThrowsAsync(async () => + { + EntryTraceResult? result = null; + await store.StoreAsync(result!, CancellationToken.None); + }); + } + + [Fact] + public async Task GetAsync_ReturnsNullWhenMissing() + { + await ClearCollectionAsync(); + var store = CreateStore(); + + var result = await store.GetAsync("scan-missing", CancellationToken.None); + + Assert.Null(result); + } + + [Fact] + public async Task StoreAsync_RoundTripsResult() + { + await ClearCollectionAsync(); + var store = CreateStore(); + + var scanId = $"scan-{Guid.NewGuid():n}"; + var generatedAt = new DateTimeOffset(2025, 11, 2, 10, 30, 0, TimeSpan.Zero); + + var node = new EntryTraceNode( + 1, + EntryTraceNodeKind.Command, + "python", + ImmutableArray.Create("python", "/app/main.py"), + EntryTraceInterpreterKind.None, + new EntryTraceEvidence("/usr/bin/python", "sha256:layer-a", "path", ImmutableDictionary.Empty), + new EntryTraceSpan("/app/start.sh", 1, 0, 1, 14), + ImmutableDictionary.Empty); + + var plan = new EntryTracePlan( + ImmutableArray.Create("/app/main.py"), + ImmutableDictionary.Empty, + "/workspace", + "scanner", + "/app/main.py", + EntryTraceTerminalType.Native, + "python", + 0.95, + ImmutableDictionary.Empty); + + var terminal = new EntryTraceTerminal( + "/app/main.py", + EntryTraceTerminalType.Native, + "python", + 0.95, + ImmutableDictionary.Empty, + "scanner", + "/workspace", + ImmutableArray.Create("/app/main.py")); + + var graph = new EntryTraceGraph( + EntryTraceOutcome.Resolved, + ImmutableArray.Create(node), + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Create(plan), + ImmutableArray.Create(terminal)); + + var ndjson = EntryTraceNdjsonWriter.Serialize( + graph, + new EntryTraceNdjsonMetadata(scanId, "sha256:image", generatedAt, Source: "storage.tests")); + + var result = new EntryTraceResult(scanId, "sha256:image", generatedAt, graph, ndjson); + + await store.StoreAsync(result, CancellationToken.None); + + var stored = await store.GetAsync(scanId, CancellationToken.None); + + Assert.NotNull(stored); + Assert.Equal(result.ScanId, stored!.ScanId); + Assert.Equal(result.ImageDigest, stored.ImageDigest); + Assert.Equal(result.GeneratedAtUtc, stored.GeneratedAtUtc); + Assert.Equal(result.Graph, stored.Graph); + Assert.Equal(result.Ndjson, stored.Ndjson); + } + + private async Task ClearCollectionAsync() + { + var provider = CreateProvider(); + await provider.EntryTrace.DeleteManyAsync(_ => true); + } + + private EntryTraceResultStore CreateStore() + { + var provider = CreateProvider(); + var repository = new EntryTraceRepository(provider); + return new EntryTraceResultStore(repository); + } + + private MongoCollectionProvider CreateProvider() + { + var options = Options.Create(new ScannerStorageOptions + { + Mongo = new MongoOptions + { + ConnectionString = _fixture.Runner.ConnectionString, + DatabaseName = _fixture.Database.DatabaseNamespace.DatabaseName, + UseMajorityReadConcern = false, + UseMajorityWriteConcern = false + } + }); + + return new MongoCollectionProvider(_fixture.Database, options); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/RustFsArtifactObjectStoreTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/RustFsArtifactObjectStoreTests.cs index 6b583b10..29677dd8 100644 --- a/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/RustFsArtifactObjectStoreTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/RustFsArtifactObjectStoreTests.cs @@ -146,7 +146,7 @@ public sealed class RustFsArtifactObjectStoreTests } // Materialize content to ensure downstream callers can inspect it. - _ = await request.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + _ = await request.Content.ReadAsByteArrayAsync(cancellationToken); } CapturedRequests.Add(new CapturedRequest(request.Method, request.RequestUri!, headerSnapshot)); diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/StellaOps.Scanner.Surface.Env.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/StellaOps.Scanner.Surface.Env.Tests.csproj new file mode 100644 index 00000000..f89a78c5 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/StellaOps.Scanner.Surface.Env.Tests.csproj @@ -0,0 +1,17 @@ + + + net10.0 + preview + enable + enable + true + false + Exe + + + + + + + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/SurfaceEnvironmentBuilderTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/SurfaceEnvironmentBuilderTests.cs new file mode 100644 index 00000000..c8f95a8f --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/SurfaceEnvironmentBuilderTests.cs @@ -0,0 +1,80 @@ +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Surface.Env; + +namespace StellaOps.Scanner.Surface.Env.Tests; + +public sealed class SurfaceEnvironmentBuilderTests +{ + [Fact] + public void Build_UsesDefaults_WhenVariablesMissing() + { + var services = CreateServices(); + var environment = SurfaceEnvironmentFactory.Create(services, options => + { + options.RequireSurfaceEndpoint = false; + }); + + Assert.Equal("surface-cache", environment.Settings.SurfaceFsBucket); + Assert.Equal(4096, environment.Settings.CacheQuotaMegabytes); + Assert.False(environment.Settings.PrefetchEnabled); + Assert.NotNull(environment.Settings.CacheRoot); + Assert.True(environment.Settings.CacheRoot.Exists); + } + + [Fact] + public void Build_ReadsEnvironmentVariables_WithPrefixes() + { + Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_BUCKET", "custom-bucket"); + Environment.SetEnvironmentVariable("SCANNER_SURFACE_CACHE_QUOTA_MB", "512"); + Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_ENDPOINT", "https://surface.example.test"); + + try + { + var services = CreateServices(); + var environment = SurfaceEnvironmentFactory.Create(services); + + Assert.Equal("custom-bucket", environment.Settings.SurfaceFsBucket); + Assert.Equal(512, environment.Settings.CacheQuotaMegabytes); + } + finally + { + Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_BUCKET", null); + Environment.SetEnvironmentVariable("SCANNER_SURFACE_CACHE_QUOTA_MB", null); + Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_ENDPOINT", null); + } + } + + [Fact] + public void Build_Throws_WhenIntegerOutOfRange() + { + Environment.SetEnvironmentVariable("SCANNER_SURFACE_CACHE_QUOTA_MB", "1"); + Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_ENDPOINT", "https://surface.example.test"); + + try + { + var services = CreateServices(); + var exception = Assert.Throws(() => SurfaceEnvironmentFactory.Create(services)); + Assert.Equal("SURFACE_CACHE_QUOTA_MB", exception.Variable); + } + finally + { + Environment.SetEnvironmentVariable("SCANNER_SURFACE_CACHE_QUOTA_MB", null); + Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_ENDPOINT", null); + } + } + + private static IServiceProvider CreateServices(Action? configure = null) + { + var services = new ServiceCollection(); + var configuration = new ConfigurationBuilder().Build(); + + services.AddSingleton(configuration); + services.AddLogging(builder => builder.ClearProviders()); + + configure?.Invoke(services); + + return services.BuildServiceProvider(); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/SurfaceEnvironmentFeatureFlagTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/SurfaceEnvironmentFeatureFlagTests.cs new file mode 100644 index 00000000..59cac03e --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Env.Tests/SurfaceEnvironmentFeatureFlagTests.cs @@ -0,0 +1,43 @@ +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Surface.Env; + +namespace StellaOps.Scanner.Surface.Env.Tests; + +public sealed class SurfaceEnvironmentFeatureFlagTests +{ + [Fact] + public void Build_ReturnsFlags_LowerCased() + { + Environment.SetEnvironmentVariable("SCANNER_SURFACE_FEATURES", "Validation,PreWarm , unknown"); + Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_ENDPOINT", "https://surface.example.test"); + try + { + var services = CreateServices(); + var environment = SurfaceEnvironmentFactory.Create(services, options => + { + options.KnownFeatureFlags.Add("validation"); + options.KnownFeatureFlags.Add("prewarm"); + options.RequireSurfaceEndpoint = true; + }); + + Assert.Contains("validation", environment.Settings.FeatureFlags); + Assert.Contains("prewarm", environment.Settings.FeatureFlags); + Assert.Contains("unknown", environment.Settings.FeatureFlags); + } + finally + { + Environment.SetEnvironmentVariable("SCANNER_SURFACE_FEATURES", null); + Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_ENDPOINT", null); + } + } + + private static IServiceProvider CreateServices() + { + var services = new ServiceCollection(); + services.AddSingleton(new ConfigurationBuilder().Build()); + services.AddLogging(builder => builder.ClearProviders()); + return services.BuildServiceProvider(); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Surface.FS.Tests/FileSurfaceCacheTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Surface.FS.Tests/FileSurfaceCacheTests.cs new file mode 100644 index 00000000..75d46dba --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Surface.FS.Tests/FileSurfaceCacheTests.cs @@ -0,0 +1,50 @@ +using System; +using System.IO; +using System.Threading.Tasks; +using System.Text.Json; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Scanner.Surface.FS; + + +namespace StellaOps.Scanner.Surface.FS.Tests; + +public sealed class FileSurfaceCacheTests +{ + [Fact] + public async Task GetOrCreateAsync_PersistsValue() + { + var root = Directory.CreateTempSubdirectory(); + try + { + var options = Microsoft.Extensions.Options.Options.Create(new SurfaceCacheOptions { RootDirectory = root.FullName }); + var cache = new FileSurfaceCache(options, NullLogger.Instance); + var key = new SurfaceCacheKey("entrytrace", "tenant", "digest"); + + var result = await cache.GetOrCreateAsync( + key, + _ => Task.FromResult(42), + Serialize, + Deserialize); + + Assert.Equal(42, result); + + var cached = await cache.GetOrCreateAsync( + key, + _ => Task.FromResult(99), + Serialize, + Deserialize); + + Assert.Equal(42, cached); + } + finally + { + root.Delete(true); + } + + static ReadOnlyMemory Serialize(int value) + => JsonSerializer.SerializeToUtf8Bytes(value); + + static int Deserialize(ReadOnlyMemory payload) + => JsonSerializer.Deserialize(payload.Span)!; + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Surface.FS.Tests/StellaOps.Scanner.Surface.FS.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Surface.FS.Tests/StellaOps.Scanner.Surface.FS.Tests.csproj new file mode 100644 index 00000000..4d622f49 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Surface.FS.Tests/StellaOps.Scanner.Surface.FS.Tests.csproj @@ -0,0 +1,17 @@ + + + net10.0 + preview + enable + enable + true + false + Exe + + + + + + + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/FileSurfaceSecretProviderTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/FileSurfaceSecretProviderTests.cs new file mode 100644 index 00000000..94b31db3 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/FileSurfaceSecretProviderTests.cs @@ -0,0 +1,47 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using System.IO; +using System.Text.Json; +using StellaOps.Scanner.Surface.Secrets; +using StellaOps.Scanner.Surface.Secrets.Providers; + +namespace StellaOps.Scanner.Surface.Secrets.Tests; + +public sealed class FileSurfaceSecretProviderTests +{ + [Fact] + public async Task GetAsync_ReturnsSecret_FromJson() + { + var rootDirectory = Directory.CreateTempSubdirectory(); + var root = rootDirectory.FullName; + var request = new SurfaceSecretRequest("tenant", "component", "registry"); + var path = Path.Combine(root, request.Tenant, request.Component, request.SecretType); + Directory.CreateDirectory(path); + var payloadPath = Path.Combine(path, "default.json"); + await File.WriteAllTextAsync(payloadPath, JsonSerializer.Serialize(new + { + Payload = Convert.ToBase64String(new byte[] { 10, 20, 30 }), + Metadata = new Dictionary { ["username"] = "demo" } + })); + + try + { + var provider = new FileSurfaceSecretProvider(root); + var handle = await provider.GetAsync(request); + try + { + Assert.Equal(new byte[] { 10, 20, 30 }, handle.AsBytes().ToArray()); + Assert.Equal("demo", handle.Metadata["username"]); + } + finally + { + handle.Dispose(); + } + } + finally + { + rootDirectory.Delete(true); + } + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/InlineSurfaceSecretProviderTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/InlineSurfaceSecretProviderTests.cs new file mode 100644 index 00000000..d56611b5 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/InlineSurfaceSecretProviderTests.cs @@ -0,0 +1,38 @@ +using System; +using System.Threading.Tasks; +using StellaOps.Scanner.Surface.Env; +using StellaOps.Scanner.Surface.Secrets; +using StellaOps.Scanner.Surface.Secrets.Providers; + +namespace StellaOps.Scanner.Surface.Secrets.Tests; + +public sealed class InlineSurfaceSecretProviderTests +{ + [Fact] + public async Task GetAsync_ReturnsSecret_WhenInlineAllowed() + { + var configuration = new SurfaceSecretsConfiguration("inline", "tenant", null, null, null, AllowInline: true); + var provider = new InlineSurfaceSecretProvider(configuration); + var request = new SurfaceSecretRequest("tenant", "component", "registry"); + var key = "SURFACE_SECRET_TENANT_COMPONENT_REGISTRY_DEFAULT"; + try + { + Environment.SetEnvironmentVariable(key, Convert.ToBase64String(new byte[] { 1, 2, 3 })); + var handle = await provider.GetAsync(request); + Assert.Equal(new byte[] { 1, 2, 3 }, handle.AsBytes().ToArray()); + } + finally + { + Environment.SetEnvironmentVariable(key, null); + } + } + + [Fact] + public async Task GetAsync_Throws_WhenInlineDisallowed() + { + var configuration = new SurfaceSecretsConfiguration("inline", "tenant", null, null, null, AllowInline: false); + var provider = new InlineSurfaceSecretProvider(configuration); + var request = new SurfaceSecretRequest("tenant", "component", "registry"); + await Assert.ThrowsAsync(async () => await provider.GetAsync(request)); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/StellaOps.Scanner.Surface.Secrets.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/StellaOps.Scanner.Surface.Secrets.Tests.csproj new file mode 100644 index 00000000..dccf42de --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/StellaOps.Scanner.Surface.Secrets.Tests.csproj @@ -0,0 +1,18 @@ + + + net10.0 + preview + enable + enable + true + false + Exe + + + + + + + + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/SurfaceSecretsServiceCollectionExtensionsTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/SurfaceSecretsServiceCollectionExtensionsTests.cs new file mode 100644 index 00000000..149f56a8 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Secrets.Tests/SurfaceSecretsServiceCollectionExtensionsTests.cs @@ -0,0 +1,52 @@ +using System; +using System.Collections.Generic; +using System.IO; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Surface.Env; +using StellaOps.Scanner.Surface.Secrets; + +namespace StellaOps.Scanner.Surface.Secrets.Tests +{ + public sealed class SurfaceSecretsServiceCollectionExtensionsTests + { + [Fact] + public void AddSurfaceSecrets_RegistersProvider() + { + var services = new ServiceCollection(); + services.AddSingleton(_ => new TestSurfaceEnvironment()); + services.AddLogging(builder => builder.ClearProviders()); + services.AddSurfaceSecrets(); + + using var provider = services.BuildServiceProvider(); + var secretProvider = provider.GetRequiredService(); + Assert.NotNull(secretProvider); + } + + private sealed class TestSurfaceEnvironment : ISurfaceEnvironment + { + public SurfaceEnvironmentSettings Settings { get; } + public IReadOnlyDictionary RawVariables { get; } + + public TestSurfaceEnvironment() + { + Settings = new SurfaceEnvironmentSettings( + new Uri("https://surface.example"), + "surface", + null, + new DirectoryInfo(Path.GetTempPath()), + 1024, + false, + Array.Empty(), + new SurfaceSecretsConfiguration("file", "tenant", Root: Path.GetTempPath(), Namespace: null, FallbackProvider: null, AllowInline: true), + "tenant", + new SurfaceTlsConfiguration(null, null, null)) + { + CreatedAtUtc = DateTimeOffset.UtcNow + }; + + RawVariables = new Dictionary(); + } + } + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Surface.Validation.Tests/StellaOps.Scanner.Surface.Validation.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Validation.Tests/StellaOps.Scanner.Surface.Validation.Tests.csproj new file mode 100644 index 00000000..47ba6b00 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Validation.Tests/StellaOps.Scanner.Surface.Validation.Tests.csproj @@ -0,0 +1,18 @@ + + + net10.0 + preview + enable + enable + true + false + Exe + + + + + + + + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Surface.Validation.Tests/SurfaceValidatorRunnerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Validation.Tests/SurfaceValidatorRunnerTests.cs new file mode 100644 index 00000000..989b3398 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Surface.Validation.Tests/SurfaceValidatorRunnerTests.cs @@ -0,0 +1,87 @@ +using System; +using System.IO; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Surface.Env; +using StellaOps.Scanner.Surface.Validation; + +namespace StellaOps.Scanner.Surface.Validation.Tests; + +public sealed class SurfaceValidatorRunnerTests +{ + [Fact] + public async Task EnsureAsync_Throws_WhenValidationFails() + { + var services = CreateServices(services => + { + services.Configure(options => + { + options.ThrowOnFailure = true; + options.ContinueOnError = false; + }); + }); + + var runner = services.GetRequiredService(); + var environment = new SurfaceEnvironmentSettings( + new Uri("https://surface.invalid"), + string.Empty, + null, + new DirectoryInfo(Path.Combine(Path.GetTempPath(), "stellaops-tests", Guid.NewGuid().ToString())), + 0, + false, + Array.Empty(), + new SurfaceSecretsConfiguration("kubernetes", "", null, null, null, false), + string.Empty, + new SurfaceTlsConfiguration(null, null, null)); + + var context = SurfaceValidationContext.Create(services, "TestComponent", environment); + + await Assert.ThrowsAsync(() => runner.EnsureAsync(context)); + } + + [Fact] + public async Task RunAllAsync_ReturnsSuccess_ForValidConfiguration() + { + var directory = new DirectoryInfo(Path.Combine(Path.GetTempPath(), "stellaops-tests", Guid.NewGuid().ToString())) + { + Attributes = FileAttributes.Normal + }; + + var environment = new SurfaceEnvironmentSettings( + new Uri("https://surface.example.com"), + "surface-cache", + null, + directory, + 1024, + false, + Array.Empty(), + new SurfaceSecretsConfiguration("kubernetes", "tenant-a", null, "stellaops", null, false), + "tenant-a", + new SurfaceTlsConfiguration(null, null, null)); + + var services = CreateServices(); + var runner = services.GetRequiredService(); + var context = SurfaceValidationContext.Create(services, "TestComponent", environment); + + var result = await runner.RunAllAsync(context); + + Assert.True(result.IsSuccess); + } + + private static ServiceProvider CreateServices(Action? configure = null) + { + var services = new ServiceCollection(); + services.AddSingleton>(_ => NullLogger.Instance); + services.AddSingleton>(_ => NullLogger.Instance); + services.AddOptions(); + services.AddSurfaceValidation(); + + configure?.Invoke(services); + + return services.BuildServiceProvider(); + } +} + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs index afc31e73..c5677af0 100644 --- a/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs @@ -1,18 +1,24 @@ using System; using System.Collections.Generic; +using System.Collections.Immutable; using System.IO; using System.Net; using System.Net.Http.Json; using System.Linq; using System.Text.Json; using System.Threading.Tasks; +using System.Threading; using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc.Testing; -using Microsoft.AspNetCore.TestHost; -using Microsoft.Extensions.DependencyInjection; -using StellaOps.Scanner.WebService.Contracts; -using StellaOps.Scanner.WebService.Domain; -using StellaOps.Scanner.WebService.Services; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.AspNetCore.TestHost; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Scanner.EntryTrace; +using StellaOps.Scanner.EntryTrace.Serialization; +using StellaOps.Scanner.Storage.Catalog; +using StellaOps.Scanner.Storage.Repositories; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Domain; +using StellaOps.Scanner.WebService.Services; namespace StellaOps.Scanner.WebService.Tests; @@ -126,14 +132,74 @@ public sealed class ScansEndpointsTests Assert.Equal(HttpStatusCode.Accepted, response.StatusCode); Assert.NotNull(coordinator); - Assert.True(coordinator.TokenMatched); - Assert.True(coordinator.LastToken.CanBeCanceled); - } - - private sealed class RecordingCoordinator : IScanCoordinator - { - private readonly IHttpContextAccessor accessor; - private readonly InMemoryScanCoordinator inner; + Assert.True(coordinator.TokenMatched); + Assert.True(coordinator.LastToken.CanBeCanceled); + } + + [Fact] + public async Task EntryTraceEndpointReturnsStoredResult() + { + using var factory = new ScannerApplicationFactory(); + var scanId = $"scan-entrytrace-{Guid.NewGuid():n}"; + var graph = new EntryTraceGraph( + EntryTraceOutcome.Resolved, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Create(new EntryTracePlan( + ImmutableArray.Create("/bin/bash", "-lc", "./start.sh"), + ImmutableDictionary.Empty, + "/workspace", + "root", + "/bin/bash", + EntryTraceTerminalType.Script, + "bash", + 0.9, + ImmutableDictionary.Empty)), + ImmutableArray.Create(new EntryTraceTerminal( + "/bin/bash", + EntryTraceTerminalType.Script, + "bash", + 0.9, + ImmutableDictionary.Empty, + "root", + "/workspace", + ImmutableArray.Empty))); + + var ndjson = new List { "{\"kind\":\"entry\"}" }; + + using (var scope = factory.Services.CreateScope()) + { + var repository = scope.ServiceProvider.GetRequiredService(); + await repository.UpsertAsync(new EntryTraceDocument + { + ScanId = scanId, + ImageDigest = "sha256:entrytrace", + GeneratedAtUtc = DateTime.UtcNow, + GraphJson = EntryTraceGraphSerializer.Serialize(graph), + Ndjson = ndjson + }, CancellationToken.None).ConfigureAwait(false); + } + + using var client = factory.CreateClient(); + var response = await client.GetAsync($"/api/v1/scans/{scanId}/entrytrace"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var payload = await response.Content.ReadFromJsonAsync(); + Assert.NotNull(payload); + Assert.Equal(scanId, payload!.ScanId); + Assert.Equal("sha256:entrytrace", payload.ImageDigest); + Assert.Equal(graph.Outcome, payload.Graph.Outcome); + Assert.Single(payload.Graph.Plans); + Assert.Equal("/bin/bash", payload.Graph.Plans[0].TerminalPath); + Assert.Single(payload.Graph.Terminals); + Assert.Equal(ndjson, payload.Ndjson); + } + + private sealed class RecordingCoordinator : IScanCoordinator + { + private readonly IHttpContextAccessor accessor; + private readonly InMemoryScanCoordinator inner; public RecordingCoordinator(IHttpContextAccessor accessor, TimeProvider timeProvider, IScanProgressPublisher publisher) { @@ -358,15 +424,111 @@ public sealed class ScansEndpointsTests Assert.Equal(new[] { "alpha", "Beta", "zeta" }, names); } } + + [Fact] + public async Task GetEntryTraceReturnsStoredResult() + { + var scanId = $"scan-{Guid.NewGuid():n}"; + var generatedAt = new DateTimeOffset(2025, 11, 1, 12, 0, 0, TimeSpan.Zero); + var plan = new EntryTracePlan( + ImmutableArray.Create("/usr/local/bin/app"), + ImmutableDictionary.Empty, + "/workspace", + "appuser", + "/usr/local/bin/app", + EntryTraceTerminalType.Native, + "go", + 90d, + ImmutableDictionary.Empty); + var terminal = new EntryTraceTerminal( + "/usr/local/bin/app", + EntryTraceTerminalType.Native, + "go", + 90d, + ImmutableDictionary.Empty, + "appuser", + "/workspace", + ImmutableArray.Empty); + var graph = new EntryTraceGraph( + EntryTraceOutcome.Resolved, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Create(plan), + ImmutableArray.Create(terminal)); + var ndjson = EntryTraceNdjsonWriter.Serialize( + graph, + new EntryTraceNdjsonMetadata(scanId, "sha256:test", generatedAt)); + var storedResult = new EntryTraceResult(scanId, "sha256:test", generatedAt, graph, ndjson); + + using var factory = new ScannerApplicationFactory( + configuration: null, + services => + { + services.AddSingleton(new StubEntryTraceResultStore(storedResult)); + }); + + using var client = factory.CreateClient(); + var response = await client.GetAsync($"/api/v1/scans/{scanId}/entrytrace"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var payload = await response.Content.ReadFromJsonAsync(); + Assert.NotNull(payload); + Assert.Equal(storedResult.ScanId, payload!.ScanId); + Assert.Equal(storedResult.ImageDigest, payload.ImageDigest); + Assert.Equal(storedResult.GeneratedAtUtc, payload.GeneratedAt); + Assert.Equal(storedResult.Graph.Plans.Length, payload.Graph.Plans.Length); + Assert.Equal(storedResult.Ndjson, payload.Ndjson); + } + + [Fact] + public async Task GetEntryTraceReturnsNotFoundWhenMissing() + { + using var factory = new ScannerApplicationFactory( + configuration: null, + services => + { + services.AddSingleton(new StubEntryTraceResultStore(null)); + }); + + using var client = factory.CreateClient(); + var response = await client.GetAsync("/api/v1/scans/scan-missing/entrytrace"); + Assert.Equal(HttpStatusCode.NotFound, response.StatusCode); + } - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); - - private sealed record ProgressEnvelope( - string ScanId, - int Sequence, + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + + private sealed record ProgressEnvelope( + string ScanId, + int Sequence, string State, string? Message, DateTimeOffset Timestamp, - string CorrelationId, - Dictionary Data); -} + string CorrelationId, + Dictionary Data); + + private sealed class StubEntryTraceResultStore : IEntryTraceResultStore + { + private readonly EntryTraceResult? _result; + + public StubEntryTraceResultStore(EntryTraceResult? result) + { + _result = result; + } + + public Task GetAsync(string scanId, CancellationToken cancellationToken) + { + if (_result is not null && string.Equals(_result.ScanId, scanId, StringComparison.Ordinal)) + { + return Task.FromResult(_result); + } + + return Task.FromResult(null); + } + + public Task StoreAsync(EntryTraceResult result, CancellationToken cancellationToken) + { + return Task.CompletedTask; + } + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/EntryTraceExecutionServiceTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/EntryTraceExecutionServiceTests.cs index 841311a6..d8cc47e8 100644 --- a/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/EntryTraceExecutionServiceTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/EntryTraceExecutionServiceTests.cs @@ -1,179 +1,492 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.IO; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scanner.Core.Contracts; -using StellaOps.Scanner.EntryTrace; -using StellaOps.Scanner.Worker.Options; -using StellaOps.Scanner.Worker.Processing; -using Xunit; - -namespace StellaOps.Scanner.Worker.Tests; - -public sealed class EntryTraceExecutionServiceTests : IDisposable -{ - private readonly string _tempRoot; - - public EntryTraceExecutionServiceTests() - { - _tempRoot = Path.Combine(Path.GetTempPath(), $"entrytrace-service-{Guid.NewGuid():n}"); - Directory.CreateDirectory(_tempRoot); - } - - [Fact] - public async Task ExecuteAsync_Skips_When_ConfigMetadataMissing() - { - var analyzer = new CapturingEntryTraceAnalyzer(); - var service = CreateService(analyzer); - - var context = CreateContext(new Dictionary()); - - await service.ExecuteAsync(context, CancellationToken.None); - - Assert.False(analyzer.Invoked); - Assert.False(context.Analysis.TryGet(ScanAnalysisKeys.EntryTraceGraph, out _)); - } - - [Fact] - public async Task ExecuteAsync_BuildsContext_AndStoresGraph() - { - var configPath = Path.Combine(_tempRoot, "config.json"); - File.WriteAllText(configPath, """ - { - "config": { - "Env": ["PATH=/bin:/usr/bin"], - "Entrypoint": ["/entrypoint.sh"], - "WorkingDir": "/workspace", - "User": "scanner" - } - } - """); - - var layerDirectory = Path.Combine(_tempRoot, "layer-1"); - Directory.CreateDirectory(layerDirectory); - File.WriteAllText(Path.Combine(layerDirectory, "entrypoint.sh"), "#!/bin/sh\necho hello\n"); - - var metadata = new Dictionary - { - [ScanMetadataKeys.ImageConfigPath] = configPath, - [ScanMetadataKeys.LayerDirectories] = layerDirectory, - ["image.digest"] = "sha256:test-digest" - }; - - var analyzer = new CapturingEntryTraceAnalyzer(); - var service = CreateService(analyzer); - - var context = CreateContext(metadata); - - await service.ExecuteAsync(context, CancellationToken.None); - - Assert.True(analyzer.Invoked); - Assert.NotNull(analyzer.LastEntrypoint); - Assert.Equal("/entrypoint.sh", analyzer.LastEntrypoint!.Entrypoint[0]); - Assert.NotNull(analyzer.LastContext); - Assert.Equal("scanner", analyzer.LastContext!.User); - Assert.Equal("/workspace", analyzer.LastContext.WorkingDirectory); - Assert.Contains("/bin", analyzer.LastContext.Path); - - Assert.True(context.Analysis.TryGet(ScanAnalysisKeys.EntryTraceGraph, out EntryTraceGraph stored)); - Assert.Same(analyzer.Graph, stored); - } - - private EntryTraceExecutionService CreateService(IEntryTraceAnalyzer analyzer) - { - var workerOptions = new ScannerWorkerOptions(); - var entryTraceOptions = new EntryTraceAnalyzerOptions(); - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Trace)); - return new EntryTraceExecutionService( - analyzer, - Options.Create(entryTraceOptions), - Options.Create(workerOptions), - loggerFactory.CreateLogger(), - loggerFactory); - } - - private static ScanJobContext CreateContext(IReadOnlyDictionary metadata) - { - var lease = new TestLease(metadata); - return new ScanJobContext(lease, TimeProvider.System, DateTimeOffset.UtcNow, CancellationToken.None); - } - - public void Dispose() - { - try - { - if (Directory.Exists(_tempRoot)) - { - Directory.Delete(_tempRoot, recursive: true); - } - } - catch - { - // ignore cleanup failures - } - } - - private sealed class CapturingEntryTraceAnalyzer : IEntryTraceAnalyzer - { - public bool Invoked { get; private set; } - - public EntrypointSpecification? LastEntrypoint { get; private set; } - - public EntryTraceContext? LastContext { get; private set; } - - public EntryTraceGraph Graph { get; } = new( - EntryTraceOutcome.Resolved, - ImmutableArray.Empty, - ImmutableArray.Empty, - ImmutableArray.Empty); - - public ValueTask ResolveAsync(EntrypointSpecification entrypoint, EntryTraceContext context, CancellationToken cancellationToken = default) - { - Invoked = true; - LastEntrypoint = entrypoint; - LastContext = context; - return ValueTask.FromResult(Graph); - } - } - - private sealed class TestLease : IScanJobLease - { - private readonly IReadOnlyDictionary _metadata; - - public TestLease(IReadOnlyDictionary metadata) - { - _metadata = metadata; - EnqueuedAtUtc = DateTimeOffset.UtcNow; - LeasedAtUtc = EnqueuedAtUtc; - } - - public string JobId { get; } = $"job-{Guid.NewGuid():n}"; - - public string ScanId { get; } = $"scan-{Guid.NewGuid():n}"; - - public int Attempt => 1; - - public DateTimeOffset EnqueuedAtUtc { get; } - - public DateTimeOffset LeasedAtUtc { get; } - - public TimeSpan LeaseDuration => TimeSpan.FromMinutes(5); - - public IReadOnlyDictionary Metadata => _metadata; - - public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask DisposeAsync() => ValueTask.CompletedTask; - } -} + +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.EntryTrace; +using StellaOps.Scanner.EntryTrace.Runtime; +using StellaOps.Scanner.Surface.Env; +using StellaOps.Scanner.Surface.FS; +using StellaOps.Scanner.Surface.Secrets; +using StellaOps.Scanner.Surface.Validation; +using StellaOps.Scanner.Worker.Options; +using StellaOps.Scanner.Worker.Processing; +using Xunit; + +namespace StellaOps.Scanner.Worker.Tests; + +public sealed class EntryTraceExecutionServiceTests : IDisposable +{ + private readonly string _tempRoot; + + public EntryTraceExecutionServiceTests() + { + _tempRoot = Path.Combine(Path.GetTempPath(), $"entrytrace-service-{Guid.NewGuid():n}"); + Directory.CreateDirectory(_tempRoot); + } + + [Fact] + public async Task ExecuteAsync_Skips_When_ConfigMetadataMissing() + { + var analyzer = new CapturingEntryTraceAnalyzer(); + var store = new CapturingEntryTraceResultStore(); + var service = CreateService(analyzer, store); + + var context = CreateContext(new Dictionary()); + + await service.ExecuteAsync(context, CancellationToken.None); + + Assert.False(analyzer.Invoked); + Assert.False(context.Analysis.TryGet(ScanAnalysisKeys.EntryTraceGraph, out _)); + Assert.False(store.Stored); + } + + [Fact] + public async Task ExecuteAsync_BuildsContext_AndStoresGraph() + { + var metadata = CreateMetadata("PATH=/bin:/usr/bin"); + var analyzer = new CapturingEntryTraceAnalyzer(); + var store = new CapturingEntryTraceResultStore(); + var service = CreateService(analyzer, store); + + var context = CreateContext(metadata); + + await service.ExecuteAsync(context, CancellationToken.None); + + Assert.True(analyzer.Invoked); + Assert.NotNull(analyzer.LastEntrypoint); + Assert.Equal("/entrypoint.sh", analyzer.LastEntrypoint!.Entrypoint[0]); + Assert.NotNull(analyzer.LastContext); + Assert.Equal("scanner", analyzer.LastContext!.User); + Assert.Equal("/workspace", analyzer.LastContext.WorkingDirectory); + Assert.Contains("/bin", analyzer.LastContext.Path); + + Assert.True(context.Analysis.TryGet(ScanAnalysisKeys.EntryTraceGraph, out EntryTraceGraph stored)); + Assert.Equal(analyzer.Graph.Outcome, stored.Outcome); + Assert.Contains(stored.Diagnostics, diagnostic => diagnostic.Reason == EntryTraceUnknownReason.RuntimeSnapshotUnavailable); + Assert.True(context.Analysis.TryGet(ScanAnalysisKeys.EntryTraceNdjson, out ImmutableArray ndjsonPayload)); + Assert.False(ndjsonPayload.IsDefaultOrEmpty); + Assert.True(store.Stored); + Assert.NotNull(store.LastResult); + Assert.Equal(context.ScanId, store.LastResult!.ScanId); + Assert.Equal("sha256:test-digest", store.LastResult.ImageDigest); + Assert.Equal(stored, store.LastResult.Graph); + Assert.Equal(ndjsonPayload, store.LastResult.Ndjson); + } + + + [Fact] + public async Task ExecuteAsync_UsesCachedGraphWhenAvailable() + { + var metadata = CreateMetadata("PATH=/bin:/usr/bin"); + var analyzer = new CapturingEntryTraceAnalyzer(); + var store = new CapturingEntryTraceResultStore(); + var cache = new InMemorySurfaceCache(); + var service = CreateService(analyzer, store, surfaceCache: cache); + + await service.ExecuteAsync(CreateContext(metadata), CancellationToken.None); + Assert.True(analyzer.Invoked); + + analyzer.Reset(); + store.Reset(); + + await service.ExecuteAsync(CreateContext(metadata), CancellationToken.None); + + Assert.False(analyzer.Invoked); + Assert.True(store.Stored); + } + + [Fact] + public async Task ExecuteAsync_ReplacesSecretReferencesUsingSurfaceSecrets() + { + var metadata = CreateMetadata("API_KEY=secret://inline/api-key"); + var analyzer = new CapturingEntryTraceAnalyzer(); + var store = new CapturingEntryTraceResultStore(); + var secrets = new StubSurfaceSecretProvider(new Dictionary<(string Type, string Name), byte[]> + { + {("inline", "api-key"), Encoding.UTF8.GetBytes("resolved-value")} + }); + var service = CreateService(analyzer, store, surfaceSecrets: secrets); + + await service.ExecuteAsync(CreateContext(metadata), CancellationToken.None); + + Assert.True(analyzer.Invoked); + Assert.Equal("resolved-value", analyzer.LastContext!.Environment["API_KEY"]); + } + + [Fact] + public async Task ExecuteAsync_FallsBackToBase64ForBinarySecrets() + { + var metadata = CreateMetadata("BLOB=secret://inline/blob"); + var analyzer = new CapturingEntryTraceAnalyzer(); + var store = new CapturingEntryTraceResultStore(); + var payload = new byte[] { 0x00, 0xFF, 0x10 }; + var secrets = new StubSurfaceSecretProvider(new Dictionary<(string Type, string Name), byte[]> + { + {("inline", "blob"), payload} + }); + var service = CreateService(analyzer, store, surfaceSecrets: secrets); + + await service.ExecuteAsync(CreateContext(metadata), CancellationToken.None); + + Assert.True(analyzer.Invoked); + Assert.Equal(Convert.ToBase64String(payload), analyzer.LastContext!.Environment["BLOB"]); + } + + [Fact] + public async Task ExecuteAsync_SkipsWhenSurfaceValidationFails() + { + var metadata = CreateMetadata("PATH=/bin:/usr/bin"); + var analyzer = new CapturingEntryTraceAnalyzer(); + var store = new CapturingEntryTraceResultStore(); + var issues = new[] + { + SurfaceValidationIssue.Error("cache", "unwritable") + }; + var validator = new StaticSurfaceValidatorRunner(SurfaceValidationResult.FromIssues(issues)); + var service = CreateService(analyzer, store, surfaceValidator: validator); + + await service.ExecuteAsync(CreateContext(metadata), CancellationToken.None); + + Assert.False(analyzer.Invoked); + Assert.False(store.Stored); + } + + private EntryTraceExecutionService CreateService( + IEntryTraceAnalyzer analyzer, + IEntryTraceResultStore store, + ISurfaceCache? surfaceCache = null, + ISurfaceValidatorRunner? surfaceValidator = null, + ISurfaceSecretProvider? surfaceSecrets = null, + ISurfaceEnvironment? surfaceEnvironment = null) + { + var workerOptions = new ScannerWorkerOptions(); + var entryTraceOptions = new EntryTraceAnalyzerOptions(); + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Trace)); + surfaceEnvironment ??= new StubSurfaceEnvironment(); + surfaceCache ??= new InMemorySurfaceCache(); + surfaceValidator ??= new NoopSurfaceValidatorRunner(); + surfaceSecrets ??= new StubSurfaceSecretProvider(); + var serviceProvider = new ServiceCollection() + .AddSingleton(surfaceEnvironment) + .BuildServiceProvider(); + + return new EntryTraceExecutionService( + analyzer, + Microsoft.Extensions.Options.Options.Create(entryTraceOptions), + Microsoft.Extensions.Options.Options.Create(workerOptions), + loggerFactory.CreateLogger(), + loggerFactory, + new EntryTraceRuntimeReconciler(), + store, + surfaceValidator, + surfaceEnvironment, + surfaceCache, + surfaceSecrets, + serviceProvider); + } + + private static ScanJobContext CreateContext(IReadOnlyDictionary metadata) + { + var lease = new TestLease(metadata); + return new ScanJobContext(lease, TimeProvider.System, DateTimeOffset.UtcNow, CancellationToken.None); + } + + private Dictionary CreateMetadata(params string[] environmentEntries) + { + var configPath = Path.Combine(_tempRoot, $"config-{Guid.NewGuid():n}.json"); + var env = environmentEntries.Length == 0 + ? new[] { "PATH=/bin:/usr/bin" } + : environmentEntries; + var envJson = string.Join(",", env.Select(value => $"\"{value}\"")); + File.WriteAllText(configPath, + $$""" + { + "config": { + "Env": [{{envJson}}], + "Entrypoint": ["/entrypoint.sh"], + "WorkingDir": "/workspace", + "User": "scanner" + } + } + """); + + var rootDirectory = Path.Combine(_tempRoot, $"root-{Guid.NewGuid():n}"); + Directory.CreateDirectory(rootDirectory); + File.WriteAllText(Path.Combine(rootDirectory, "entrypoint.sh"), "#!/bin/sh\necho hello\n"); + + return new Dictionary + { + [ScanMetadataKeys.ImageConfigPath] = configPath, + [ScanMetadataKeys.RootFilesystemPath] = rootDirectory, + [ScanMetadataKeys.LayerDirectories] = rootDirectory, + ["image.digest"] = "sha256:test-digest" + }; + } + + public void Dispose() + { + try + { + if (Directory.Exists(_tempRoot)) + { + Directory.Delete(_tempRoot, recursive: true); + } + } + catch + { + // ignore cleanup failures + } + } + + private sealed class CapturingEntryTraceAnalyzer : IEntryTraceAnalyzer + { + public bool Invoked { get; private set; } + + public EntrypointSpecification? LastEntrypoint { get; private set; } + + public EntryTraceContext? LastContext { get; private set; } + + public EntryTraceGraph Graph { get; } = new( + EntryTraceOutcome.Resolved, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty); + + public ValueTask ResolveAsync(EntrypointSpecification entrypoint, EntryTraceContext context, CancellationToken cancellationToken = default) + { + Invoked = true; + LastEntrypoint = entrypoint; + LastContext = context; + return ValueTask.FromResult(Graph); + } + + public void Reset() + { + Invoked = false; + LastEntrypoint = null; + LastContext = null; + } + } + + private sealed class CapturingEntryTraceResultStore : IEntryTraceResultStore + { + public bool Stored { get; private set; } + public EntryTraceResult? LastResult { get; private set; } + + public Task GetAsync(string scanId, CancellationToken cancellationToken) + { + return Task.FromResult(null); + } + + public Task StoreAsync(EntryTraceResult result, CancellationToken cancellationToken) + { + Stored = true; + LastResult = result; + return Task.CompletedTask; + } + + public void Reset() + { + Stored = false; + LastResult = null; + } + } + + private sealed class TestLease : IScanJobLease + { + private readonly IReadOnlyDictionary _metadata; + + public TestLease(IReadOnlyDictionary metadata) + { + _metadata = metadata; + EnqueuedAtUtc = DateTimeOffset.UtcNow; + LeasedAtUtc = EnqueuedAtUtc; + } + + public string JobId { get; } = $"job-{Guid.NewGuid():n}"; + + public string ScanId { get; } = $"scan-{Guid.NewGuid():n}"; + + public int Attempt => 1; + + public DateTimeOffset EnqueuedAtUtc { get; } + + public DateTimeOffset LeasedAtUtc { get; } + + public TimeSpan LeaseDuration => TimeSpan.FromMinutes(5); + + public IReadOnlyDictionary Metadata => _metadata; + + public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask DisposeAsync() => ValueTask.CompletedTask; + } + + private sealed class StubSurfaceEnvironment : ISurfaceEnvironment + { + public StubSurfaceEnvironment() + { + var cacheRoot = new DirectoryInfo(Path.Combine(Path.GetTempPath(), "surface-cache-tests")); + Settings = new SurfaceEnvironmentSettings( + new Uri("https://surface.example"), + "surface-cache", + null, + cacheRoot, + 1024, + false, + Array.Empty(), + new SurfaceSecretsConfiguration("inline", "tenant", null, null, null, AllowInline: true), + "tenant", + new SurfaceTlsConfiguration(null, null, null)); + RawVariables = new Dictionary(); + } + + public SurfaceEnvironmentSettings Settings { get; } + + public IReadOnlyDictionary RawVariables { get; } + } + + private sealed class NoopSurfaceValidatorRunner : ISurfaceValidatorRunner + { + public ValueTask RunAllAsync(SurfaceValidationContext context, CancellationToken cancellationToken = default) + { + return ValueTask.FromResult(SurfaceValidationResult.Success()); + } + + public ValueTask EnsureAsync(SurfaceValidationContext context, CancellationToken cancellationToken = default) + { + return ValueTask.CompletedTask; + } + } + + + private sealed class StaticSurfaceValidatorRunner : ISurfaceValidatorRunner + { + private readonly SurfaceValidationResult _result; + + public StaticSurfaceValidatorRunner(SurfaceValidationResult result) + { + _result = result; + } + + public ValueTask RunAllAsync(SurfaceValidationContext context, CancellationToken cancellationToken = default) + { + return ValueTask.FromResult(_result); + } + + public ValueTask EnsureAsync(SurfaceValidationContext context, CancellationToken cancellationToken = default) + { + return ValueTask.CompletedTask; + } + } + + private sealed class InMemorySurfaceCache : ISurfaceCache + { + private readonly Dictionary _store = new(); + private readonly object _gate = new(); + + public async Task GetOrCreateAsync( + SurfaceCacheKey key, + Func> factory, + Func> serializer, + Func, T> deserializer, + CancellationToken cancellationToken = default) + { + if (TryRead(key, deserializer, out var existing)) + { + return existing; + } + + var created = await factory(cancellationToken).ConfigureAwait(false); + var payload = serializer(created).ToArray(); + lock (_gate) + { + _store[key.ToString()] = payload; + } + + return created; + } + + public Task TryGetAsync( + SurfaceCacheKey key, + Func, T> deserializer, + CancellationToken cancellationToken = default) + { + return Task.FromResult(TryRead(key, deserializer, out var value) ? value : default); + } + + public Task SetAsync( + SurfaceCacheKey key, + ReadOnlyMemory payload, + CancellationToken cancellationToken = default) + { + lock (_gate) + { + _store[key.ToString()] = payload.ToArray(); + } + + return Task.CompletedTask; + } + + private bool TryRead(SurfaceCacheKey key, Func, T> deserializer, out T value) + { + lock (_gate) + { + if (_store.TryGetValue(key.ToString(), out var bytes)) + { + value = deserializer(new ReadOnlyMemory(bytes)); + return true; + } + } + + value = default!; + return false; + } + } + + private sealed class StubSurfaceSecretProvider : ISurfaceSecretProvider + { + private readonly Dictionary<(string Type, string Name), byte[]> _secrets; + private readonly bool _throwOnMissing; + + public StubSurfaceSecretProvider(Dictionary<(string Type, string Name), byte[]>? secrets = null, bool throwOnMissing = false) + { + _secrets = secrets ?? new Dictionary<(string Type, string Name), byte[]>(); + _throwOnMissing = throwOnMissing; + } + + public ValueTask GetAsync(SurfaceSecretRequest request, CancellationToken cancellationToken = default) + { + var key = (request.SecretType, request.Name ?? string.Empty); + if (_secrets.TryGetValue(key, out var payload)) + { + return ValueTask.FromResult(SurfaceSecretHandle.FromBytes(payload)); + } + + if (_throwOnMissing) + { + throw new SurfaceSecretNotFoundException(request); + } + + return ValueTask.FromResult(SurfaceSecretHandle.Empty); + } + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs index 7cc5750d..88decefe 100644 --- a/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs @@ -153,7 +153,7 @@ public sealed class RedisWorkerSmokeTests public async Task TryAcquireAsync(CancellationToken cancellationToken) { var request = new QueueLeaseRequest(_consumerName, 1, _queueOptions.DefaultLeaseDuration); - var leases = await _queue.LeaseAsync(request, cancellationToken).ConfigureAwait(false); + var leases = await _queue.LeaseAsync(request, cancellationToken); if (leases.Count == 0) { return null; @@ -221,23 +221,23 @@ public sealed class RedisWorkerSmokeTests public async ValueTask RenewAsync(CancellationToken cancellationToken) { - await _lease.RenewAsync(_options.DefaultLeaseDuration, cancellationToken).ConfigureAwait(false); + await _lease.RenewAsync(_options.DefaultLeaseDuration, cancellationToken); } public async ValueTask CompleteAsync(CancellationToken cancellationToken) { - await _lease.AcknowledgeAsync(cancellationToken).ConfigureAwait(false); + await _lease.AcknowledgeAsync(cancellationToken); _deps.JobCompleted.TrySetResult(); } public async ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) { - await _lease.ReleaseAsync(QueueReleaseDisposition.Retry, cancellationToken).ConfigureAwait(false); + await _lease.ReleaseAsync(QueueReleaseDisposition.Retry, cancellationToken); } public async ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) { - await _lease.DeadLetterAsync(reason, cancellationToken).ConfigureAwait(false); + await _lease.DeadLetterAsync(reason, cancellationToken); } public ValueTask DisposeAsync() => ValueTask.CompletedTask; diff --git a/src/StellaOps.sln b/src/StellaOps.sln index aecd01c9..9d6a713d 100644 --- a/src/StellaOps.sln +++ b/src/StellaOps.sln @@ -315,6 +315,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.WebServi EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Tests", "StellaOps.Attestor\StellaOps.Attestor.Tests\StellaOps.Attestor.Tests.csproj", "{B8B15A8D-F647-41AE-A55F-A283A47E97C4}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Verify", "StellaOps.Attestor\StellaOps.Attestor.Verify\StellaOps.Attestor.Verify.csproj", "{99EC90D8-0D5E-41E4-A895-585A7680916C}" +EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Zastava", "StellaOps.Zastava", "{F1F029E6-2E4B-4A42-8D8F-AB325EE3B608}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Core", "StellaOps.Zastava.Core\StellaOps.Zastava.Core.csproj", "{CBE6E3D8-230C-4513-B98F-99D82B83B9F7}" @@ -359,14 +361,14 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "PolicyEngine", "PolicyEngin EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.PolicyEngine", "StellaOps.Bench\PolicyEngine\StellaOps.Bench.PolicyEngine\StellaOps.Bench.PolicyEngine.csproj", "{D8B22C17-28E9-4059-97C5-4AC4600A2BD5}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "StellaOps.Aoc\StellaOps.Aoc.csproj", "{6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc.AspNetCore", "StellaOps.Aoc.AspNetCore\StellaOps.Aoc.AspNetCore.csproj", "{D3D47993-27D3-4C90-9C8E-14652807DAF5}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc.Tests", "StellaOps.Aoc.Tests\StellaOps.Aoc.Tests.csproj", "{4D167781-1AC0-46CF-A32E-1B6E048940B2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc.AspNetCore.Tests", "StellaOps.Aoc.AspNetCore.Tests\StellaOps.Aoc.AspNetCore.Tests.csproj", "{5F9B7682-71E2-4989-9BC9-014A2C26AF50}" -EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "StellaOps.Aoc\StellaOps.Aoc.csproj", "{6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc.AspNetCore", "StellaOps.Aoc.AspNetCore\StellaOps.Aoc.AspNetCore.csproj", "{D3D47993-27D3-4C90-9C8E-14652807DAF5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc.Tests", "StellaOps.Aoc.Tests\StellaOps.Aoc.Tests.csproj", "{4D167781-1AC0-46CF-A32E-1B6E048940B2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc.AspNetCore.Tests", "StellaOps.Aoc.AspNetCore.Tests\StellaOps.Aoc.AspNetCore.Tests.csproj", "{5F9B7682-71E2-4989-9BC9-014A2C26AF50}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels", "StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj", "{C3AEAEE7-038E-45FF-892B-DB18EE29F790}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels.Tests", "StellaOps.Concelier.RawModels.Tests\StellaOps.Concelier.RawModels.Tests.csproj", "{7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}" @@ -379,6 +381,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Gateway", EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Gateway.Tests", "StellaOps.Policy.Gateway.Tests\StellaOps.Policy.Gateway.Tests.csproj", "{67650687-2E32-40BB-9849-C4ABBA65A7CF}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries", "{41F15E67-7190-CF23-3BC4-77E87134CADD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.IssuerDirectory.Client", "__Libraries\StellaOps.IssuerDirectory.Client\StellaOps.IssuerDirectory.Client.csproj", "{CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -2213,6 +2219,18 @@ Global {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x64.Build.0 = Release|Any CPU {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x86.ActiveCfg = Release|Any CPU {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x86.Build.0 = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|x64.ActiveCfg = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|x64.Build.0 = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|x86.ActiveCfg = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Debug|x86.Build.0 = Debug|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|Any CPU.Build.0 = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|x64.ActiveCfg = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|x64.Build.0 = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|x86.ActiveCfg = Release|Any CPU + {99EC90D8-0D5E-41E4-A895-585A7680916C}.Release|x86.Build.0 = Release|Any CPU {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|Any CPU.Build.0 = Debug|Any CPU {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x64.ActiveCfg = Debug|Any CPU @@ -2450,45 +2468,45 @@ Global {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|Any CPU.ActiveCfg = Release|Any CPU {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|Any CPU.Build.0 = Release|Any CPU {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x64.ActiveCfg = Release|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x64.Build.0 = Release|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x86.ActiveCfg = Release|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x86.Build.0 = Release|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|x64.ActiveCfg = Debug|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|x64.Build.0 = Debug|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|x86.ActiveCfg = Debug|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|x86.Build.0 = Debug|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|Any CPU.Build.0 = Release|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|x64.ActiveCfg = Release|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|x64.Build.0 = Release|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|x86.ActiveCfg = Release|Any CPU - {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|x86.Build.0 = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x64.ActiveCfg = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x64.Build.0 = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x86.ActiveCfg = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x86.Build.0 = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|Any CPU.Build.0 = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x64.ActiveCfg = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x64.Build.0 = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x86.ActiveCfg = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x86.Build.0 = Release|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|x64.ActiveCfg = Debug|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|x64.Build.0 = Debug|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|x86.ActiveCfg = Debug|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|x86.Build.0 = Debug|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|Any CPU.Build.0 = Release|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|x64.ActiveCfg = Release|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|x64.Build.0 = Release|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|x86.ActiveCfg = Release|Any CPU - {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|x86.Build.0 = Release|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x64.Build.0 = Release|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x86.ActiveCfg = Release|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x86.Build.0 = Release|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|x64.ActiveCfg = Debug|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|x64.Build.0 = Debug|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|x86.ActiveCfg = Debug|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Debug|x86.Build.0 = Debug|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|Any CPU.Build.0 = Release|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|x64.ActiveCfg = Release|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|x64.Build.0 = Release|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|x86.ActiveCfg = Release|Any CPU + {D3D47993-27D3-4C90-9C8E-14652807DAF5}.Release|x86.Build.0 = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x64.ActiveCfg = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x64.Build.0 = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x86.ActiveCfg = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x86.Build.0 = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|Any CPU.Build.0 = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x64.ActiveCfg = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x64.Build.0 = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x86.ActiveCfg = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x86.Build.0 = Release|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|x64.ActiveCfg = Debug|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|x64.Build.0 = Debug|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|x86.ActiveCfg = Debug|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Debug|x86.Build.0 = Debug|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|Any CPU.Build.0 = Release|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|x64.ActiveCfg = Release|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|x64.Build.0 = Release|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|x86.ActiveCfg = Release|Any CPU + {5F9B7682-71E2-4989-9BC9-014A2C26AF50}.Release|x86.Build.0 = Release|Any CPU {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|Any CPU.Build.0 = Debug|Any CPU {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|x64.ActiveCfg = Debug|Any CPU @@ -2561,6 +2579,18 @@ Global {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|x64.Build.0 = Release|Any CPU {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|x86.ActiveCfg = Release|Any CPU {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|x86.Build.0 = Release|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Debug|x64.ActiveCfg = Debug|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Debug|x64.Build.0 = Debug|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Debug|x86.ActiveCfg = Debug|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Debug|x86.Build.0 = Debug|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Release|Any CPU.Build.0 = Release|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Release|x64.ActiveCfg = Release|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Release|x64.Build.0 = Release|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Release|x86.ActiveCfg = Release|Any CPU + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -2665,10 +2695,12 @@ Global {49EF86AC-1CC2-4A24-8637-C5151E23DF9D} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} {C22333B3-D132-4960-A490-6BEF1EB1C917} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} {B8B15A8D-F647-41AE-A55F-A283A47E97C4} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {99EC90D8-0D5E-41E4-A895-585A7680916C} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} {CBE6E3D8-230C-4513-B98F-99D82B83B9F7} = {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} {821C7F88-B775-4D3C-8D89-850B6C34E818} = {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} {CBDF819E-923F-A07F-78D9-D599DD28197E} = {1553F566-661E-A2F5-811B-F74BF45C44CC} {D8B22C17-28E9-4059-97C5-4AC4600A2BD5} = {CBDF819E-923F-A07F-78D9-D599DD28197E} + {CADD452F-3F55-4FD8-BB01-5A5EE5AF99EE} = {41F15E67-7190-CF23-3BC4-77E87134CADD} EndGlobalSection EndGlobal diff --git a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TestManifests.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TestManifests.cs index 1cbf4cba..a20f7a56 100644 --- a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TestManifests.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TestManifests.cs @@ -26,8 +26,8 @@ spec: required: false default: false approvals: - - id: security-review - grants: ["Packs.Approve"] + - id: security-review + grants: ["packs.approve"] steps: - id: plan-step name: Plan @@ -112,8 +112,8 @@ metadata: version: 1.0.0 spec: secrets: - - name: apiKey - scope: Packs.Run + - name: apiKey + scope: packs.run description: API authentication token steps: - id: use-secret diff --git a/src/TaskRunner/StellaOps.TaskRunner/TASKS.md b/src/TaskRunner/StellaOps.TaskRunner/TASKS.md index f6822ba3..1dda96c4 100644 --- a/src/TaskRunner/StellaOps.TaskRunner/TASKS.md +++ b/src/TaskRunner/StellaOps.TaskRunner/TASKS.md @@ -3,7 +3,7 @@ ## Sprint 41 – Foundations | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| TASKRUN-41-001 | TODO | Task Runner Guild | ORCH-SVC-41-101, AUTH-PACKS-41-001 | Bootstrap service, define migrations for `pack_runs`, `pack_run_logs`, `pack_artifacts`, implement run API (create/get/log stream), local executor, approvals pause, artifact capture, and provenance manifest generation. | Service builds/tests; migrations scripted; run API functional with sample pack; logs/artefacts stored; manifest signed; compliance checklist recorded. | +| TASKRUN-41-001 | DOING (2025-11-01) | Task Runner Guild | ORCH-SVC-41-101, AUTH-PACKS-41-001 | Bootstrap service, define migrations for `pack_runs`, `pack_run_logs`, `pack_artifacts`, implement run API (create/get/log stream), local executor, approvals pause, artifact capture, and provenance manifest generation. | Service builds/tests; migrations scripted; run API functional with sample pack; logs/artefacts stored; manifest signed; compliance checklist recorded. | ## Sprint 42 – Advanced Execution | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs index 3917ef1b..705e480d 100644 --- a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs @@ -1,41 +1,43 @@ -var builder = WebApplication.CreateBuilder(args); - -// Add services to the container. -// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi -builder.Services.AddOpenApi(); - -var app = builder.Build(); - -// Configure the HTTP request pipeline. -if (app.Environment.IsDevelopment()) -{ - app.MapOpenApi(); -} - -app.UseHttpsRedirection(); - -var summaries = new[] -{ - "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" -}; - -app.MapGet("/weatherforecast", () => -{ - var forecast = Enumerable.Range(1, 5).Select(index => - new WeatherForecast - ( - DateOnly.FromDateTime(DateTime.Now.AddDays(index)), - Random.Shared.Next(-20, 55), - summaries[Random.Shared.Next(summaries.Length)] - )) - .ToArray(); - return forecast; -}) -.WithName("GetWeatherForecast"); - -app.Run(); - -record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) -{ - public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); -} +using Microsoft.AspNetCore.Authorization; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; + +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configure: options => + { + options.RequiredScopes.Clear(); + }); + +builder.Services.AddAuthorization(options => +{ + options.AddObservabilityResourcePolicies(); + options.DefaultPolicy = new AuthorizationPolicyBuilder() + .RequireAuthenticatedUser() + .AddRequirements(new StellaOpsScopeRequirement(new[] { StellaOpsScopes.TimelineRead })) + .Build(); + options.FallbackPolicy = options.DefaultPolicy; +}); + +builder.Services.AddOpenApi(); + +var app = builder.Build(); + +if (app.Environment.IsDevelopment()) +{ + app.MapOpenApi(); +} + +app.UseHttpsRedirection(); +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapGet("/timeline/events", () => Results.Ok(Array.Empty())) + .RequireAuthorization(StellaOpsResourceServerPolicies.TimelineRead); + +app.MapPost("/timeline/events", () => Results.Accepted("/timeline/events", new { status = "indexed" })) + .RequireAuthorization(StellaOpsResourceServerPolicies.TimelineWrite); + +app.Run(); diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj index dc2cc62a..f7996544 100644 --- a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj @@ -1,41 +1,20 @@ - - - - - - - - - net10.0 - enable - enable - preview - true - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + net10.0 + enable + enable + preview + true + + + + + + + + + + + diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json index 4d566948..b08bcbcb 100644 --- a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json @@ -1,9 +1,20 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} +{ + Logging: { + LogLevel: { + Default: Information, + Microsoft.AspNetCore: Warning + } + }, + Authority: { + ResourceServer: { + Authority: https://authority.localtest.me, + Audiences: [ + api://timeline-indexer + ], + RequiredTenants: [ + tenant-default + ] + } + }, + AllowedHosts: * +} diff --git a/src/Tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj b/src/Tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj index a7490b33..904491aa 100644 --- a/src/Tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj +++ b/src/Tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj @@ -11,8 +11,8 @@ - - - - - + + + + + diff --git a/src/Tools/LanguageAnalyzerSmoke/Program.cs b/src/Tools/LanguageAnalyzerSmoke/Program.cs index 83f1227f..ff72ff9b 100644 --- a/src/Tools/LanguageAnalyzerSmoke/Program.cs +++ b/src/Tools/LanguageAnalyzerSmoke/Program.cs @@ -11,22 +11,75 @@ using StellaOps.Scanner.Analyzers.Lang; using StellaOps.Scanner.Analyzers.Lang.Plugin; using StellaOps.Scanner.Core.Security; -internal sealed record SmokeScenario(string Name, string[] UsageHintRelatives) -{ - public IReadOnlyList ResolveUsageHints(string scenarioRoot) - => UsageHintRelatives.Select(relative => Path.GetFullPath(Path.Combine(scenarioRoot, relative))).ToArray(); -} - -internal sealed class SmokeOptions -{ - public string RepoRoot { get; set; } = Directory.GetCurrentDirectory(); - public string PluginDirectoryName { get; set; } = "StellaOps.Scanner.Analyzers.Lang.Python"; - public string FixtureRelativePath { get; set; } = Path.Combine("src", "StellaOps.Scanner.Analyzers.Lang.Python.Tests", "Fixtures", "lang", "python"); - - public static SmokeOptions Parse(string[] args) - { - var options = new SmokeOptions(); - +internal sealed record SmokeScenario(string Name, string[] UsageHintRelatives) +{ + public IReadOnlyList ResolveUsageHints(string scenarioRoot) + => UsageHintRelatives.Select(relative => Path.GetFullPath(Path.Combine(scenarioRoot, relative))).ToArray(); +} + +internal sealed record AnalyzerProfile( + string DisplayName, + string AnalyzerId, + string PluginDirectory, + string FixtureRelativePath, + string ExpectedPluginId, + string ExpectedEntryPointType, + IReadOnlyList RequiredCapabilities, + SmokeScenario[] Scenarios); + +internal static class AnalyzerProfileCatalog +{ + private static readonly SmokeScenario[] PythonScenarios = + { + new("simple-venv", new[] { Path.Combine("bin", "simple-tool") }), + new("pip-cache", new[] { Path.Combine("lib", "python3.11", "site-packages", "cache_pkg-1.2.3.data", "scripts", "cache-tool") }), + new("layered-editable", new[] { Path.Combine("layer1", "usr", "bin", "layered-cli") }), + }; + + private static readonly SmokeScenario[] RustScenarios = + { + new("simple", new[] { Path.Combine("usr", "local", "bin", "my_app") }), + new("heuristics", new[] { Path.Combine("usr", "local", "bin", "heuristic_app") }), + new("fallback", new[] { Path.Combine("usr", "local", "bin", "opaque_bin") }), + }; + + public static readonly IReadOnlyDictionary Profiles = + new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["python"] = new AnalyzerProfile( + DisplayName: "Python", + AnalyzerId: "python", + PluginDirectory: "StellaOps.Scanner.Analyzers.Lang.Python", + FixtureRelativePath: Path.Combine("src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Python.Tests", "Fixtures", "lang", "python"), + ExpectedPluginId: "stellaops.analyzer.lang.python", + ExpectedEntryPointType: "StellaOps.Scanner.Analyzers.Lang.Python.PythonAnalyzerPlugin", + RequiredCapabilities: new[] { "python" }, + Scenarios: PythonScenarios), + ["rust"] = new AnalyzerProfile( + DisplayName: "Rust", + AnalyzerId: "rust", + PluginDirectory: "StellaOps.Scanner.Analyzers.Lang.Rust", + FixtureRelativePath: Path.Combine("src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Tests", "Fixtures", "lang", "rust"), + ExpectedPluginId: "stellaops.analyzer.lang.rust", + ExpectedEntryPointType: "StellaOps.Scanner.Analyzers.Lang.Rust.RustAnalyzerPlugin", + RequiredCapabilities: new[] { "rust", "cargo" }, + Scenarios: RustScenarios), + }; +} + +internal sealed class SmokeOptions +{ + public string RepoRoot { get; set; } = Directory.GetCurrentDirectory(); + public string AnalyzerId { get; set; } = "python"; + public string PluginDirectoryName { get; set; } = "StellaOps.Scanner.Analyzers.Lang.Python"; + public string FixtureRelativePath { get; set; } = Path.Combine("src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Python.Tests", "Fixtures", "lang", "python"); + public bool PluginDirectoryExplicit { get; private set; } + public bool FixturePathExplicit { get; private set; } + + public static SmokeOptions Parse(string[] args) + { + var options = new SmokeOptions(); + for (var index = 0; index < args.Length; index++) { var current = args[index]; @@ -36,30 +89,52 @@ internal sealed class SmokeOptions case "-r": options.RepoRoot = RequireValue(args, ref index, current); break; - case "--plugin-directory": - case "-p": - options.PluginDirectoryName = RequireValue(args, ref index, current); - break; - case "--fixture-path": - case "-f": - options.FixtureRelativePath = RequireValue(args, ref index, current); - break; - case "--help": - case "-h": - PrintUsage(); - Environment.Exit(0); + case "--plugin-directory": + case "-p": + options.PluginDirectoryName = RequireValue(args, ref index, current); + options.PluginDirectoryExplicit = true; + break; + case "--fixture-path": + case "-f": + options.FixtureRelativePath = RequireValue(args, ref index, current); + options.FixturePathExplicit = true; + break; + case "--analyzer": + case "-a": + options.AnalyzerId = RequireValue(args, ref index, current); + break; + case "--help": + case "-h": + PrintUsage(); + Environment.Exit(0); break; default: throw new ArgumentException($"Unknown argument '{current}'. Use --help for usage."); } - } - - options.RepoRoot = Path.GetFullPath(options.RepoRoot); - return options; - } - - private static string RequireValue(string[] args, ref int index, string switchName) - { + } + + options.RepoRoot = Path.GetFullPath(options.RepoRoot); + + if (!AnalyzerProfileCatalog.Profiles.TryGetValue(options.AnalyzerId, out var profile)) + { + throw new ArgumentException($"Unsupported analyzer '{options.AnalyzerId}'."); + } + + if (!options.PluginDirectoryExplicit) + { + options.PluginDirectoryName = profile.PluginDirectory; + } + + if (!options.FixturePathExplicit) + { + options.FixtureRelativePath = profile.FixtureRelativePath; + } + + return options; + } + + private static string RequireValue(string[] args, ref int index, string switchName) + { if (index + 1 >= args.Length) { throw new ArgumentException($"Missing value for '{switchName}'."); @@ -76,16 +151,17 @@ internal sealed class SmokeOptions } private static void PrintUsage() - { - Console.WriteLine("Language Analyzer Smoke Harness"); - Console.WriteLine("Usage: dotnet run --project src/Tools/LanguageAnalyzerSmoke -- [options]"); - Console.WriteLine(); - Console.WriteLine("Options:"); - Console.WriteLine(" -r, --repo-root Repository root (defaults to current working directory)"); - Console.WriteLine(" -p, --plugin-directory Analyzer plug-in directory under plugins/scanner/analyzers/lang (defaults to StellaOps.Scanner.Analyzers.Lang.Python)"); - Console.WriteLine(" -f, --fixture-path Relative path to fixtures root (defaults to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python)"); - Console.WriteLine(" -h, --help Show usage information"); - } + { + Console.WriteLine("Language Analyzer Smoke Harness"); + Console.WriteLine("Usage: dotnet run --project src/Tools/LanguageAnalyzerSmoke -- [options]"); + Console.WriteLine(); + Console.WriteLine("Options:"); + Console.WriteLine(" -a, --analyzer Analyzer to exercise (python, rust). Defaults to python."); + Console.WriteLine(" -r, --repo-root Repository root (defaults to current working directory)"); + Console.WriteLine(" -p, --plugin-directory Analyzer plug-in directory under plugins/scanner/analyzers/lang (defaults to StellaOps.Scanner.Analyzers.Lang.Python)"); + Console.WriteLine(" -f, --fixture-path Relative path to fixtures root (defaults to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python)"); + Console.WriteLine(" -h, --help Show usage information"); + } } internal sealed record PluginManifest @@ -137,28 +213,33 @@ file static class Program }; public static async Task Main(string[] args) - { - try - { - var options = SmokeOptions.Parse(args); - await RunAsync(options).ConfigureAwait(false); - Console.WriteLine("✅ Python analyzer smoke checks passed"); - return 0; - } - catch (Exception ex) - { - Console.Error.WriteLine($"❌ {ex.Message}"); + { + try + { + var options = SmokeOptions.Parse(args); + var profile = await RunAsync(options).ConfigureAwait(false); + Console.WriteLine($"✅ {profile.DisplayName} analyzer smoke checks passed"); + return 0; + } + catch (Exception ex) + { + Console.Error.WriteLine($"❌ {ex.Message}"); return 1; - } - } - - private static async Task RunAsync(SmokeOptions options) - { - ValidateOptions(options); - - var pluginRoot = Path.Combine(options.RepoRoot, "plugins", "scanner", "analyzers", "lang", options.PluginDirectoryName); - var manifestPath = Path.Combine(pluginRoot, "manifest.json"); - if (!File.Exists(manifestPath)) + } + } + + private static async Task RunAsync(SmokeOptions options) + { + if (!AnalyzerProfileCatalog.Profiles.TryGetValue(options.AnalyzerId, out var profile)) + { + throw new ArgumentException($"Analyzer '{options.AnalyzerId}' is not supported."); + } + + ValidateOptions(options); + + var pluginRoot = Path.Combine(options.RepoRoot, "plugins", "scanner", "analyzers", "lang", options.PluginDirectoryName); + var manifestPath = Path.Combine(pluginRoot, "manifest.json"); + if (!File.Exists(manifestPath)) { throw new FileNotFoundException($"Plug-in manifest not found at '{manifestPath}'.", manifestPath); } @@ -166,16 +247,16 @@ file static class Program using var manifestStream = File.OpenRead(manifestPath); var manifest = JsonSerializer.Deserialize(manifestStream, new JsonSerializerOptions { - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip - }) ?? throw new InvalidOperationException($"Unable to parse manifest '{manifestPath}'."); - - ValidateManifest(manifest, options.PluginDirectoryName); - - var pluginAssemblyPath = Path.Combine(pluginRoot, manifest.EntryPoint.Assembly); - if (!File.Exists(pluginAssemblyPath)) - { - throw new FileNotFoundException($"Plug-in assembly '{manifest.EntryPoint.Assembly}' not found under '{pluginRoot}'.", pluginAssemblyPath); + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip + }) ?? throw new InvalidOperationException($"Unable to parse manifest '{manifestPath}'."); + + ValidateManifest(manifest, profile, options.PluginDirectoryName); + + var pluginAssemblyPath = Path.Combine(pluginRoot, manifest.EntryPoint.Assembly); + if (!File.Exists(pluginAssemblyPath)) + { + throw new FileNotFoundException($"Plug-in assembly '{manifest.EntryPoint.Assembly}' not found under '{pluginRoot}'.", pluginAssemblyPath); } var sha256 = ComputeSha256(pluginAssemblyPath); @@ -191,30 +272,32 @@ file static class Program } var analyzerSet = catalog.CreateAnalyzers(serviceProvider); - if (analyzerSet.Count == 0) - { - throw new InvalidOperationException("Language analyzer plug-ins reported no analyzers."); - } - - var analyzerIds = analyzerSet.Select(analyzer => analyzer.Id).ToArray(); - Console.WriteLine($"→ Loaded analyzers: {string.Join(", ", analyzerIds)}"); - - if (!analyzerIds.Contains("python", StringComparer.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("Python analyzer was not created by the plug-in."); - } - - var fixtureRoot = Path.GetFullPath(Path.Combine(options.RepoRoot, options.FixtureRelativePath)); - if (!Directory.Exists(fixtureRoot)) - { - throw new DirectoryNotFoundException($"Fixture directory '{fixtureRoot}' does not exist."); - } - - foreach (var scenario in PythonScenarios) - { - await RunScenarioAsync(scenario, fixtureRoot, catalog, serviceProvider).ConfigureAwait(false); - } - } + if (analyzerSet.Count == 0) + { + throw new InvalidOperationException("Language analyzer plug-ins reported no analyzers."); + } + + var analyzerIds = analyzerSet.Select(analyzer => analyzer.Id).ToArray(); + Console.WriteLine($"→ Loaded analyzers: {string.Join(", ", analyzerIds)}"); + + if (!analyzerIds.Contains(profile.AnalyzerId, StringComparer.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"{profile.DisplayName} analyzer was not created by the plug-in."); + } + + var fixtureRoot = Path.GetFullPath(Path.Combine(options.RepoRoot, options.FixtureRelativePath)); + if (!Directory.Exists(fixtureRoot)) + { + throw new DirectoryNotFoundException($"Fixture directory '{fixtureRoot}' does not exist."); + } + + foreach (var scenario in profile.Scenarios) + { + await RunScenarioAsync(scenario, fixtureRoot, catalog, serviceProvider).ConfigureAwait(false); + } + + return profile; + } private static ServiceProvider BuildServiceProvider() { @@ -300,12 +383,12 @@ file static class Program } } - private static void ValidateManifest(PluginManifest manifest, string expectedDirectory) - { - if (!string.Equals(manifest.SchemaVersion, "1.0", StringComparison.Ordinal)) - { - throw new InvalidOperationException($"Unexpected manifest schema version '{manifest.SchemaVersion}'."); - } + private static void ValidateManifest(PluginManifest manifest, AnalyzerProfile profile, string pluginDirectoryName) + { + if (!string.Equals(manifest.SchemaVersion, "1.0", StringComparison.Ordinal)) + { + throw new InvalidOperationException($"Unexpected manifest schema version '{manifest.SchemaVersion}'."); + } if (!manifest.RequiresRestart) { @@ -313,25 +396,28 @@ file static class Program } if (!string.Equals(manifest.EntryPoint.Type, "dotnet", StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Unsupported entry point type '{manifest.EntryPoint.Type}'."); - } - - if (!manifest.Capabilities.Contains("python", StringComparer.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("Manifest capabilities do not include 'python'."); - } - - if (!string.Equals(manifest.EntryPoint.TypeName, "StellaOps.Scanner.Analyzers.Lang.Python.PythonAnalyzerPlugin", StringComparison.Ordinal)) - { - throw new InvalidOperationException($"Unexpected entry point type name '{manifest.EntryPoint.TypeName}'."); - } - - if (!string.Equals(manifest.Id, "stellaops.analyzer.lang.python", StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Manifest id '{manifest.Id}' does not match expected plug-in id for directory '{expectedDirectory}'."); - } - } + { + throw new InvalidOperationException($"Unsupported entry point type '{manifest.EntryPoint.Type}'."); + } + + foreach (var capability in profile.RequiredCapabilities) + { + if (!manifest.Capabilities.Contains(capability, StringComparer.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Manifest capabilities do not include required capability '{capability}'."); + } + } + + if (!string.Equals(manifest.EntryPoint.TypeName, profile.ExpectedEntryPointType, StringComparison.Ordinal)) + { + throw new InvalidOperationException($"Unexpected entry point type name '{manifest.EntryPoint.TypeName}'."); + } + + if (!string.Equals(manifest.Id, profile.ExpectedPluginId, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Manifest id '{manifest.Id}' does not match expected plug-in id for directory '{pluginDirectoryName}'."); + } + } private static string ComputeSha256(string path) { diff --git a/src/__Libraries/StellaOps.Configuration/AuthorityAdvisoryAiOptions.cs b/src/__Libraries/StellaOps.Configuration/AuthorityAdvisoryAiOptions.cs new file mode 100644 index 00000000..c0f0bde5 --- /dev/null +++ b/src/__Libraries/StellaOps.Configuration/AuthorityAdvisoryAiOptions.cs @@ -0,0 +1,163 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Configuration; + +/// +/// Advisory AI configuration (feature flags, remote inference policies). +/// +public sealed class AuthorityAdvisoryAiOptions +{ + public AdvisoryAiRemoteInferenceOptions RemoteInference { get; } = new(); + + internal void Normalize() + { + RemoteInference.Normalize(); + } + + internal void Validate() + { + RemoteInference.Validate(); + } +} + +public sealed class AdvisoryAiRemoteInferenceOptions +{ + private readonly List allowedProfiles = new(); + + /// + /// Controls whether remote inference endpoints (cloud or third-party) are permitted. + /// Disabled by default for sovereign/offline installs. + /// + public bool Enabled { get; set; } + + /// + /// Requires tenants to explicitly opt-in before remote inference may be invoked on their behalf. + /// + public bool RequireTenantConsent { get; set; } = true; + + /// + /// Remote inference profiles permitted when is true (e.g. cloud-openai, vendor-xyz). + /// + public IList AllowedProfiles => allowedProfiles; + + internal void Normalize() + { + if (allowedProfiles.Count == 0) + { + return; + } + + var unique = new HashSet(StringComparer.OrdinalIgnoreCase); + for (var index = allowedProfiles.Count - 1; index >= 0; index--) + { + var entry = allowedProfiles[index]; + if (string.IsNullOrWhiteSpace(entry)) + { + allowedProfiles.RemoveAt(index); + continue; + } + + var normalized = entry.Trim(); + var canonical = normalized.ToLowerInvariant(); + if (!unique.Add(canonical)) + { + allowedProfiles.RemoveAt(index); + continue; + } + + allowedProfiles[index] = canonical; + } + } + + internal void Validate() + { + if (Enabled) + { + Normalize(); + + if (allowedProfiles.Count == 0) + { + throw new InvalidOperationException("Authority configuration requires at least one advisory AI remote inference profile when remote inference is enabled."); + } + } + else + { + // Ensure no stale profiles linger to avoid confusing downstream consumers. + Normalize(); + } + } +} + +public sealed class AuthorityTenantAdvisoryAiOptions +{ + public AdvisoryAiTenantRemoteInferenceOptions RemoteInference { get; } = new(); + + internal void Normalize(AuthorityAdvisoryAiOptions? _) => RemoteInference.Normalize(); + + internal void Validate(AuthorityAdvisoryAiOptions? globalOptions) => RemoteInference.Validate(globalOptions); +} + +public sealed class AdvisoryAiTenantRemoteInferenceOptions +{ + private const int MaxConsentVersionLength = 128; + private const int MaxConsentedByLength = 256; + + public bool ConsentGranted { get; set; } + + public string? ConsentVersion { get; set; } + + public DateTimeOffset? ConsentedAt { get; set; } + + public string? ConsentedBy { get; set; } + + internal void Normalize() + { + ConsentVersion = string.IsNullOrWhiteSpace(ConsentVersion) ? null : ConsentVersion.Trim(); + ConsentedBy = string.IsNullOrWhiteSpace(ConsentedBy) ? null : ConsentedBy.Trim(); + + if (ConsentedAt.HasValue) + { + ConsentedAt = ConsentedAt.Value.ToUniversalTime(); + } + } + + internal void Validate(AuthorityAdvisoryAiOptions? globalOptions) + { + Normalize(); + + var remoteOptions = globalOptions?.RemoteInference; + if (!ConsentGranted) + { + return; + } + + if (remoteOptions is null || !remoteOptions.Enabled) + { + throw new InvalidOperationException("Tenant remote inference consent cannot be granted when remote inference is disabled."); + } + + if (ConsentVersion is { Length: > MaxConsentVersionLength }) + { + throw new InvalidOperationException($"Tenant remote inference consentVersion must be {MaxConsentVersionLength} characters or fewer."); + } + + if (ConsentedBy is { Length: > MaxConsentedByLength }) + { + throw new InvalidOperationException($"Tenant remote inference consentedBy must be {MaxConsentedByLength} characters or fewer."); + } + + if (remoteOptions.RequireTenantConsent) + { + if (string.IsNullOrWhiteSpace(ConsentVersion)) + { + throw new InvalidOperationException("Tenant remote inference consent requires consentVersion when consentGranted is true."); + } + + if (!ConsentedAt.HasValue) + { + throw new InvalidOperationException("Tenant remote inference consent requires consentedAt when consentGranted is true."); + } + } + } +} diff --git a/src/__Libraries/StellaOps.Configuration/AuthorityApiLifecycleOptions.cs b/src/__Libraries/StellaOps.Configuration/AuthorityApiLifecycleOptions.cs new file mode 100644 index 00000000..f597fcea --- /dev/null +++ b/src/__Libraries/StellaOps.Configuration/AuthorityApiLifecycleOptions.cs @@ -0,0 +1,77 @@ +using System; + +namespace StellaOps.Configuration; + +/// +/// API lifecycle controls for the Authority service. +/// +public sealed class AuthorityApiLifecycleOptions +{ + /// + /// Settings for the legacy OAuth endpoint shim (/oauth/* → canonical). + /// + public AuthorityLegacyAuthEndpointOptions LegacyAuth { get; } = new(); + + internal void Validate() + { + LegacyAuth.Validate(); + } +} + +/// +/// Configuration for legacy OAuth endpoint shims and deprecation signalling. +/// +public sealed class AuthorityLegacyAuthEndpointOptions +{ + private static readonly DateTimeOffset DefaultDeprecationDate = new(2025, 11, 1, 0, 0, 0, TimeSpan.Zero); + private static readonly DateTimeOffset DefaultSunsetDate = new(2026, 5, 1, 0, 0, 0, TimeSpan.Zero); + + /// + /// Enables the legacy endpoint shim that routes /oauth/* to the canonical endpoints. + /// + public bool Enabled { get; set; } = true; + + /// + /// Date when clients should consider the legacy endpoints deprecated. + /// + public DateTimeOffset DeprecationDate { get; set; } = DefaultDeprecationDate; + + /// + /// Date when legacy endpoints will be removed. + /// + public DateTimeOffset SunsetDate { get; set; } = DefaultSunsetDate; + + /// + /// Optional documentation URL included in the Sunset link header. + /// + public string? DocumentationUrl { get; set; } = "https://docs.stella-ops.org/authority/legacy-auth"; + + internal void Validate() + { + if (!Enabled) + { + return; + } + + var normalizedDeprecation = DeprecationDate.ToUniversalTime(); + var normalizedSunset = SunsetDate.ToUniversalTime(); + + if (normalizedSunset <= normalizedDeprecation) + { + throw new InvalidOperationException("Legacy auth sunset date must be after the deprecation date."); + } + + DeprecationDate = normalizedDeprecation; + SunsetDate = normalizedSunset; + + if (!string.IsNullOrWhiteSpace(DocumentationUrl)) + { + if (!Uri.TryCreate(DocumentationUrl, UriKind.Absolute, out var uri) || + (uri.Scheme != Uri.UriSchemeHttps && uri.Scheme != Uri.UriSchemeHttp)) + { + throw new InvalidOperationException("Legacy auth documentation URL must be an absolute HTTP or HTTPS URL."); + } + } + } +} + diff --git a/src/__Libraries/StellaOps.Configuration/AuthorityNotificationsOptions.cs b/src/__Libraries/StellaOps.Configuration/AuthorityNotificationsOptions.cs new file mode 100644 index 00000000..9ff747a1 --- /dev/null +++ b/src/__Libraries/StellaOps.Configuration/AuthorityNotificationsOptions.cs @@ -0,0 +1,246 @@ +using System; +using System.Collections.Generic; +using StellaOps.Cryptography; + +namespace StellaOps.Configuration; + +/// +/// Notification-related configuration surfaced by the Authority host. +/// +public sealed class AuthorityNotificationsOptions +{ + /// + /// DSSE ack token configuration. + /// + public AuthorityAckTokenOptions AckTokens { get; } = new(); + + /// + /// Webhook allowlist configuration for callback targets. + /// + public AuthorityWebhookAllowlistOptions Webhooks { get; } = new(); + + /// + /// Escalation guardrail configuration. + /// + public AuthorityEscalationOptions Escalation { get; } = new(); + + internal void Validate() + { + AckTokens.Validate(); + Webhooks.Validate(); + Escalation.Validate(); + } +} + +/// +/// Options governing signed ack token issuance. +/// +public sealed class AuthorityAckTokenOptions +{ + private readonly IList additionalKeys = + new List(); + + /// + /// Determines whether ack tokens are enabled. + /// + public bool Enabled { get; set; } = true; + + /// + /// DSSE payload type used for issued ack tokens. + /// + public string PayloadType { get; set; } = "application/vnd.stellaops.notify-ack-token+json"; + + /// + /// Default lifetime applied to tokens when a caller omits a value. + /// + public TimeSpan DefaultLifetime { get; set; } = TimeSpan.FromMinutes(15); + + /// + /// Maximum lifetime permitted for ack tokens. + /// + public TimeSpan MaxLifetime { get; set; } = TimeSpan.FromMinutes(30); + + /// + /// Signing algorithm identifier (defaults to ES256). + /// + public string Algorithm { get; set; } = SignatureAlgorithms.Es256; + + /// + /// Signing key source used to load ack token keys. + /// + public string KeySource { get; set; } = "file"; + + /// + /// Active signing key identifier (kid) for ack tokens. + /// + public string ActiveKeyId { get; set; } = string.Empty; + + /// + /// Path or handle to the active key material. + /// + public string KeyPath { get; set; } = string.Empty; + + /// + /// Optional crypto provider hint. + /// + public string? Provider { get; set; } + + /// + /// Optional JWKS cache lifetime override for ack keys. + /// + public TimeSpan JwksCacheLifetime { get; set; } = TimeSpan.FromMinutes(5); + + /// + /// Additional (retired) keys retained for verification. + /// + public IList AdditionalKeys => additionalKeys; + + /// + /// Metadata value emitted in JWKS use field (defaults to notify-ack). + /// + public string KeyUse { get; set; } = "notify-ack"; + + internal void Validate() + { + if (!Enabled) + { + return; + } + + if (string.IsNullOrWhiteSpace(PayloadType)) + { + throw new InvalidOperationException("notifications.ackTokens.payloadType must be specified when ack tokens are enabled."); + } + + if (DefaultLifetime <= TimeSpan.Zero) + { + throw new InvalidOperationException("notifications.ackTokens.defaultLifetime must be greater than zero."); + } + + if (MaxLifetime <= TimeSpan.Zero || MaxLifetime < DefaultLifetime) + { + throw new InvalidOperationException("notifications.ackTokens.maxLifetime must be greater than zero and greater than or equal to defaultLifetime."); + } + + if (string.IsNullOrWhiteSpace(ActiveKeyId)) + { + throw new InvalidOperationException("notifications.ackTokens.activeKeyId must be provided when ack tokens are enabled."); + } + + if (string.IsNullOrWhiteSpace(KeyPath)) + { + throw new InvalidOperationException("notifications.ackTokens.keyPath must be provided when ack tokens are enabled."); + } + + if (string.IsNullOrWhiteSpace(KeySource)) + { + KeySource = "file"; + } + + if (string.IsNullOrWhiteSpace(Algorithm)) + { + Algorithm = SignatureAlgorithms.Es256; + } + + if (string.IsNullOrWhiteSpace(KeyUse)) + { + KeyUse = "notify-ack"; + } + + foreach (var additional in AdditionalKeys) + { + additional.Validate(KeySource); + } + + if (JwksCacheLifetime <= TimeSpan.Zero || JwksCacheLifetime > TimeSpan.FromHours(1)) + { + throw new InvalidOperationException("notifications.ackTokens.jwksCacheLifetime must be between 00:00:01 and 01:00:00."); + } + } +} + +/// +/// Options controlling webhook allowlists for ack callbacks. +/// +public sealed class AuthorityWebhookAllowlistOptions +{ + private readonly IList allowedHosts = new List(); + private readonly IList allowedSchemes = new List { "https" }; + + /// + /// Determines whether allowlist enforcement is enabled. + /// + public bool Enabled { get; set; } = true; + + /// + /// Hostnames or wildcard suffixes permitted for webhook callbacks (e.g. hooks.slack.com, *.pagerduty.com). + /// + public IList AllowedHosts => allowedHosts; + + /// + /// Allowed URI schemes for webhook callbacks (defaults to https). + /// + public IList AllowedSchemes => allowedSchemes; + + internal void Validate() + { + if (!Enabled) + { + return; + } + + if (allowedHosts.Count == 0) + { + throw new InvalidOperationException("notifications.webhooks.allowedHosts must include at least one host when enabled."); + } + + NormalizeList(allowedHosts); + NormalizeList(allowedSchemes); + + if (allowedSchemes.Count == 0) + { + allowedSchemes.Add("https"); + } + } + + private static void NormalizeList(IList values) + { + for (var i = values.Count - 1; i >= 0; i--) + { + var current = values[i]; + if (string.IsNullOrWhiteSpace(current)) + { + values.RemoveAt(i); + continue; + } + + values[i] = current.Trim(); + } + } +} + +/// +/// Options controlling escalation enforcement for acknowledgement flows. +/// +public sealed class AuthorityEscalationOptions +{ + /// + /// Scope required to mint or execute escalation-bearing ack tokens. + /// + public string Scope { get; set; } = "notify.escalate"; + + /// + /// When true, escalation requires the caller to also possess notify.admin. + /// + public bool RequireAdminScope { get; set; } = true; + + internal void Validate() + { + if (string.IsNullOrWhiteSpace(Scope)) + { + throw new InvalidOperationException("notifications.escalation.scope must be specified."); + } + + Scope = Scope.Trim().ToLowerInvariant(); + } +} diff --git a/src/__Libraries/StellaOps.Configuration/StellaOpsAuthorityOptions.cs b/src/__Libraries/StellaOps.Configuration/StellaOpsAuthorityOptions.cs index 4a8f4d6c..9c8b49d5 100644 --- a/src/__Libraries/StellaOps.Configuration/StellaOpsAuthorityOptions.cs +++ b/src/__Libraries/StellaOps.Configuration/StellaOpsAuthorityOptions.cs @@ -1,63 +1,63 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text.RegularExpressions; -using System.Threading.RateLimiting; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Cryptography; - -namespace StellaOps.Configuration; - -/// -/// Strongly typed configuration for the StellaOps Authority service. -/// -public sealed class StellaOpsAuthorityOptions -{ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text.RegularExpressions; +using System.Threading.RateLimiting; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Cryptography; + +namespace StellaOps.Configuration; + +/// +/// Strongly typed configuration for the StellaOps Authority service. +/// +public sealed class StellaOpsAuthorityOptions +{ private readonly List pluginDirectories = new(); private readonly List bypassNetworks = new(); private readonly List tenants = new(); - - /// - /// Schema version for downstream consumers to coordinate breaking changes. - /// - public int SchemaVersion { get; set; } = 1; - - /// - /// Absolute issuer URI advertised to clients (e.g. https://authority.stella-ops.local). - /// - public Uri? Issuer { get; set; } - - /// - /// Lifetime for OAuth access tokens issued by Authority. - /// + + /// + /// Schema version for downstream consumers to coordinate breaking changes. + /// + public int SchemaVersion { get; set; } = 1; + + /// + /// Absolute issuer URI advertised to clients (e.g. https://authority.stella-ops.local). + /// + public Uri? Issuer { get; set; } + + /// + /// Lifetime for OAuth access tokens issued by Authority. + /// public TimeSpan AccessTokenLifetime { get; set; } = TimeSpan.FromMinutes(2); - - /// - /// Lifetime for OAuth refresh tokens issued by Authority. - /// - public TimeSpan RefreshTokenLifetime { get; set; } = TimeSpan.FromDays(30); - - /// - /// Lifetime for OpenID Connect identity tokens. - /// - public TimeSpan IdentityTokenLifetime { get; set; } = TimeSpan.FromMinutes(5); - - /// - /// Lifetime for OAuth authorization codes. - /// - public TimeSpan AuthorizationCodeLifetime { get; set; } = TimeSpan.FromMinutes(5); - - /// - /// Lifetime for OAuth device codes (device authorization flow). - /// - public TimeSpan DeviceCodeLifetime { get; set; } = TimeSpan.FromMinutes(15); - - /// - /// Directories scanned for Authority plugins (absolute or relative to application base path). - /// - public IList PluginDirectories => pluginDirectories; - + + /// + /// Lifetime for OAuth refresh tokens issued by Authority. + /// + public TimeSpan RefreshTokenLifetime { get; set; } = TimeSpan.FromDays(30); + + /// + /// Lifetime for OpenID Connect identity tokens. + /// + public TimeSpan IdentityTokenLifetime { get; set; } = TimeSpan.FromMinutes(5); + + /// + /// Lifetime for OAuth authorization codes. + /// + public TimeSpan AuthorizationCodeLifetime { get; set; } = TimeSpan.FromMinutes(5); + + /// + /// Lifetime for OAuth device codes (device authorization flow). + /// + public TimeSpan DeviceCodeLifetime { get; set; } = TimeSpan.FromMinutes(15); + + /// + /// Directories scanned for Authority plugins (absolute or relative to application base path). + /// + public IList PluginDirectories => pluginDirectories; + /// /// CIDR blocks permitted to bypass certain authentication policies (e.g. on-host cron). /// @@ -67,73 +67,92 @@ public sealed class StellaOpsAuthorityOptions /// Declared tenants for the deployment. /// public IList Tenants => tenants; - - /// - /// Configuration describing the Authority MongoDB storage. - /// - public AuthorityStorageOptions Storage { get; } = new(); - - /// - /// Bootstrap settings for initial administrative provisioning. - /// + + /// + /// Configuration describing the Authority MongoDB storage. + /// + public AuthorityStorageOptions Storage { get; } = new(); + + /// + /// Bootstrap settings for initial administrative provisioning. + /// public AuthorityBootstrapOptions Bootstrap { get; } = new(); - - /// - /// Configuration describing available Authority plugins and their manifests. - /// - public AuthorityPluginSettings Plugins { get; } = new(); - + + /// + /// Configuration describing available Authority plugins and their manifests. + /// + public AuthorityPluginSettings Plugins { get; } = new(); + /// /// Security-related configuration for the Authority host. /// public AuthoritySecurityOptions Security { get; } = new(); + /// + /// Advisory AI configuration (remote inference policies, consent defaults). + /// + public AuthorityAdvisoryAiOptions AdvisoryAi { get; } = new(); + + /// + /// Notification system configuration (webhook allowlists, ack token policies). + /// + public AuthorityNotificationsOptions Notifications { get; } = new(); + /// /// Exception governance configuration (routing templates, MFA requirements). /// public AuthorityExceptionsOptions Exceptions { get; } = new(); + /// + /// API lifecycle configuration (deprecations, migration messaging). + /// + public AuthorityApiLifecycleOptions ApiLifecycle { get; } = new(); + /// /// Signing options for Authority-generated artefacts (revocation bundles, JWKS). /// public AuthoritySigningOptions Signing { get; } = new(); - - /// - /// Validates configured values and normalises collections. - /// - /// Thrown when configuration is invalid. - public void Validate() - { - if (SchemaVersion <= 0) - { - throw new InvalidOperationException("Authority configuration requires a positive schemaVersion."); - } - - if (Issuer is null) - { - throw new InvalidOperationException("Authority configuration requires an issuer URL."); - } - - if (!Issuer.IsAbsoluteUri) - { - throw new InvalidOperationException("Authority issuer must be an absolute URI."); - } - - if (string.Equals(Issuer.Scheme, Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase) && !Issuer.IsLoopback) - { - throw new InvalidOperationException("Authority issuer must use HTTPS unless running on a loopback interface."); - } - - ValidateLifetime(AccessTokenLifetime, nameof(AccessTokenLifetime), TimeSpan.FromHours(24)); - ValidateLifetime(RefreshTokenLifetime, nameof(RefreshTokenLifetime), TimeSpan.FromDays(365)); - ValidateLifetime(IdentityTokenLifetime, nameof(IdentityTokenLifetime), TimeSpan.FromHours(24)); - ValidateLifetime(AuthorizationCodeLifetime, nameof(AuthorizationCodeLifetime), TimeSpan.FromHours(1)); - ValidateLifetime(DeviceCodeLifetime, nameof(DeviceCodeLifetime), TimeSpan.FromHours(24)); - - NormaliseList(pluginDirectories); - NormaliseList(bypassNetworks); - + + /// + /// Validates configured values and normalises collections. + /// + /// Thrown when configuration is invalid. + public void Validate() + { + if (SchemaVersion <= 0) + { + throw new InvalidOperationException("Authority configuration requires a positive schemaVersion."); + } + + if (Issuer is null) + { + throw new InvalidOperationException("Authority configuration requires an issuer URL."); + } + + if (!Issuer.IsAbsoluteUri) + { + throw new InvalidOperationException("Authority issuer must be an absolute URI."); + } + + if (string.Equals(Issuer.Scheme, Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase) && !Issuer.IsLoopback) + { + throw new InvalidOperationException("Authority issuer must use HTTPS unless running on a loopback interface."); + } + + ValidateLifetime(AccessTokenLifetime, nameof(AccessTokenLifetime), TimeSpan.FromHours(24)); + ValidateLifetime(RefreshTokenLifetime, nameof(RefreshTokenLifetime), TimeSpan.FromDays(365)); + ValidateLifetime(IdentityTokenLifetime, nameof(IdentityTokenLifetime), TimeSpan.FromHours(24)); + ValidateLifetime(AuthorizationCodeLifetime, nameof(AuthorizationCodeLifetime), TimeSpan.FromHours(1)); + ValidateLifetime(DeviceCodeLifetime, nameof(DeviceCodeLifetime), TimeSpan.FromHours(24)); + + NormaliseList(pluginDirectories); + NormaliseList(bypassNetworks); + Security.Validate(); + AdvisoryAi.Normalize(); + AdvisoryAi.Validate(); + Notifications.Validate(); + ApiLifecycle.Validate(); Signing.Validate(); Plugins.NormalizeAndValidate(); Storage.Validate(); @@ -145,8 +164,8 @@ public sealed class StellaOpsAuthorityOptions var identifiers = new HashSet(StringComparer.Ordinal); foreach (var tenant in tenants) { - tenant.Normalize(); - tenant.Validate(); + tenant.Normalize(AdvisoryAi); + tenant.Validate(AdvisoryAi); if (!identifiers.Add(tenant.Id)) { throw new InvalidOperationException($"Authority configuration contains duplicate tenant identifier '{tenant.Id}'."); @@ -154,254 +173,254 @@ public sealed class StellaOpsAuthorityOptions } } } - - private static void ValidateLifetime(TimeSpan value, string propertyName, TimeSpan maximum) - { - if (value <= TimeSpan.Zero) - { - throw new InvalidOperationException($"Authority configuration requires {propertyName} to be greater than zero."); - } - - if (value > maximum) - { - throw new InvalidOperationException($"Authority configuration requires {propertyName} to be less than or equal to {maximum}."); - } - } - - private static void NormaliseList(IList values) - { - if (values.Count == 0) - { - return; - } - - var unique = new HashSet(StringComparer.OrdinalIgnoreCase); - - for (var index = values.Count - 1; index >= 0; index--) - { - var entry = values[index]; - - if (string.IsNullOrWhiteSpace(entry)) - { - values.RemoveAt(index); - continue; - } - - var trimmed = entry.Trim(); - if (!unique.Add(trimmed)) - { - values.RemoveAt(index); - continue; - } - - values[index] = trimmed; - } - } -} - -public sealed class AuthoritySecurityOptions -{ - /// - /// Rate limiting configuration applied to Authority endpoints. - /// - public AuthorityRateLimitingOptions RateLimiting { get; } = new(); - - /// - /// Default password hashing parameters advertised to Authority plug-ins. - /// - public PasswordHashOptions PasswordHashing { get; } = new(); - - /// - /// Sender-constraint configuration (DPoP, mTLS). - /// - public AuthoritySenderConstraintOptions SenderConstraints { get; } = new(); - - internal void Validate() - { - RateLimiting.Validate(); - PasswordHashing.Validate(); - SenderConstraints.Validate(); - } -} - -public sealed class AuthorityRateLimitingOptions -{ - public AuthorityRateLimitingOptions() - { - Token = new AuthorityEndpointRateLimitOptions - { - PermitLimit = 30, - Window = TimeSpan.FromMinutes(1), - QueueLimit = 0 - }; - - Authorize = new AuthorityEndpointRateLimitOptions - { - PermitLimit = 60, - Window = TimeSpan.FromMinutes(1), - QueueLimit = 10 - }; - - Internal = new AuthorityEndpointRateLimitOptions - { - Enabled = false, - PermitLimit = 5, - Window = TimeSpan.FromMinutes(1), - QueueLimit = 0 - }; - } - - /// - /// Rate limiting configuration applied to the /token endpoint. - /// - public AuthorityEndpointRateLimitOptions Token { get; } - - /// - /// Rate limiting configuration applied to the /authorize endpoint. - /// - public AuthorityEndpointRateLimitOptions Authorize { get; } - - /// - /// Rate limiting configuration applied to /internal endpoints. - /// - public AuthorityEndpointRateLimitOptions Internal { get; } - - internal void Validate() - { - Token.Validate(nameof(Token)); - Authorize.Validate(nameof(Authorize)); - Internal.Validate(nameof(Internal)); - } -} - -public sealed class AuthoritySenderConstraintOptions -{ - public AuthoritySenderConstraintOptions() - { - Dpop = new AuthorityDpopOptions(); - Mtls = new AuthorityMtlsOptions(); - } - - public AuthorityDpopOptions Dpop { get; } - - public AuthorityMtlsOptions Mtls { get; } - - internal void Validate() - { - Dpop.Validate(); - Mtls.Validate(); - } -} - -public sealed class AuthorityDpopOptions -{ - private readonly HashSet allowedAlgorithms = new(StringComparer.OrdinalIgnoreCase) - { - "ES256", - "ES384" - }; - - public bool Enabled { get; set; } - - public TimeSpan ProofLifetime { get; set; } = TimeSpan.FromMinutes(2); - - public TimeSpan AllowedClockSkew { get; set; } = TimeSpan.FromSeconds(30); - - public TimeSpan ReplayWindow { get; set; } = TimeSpan.FromMinutes(5); - - public ISet AllowedAlgorithms => allowedAlgorithms; - - public IReadOnlySet NormalizedAlgorithms { get; private set; } = new HashSet(StringComparer.Ordinal); - - public AuthorityDpopNonceOptions Nonce { get; } = new(); - - internal void Validate() - { - if (ProofLifetime <= TimeSpan.Zero) - { - throw new InvalidOperationException("Dpop.ProofLifetime must be greater than zero."); - } - - if (AllowedClockSkew < TimeSpan.Zero || AllowedClockSkew > TimeSpan.FromMinutes(5)) - { - throw new InvalidOperationException("Dpop.AllowedClockSkew must be between 0 and 5 minutes."); - } - - if (ReplayWindow < TimeSpan.Zero) - { - throw new InvalidOperationException("Dpop.ReplayWindow must be greater than or equal to zero."); - } - - if (allowedAlgorithms.Count == 0) - { - throw new InvalidOperationException("At least one DPoP algorithm must be configured."); - } - - NormalizedAlgorithms = allowedAlgorithms - .Select(static alg => alg.Trim().ToUpperInvariant()) - .Where(static alg => alg.Length > 0) - .ToHashSet(StringComparer.Ordinal); - - if (NormalizedAlgorithms.Count == 0) - { - throw new InvalidOperationException("Allowed DPoP algorithms cannot be empty after normalization."); - } - - Nonce.Validate(); - } -} - -public sealed class AuthorityDpopNonceOptions -{ - private readonly HashSet requiredAudiences = new(StringComparer.OrdinalIgnoreCase) - { - "signer", - "attestor" - }; - - public bool Enabled { get; set; } = true; - - public TimeSpan Ttl { get; set; } = TimeSpan.FromMinutes(10); - - public int MaxIssuancePerMinute { get; set; } = 120; - - public string Store { get; set; } = "memory"; - - public string? RedisConnectionString { get; set; } - - public ISet RequiredAudiences => requiredAudiences; - - public IReadOnlySet NormalizedAudiences { get; private set; } = new HashSet(StringComparer.OrdinalIgnoreCase); - - internal void Validate() - { - if (Ttl <= TimeSpan.Zero) - { - throw new InvalidOperationException("Dpop.Nonce.Ttl must be greater than zero."); - } - - if (MaxIssuancePerMinute < 1) - { - throw new InvalidOperationException("Dpop.Nonce.MaxIssuancePerMinute must be at least 1."); - } - - if (string.IsNullOrWhiteSpace(Store)) - { - throw new InvalidOperationException("Dpop.Nonce.Store must be specified."); - } - - Store = Store.Trim().ToLowerInvariant(); - - if (Store is not ("memory" or "redis")) - { - throw new InvalidOperationException("Dpop.Nonce.Store must be either 'memory' or 'redis'."); - } - - if (Store == "redis" && string.IsNullOrWhiteSpace(RedisConnectionString)) - { - throw new InvalidOperationException("Dpop.Nonce.RedisConnectionString must be provided when using the 'redis' store."); - } - + + private static void ValidateLifetime(TimeSpan value, string propertyName, TimeSpan maximum) + { + if (value <= TimeSpan.Zero) + { + throw new InvalidOperationException($"Authority configuration requires {propertyName} to be greater than zero."); + } + + if (value > maximum) + { + throw new InvalidOperationException($"Authority configuration requires {propertyName} to be less than or equal to {maximum}."); + } + } + + private static void NormaliseList(IList values) + { + if (values.Count == 0) + { + return; + } + + var unique = new HashSet(StringComparer.OrdinalIgnoreCase); + + for (var index = values.Count - 1; index >= 0; index--) + { + var entry = values[index]; + + if (string.IsNullOrWhiteSpace(entry)) + { + values.RemoveAt(index); + continue; + } + + var trimmed = entry.Trim(); + if (!unique.Add(trimmed)) + { + values.RemoveAt(index); + continue; + } + + values[index] = trimmed; + } + } +} + +public sealed class AuthoritySecurityOptions +{ + /// + /// Rate limiting configuration applied to Authority endpoints. + /// + public AuthorityRateLimitingOptions RateLimiting { get; } = new(); + + /// + /// Default password hashing parameters advertised to Authority plug-ins. + /// + public PasswordHashOptions PasswordHashing { get; } = new(); + + /// + /// Sender-constraint configuration (DPoP, mTLS). + /// + public AuthoritySenderConstraintOptions SenderConstraints { get; } = new(); + + internal void Validate() + { + RateLimiting.Validate(); + PasswordHashing.Validate(); + SenderConstraints.Validate(); + } +} + +public sealed class AuthorityRateLimitingOptions +{ + public AuthorityRateLimitingOptions() + { + Token = new AuthorityEndpointRateLimitOptions + { + PermitLimit = 30, + Window = TimeSpan.FromMinutes(1), + QueueLimit = 0 + }; + + Authorize = new AuthorityEndpointRateLimitOptions + { + PermitLimit = 60, + Window = TimeSpan.FromMinutes(1), + QueueLimit = 10 + }; + + Internal = new AuthorityEndpointRateLimitOptions + { + Enabled = false, + PermitLimit = 5, + Window = TimeSpan.FromMinutes(1), + QueueLimit = 0 + }; + } + + /// + /// Rate limiting configuration applied to the /token endpoint. + /// + public AuthorityEndpointRateLimitOptions Token { get; } + + /// + /// Rate limiting configuration applied to the /authorize endpoint. + /// + public AuthorityEndpointRateLimitOptions Authorize { get; } + + /// + /// Rate limiting configuration applied to /internal endpoints. + /// + public AuthorityEndpointRateLimitOptions Internal { get; } + + internal void Validate() + { + Token.Validate(nameof(Token)); + Authorize.Validate(nameof(Authorize)); + Internal.Validate(nameof(Internal)); + } +} + +public sealed class AuthoritySenderConstraintOptions +{ + public AuthoritySenderConstraintOptions() + { + Dpop = new AuthorityDpopOptions(); + Mtls = new AuthorityMtlsOptions(); + } + + public AuthorityDpopOptions Dpop { get; } + + public AuthorityMtlsOptions Mtls { get; } + + internal void Validate() + { + Dpop.Validate(); + Mtls.Validate(); + } +} + +public sealed class AuthorityDpopOptions +{ + private readonly HashSet allowedAlgorithms = new(StringComparer.OrdinalIgnoreCase) + { + "ES256", + "ES384" + }; + + public bool Enabled { get; set; } + + public TimeSpan ProofLifetime { get; set; } = TimeSpan.FromMinutes(2); + + public TimeSpan AllowedClockSkew { get; set; } = TimeSpan.FromSeconds(30); + + public TimeSpan ReplayWindow { get; set; } = TimeSpan.FromMinutes(5); + + public ISet AllowedAlgorithms => allowedAlgorithms; + + public IReadOnlySet NormalizedAlgorithms { get; private set; } = new HashSet(StringComparer.Ordinal); + + public AuthorityDpopNonceOptions Nonce { get; } = new(); + + internal void Validate() + { + if (ProofLifetime <= TimeSpan.Zero) + { + throw new InvalidOperationException("Dpop.ProofLifetime must be greater than zero."); + } + + if (AllowedClockSkew < TimeSpan.Zero || AllowedClockSkew > TimeSpan.FromMinutes(5)) + { + throw new InvalidOperationException("Dpop.AllowedClockSkew must be between 0 and 5 minutes."); + } + + if (ReplayWindow < TimeSpan.Zero) + { + throw new InvalidOperationException("Dpop.ReplayWindow must be greater than or equal to zero."); + } + + if (allowedAlgorithms.Count == 0) + { + throw new InvalidOperationException("At least one DPoP algorithm must be configured."); + } + + NormalizedAlgorithms = allowedAlgorithms + .Select(static alg => alg.Trim().ToUpperInvariant()) + .Where(static alg => alg.Length > 0) + .ToHashSet(StringComparer.Ordinal); + + if (NormalizedAlgorithms.Count == 0) + { + throw new InvalidOperationException("Allowed DPoP algorithms cannot be empty after normalization."); + } + + Nonce.Validate(); + } +} + +public sealed class AuthorityDpopNonceOptions +{ + private readonly HashSet requiredAudiences = new(StringComparer.OrdinalIgnoreCase) + { + "signer", + "attestor" + }; + + public bool Enabled { get; set; } = true; + + public TimeSpan Ttl { get; set; } = TimeSpan.FromMinutes(10); + + public int MaxIssuancePerMinute { get; set; } = 120; + + public string Store { get; set; } = "memory"; + + public string? RedisConnectionString { get; set; } + + public ISet RequiredAudiences => requiredAudiences; + + public IReadOnlySet NormalizedAudiences { get; private set; } = new HashSet(StringComparer.OrdinalIgnoreCase); + + internal void Validate() + { + if (Ttl <= TimeSpan.Zero) + { + throw new InvalidOperationException("Dpop.Nonce.Ttl must be greater than zero."); + } + + if (MaxIssuancePerMinute < 1) + { + throw new InvalidOperationException("Dpop.Nonce.MaxIssuancePerMinute must be at least 1."); + } + + if (string.IsNullOrWhiteSpace(Store)) + { + throw new InvalidOperationException("Dpop.Nonce.Store must be specified."); + } + + Store = Store.Trim().ToLowerInvariant(); + + if (Store is not ("memory" or "redis")) + { + throw new InvalidOperationException("Dpop.Nonce.Store must be either 'memory' or 'redis'."); + } + + if (Store == "redis" && string.IsNullOrWhiteSpace(RedisConnectionString)) + { + throw new InvalidOperationException("Dpop.Nonce.RedisConnectionString must be provided when using the 'redis' store."); + } + var normalizedAudiences = requiredAudiences .Select(static aud => aud.Trim()) .Where(static aud => aud.Length > 0) @@ -421,174 +440,174 @@ public sealed class AuthorityDpopNonceOptions NormalizedAudiences = normalizedAudiences; } } - -public sealed class AuthorityMtlsOptions -{ - private readonly HashSet enforceForAudiences = new(StringComparer.OrdinalIgnoreCase) - { - "signer" - }; - - private readonly HashSet allowedSanTypes = new(StringComparer.OrdinalIgnoreCase) - { - "dns", - "uri" - }; - - public bool Enabled { get; set; } - - public bool RequireChainValidation { get; set; } = true; - - public TimeSpan RotationGrace { get; set; } = TimeSpan.FromMinutes(15); - - public ISet EnforceForAudiences => enforceForAudiences; - - public IReadOnlySet NormalizedAudiences { get; private set; } = new HashSet(StringComparer.OrdinalIgnoreCase); - - public IList AllowedCertificateAuthorities { get; } = new List(); - - public IList AllowedSubjectPatterns { get; } = new List(); - - public ISet AllowedSanTypes => allowedSanTypes; - - public IReadOnlyList NormalizedSubjectPatterns { get; private set; } = Array.Empty(); - - public IReadOnlySet NormalizedSanTypes { get; private set; } = new HashSet(StringComparer.OrdinalIgnoreCase); - - internal void Validate() - { - if (RotationGrace < TimeSpan.Zero) - { - throw new InvalidOperationException("Mtls.RotationGrace must be non-negative."); - } - - NormalizedAudiences = enforceForAudiences - .Select(static aud => aud.Trim()) - .Where(static aud => aud.Length > 0) - .ToHashSet(StringComparer.OrdinalIgnoreCase); - - if (Enabled && NormalizedAudiences.Count == 0) - { - throw new InvalidOperationException("Mtls.EnforceForAudiences must include at least one audience when enabled."); - } - - if (AllowedCertificateAuthorities.Any(static path => string.IsNullOrWhiteSpace(path))) - { - throw new InvalidOperationException("Mtls.AllowedCertificateAuthorities entries must not be empty."); - } - - NormalizedSanTypes = allowedSanTypes - .Select(static value => value.Trim()) - .Where(static value => value.Length > 0) - .Select(static value => value.ToLowerInvariant()) - .ToHashSet(StringComparer.OrdinalIgnoreCase); - - if (Enabled && NormalizedSanTypes.Count == 0) - { - throw new InvalidOperationException("Mtls.AllowedSanTypes must include at least one entry when enabled."); - } - - var compiledPatterns = new List(AllowedSubjectPatterns.Count); - - foreach (var pattern in AllowedSubjectPatterns) - { - if (string.IsNullOrWhiteSpace(pattern)) - { - throw new InvalidOperationException("Mtls.AllowedSubjectPatterns entries must not be empty."); - } - - try - { - compiledPatterns.Add(new Regex(pattern, RegexOptions.CultureInvariant | RegexOptions.IgnoreCase | RegexOptions.Compiled, TimeSpan.FromMilliseconds(100))); - } - catch (RegexParseException ex) - { - throw new InvalidOperationException($"Mtls.AllowedSubjectPatterns entry '{pattern}' is not a valid regular expression.", ex); - } - } - - NormalizedSubjectPatterns = compiledPatterns; - } -} - -public sealed class AuthorityEndpointRateLimitOptions -{ - /// - /// Gets or sets a value indicating whether rate limiting is enabled for the endpoint. - /// - public bool Enabled { get; set; } = true; - - /// - /// Maximum number of requests allowed within the configured window. - /// - public int PermitLimit { get; set; } = 60; - - /// - /// Size of the fixed window applied to the rate limiter. - /// - public TimeSpan Window { get; set; } = TimeSpan.FromMinutes(1); - - /// - /// Maximum number of queued requests awaiting permits. - /// - public int QueueLimit { get; set; } = 0; - - /// - /// Ordering strategy for queued requests. - /// - public QueueProcessingOrder QueueProcessingOrder { get; set; } = QueueProcessingOrder.OldestFirst; - - internal void Validate(string name) - { - if (!Enabled) - { - return; - } - - if (PermitLimit <= 0) - { - throw new InvalidOperationException($"Authority rate limiting '{name}' requires permitLimit to be greater than zero."); - } - - if (QueueLimit < 0) - { - throw new InvalidOperationException($"Authority rate limiting '{name}' queueLimit cannot be negative."); - } - - if (Window <= TimeSpan.Zero || Window > TimeSpan.FromHours(1)) - { - throw new InvalidOperationException($"Authority rate limiting '{name}' window must be greater than zero and no more than one hour."); - } - } -} - -public sealed class AuthorityStorageOptions -{ - /// - /// Mongo connection string used by Authority storage. - /// - public string ConnectionString { get; set; } = string.Empty; - - /// - /// Optional explicit database name override. - /// - public string? DatabaseName { get; set; } - - /// - /// Mongo command timeout. - /// - public TimeSpan CommandTimeout { get; set; } = TimeSpan.FromSeconds(30); - - internal void Validate() - { - if (string.IsNullOrWhiteSpace(ConnectionString)) - { - throw new InvalidOperationException("Authority storage requires a Mongo connection string."); - } - - if (CommandTimeout <= TimeSpan.Zero) - { - throw new InvalidOperationException("Authority storage command timeout must be greater than zero."); + +public sealed class AuthorityMtlsOptions +{ + private readonly HashSet enforceForAudiences = new(StringComparer.OrdinalIgnoreCase) + { + "signer" + }; + + private readonly HashSet allowedSanTypes = new(StringComparer.OrdinalIgnoreCase) + { + "dns", + "uri" + }; + + public bool Enabled { get; set; } + + public bool RequireChainValidation { get; set; } = true; + + public TimeSpan RotationGrace { get; set; } = TimeSpan.FromMinutes(15); + + public ISet EnforceForAudiences => enforceForAudiences; + + public IReadOnlySet NormalizedAudiences { get; private set; } = new HashSet(StringComparer.OrdinalIgnoreCase); + + public IList AllowedCertificateAuthorities { get; } = new List(); + + public IList AllowedSubjectPatterns { get; } = new List(); + + public ISet AllowedSanTypes => allowedSanTypes; + + public IReadOnlyList NormalizedSubjectPatterns { get; private set; } = Array.Empty(); + + public IReadOnlySet NormalizedSanTypes { get; private set; } = new HashSet(StringComparer.OrdinalIgnoreCase); + + internal void Validate() + { + if (RotationGrace < TimeSpan.Zero) + { + throw new InvalidOperationException("Mtls.RotationGrace must be non-negative."); + } + + NormalizedAudiences = enforceForAudiences + .Select(static aud => aud.Trim()) + .Where(static aud => aud.Length > 0) + .ToHashSet(StringComparer.OrdinalIgnoreCase); + + if (Enabled && NormalizedAudiences.Count == 0) + { + throw new InvalidOperationException("Mtls.EnforceForAudiences must include at least one audience when enabled."); + } + + if (AllowedCertificateAuthorities.Any(static path => string.IsNullOrWhiteSpace(path))) + { + throw new InvalidOperationException("Mtls.AllowedCertificateAuthorities entries must not be empty."); + } + + NormalizedSanTypes = allowedSanTypes + .Select(static value => value.Trim()) + .Where(static value => value.Length > 0) + .Select(static value => value.ToLowerInvariant()) + .ToHashSet(StringComparer.OrdinalIgnoreCase); + + if (Enabled && NormalizedSanTypes.Count == 0) + { + throw new InvalidOperationException("Mtls.AllowedSanTypes must include at least one entry when enabled."); + } + + var compiledPatterns = new List(AllowedSubjectPatterns.Count); + + foreach (var pattern in AllowedSubjectPatterns) + { + if (string.IsNullOrWhiteSpace(pattern)) + { + throw new InvalidOperationException("Mtls.AllowedSubjectPatterns entries must not be empty."); + } + + try + { + compiledPatterns.Add(new Regex(pattern, RegexOptions.CultureInvariant | RegexOptions.IgnoreCase | RegexOptions.Compiled, TimeSpan.FromMilliseconds(100))); + } + catch (RegexParseException ex) + { + throw new InvalidOperationException($"Mtls.AllowedSubjectPatterns entry '{pattern}' is not a valid regular expression.", ex); + } + } + + NormalizedSubjectPatterns = compiledPatterns; + } +} + +public sealed class AuthorityEndpointRateLimitOptions +{ + /// + /// Gets or sets a value indicating whether rate limiting is enabled for the endpoint. + /// + public bool Enabled { get; set; } = true; + + /// + /// Maximum number of requests allowed within the configured window. + /// + public int PermitLimit { get; set; } = 60; + + /// + /// Size of the fixed window applied to the rate limiter. + /// + public TimeSpan Window { get; set; } = TimeSpan.FromMinutes(1); + + /// + /// Maximum number of queued requests awaiting permits. + /// + public int QueueLimit { get; set; } = 0; + + /// + /// Ordering strategy for queued requests. + /// + public QueueProcessingOrder QueueProcessingOrder { get; set; } = QueueProcessingOrder.OldestFirst; + + internal void Validate(string name) + { + if (!Enabled) + { + return; + } + + if (PermitLimit <= 0) + { + throw new InvalidOperationException($"Authority rate limiting '{name}' requires permitLimit to be greater than zero."); + } + + if (QueueLimit < 0) + { + throw new InvalidOperationException($"Authority rate limiting '{name}' queueLimit cannot be negative."); + } + + if (Window <= TimeSpan.Zero || Window > TimeSpan.FromHours(1)) + { + throw new InvalidOperationException($"Authority rate limiting '{name}' window must be greater than zero and no more than one hour."); + } + } +} + +public sealed class AuthorityStorageOptions +{ + /// + /// Mongo connection string used by Authority storage. + /// + public string ConnectionString { get; set; } = string.Empty; + + /// + /// Optional explicit database name override. + /// + public string? DatabaseName { get; set; } + + /// + /// Mongo command timeout. + /// + public TimeSpan CommandTimeout { get; set; } = TimeSpan.FromSeconds(30); + + internal void Validate() + { + if (string.IsNullOrWhiteSpace(ConnectionString)) + { + throw new InvalidOperationException("Authority storage requires a Mongo connection string."); + } + + if (CommandTimeout <= TimeSpan.Zero) + { + throw new InvalidOperationException("Authority storage command timeout must be greater than zero."); } } } @@ -702,35 +721,35 @@ public sealed class AuthorityBootstrapOptions /// /// Enables or disables bootstrap administrative APIs. /// - public bool Enabled { get; set; } = false; - - /// - /// API key required when invoking bootstrap endpoints. - /// - public string? ApiKey { get; set; } = string.Empty; - - /// - /// Default identity provider used when none is specified in bootstrap requests. - /// - public string? DefaultIdentityProvider { get; set; } = "standard"; - - internal void Validate() - { - if (!Enabled) - { - return; - } - - if (string.IsNullOrWhiteSpace(ApiKey)) - { - throw new InvalidOperationException("Authority bootstrap configuration requires an API key when enabled."); - } - - if (string.IsNullOrWhiteSpace(DefaultIdentityProvider)) - { - throw new InvalidOperationException("Authority bootstrap configuration requires a default identity provider name when enabled."); - } - } + public bool Enabled { get; set; } = false; + + /// + /// API key required when invoking bootstrap endpoints. + /// + public string? ApiKey { get; set; } = string.Empty; + + /// + /// Default identity provider used when none is specified in bootstrap requests. + /// + public string? DefaultIdentityProvider { get; set; } = "standard"; + + internal void Validate() + { + if (!Enabled) + { + return; + } + + if (string.IsNullOrWhiteSpace(ApiKey)) + { + throw new InvalidOperationException("Authority bootstrap configuration requires an API key when enabled."); + } + + if (string.IsNullOrWhiteSpace(DefaultIdentityProvider)) + { + throw new InvalidOperationException("Authority bootstrap configuration requires a default identity provider name when enabled."); + } + } } public sealed class AuthorityTenantOptions @@ -746,7 +765,9 @@ public sealed class AuthorityTenantOptions public IList DefaultRoles { get; } = new List(); public IList Projects { get; } = new List(); - internal void Normalize() + public AuthorityTenantAdvisoryAiOptions AdvisoryAi { get; } = new(); + + internal void Normalize(AuthorityAdvisoryAiOptions? advisoryAiOptions) { Id = (Id ?? string.Empty).Trim(); DisplayName = (DisplayName ?? string.Empty).Trim(); @@ -787,9 +808,11 @@ public sealed class AuthorityTenantOptions Projects[index] = normalized; } } + + AdvisoryAi.Normalize(advisoryAiOptions); } - internal void Validate() + internal void Validate(AuthorityAdvisoryAiOptions? advisoryAiOptions) { if (string.IsNullOrWhiteSpace(Id)) { @@ -816,190 +839,193 @@ public sealed class AuthorityTenantOptions } } } + + AdvisoryAi.Validate(advisoryAiOptions); } private static readonly Regex TenantSlugRegex = new("^[a-z0-9-]+$", RegexOptions.Compiled | RegexOptions.CultureInvariant); private static readonly Regex ProjectSlugRegex = new("^[a-z0-9-]+$", RegexOptions.Compiled | RegexOptions.CultureInvariant); } + public sealed class AuthorityPluginSettings { - private static readonly StringComparer OrdinalIgnoreCase = StringComparer.OrdinalIgnoreCase; - - /// - /// Directory containing per-plugin configuration manifests (relative paths resolved against application base path). - /// - public string ConfigurationDirectory { get; set; } = "../etc/authority.plugins"; - - /// - /// Declarative descriptors for Authority plugins (keyed by logical plugin name). - /// - public IDictionary Descriptors { get; } = new Dictionary(OrdinalIgnoreCase); - - internal void NormalizeAndValidate() - { - if (Descriptors.Count == 0) - { - return; - } - - foreach (var (name, descriptor) in Descriptors.ToArray()) - { - if (descriptor is null) - { - throw new InvalidOperationException($"Authority plugin descriptor '{name}' is null."); - } - - descriptor.Normalize(name); - descriptor.Validate(name); - } - } -} - -public sealed class AuthorityPluginDescriptorOptions -{ - private static readonly StringComparer OrdinalIgnoreCase = StringComparer.OrdinalIgnoreCase; - - private readonly List capabilities = new(); - private readonly Dictionary metadata = new(OrdinalIgnoreCase); - private static readonly HashSet AllowedCapabilities = new( - new[] - { - AuthorityPluginCapabilities.Password, - AuthorityPluginCapabilities.Mfa, - AuthorityPluginCapabilities.ClientProvisioning, - AuthorityPluginCapabilities.Bootstrap - }, - OrdinalIgnoreCase); - - /// - /// Logical type identifier for the plugin (e.g. standard, ldap). - /// - public string? Type { get; set; } - - /// - /// Name of the plugin assembly (without file extension). - /// - public string? AssemblyName { get; set; } - - /// - /// Optional explicit assembly path override; relative paths resolve against plugin directories. - /// - public string? AssemblyPath { get; set; } - - /// - /// Indicates whether the plugin should be enabled. - /// - public bool Enabled { get; set; } = true; - - /// - /// Plugin capability hints surfaced to the Authority host. - /// - public IList Capabilities => capabilities; - - /// - /// Optional metadata (string key/value) passed to plugin implementations. - /// - public IDictionary Metadata => metadata; - - /// - /// Relative path to the plugin-specific configuration file (defaults to <pluginName>.yaml). - /// - public string? ConfigFile { get; set; } - - internal void Normalize(string pluginName) - { - if (string.IsNullOrWhiteSpace(ConfigFile)) - { - ConfigFile = $"{pluginName}.yaml"; - } - else - { - ConfigFile = ConfigFile.Trim(); - } - - Type = string.IsNullOrWhiteSpace(Type) ? pluginName : Type.Trim(); - - if (!string.IsNullOrWhiteSpace(AssemblyName)) - { - AssemblyName = AssemblyName.Trim(); - } - - if (!string.IsNullOrWhiteSpace(AssemblyPath)) - { - AssemblyPath = AssemblyPath.Trim(); - } - - if (capabilities.Count > 0) - { - var seen = new HashSet(OrdinalIgnoreCase); - var unique = new List(capabilities.Count); - - foreach (var entry in capabilities) - { - if (string.IsNullOrWhiteSpace(entry)) - { - continue; - } - - var canonical = entry.Trim().ToLowerInvariant(); - if (seen.Add(canonical)) - { - unique.Add(canonical); - } - } - - unique.Sort(StringComparer.Ordinal); - - capabilities.Clear(); - capabilities.AddRange(unique); - } - } - - internal void Validate(string pluginName) - { - if (string.IsNullOrWhiteSpace(AssemblyName) && string.IsNullOrWhiteSpace(AssemblyPath)) - { - throw new InvalidOperationException($"Authority plugin '{pluginName}' must define either assemblyName or assemblyPath."); - } - - if (string.IsNullOrWhiteSpace(ConfigFile)) - { - throw new InvalidOperationException($"Authority plugin '{pluginName}' must define a configFile."); - } - - if (Path.GetFileName(ConfigFile) != ConfigFile && Path.IsPathRooted(ConfigFile) && !File.Exists(ConfigFile)) - { - throw new InvalidOperationException($"Authority plugin '{pluginName}' specifies configFile '{ConfigFile}' which does not exist."); - } - - foreach (var capability in capabilities) - { - if (!AllowedCapabilities.Contains(capability)) - { - throw new InvalidOperationException($"Authority plugin '{pluginName}' declares unknown capability '{capability}'. Allowed values: password, mfa, clientProvisioning, bootstrap."); - } - } - } - - internal AuthorityPluginManifest ToManifest(string name, string configPath) - { - var capabilitiesSnapshot = capabilities.Count == 0 - ? Array.Empty() - : capabilities.ToArray(); - - var metadataSnapshot = metadata.Count == 0 - ? new Dictionary(OrdinalIgnoreCase) - : new Dictionary(metadata, OrdinalIgnoreCase); - - return new AuthorityPluginManifest( - name, - Type ?? name, - Enabled, - AssemblyName, - AssemblyPath, - capabilitiesSnapshot, - metadataSnapshot, - configPath); - } -} + private static readonly StringComparer OrdinalIgnoreCase = StringComparer.OrdinalIgnoreCase; + + /// + /// Directory containing per-plugin configuration manifests (relative paths resolved against application base path). + /// + public string ConfigurationDirectory { get; set; } = "../etc/authority.plugins"; + + /// + /// Declarative descriptors for Authority plugins (keyed by logical plugin name). + /// + public IDictionary Descriptors { get; } = new Dictionary(OrdinalIgnoreCase); + + internal void NormalizeAndValidate() + { + if (Descriptors.Count == 0) + { + return; + } + + foreach (var (name, descriptor) in Descriptors.ToArray()) + { + if (descriptor is null) + { + throw new InvalidOperationException($"Authority plugin descriptor '{name}' is null."); + } + + descriptor.Normalize(name); + descriptor.Validate(name); + } + } +} + +public sealed class AuthorityPluginDescriptorOptions +{ + private static readonly StringComparer OrdinalIgnoreCase = StringComparer.OrdinalIgnoreCase; + + private readonly List capabilities = new(); + private readonly Dictionary metadata = new(OrdinalIgnoreCase); + private static readonly HashSet AllowedCapabilities = new( + new[] + { + AuthorityPluginCapabilities.Password, + AuthorityPluginCapabilities.Mfa, + AuthorityPluginCapabilities.ClientProvisioning, + AuthorityPluginCapabilities.Bootstrap + }, + OrdinalIgnoreCase); + + /// + /// Logical type identifier for the plugin (e.g. standard, ldap). + /// + public string? Type { get; set; } + + /// + /// Name of the plugin assembly (without file extension). + /// + public string? AssemblyName { get; set; } + + /// + /// Optional explicit assembly path override; relative paths resolve against plugin directories. + /// + public string? AssemblyPath { get; set; } + + /// + /// Indicates whether the plugin should be enabled. + /// + public bool Enabled { get; set; } = true; + + /// + /// Plugin capability hints surfaced to the Authority host. + /// + public IList Capabilities => capabilities; + + /// + /// Optional metadata (string key/value) passed to plugin implementations. + /// + public IDictionary Metadata => metadata; + + /// + /// Relative path to the plugin-specific configuration file (defaults to <pluginName>.yaml). + /// + public string? ConfigFile { get; set; } + + internal void Normalize(string pluginName) + { + if (string.IsNullOrWhiteSpace(ConfigFile)) + { + ConfigFile = $"{pluginName}.yaml"; + } + else + { + ConfigFile = ConfigFile.Trim(); + } + + Type = string.IsNullOrWhiteSpace(Type) ? pluginName : Type.Trim(); + + if (!string.IsNullOrWhiteSpace(AssemblyName)) + { + AssemblyName = AssemblyName.Trim(); + } + + if (!string.IsNullOrWhiteSpace(AssemblyPath)) + { + AssemblyPath = AssemblyPath.Trim(); + } + + if (capabilities.Count > 0) + { + var seen = new HashSet(OrdinalIgnoreCase); + var unique = new List(capabilities.Count); + + foreach (var entry in capabilities) + { + if (string.IsNullOrWhiteSpace(entry)) + { + continue; + } + + var canonical = entry.Trim().ToLowerInvariant(); + if (seen.Add(canonical)) + { + unique.Add(canonical); + } + } + + unique.Sort(StringComparer.Ordinal); + + capabilities.Clear(); + capabilities.AddRange(unique); + } + } + + internal void Validate(string pluginName) + { + if (string.IsNullOrWhiteSpace(AssemblyName) && string.IsNullOrWhiteSpace(AssemblyPath)) + { + throw new InvalidOperationException($"Authority plugin '{pluginName}' must define either assemblyName or assemblyPath."); + } + + if (string.IsNullOrWhiteSpace(ConfigFile)) + { + throw new InvalidOperationException($"Authority plugin '{pluginName}' must define a configFile."); + } + + if (Path.GetFileName(ConfigFile) != ConfigFile && Path.IsPathRooted(ConfigFile) && !File.Exists(ConfigFile)) + { + throw new InvalidOperationException($"Authority plugin '{pluginName}' specifies configFile '{ConfigFile}' which does not exist."); + } + + foreach (var capability in capabilities) + { + if (!AllowedCapabilities.Contains(capability)) + { + throw new InvalidOperationException($"Authority plugin '{pluginName}' declares unknown capability '{capability}'. Allowed values: password, mfa, clientProvisioning, bootstrap."); + } + } + } + + internal AuthorityPluginManifest ToManifest(string name, string configPath) + { + var capabilitiesSnapshot = capabilities.Count == 0 + ? Array.Empty() + : capabilities.ToArray(); + + var metadataSnapshot = metadata.Count == 0 + ? new Dictionary(OrdinalIgnoreCase) + : new Dictionary(metadata, OrdinalIgnoreCase); + + return new AuthorityPluginManifest( + name, + Type ?? name, + Enabled, + AssemblyName, + AssemblyPath, + capabilitiesSnapshot, + metadataSnapshot, + configPath); + } +} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IIssuerDirectoryClient.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IIssuerDirectoryClient.cs new file mode 100644 index 00000000..a51be46e --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IIssuerDirectoryClient.cs @@ -0,0 +1,16 @@ +namespace StellaOps.IssuerDirectory.Client; + +public interface IIssuerDirectoryClient +{ + ValueTask> GetIssuerKeysAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken); + + ValueTask GetIssuerTrustAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken); +} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.cs new file mode 100644 index 00000000..5c623c3c --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClient.cs @@ -0,0 +1,120 @@ +using System; +using System.Globalization; +using System.Net.Http; +using System.Net.Http.Json; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace StellaOps.IssuerDirectory.Client; + +internal sealed class IssuerDirectoryClient : IIssuerDirectoryClient +{ + private readonly HttpClient _httpClient; + private readonly IMemoryCache _cache; + private readonly IssuerDirectoryClientOptions _options; + private readonly ILogger _logger; + + public IssuerDirectoryClient( + HttpClient httpClient, + IMemoryCache cache, + IOptions options, + ILogger logger) + { + _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + _cache = cache ?? throw new ArgumentNullException(nameof(cache)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + ArgumentNullException.ThrowIfNull(options); + + _options = options.Value; + _options.Validate(); + } + + public async ValueTask> GetIssuerKeysAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var cacheKey = CacheKey("keys", tenantId, issuerId, includeGlobal.ToString(CultureInfo.InvariantCulture)); + if (_cache.TryGetValue(cacheKey, out IReadOnlyList? cached) && cached is not null) + { + return cached; + } + + var requestUri = $"issuer-directory/issuers/{Uri.EscapeDataString(issuerId)}/keys?includeGlobal={includeGlobal.ToString().ToLowerInvariant()}"; + using var request = new HttpRequestMessage(HttpMethod.Get, requestUri); + request.Headers.TryAddWithoutValidation(_options.TenantHeader, tenantId); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + _logger.LogWarning( + "Issuer Directory key lookup failed for {IssuerId} (tenant={TenantId}) {StatusCode}", + issuerId, + tenantId, + response.StatusCode); + response.EnsureSuccessStatusCode(); + } + + var payload = await response.Content.ReadFromJsonAsync>(cancellationToken: cancellationToken) + .ConfigureAwait(false); + + IReadOnlyList result = payload?.ToArray() ?? Array.Empty(); + _cache.Set(cacheKey, result, _options.Cache.Keys); + return result; + } + + public async ValueTask GetIssuerTrustAsync( + string tenantId, + string issuerId, + bool includeGlobal, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(issuerId); + + var cacheKey = CacheKey("trust", tenantId, issuerId, includeGlobal.ToString(CultureInfo.InvariantCulture)); + if (_cache.TryGetValue(cacheKey, out IssuerTrustResponseModel? cached) && cached is not null) + { + return cached; + } + + var requestUri = $"issuer-directory/issuers/{Uri.EscapeDataString(issuerId)}/trust?includeGlobal={includeGlobal.ToString().ToLowerInvariant()}"; + using var request = new HttpRequestMessage(HttpMethod.Get, requestUri); + request.Headers.TryAddWithoutValidation(_options.TenantHeader, tenantId); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + _logger.LogWarning( + "Issuer Directory trust lookup failed for {IssuerId} (tenant={TenantId}) {StatusCode}", + issuerId, + tenantId, + response.StatusCode); + response.EnsureSuccessStatusCode(); + } + + var payload = await response.Content.ReadFromJsonAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false) ?? new IssuerTrustResponseModel(null, null, 0m); + + _cache.Set(cacheKey, payload, _options.Cache.Trust); + return payload; + } + + private static string CacheKey(string prefix, params string[] parts) + { + if (parts is null || parts.Length == 0) + { + return prefix; + } + + var segments = new string[1 + parts.Length]; + segments[0] = prefix; + Array.Copy(parts, 0, segments, 1, parts.Length); + return string.Join('|', segments); + } +} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClientOptions.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClientOptions.cs new file mode 100644 index 00000000..6c05be4c --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryClientOptions.cs @@ -0,0 +1,44 @@ +namespace StellaOps.IssuerDirectory.Client; + +public sealed class IssuerDirectoryClientOptions +{ + public const string SectionName = "IssuerDirectory:Client"; + + public Uri? BaseAddress { get; set; } + + public TimeSpan HttpTimeout { get; set; } = TimeSpan.FromSeconds(10); + + public string TenantHeader { get; set; } = "X-StellaOps-Tenant"; + + public IssuerDirectoryCacheOptions Cache { get; set; } = new(); + + internal void Validate() + { + if (BaseAddress is null) + { + throw new InvalidOperationException("IssuerDirectory client base address must be configured."); + } + + if (!BaseAddress.IsAbsoluteUri) + { + throw new InvalidOperationException("IssuerDirectory client base address must be absolute."); + } + + if (HttpTimeout <= TimeSpan.Zero) + { + throw new InvalidOperationException("IssuerDirectory client timeout must be positive."); + } + + if (string.IsNullOrWhiteSpace(TenantHeader)) + { + throw new InvalidOperationException("IssuerDirectory tenant header must be configured."); + } + } +} + +public sealed class IssuerDirectoryCacheOptions +{ + public TimeSpan Keys { get; set; } = TimeSpan.FromMinutes(5); + + public TimeSpan Trust { get; set; } = TimeSpan.FromMinutes(5); +} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryModels.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryModels.cs new file mode 100644 index 00000000..815de152 --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/IssuerDirectoryModels.cs @@ -0,0 +1,30 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.IssuerDirectory.Client; + +public sealed record IssuerKeyModel( + [property: JsonPropertyName("id")] string Id, + [property: JsonPropertyName("issuerId")] string IssuerId, + [property: JsonPropertyName("tenantId")] string TenantId, + [property: JsonPropertyName("type")] string Type, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("materialFormat")] string MaterialFormat, + [property: JsonPropertyName("materialValue")] string MaterialValue, + [property: JsonPropertyName("fingerprint")] string Fingerprint, + [property: JsonPropertyName("expiresAtUtc")] DateTimeOffset? ExpiresAtUtc, + [property: JsonPropertyName("retiredAtUtc")] DateTimeOffset? RetiredAtUtc, + [property: JsonPropertyName("revokedAtUtc")] DateTimeOffset? RevokedAtUtc, + [property: JsonPropertyName("replacesKeyId")] string? ReplacesKeyId); + +public sealed record IssuerTrustOverrideModel( + [property: JsonPropertyName("weight")] decimal Weight, + [property: JsonPropertyName("reason")] string? Reason, + [property: JsonPropertyName("updatedAtUtc")] DateTimeOffset UpdatedAtUtc, + [property: JsonPropertyName("updatedBy")] string UpdatedBy, + [property: JsonPropertyName("createdAtUtc")] DateTimeOffset CreatedAtUtc, + [property: JsonPropertyName("createdBy")] string CreatedBy); + +public sealed record IssuerTrustResponseModel( + [property: JsonPropertyName("tenantOverride")] IssuerTrustOverrideModel? TenantOverride, + [property: JsonPropertyName("globalOverride")] IssuerTrustOverrideModel? GlobalOverride, + [property: JsonPropertyName("effectiveWeight")] decimal EffectiveWeight); diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/ServiceCollectionExtensions.cs b/src/__Libraries/StellaOps.IssuerDirectory.Client/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..08851ac7 --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/ServiceCollectionExtensions.cs @@ -0,0 +1,57 @@ +using System; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Options; + +namespace StellaOps.IssuerDirectory.Client; + +public static class IssuerDirectoryClientServiceCollectionExtensions +{ + public static IServiceCollection AddIssuerDirectoryClient( + this IServiceCollection services, + IConfiguration configuration, + Action? configure = null) + { + ArgumentNullException.ThrowIfNull(configuration); + + return services.AddIssuerDirectoryClient(configuration.GetSection(IssuerDirectoryClientOptions.SectionName), configure); + } + + public static IServiceCollection AddIssuerDirectoryClient( + this IServiceCollection services, + IConfigurationSection configurationSection, + Action? configure = null) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configurationSection); + + services.AddMemoryCache(); + services.AddOptions() + .Bind(configurationSection) + .PostConfigure(options => configure?.Invoke(options)) + .Validate(options => + { + try + { + options.Validate(); + return true; + } + catch + { + return false; + } + }) + .ValidateOnStart(); + + services.AddHttpClient((provider, client) => + { + var opts = provider.GetRequiredService>().Value; + opts.Validate(); + client.BaseAddress = opts.BaseAddress; + client.Timeout = opts.HttpTimeout; + }); + + return services; + } +} diff --git a/src/__Libraries/StellaOps.IssuerDirectory.Client/StellaOps.IssuerDirectory.Client.csproj b/src/__Libraries/StellaOps.IssuerDirectory.Client/StellaOps.IssuerDirectory.Client.csproj new file mode 100644 index 00000000..39cd0e72 --- /dev/null +++ b/src/__Libraries/StellaOps.IssuerDirectory.Client/StellaOps.IssuerDirectory.Client.csproj @@ -0,0 +1,14 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + diff --git a/src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOpsAuthorityOptionsTests.cs b/src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOpsAuthorityOptionsTests.cs index 622a4d47..828cf11a 100644 --- a/src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOpsAuthorityOptionsTests.cs +++ b/src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOpsAuthorityOptionsTests.cs @@ -1,8 +1,9 @@ -using System; -using System.Collections.Generic; -using Microsoft.Extensions.Configuration; -using StellaOps.Configuration; -using Xunit; +using System; +using System.Collections.Generic; +using System.Globalization; +using Microsoft.Extensions.Configuration; +using StellaOps.Configuration; +using Xunit; namespace StellaOps.Configuration.Tests; @@ -19,37 +20,60 @@ public class StellaOpsAuthorityOptionsTests } [Fact] - public void Validate_Normalises_Collections() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.stella-ops.test"), - SchemaVersion = 1 - }; - options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - - options.PluginDirectories.Add(" ./plugins "); - options.PluginDirectories.Add("./plugins"); - options.PluginDirectories.Add("./other"); - - options.BypassNetworks.Add(" 10.0.0.0/24 "); - options.BypassNetworks.Add("10.0.0.0/24"); - options.BypassNetworks.Add("192.168.0.0/16"); - - options.Validate(); - - Assert.Equal(new[] { "./plugins", "./other" }, options.PluginDirectories); - Assert.Equal(new[] { "10.0.0.0/24", "192.168.0.0/16" }, options.BypassNetworks); - } + public void Validate_Normalises_Collections() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.stella-ops.test"), + SchemaVersion = 1 + }; + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + + options.PluginDirectories.Add(" ./plugins "); + options.PluginDirectories.Add("./plugins"); + options.PluginDirectories.Add("./other"); + + options.BypassNetworks.Add(" 10.0.0.0/24 "); + options.BypassNetworks.Add("10.0.0.0/24"); + options.BypassNetworks.Add("192.168.0.0/16"); + + options.AdvisoryAi.RemoteInference.AllowedProfiles.Add(" cloud-openai "); + options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("CLOUD-OPENAI"); + options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("sovereign-local"); + + options.Validate(); + + Assert.Equal(new[] { "./plugins", "./other" }, options.PluginDirectories); + Assert.Equal(new[] { "10.0.0.0/24", "192.168.0.0/16" }, options.BypassNetworks); + Assert.Equal(new[] { "cloud-openai", "sovereign-local" }, options.AdvisoryAi.RemoteInference.AllowedProfiles); + } + + [Fact] + public void Validate_Throws_When_RemoteInferenceEnabledWithoutProfiles() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.stella-ops.test"), + SchemaVersion = 1 + }; + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.AdvisoryAi.RemoteInference.Enabled = true; + + var exception = Assert.Throws(() => options.Validate()); + + Assert.Contains("remote inference", exception.Message, StringComparison.OrdinalIgnoreCase); + } [Fact] - public void Validate_Normalises_PluginDescriptors() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.stella-ops.test"), + public void Validate_Normalises_PluginDescriptors() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.stella-ops.test"), SchemaVersion = 1 }; options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; @@ -72,8 +96,73 @@ public class StellaOpsAuthorityOptionsTests var normalized = options.Plugins.Descriptors["standard"]; Assert.Equal("standard.yaml", normalized.ConfigFile); Assert.Single(normalized.Capabilities); - Assert.Equal("password", normalized.Capabilities[0]); - } + Assert.Equal("password", normalized.Capabilities[0]); + } + + [Fact] + public void Validate_Allows_TenantRemoteInferenceConsent_WhenConfigured() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.stella-ops.test"), + SchemaVersion = 1 + }; + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.AdvisoryAi.RemoteInference.Enabled = true; + options.AdvisoryAi.RemoteInference.RequireTenantConsent = true; + options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai"); + + var tenant = new AuthorityTenantOptions + { + Id = "tenant-default", + DisplayName = "Tenant Default" + }; + + tenant.AdvisoryAi.RemoteInference.ConsentGranted = true; + tenant.AdvisoryAi.RemoteInference.ConsentVersion = "2025-10"; + tenant.AdvisoryAi.RemoteInference.ConsentedAt = DateTimeOffset.Parse("2025-10-31T12:34:56Z", CultureInfo.InvariantCulture); + tenant.AdvisoryAi.RemoteInference.ConsentedBy = "legal@example.com"; + + options.Tenants.Add(tenant); + + options.Validate(); + + Assert.Equal("2025-10", tenant.AdvisoryAi.RemoteInference.ConsentVersion); + Assert.Equal(DateTimeOffset.Parse("2025-10-31T12:34:56Z", CultureInfo.InvariantCulture), tenant.AdvisoryAi.RemoteInference.ConsentedAt); + } + + [Fact] + public void Validate_Throws_When_TenantRemoteInferenceConsentMissingVersion() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.stella-ops.test"), + SchemaVersion = 1 + }; + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.AdvisoryAi.RemoteInference.Enabled = true; + options.AdvisoryAi.RemoteInference.RequireTenantConsent = true; + options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai"); + + var tenant = new AuthorityTenantOptions + { + Id = "tenant-default", + DisplayName = "Tenant Default" + }; + + tenant.AdvisoryAi.RemoteInference.ConsentGranted = true; + tenant.AdvisoryAi.RemoteInference.ConsentedAt = DateTimeOffset.Parse("2025-10-31T12:34:56Z", CultureInfo.InvariantCulture); + + options.Tenants.Add(tenant); + + var exception = Assert.Throws(() => options.Validate()); + + Assert.Contains("consentVersion", exception.Message, StringComparison.OrdinalIgnoreCase); + } [Fact] public void Validate_Throws_When_StorageConnectionStringMissing()