From 69c59defdcef132dbb511fecfc75f14878ddf9fe Mon Sep 17 00:00:00 2001 From: master <> Date: Mon, 10 Nov 2025 07:56:15 +0200 Subject: [PATCH] feat: Implement Runtime Facts ingestion service and NDJSON reader - Added RuntimeFactsNdjsonReader for reading NDJSON formatted runtime facts. - Introduced IRuntimeFactsIngestionService interface and its implementation. - Enhanced Program.cs to register new services and endpoints for runtime facts. - Updated CallgraphIngestionService to include CAS URI in stored artifacts. - Created RuntimeFactsValidationException for validation errors during ingestion. - Added tests for RuntimeFactsIngestionService and RuntimeFactsNdjsonReader. - Implemented SignalsSealedModeMonitor for compliance checks in sealed mode. - Updated project dependencies for testing utilities. --- docs/09_API_CLI_REFERENCE.md | 87 +- .../implplan/SPRINT_110_ingestion_evidence.md | 91 +- docs/implplan/SPRINT_130_scanner_surface.md | 14 +- .../implplan/SPRINT_137_scanner_gap_design.md | 163 +- .../SPRINT_138_scanner_ruby_parity.md | 15 +- docs/implplan/SPRINT_140_runtime_signals.md | 44 + docs/implplan/SPRINT_143_signals.md | 9 +- docs/modules/cli/guides/cli-reference.md | 131 + docs/modules/scanner/README.md | 6 +- .../operations/dsse-rekor-operator-guide.md | 171 + etc/signals.yaml.sample | 16 +- .../manifest.json | 21 + .../StellaOps.Cli/Commands/CommandFactory.cs | 97 +- .../StellaOps.Cli/Commands/CommandHandlers.cs | 12973 ++++++++-------- .../Services/BackendOperationsClient.cs | 52 +- .../Services/IBackendOperationsClient.cs | 3 + .../Models/Ruby/RubyPackageArtifactModel.cs | 28 + src/Cli/StellaOps.Cli/StellaOps.Cli.csproj | 7 + src/Cli/StellaOps.Cli/TASKS.md | 6 + src/Cli/StellaOps.Cli/Telemetry/CliMetrics.cs | 35 + .../Commands/CommandFactoryTests.cs | 36 + .../Commands/CommandHandlersTests.cs | 5342 ++++--- .../TASKS.md | 12 + .../CompositeScanAnalyzerDispatcher.cs | 7 +- .../Surface/SurfaceManifestPublisher.cs | 1 + .../Surface/SurfaceManifestStageExecutor.cs | 36 + src/Scanner/StellaOps.Scanner.sln | 28 + .../AssemblyInfo.cs | 3 + .../DenoAnalyzerPlugin.cs | 18 + .../DenoLanguageAnalyzer.cs | 114 + .../GlobalUsings.cs | 17 + .../Internal/DenoBuiltinUsage.cs | 6 + .../Internal/DenoBundleInspectionResult.cs | 12 + .../Internal/DenoBundleInspector.cs | 156 + .../Internal/DenoBundleModule.cs | 7 + .../Internal/DenoBundleObservation.cs | 8 + .../Internal/DenoBundleResource.cs | 6 + .../Internal/DenoBundleScanResult.cs | 5 + .../Internal/DenoBundleScanner.cs | 82 + .../Internal/DenoCacheLocation.cs | 39 + .../Internal/DenoCapabilityRecord.cs | 6 + .../Internal/DenoCapabilityType.cs | 12 + .../Internal/DenoCompatibilityAnalysis.cs | 8 + .../Internal/DenoCompileInspector.cs | 59 + .../Internal/DenoConfigDocument.cs | 330 + .../Internal/DenoContainerAdapter.cs | 76 + .../Internal/DenoContainerEmitter.cs | 86 + .../Internal/DenoContainerInput.cs | 8 + .../Internal/DenoContainerSourceKind.cs | 8 + .../Internal/DenoDynamicImportObservation.cs | 7 + .../Internal/DenoImportKind.cs | 15 + .../Internal/DenoImportMapDocument.cs | 152 + .../Internal/DenoLayerMetadata.cs | 43 + .../Internal/DenoLiteralFetchObservation.cs | 7 + .../Internal/DenoLockFile.cs | 208 + .../Internal/DenoModuleEdge.cs | 9 + .../Internal/DenoModuleGraph.cs | 27 + .../Internal/DenoModuleGraphResolver.cs | 709 + .../Internal/DenoModuleKind.cs | 17 + .../Internal/DenoModuleNode.cs | 10 + .../Internal/DenoNpmCompatibilityAdapter.cs | 673 + .../Internal/DenoNpmResolution.cs | 11 + .../Internal/DenoPathUtilities.cs | 81 + .../Internal/DenoVendorDirectory.cs | 47 + .../Internal/DenoVirtualFileSystem.cs | 289 + .../Internal/DenoWorkspace.cs | 59 + .../Internal/DenoWorkspaceNormalizer.cs | 444 + .../Observations/DenoObservationBuilder.cs | 73 + .../DenoObservationBundleSummary.cs | 8 + .../Observations/DenoObservationDocument.cs | 9 + .../Observations/DenoObservationSerializer.cs | 109 + ...ellaOps.Scanner.Analyzers.Lang.Deno.csproj | 20 + .../manifest.json | 21 + .../Internal/JavaLockFileCollector.cs | 184 + .../JavaLanguageAnalyzer.cs | 449 +- .../Internal/NodeLockData.cs | 532 +- .../Internal/NodeLockEntry.cs | 19 +- .../Internal/NodePackage.cs | 116 +- .../Internal/NodePackageCollector.cs | 143 +- .../Internal/PythonDistributionLoader.cs | 78 +- .../Internal/PythonLockFileCollector.cs | 283 + .../PythonLanguageAnalyzer.cs | 148 +- .../Internal/RubyCapabilities.cs | 8 +- .../Internal/RubyCapabilityDetector.cs | 320 + .../Internal/RubyPackage.cs | 31 +- .../Internal/RubyRuntimeGraphBuilder.cs | 439 + .../Internal/RubyRuntimeUsage.cs | 15 + .../RubyLanguageAnalyzer.cs | 9 +- .../TASKS.md | 6 + .../Core/LanguageAnalyzerContext.cs | 19 +- .../Contracts/AnalyzerObservationPayload.cs | 42 + .../Contracts/ScanAnalysisKeys.cs | 2 + .../Catalog/ArtifactDocument.cs | 2 + .../Bundles/BundleInspectorTests.cs | 76 + .../Containers/ContainerAdapterTests.cs | 34 + .../Containers/ContainerEmitterTests.cs | 38 + .../Deno/DenoWorkspaceNormalizerTests.cs | 121 + .../DenoLanguageAnalyzerObservationTests.cs | 47 + .../ObservationSerializerTests.cs | 28 + ...s.Scanner.Analyzers.Lang.Deno.Tests.csproj | 42 + .../TestFixtures/BundleFixtureBuilder.cs | 74 + .../TestFixtures/DenoWorkspaceTestFixture.cs | 186 + .../Fixtures/java/basic/expected.json | 70 +- .../Java/JavaLanguageAnalyzerTests.cs | 103 + .../lang/node/workspaces/expected.json | 291 +- .../lang/node/workspaces/package-lock.json | 22 +- .../Python/PythonLanguageAnalyzerTests.cs | 131 +- .../Fixtures/lang/ruby/basic/app/main.rb | 27 + .../Fixtures/lang/ruby/basic/expected.json | 44 +- .../Lang/Ruby/RubyLanguageAnalyzerTests.cs | 6 +- .../TestUtilities/JavaClassFileFactory.cs | 2 + .../SurfaceManifestStageExecutorTests.cs | 43 + .../Hosting/SignalsSealedModeMonitor.cs | 92 + .../Models/CallgraphArtifactMetadata.cs | 21 +- .../Models/CallgraphIngestResponse.cs | 9 +- .../Models/ReachabilityFactDocument.cs | 25 + .../Models/RuntimeFactsIngestRequest.cs | 47 + .../Models/RuntimeFactsStreamMetadata.cs | 14 + .../Options/SignalsAirGapOptions.cs | 49 + .../Options/SignalsOptions.cs | 22 +- .../Parsing/RuntimeFactsNdjsonReader.cs | 52 + src/Signals/StellaOps.Signals/Program.cs | 281 +- .../Services/CallgraphIngestionService.cs | 17 +- .../Services/IRuntimeFactsIngestionService.cs | 10 + .../Services/RuntimeFactsIngestionService.cs | 255 + .../RuntimeFactsValidationException.cs | 11 + .../FileSystemCallgraphArtifactStore.cs | 70 +- .../Storage/Models/StoredCallgraphArtifact.cs | 11 +- .../RuntimeFactsIngestionServiceTests.cs | 141 + .../RuntimeFactsNdjsonReaderTests.cs | 49 + .../SignalsSealedModeMonitorTests.cs | 100 + ...tellaOps.Signals.Reachability.Tests.csproj | 1 + 132 files changed, 19718 insertions(+), 9334 deletions(-) create mode 100644 docs/modules/scanner/operations/dsse-rekor-operator-guide.md create mode 100644 plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Deno/manifest.json create mode 100644 src/Cli/StellaOps.Cli/Services/Models/Ruby/RubyPackageArtifactModel.cs create mode 100644 src/Cli/StellaOps.Cli/TASKS.md create mode 100644 src/Cli/__Tests/StellaOps.Cli.Tests/Commands/CommandFactoryTests.cs create mode 100644 src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/AssemblyInfo.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/DenoAnalyzerPlugin.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/DenoLanguageAnalyzer.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/GlobalUsings.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoBuiltinUsage.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoBundleInspectionResult.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoBundleInspector.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoBundleModule.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoBundleObservation.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoBundleResource.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoBundleScanResult.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoBundleScanner.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoCacheLocation.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoCapabilityRecord.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoCapabilityType.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoCompatibilityAnalysis.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoCompileInspector.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoConfigDocument.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoContainerAdapter.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoContainerEmitter.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoContainerInput.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoContainerSourceKind.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoDynamicImportObservation.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoImportKind.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoImportMapDocument.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoLayerMetadata.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoLiteralFetchObservation.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoLockFile.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoModuleEdge.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoModuleGraph.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoModuleGraphResolver.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoModuleKind.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoModuleNode.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoNpmCompatibilityAdapter.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoNpmResolution.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoPathUtilities.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoVendorDirectory.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoVirtualFileSystem.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoWorkspace.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/DenoWorkspaceNormalizer.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/Observations/DenoObservationBuilder.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/Observations/DenoObservationBundleSummary.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/Observations/DenoObservationDocument.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/Internal/Observations/DenoObservationSerializer.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/StellaOps.Scanner.Analyzers.Lang.Deno.csproj create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Deno/manifest.json create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaLockFileCollector.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/PythonLockFileCollector.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/Internal/RubyCapabilityDetector.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/Internal/RubyRuntimeGraphBuilder.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/Internal/RubyRuntimeUsage.cs create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md create mode 100644 src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/AnalyzerObservationPayload.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Deno.Tests/Bundles/BundleInspectorTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Deno.Tests/Containers/ContainerAdapterTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Deno.Tests/Containers/ContainerEmitterTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Deno.Tests/Deno/DenoWorkspaceNormalizerTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Deno.Tests/Observations/DenoLanguageAnalyzerObservationTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Deno.Tests/Observations/ObservationSerializerTests.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Deno.Tests/StellaOps.Scanner.Analyzers.Lang.Deno.Tests.csproj create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Deno.Tests/TestFixtures/BundleFixtureBuilder.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Deno.Tests/TestFixtures/DenoWorkspaceTestFixture.cs create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/ruby/basic/app/main.rb create mode 100644 src/Signals/StellaOps.Signals/Hosting/SignalsSealedModeMonitor.cs create mode 100644 src/Signals/StellaOps.Signals/Models/RuntimeFactsIngestRequest.cs create mode 100644 src/Signals/StellaOps.Signals/Models/RuntimeFactsStreamMetadata.cs create mode 100644 src/Signals/StellaOps.Signals/Options/SignalsAirGapOptions.cs create mode 100644 src/Signals/StellaOps.Signals/Parsing/RuntimeFactsNdjsonReader.cs create mode 100644 src/Signals/StellaOps.Signals/Services/IRuntimeFactsIngestionService.cs create mode 100644 src/Signals/StellaOps.Signals/Services/RuntimeFactsIngestionService.cs create mode 100644 src/Signals/StellaOps.Signals/Services/RuntimeFactsValidationException.cs create mode 100644 tests/reachability/StellaOps.Signals.Reachability.Tests/RuntimeFactsIngestionServiceTests.cs create mode 100644 tests/reachability/StellaOps.Signals.Reachability.Tests/RuntimeFactsNdjsonReaderTests.cs create mode 100644 tests/reachability/StellaOps.Signals.Reachability.Tests/SignalsSealedModeMonitorTests.cs diff --git a/docs/09_API_CLI_REFERENCE.md b/docs/09_API_CLI_REFERENCE.md index 34f15e156..7c32a5f98 100755 --- a/docs/09_API_CLI_REFERENCE.md +++ b/docs/09_API_CLI_REFERENCE.md @@ -669,16 +669,85 @@ See `docs/dev/32_AUTH_CLIENT_GUIDE.md` for recommended profiles (online vs. air- | Command | Purpose | Key Flags / Arguments | Notes | |---------|---------|-----------------------|-------| -| `stellaops-cli scanner download` | Fetch and install scanner container | `--channel ` (default `stable`)
`--output `
`--overwrite`
`--no-install` | Saves artefact under `ScannerCacheDirectory`, verifies digest/signature, and executes `docker load` unless `--no-install` is supplied. | -| `stellaops-cli scan run` | Execute scanner container against a directory (auto-upload) | `--target ` (required)
`--runner ` (default from config)
`--entry `
`[scanner-args...]` | Runs the scanner, writes results into `ResultsDirectory`, emits a structured `scan-run-*.json` metadata file, and automatically uploads the artefact when the exit code is `0`. | +| `stellaops-cli scanner download` | Fetch and install scanner container | `--channel ` (default `stable`)
`--output `
`--overwrite`
`--no-install` | Saves artefact under `ScannerCacheDirectory`, verifies digest/signature, and executes `docker load` unless `--no-install` is supplied. | +| `stellaops-cli scan run` | Execute scanner container against a directory (auto-upload) | `--target ` (required)
`--runner ` (default from config)
`--entry `
`[scanner-args...]` | Runs the scanner, writes results into `ResultsDirectory`, emits a structured `scan-run-*.json` metadata file, and automatically uploads the artefact when the exit code is `0`. | | `stellaops-cli scan upload` | Re-upload existing scan artefact | `--file ` | Useful for retries when automatic upload fails or when operating offline. | -| `stellaops-cli db fetch` | Trigger connector jobs | `--source ` (e.g. `redhat`, `osv`)
`--stage ` (default `fetch`)
`--mode ` | Translates to `POST /jobs/source:{source}:{stage}` with `trigger=cli` | -| `stellaops-cli db merge` | Run canonical merge reconcile | — | Calls `POST /jobs/merge:reconcile`; exit code `0` on acceptance, `1` on failures/conflicts | -| `stellaops-cli db export` | Kick JSON / Trivy exports | `--format ` (default `json`)
`--delta`
`--publish-full/--publish-delta`
`--bundle-full/--bundle-delta` | Sets `{ delta = true }` parameter when requested and can override ORAS/bundle toggles per run | -| `stellaops-cli auth ` | Manage cached tokens for StellaOps Authority | `auth login --force` (ignore cache)
`auth status`
`auth whoami` | Uses `StellaOps.Auth.Client`; honours `StellaOps:Authority:*` configuration, stores tokens under `~/.stellaops/tokens` by default, and `whoami` prints subject/scope/expiry | -| `stellaops-cli auth revoke export` | Export the Authority revocation bundle | `--output ` (defaults to CWD) | Writes `revocation-bundle.json`, `.json.jws`, and `.json.sha256`; verifies the digest locally and includes key metadata in the log summary. | -| `stellaops-cli auth revoke verify` | Validate a revocation bundle offline | `--bundle ` `--signature ` `--key `
`--verbose` | Verifies detached JWS signatures, reports the computed SHA-256, and can fall back to cached JWKS when `--key` is omitted. | -| `stellaops-cli offline kit pull` | Download the latest offline kit bundle and manifest | `--bundle-id ` (optional)
`--destination `
`--overwrite`
`--no-resume` | Streams the bundle + manifest from the configured mirror/backend, resumes interrupted downloads, verifies SHA-256, and writes signatures plus a `.metadata.json` manifest alongside the artefacts. | +| `stellaops-cli ruby inspect` | Offline Ruby workspace inspection (Gemfile / lock + runtime signals) | `--root ` (default current directory)
`--format ` (default `table`) | Runs the bundled `RubyLanguageAnalyzer`, renders Package/Version/Group/Source/Lockfile/Runtime columns, or emits JSON `{ packages: [...] }`. Exit codes: `0` success, `64` invalid format, `70` unexpected failure, `71` missing directory. | +| `stellaops-cli ruby resolve` | Fetch Ruby package inventory for a completed scan | `--image ` *or* `--scan-id ` (one required)
`--format ` (default `table`) | Calls `GetRubyPackagesAsync` to download `ruby_packages.json`, groups entries by bundle/platform, and shows runtime entrypoints/usage. Table output mirrors `inspect`; JSON returns `{ scanId, groups: [...] }`. Exit codes: `0` success, `64` invalid args, `70` backend failure. | +| `stellaops-cli db fetch` | Trigger connector jobs | `--source ` (e.g. `redhat`, `osv`)
`--stage ` (default `fetch`)
`--mode ` | Translates to `POST /jobs/source:{source}:{stage}` with `trigger=cli` | +| `stellaops-cli db merge` | Run canonical merge reconcile | — | Calls `POST /jobs/merge:reconcile`; exit code `0` on acceptance, `1` on failures/conflicts | +| `stellaops-cli db export` | Kick JSON / Trivy exports | `--format ` (default `json`)
`--delta`
`--publish-full/--publish-delta`
`--bundle-full/--bundle-delta` | Sets `{ delta = true }` parameter when requested and can override ORAS/bundle toggles per run | +| `stellaops-cli auth ` | Manage cached tokens for StellaOps Authority | `auth login --force` (ignore cache)
`auth status`
`auth whoami` | Uses `StellaOps.Auth.Client`; honours `StellaOps:Authority:*` configuration, stores tokens under `~/.stellaops/tokens` by default, and `whoami` prints subject/scope/expiry | +| `stellaops-cli auth revoke export` | Export the Authority revocation bundle | `--output ` (defaults to CWD) | Writes `revocation-bundle.json`, `.json.jws`, and `.json.sha256`; verifies the digest locally and includes key metadata in the log summary. | +| `stellaops-cli auth revoke verify` | Validate a revocation bundle offline | `--bundle ` `--signature ` `--key `
`--verbose` | Verifies detached JWS signatures, reports the computed SHA-256, and can fall back to cached JWKS when `--key` is omitted. | +| `stellaops-cli offline kit pull` | Download the latest offline kit bundle and manifest | `--bundle-id ` (optional)
`--destination `
`--overwrite`
`--no-resume` | Streams the bundle + manifest from the configured mirror/backend, resumes interrupted downloads, verifies SHA-256, and writes signatures plus a `.metadata.json` manifest alongside the artefacts. | + +### Ruby dependency verbs (`stellaops-cli ruby …`) + +`ruby inspect` runs the same deterministic `RubyLanguageAnalyzer` bundled with Scanner.Worker against the local working tree—no backend calls—so operators can sanity-check Gemfile / Gemfile.lock pairs before shipping. `ruby resolve` downloads the `ruby_packages.json` artifact that Scanner creates for each scan (via `GetRubyPackagesAsync`) and reshapes it for operators who need to reason about groups/platforms/runtime usage after the fact. + +**`ruby inspect` flags** + +| Flag | Default | Description | +| ---- | ------- | ----------- | +| `--root ` | current working directory | Directory containing `Gemfile`, `Gemfile.lock`, and runtime sources. Missing paths set exit code **71**. | +| `--format ` | `table` | `table` renders Package/Version/Groups/Platform/Source/Lockfile/Runtime columns; `json` emits `{ "packages": [...] }` with the analyzer metadata. | +| `--verbose` / `-v` | `false` | Surfaces analyzer trace logging while keeping deterministic output. | + +Successful runs exit `0`; invalid formats raise **64**, unexpected failures return **70**. Table output marks runtime usage with `[green]Entrypoint[/]` and includes every runtime entrypoint path when available. JSON mode mirrors analyzer metadata: + +```json +{ + "packages": [ + { + "name": "rack", + "version": "3.1.0", + "source": "https://rubygems.org/", + "lockfile": "Gemfile.lock", + "groups": ["default"], + "platform": "-", + "runtimeEntrypoints": ["app.rb"], + "runtimeFiles": ["app.rb"], + "runtimeReasons": ["require-static"], + "usedByEntrypoint": true + } + ] +} +``` + +**`ruby resolve` flags** + +| Flag | Default | Description | +| ---- | ------- | ----------- | +| `--image ` | — | Scanner artifact identifier (image digest/tag). Mutually exclusive with `--scan-id`; one is required. | +| `--scan-id ` | — | Explicit scan identifier returned by `scan run`. | +| `--format ` | `table` | `json` writes `{ "scanId": "…", "groups": [{ "group": "default", "platform": "-", "packages": [...] }] }`. | +| `--verbose` / `-v` | `false` | Enables HTTP + resolver logging. | + +Errors caused by missing identifiers return **64**; transient backend errors surface as **70** (with full context in logs). Table output groups packages by Gem/Bundle group + platform and shows runtime entrypoints or `[grey]-[/]` when unused. JSON payloads stay stable for downstream automation: + +```json +{ + "scanId": "scan-ruby", + "groups": [ + { + "group": "default", + "platform": "-", + "packages": [ + { + "name": "rack", + "lockfile": "Gemfile.lock", + "groups": ["default"], + "runtimeUsed": true, + "runtimeEntrypoints": ["app.rb"] + } + ] + } + ] +} +``` + +Both commands honour CLI observability hooks: Spectre tables for human output, `--format json` for automation, metrics reported via `CliMetrics.RecordRubyInspect/Resolve`, and Activity tags (`cli.ruby.inspect`, `cli.ruby.resolve`) for trace correlation. | `stellaops-cli offline kit import` | Upload an offline kit bundle to the backend | `` (argument)
`--manifest `
`--bundle-signature `
`--manifest-signature ` | Validates digests when metadata is present, then posts multipart payloads to `POST /api/offline-kit/import`; logs the submitted import ID/status for air-gapped rollout tracking. | | `stellaops-cli offline kit status` | Display imported offline kit details | `--json` | Shows bundle id/kind, captured/imported timestamps, digests, and component versions; `--json` emits machine-readable output for scripting. | | `stellaops-cli sources ingest --dry-run` | Dry-run guard validation for individual payloads | `--source `
`--input `
`--tenant `
`--format table\|json`
`--output ` | Normalises gzip/base64 payloads, invokes `api/aoc/ingest/dry-run`, and maps guard failures to deterministic `ERR_AOC_00x` exit codes. | diff --git a/docs/implplan/SPRINT_110_ingestion_evidence.md b/docs/implplan/SPRINT_110_ingestion_evidence.md index 4977458b6..8aed8ee73 100644 --- a/docs/implplan/SPRINT_110_ingestion_evidence.md +++ b/docs/implplan/SPRINT_110_ingestion_evidence.md @@ -6,26 +6,89 @@ Active items only. Completed/historic work now resides in docs/implplan/archived | Wave | Guild owners | Shared prerequisites | Status | Notes | | --- | --- | --- | --- | --- | -| 110.A AdvisoryAI | Advisory AI Guild · Docs Guild · SBOM Service Guild | Sprint 100.A – Attestor (closed 2025-11-09 per `docs/implplan/archived/SPRINT_100_identity_signing.md`) | DOING | WebService/Worker orchestration, guardrails, and docs are live; continue console/CLI coverage as endpoints land. | -| 110.B Concelier | Concelier Core & WebService Guilds · Observability Guild · AirGap Guilds (Importer/Policy/Time) | Sprint 100.A – Attestor | DOING | Telemetry wiring started; mirror/air-gap tasks unlocked with AdvisoryAI evidence dependencies met. | -| 110.C Excititor | Excititor WebService/Core Guilds · Observability Guild · Evidence Locker Guild | Sprint 100.A – Attestor | DOING | VEX justification enrichment and provenance metadata are underway; keep Link-Not-Merge blockers tracked. | -| 110.D Mirror | Mirror Creator Guild · Exporter Guild · CLI Guild · AirGap Time Guild | Sprint 100.A – Attestor | TODO | Deterministic bundle assembler remains the gating task before DSSE/OCI work can proceed. | +| 110.A AdvisoryAI | Advisory AI Guild · Docs Guild · SBOM Service Guild | Sprint 100.A – Attestor (closed 2025-11-09 per `docs/implplan/archived/SPRINT_100_identity_signing.md`) | DOING | Regression/perf suite (AIAI-31-009) and console doc (DOCS-AIAI-31-004) remain DOING; SBOM (SBOM-AIAI-31-001/003), CLI (CLI-VULN-29-001/CLI-VEX-30-001), Policy (POLICY-ENGINE-31-001), and DevOps (DEVOPS-AIAI-31-001) owners owe delivery ETA updates on 2025-11-10 so the CLI/policy/runbook docs can unblock. | +| 110.B Concelier | Concelier Core & WebService Guilds · Observability Guild · AirGap Guilds (Importer/Policy/Time) | Sprint 100.A – Attestor | DOING | Paragraph chunk API shipped 2025-11-07; structured field/caching (CONCELIER-AIAI-31-002) is still TODO, telemetry (CONCELIER-AIAI-31-003) DOING, and air-gap/console/attestation tracks remain gated on Link-Not-Merge + Cartographer schema. | +| 110.C Excititor | Excititor WebService/Core Guilds · Observability Guild · Evidence Locker Guild | Sprint 100.A – Attestor | DOING | Normalized justification projections (EXCITITOR-AIAI-31-001) are DOING; chunk API, telemetry, docs, attestation, and mirror backlog stay queued behind that work plus Link-Not-Merge / Cartographer prerequisites. | +| 110.D Mirror | Mirror Creator Guild · Exporter Guild · CLI Guild · AirGap Time Guild | Sprint 100.A – Attestor | TODO | Wave remains TODO—MIRROR-CRT-56-001 has not started, so DSSE/TUF, OCI/time-anchor, CLI, and scheduling integrations cannot proceed. | -## Status Snapshot (2025-11-04) +## Status Snapshot (2025-11-09) -- **Advisory AI** – 5 of 11 tasks are DONE (AIAI-31-001, AIAI-31-002, AIAI-31-003, AIAI-31-010, AIAI-31-011); orchestration pipeline (AIAI-31-004) and host wiring (AIAI-31-004A) remain TODO while downstream guardrails, CLI, and observability tracks (AIAI-31-004B/004C and AIAI-31-005 through AIAI-31-009) stay TODO pending cache/guardrail implementation and WebService/Worker hardening. - - 2025-11-04: AIAI-31-002 and AIAI-31-003 shipped with deterministic SBOM context client wiring (`AddSbomContext` typed HTTP client) and toolset integration; WebService/Worker now invoke the orchestrator with SBOM-backed simulations and emit initial metrics. - - 2025-11-03: AIAI-31-002 landed the configurable HTTP client + DI defaults; retriever now resolves data via `/v1/sbom/context`, retaining a null fallback until SBOM service ships. - - 2025-11-03: Follow-up: SBOM guild to deliver base URL/API key and run an Advisory AI smoke retrieval once SBOM-AIAI-31-001 endpoints are live. - - 2025-11-08: AIAI-31-009 marked DONE – injection harness + dual golden prompts + plan-cache determinism tests landed; perf memo added to Advisory AI architecture, `dotnet test src/AdvisoryAI/__Tests/StellaOps.AdvisoryAI.Tests/StellaOps.AdvisoryAI.Tests.csproj --no-build` green. - - 2025-11-08: AIAI-31-008 DONE – deterministic plan ordering + conflict prompt goldens refreshed, remote inference packaging verified across Compose/Helm/Offline Kit profiles, and Advisory AI test suite re-run. -- **Concelier** – CONCELIER-CORE-AOC-19-004 is the only in-flight Concelier item; air-gap, console, attestation, and Link-Not-Merge tasks remain TODO, and several connector upgrades still carry overdue October due dates. -- **Excititor** – Excititor WebService, console, policy, and observability tracks are all TODO and hinge on Link-Not-Merge schema delivery plus trust-provenance connectors (SUSE/Ubuntu) progressing in section 110.C. -- **Mirror** – Mirror Creator track (MIRROR-CRT-56-001 through MIRROR-CRT-58-002) has not started; DSSE signing, OCI bundle, and scheduling integrations depend on the deterministic bundle assembler landing first. +- **Advisory AI (110.A)** – WebService orchestration (AIAI-31-004), typed SBOM client/tooling (AIAI-31-002/003), guardrail pipeline (AIAI-31-005), and overview/API/architecture docs (DOCS-AIAI-31-001/002/003) are DONE; focus now sits on DOCS-AIAI-31-004 and AIAI-31-009 while CLI/policy/SBOM deliverables unblock the remaining docs. + - 2025-11-09: AIAI-31-009 remains DOING after converting the guardrail harness into JSON fixtures, expanding property/perf coverage, and validating offline cache seeding; remote inference packaging (AIAI-31-008) is still TODO until the policy knob work in AIAI-31-006..007 completes. + - 2025-11-09: DOCS-AIAI-31-004 continues DOING—guardrail/offline sections are drafted, but screenshots plus copy blocks wait on CONSOLE-VULN-29-001, CONSOLE-VEX-30-001, and EXCITITOR-CONSOLE-23-001. + - SBOM-AIAI-31-003 and DOCS-AIAI-31-005/006/008/009 remain BLOCKED pending SBOM-AIAI-31-001, CLI-VULN-29-001, CLI-VEX-30-001, POLICY-ENGINE-31-001, and DEVOPS-AIAI-31-001. +- **Concelier (110.B)** – `/advisories/{advisoryKey}/chunks` shipped on 2025-11-07 with tenant enforcement, chunk tuning knobs, and regression fixtures; structured field/caching work (CONCELIER-AIAI-31-002) is still TODO while telemetry/guardrail instrumentation (CONCELIER-AIAI-31-003) is DOING. + - Air-gap provenance/staleness bundles (`CONCELIER-AIRGAP-56-001` → `CONCELIER-AIRGAP-58-001`), console views/deltas (`CONCELIER-CONSOLE-23-001..003`), and attestation metadata (`CONCELIER-ATTEST-73-001/002`) remain TODO pending Link-Not-Merge plus Cartographer schema delivery. + - Connector provenance refreshes `FEEDCONN-ICSCISA-02-012` and `FEEDCONN-KISA-02-008` are still overdue, leaving evidence parity gaps for those feeds. +- **Excititor (110.C)** – Normalized VEX justification projections (EXCITITOR-AIAI-31-001) are DOING as of 2025-11-09; the downstream chunk API (EXCITITOR-AIAI-31-002), telemetry/guardrails (EXCITITOR-AIAI-31-003), docs/OpenAPI alignment (EXCITITOR-AIAI-31-004), and attestation payload work (`EXCITITOR-ATTEST-*`) stay TODO until that projection work plus Link-Not-Merge schema land. + - Mirror/air-gap backlog (`EXCITITOR-AIRGAP-56-001` .. `EXCITITOR-AIRGAP-58-001`) and connector provenance parity (`EXCITITOR-CONN-TRUST-01-001`) remain unscheduled, so Advisory AI cannot yet hydrate sealed VEX evidence or cite connector signatures. +- **Mirror (110.D)** – MIRROR-CRT-56-001 (deterministic bundle assembler) has not kicked off, so DSSE/TUF (MIRROR-CRT-56-002), OCI exports (MIRROR-CRT-57-001), time anchors (MIRROR-CRT-57-002), CLI verbs (MIRROR-CRT-58-001), and Export Center automation (MIRROR-CRT-58-002) are all blocked. ## Blockers & Overdue Follow-ups +- Advisory AI customer-facing coverage remains blocked until SBOM-AIAI-31-001 exposes the `/v1/sbom/context` hand-off kit and until CLI-VULN-29-001, CLI-VEX-30-001, POLICY-ENGINE-31-001, and DEVOPS-AIAI-31-001 ship—keeping SBOM-AIAI-31-003 plus DOCS-AIAI-31-005/006/008/009 and the remote inference packaging work (AIAI-31-008) on hold. - `CONCELIER-GRAPH-21-001`, `CONCELIER-GRAPH-21-002`, and `CONCELIER-GRAPH-21-005` remain BLOCKED awaiting `CONCELIER-POLICY-20-002` outputs and Cartographer schema (`CARTO-GRAPH-21-002`), keeping downstream Excititor graph consumers on hold. - `EXCITITOR-GRAPH-21-001`, `EXCITITOR-GRAPH-21-002`, and `EXCITITOR-GRAPH-21-005` stay BLOCKED until the same Cartographer/Link-Not-Merge prerequisites are delivered. - Connector provenance updates `FEEDCONN-ICSCISA-02-012` (due 2025-10-23) and `FEEDCONN-KISA-02-008` (due 2025-10-24) remain past due and need scheduling. FeedMerge coordination tasks have been dropped (no AOC policy/governance backing yet), so capacity shifts to schema/guard deliverables. - Mirror evidence work remains blocked until `MIRROR-CRT-56-001` ships; align Export Center (`EXPORT-OBS-51-001`) and AirGap time anchor (`AIRGAP-TIME-57-001`) owners for kickoff. + +## Immediate actions (target: 2025-11-12) + +- **Advisory AI** – Land AIAI-31-009 test harness updates plus remote inference packaging (AIAI-31-008) once POLICY-ENGINE-31-001 and DEVOPS-AIAI-31-001 expose the required knobs; SBOM guild to deliver SBOM-AIAI-31-001 so SBOM-AIAI-31-003 and the CLI/policy/runbook docs can unblock. +- **Concelier** – Finish CONCELIER-AIAI-31-002 structured fields/caching and wire CONCELIER-AIAI-31-003 telemetry before starting air-gap or console endpoints; hold daily sync with Cartographer owners on CONCELIER-LNM-21-201/202 + CARTO-GRAPH-21-002. +- **Excititor** – Wrap EXCITITOR-AIAI-31-001 justification projections, then immediately stage EXCITITOR-AIAI-31-002/003 plus EXCITITOR-ATTEST-01-003 to keep Advisory AI evidence feeds parallel to Concelier. +- **Mirror** – Schedule MIRROR-CRT-56-001 kickoff with Export Center/AirGap Time guilds, confirm `EXPORT-OBS-51-001` + `AIRGAP-TIME-57-001` owners, and pre-stage DSSE/TUF design notes so MIRROR-CRT-56-002 can start as soon as the assembler lands. +- **Downstream prep** – Scanner (Sprint 130) and Policy/Vuln Explorer (Sprint 129) owners should review AIAI-31-009 outputs after 2025-11-10 to ensure schema expectations match; Concelier CONSOLE (23-001..003) and AIRGAP (56/57/58) leads need Link-Not-Merge dates set during the 2025-11-11 checkpoint; Excititor mirror/air-gap teams should stage EXCITITOR-AIRGAP-56/57/58 implementation plans; Mirror CLI/Export Center teams should assemble design notes ahead of MIRROR-CRT-56-002/58-001 once the assembler kickoff happens. + +## Wave detail references (2025-11-09) + +- **110.A AdvisoryAI (docs/implplan/SPRINT_111_advisoryai.md)** + DOCS-AIAI-31-004 remains DOING; DOCS-AIAI-31-005/006/008/009 are BLOCKED on CLI/POLICY/SBOM/DevOps dependencies; SBOM-AIAI-31-003 is still TODO awaiting SBOM-AIAI-31-001; AIAI-31-008 is TODO until guardrail knobs land, and AIAI-31-009 stays DOING with the expanded harness/perf coverage work. +- **110.B Concelier (docs/implplan/SPRINT_112_concelier_i.md)** + CONCELIER-AIAI-31-002 is TODO while CONCELIER-AIAI-31-003 is DOING; all air-gap (`CONCELIER-AIRGAP-56/57/58-*`), attestation (`CONCELIER-ATTEST-73-*`), and console (`CONCELIER-CONSOLE-23-*`) tracks remain TODO pending Link-Not-Merge (`CONCELIER-LNM-21-*`) and Cartographer schema (`CARTO-GRAPH-21-002`) delivery. +- **110.C Excititor (docs/implplan/SPRINT_119_excititor_i.md)** + EXCITITOR-AIAI-31-001 is DOING; EXCITITOR-AIAI-31-002/003/004, EXCITITOR-ATTEST-01-003/-73-001/-73-002, EXCITITOR-AIRGAP-56/57/58-* and EXCITITOR-CONN-TRUST-01-001 are all TODO awaiting the justification projection output plus Link-Not-Merge contracts. +- **110.D Mirror (docs/implplan/SPRINT_125_mirror.md)** + Every MIRROR-CRT-56/57/58 task is still TODO; DSSE/TUF, OCI bundle, time-anchor, CLI, and Export Center automation cannot start until the deterministic bundle assembler (MIRROR-CRT-56-001) is underway with EXPORT-OBS-51-001 and AIRGAP-TIME-57-001 owners confirmed. + +## Downstream dependency rollup (snapshot: 2025-11-09) + +| Wave | Dependent sprint(s) (selected) | Impact if 110.* slips | +| --- | --- | --- | +| 110.A AdvisoryAI | `SPRINT_130_scanner_surface.md`, `SPRINT_129_policy_reasoning.md`, `SPRINT_513_provenance.md`, `SPRINT_514_sovereign_crypto_enablement.md` | Scanner analyzers need AdvisoryAI schemas/feeds, Policy/Vuln Explorer tracks cannot expose advisory reasoning, and provenance/sovereign crypto programs remain paused until evidence contracts land. | +| 110.B Concelier | `SPRINT_113_concelier_ii.md`, `SPRINT_114_concelier_iii.md`, `SPRINT_115_concelier_iv.md` | Link-Not-Merge schema + observation APIs gate Concelier graph, telemetry, and orchestrator waves; Console/advisor UIs stay blocked. | +| 110.C Excititor | `SPRINT_120_excititor_ii.md` → `SPRINT_124_excititor_vi.md` | VEX chunk/attestation phases cannot progress until Excititor.I ships justification projections/guardrails, delaying Lens, Policy, and Advisory AI parity for VEX evidence. | +| 110.D Mirror | `SPRINT_125_mirror.md` | Export Center, CLI, and air-gap bundles rely on MIRROR-CRT-56-001; no downstream mirror automation can begin until the deterministic assembler is complete. | + +## Interlocks & owners + +| Interlock | Participants | Needed artifact(s) | Status / notes (2025-11-09) | +| --- | --- | --- | --- | +| Advisory AI customer surfaces | Advisory AI Guild · SBOM Service Guild · CLI Guild · Policy Guild · DevOps Guild | `SBOM-AIAI-31-001`, `SBOM-AIAI-31-003`, `CLI-VULN-29-001`, `CLI-VEX-30-001`, `POLICY-ENGINE-31-001`, `DEVOPS-AIAI-31-001` | SBOM hand-off kit + CLI/Policy knobs still pending; DOCS-AIAI-31-005/006/008/009 stay blocked until these artifacts ship. | +| Link-Not-Merge contract | Concelier Core/WebService Guilds · Cartographer Guild · Platform Events Guild | `CONCELIER-LNM-21-001`→`21-203`, `CARTO-GRAPH-21-002`, `CONCELIER-GRAPH-21-001/002`, `CONCELIER-CONSOLE-23-001..003` | Schema and observation APIs not started; Cartographer schema delivery remains the gate for CONCELIER-AIAI-31-002/003 and all console/air-gap tracks. | +| VEX justification + attestation | Excititor WebService/Core Guilds · Observability Guild · Evidence Locker Guild · Cartographer Guild | `EXCITITOR-AIAI-31-001`→`31-004`, `EXCITITOR-ATTEST-01-003`, `EXCITITOR-ATTEST-73-001/002`, `EXCITITOR-AIRGAP-56/57/58-*`, `EXCITITOR-CONN-TRUST-01-001` | Justification enrichment is DOING; every downstream chunk/telemetry/attestation/mirror task remains TODO pending that output plus Link-Not-Merge contracts. | +| Mirror evidence kickoff | Mirror Creator Guild · Exporter Guild · AirGap Time Guild · Security Guild · CLI Guild | `MIRROR-CRT-56-001`→`56-002`, `MIRROR-CRT-57-001/002`, `MIRROR-CRT-58-001/002`, `EXPORT-OBS-51-001`, `EXPORT-OBS-54-001`, `AIRGAP-TIME-57-001`, `CLI-AIRGAP-56-001`, `PROV-OBS-53-001` | No owner meeting yet; assembler (MIRROR-CRT-56-001) is still unscheduled, so DSSE/TUF, OCI, time-anchor, CLI, and Export Center hooks cannot start. | + +### Upcoming checkpoints + +| Date (UTC) | Focus | Agenda / expected exit | +| --- | --- | --- | +| 2025-11-10 | Advisory AI customer surfaces | Confirm SBOM-AIAI-31-001 delivery slot, align CLI-VULN/CLI-VEX scope owners, and capture POLICY-ENGINE-31-001 + DEVOPS-AIAI-31-001 readiness so DOCS-AIAI-31-005/006/008/009 can resume. | +| 2025-11-11 | Link-Not-Merge contract | Cartographer to present CARTO-GRAPH-21-002 schema draft, Concelier to commit dates for CONCELIER-LNM-21-001..003 and CONCELIER-AIAI-31-002/003 telemetry wiring. | +| 2025-11-11 | VEX justification + attestation | Walk EXCITITOR-AIAI-31-001 output, sequence EXCITITOR-AIAI-31-002/003, and lock attestation backlog order (`EXCITITOR-ATTEST-01-003`, `-73-001`, `-73-002`). | +| 2025-11-12 | Mirror evidence kickoff | Assign MIRROR-CRT-56-001 lead, confirm EXPORT-OBS-51-001/AIRGAP-TIME-57-001 owners, and outline DSSE/TUF design reviews for MIRROR-CRT-56-002. | + +## Coordination log + +| Date | Notes | +| --- | --- | +| 2025-11-09 | Sprint file refreshed with wave detail references, interlocks, and risk log; waiting on 2025-11-10/11/12 syncs for SBOM/CLI/POLICY/DevOps, Link-Not-Merge, Excititor justification, and Mirror assembler commitments. | + +## Risk log (2025-11-09) + +| Risk | Impact | Mitigation / owner | +| --- | --- | --- | +| SBOM/CLI/Policy/DevOps deliverables slip past 2025-11-12 | Advisory AI CLI/docs remain blocked; downstream Scanner/Policy/Vuln Explorer sprints cannot validate schema feeds | Capture ETAs during 2025-11-10 interlock; SBOM/CLI/Policy/DevOps guild leads to publish commit dates and update sprint rows immediately | +| Link-Not-Merge schema delays (`CONCELIER-LNM-21-*`, `CARTO-GRAPH-21-002`) | Concelier evidence APIs, console views, and Excititor graph consumers cannot progress; Advisory AI loses deterministic Concelier feeds | 2025-11-11 checkpoint to lock schema delivery; Cartographer + Concelier core owners to share migration plan and unblock CONCELIER-AIAI-31-002/003 | +| Excititor justification/attestation backlog stalls | Advisory AI cannot cite VEX evidence, Excititor attestation/air-gap tasks remain TODO, Mirror parity slips | Excititor web/core leads to finish EXCITITOR-AIAI-31-001 and schedule EXCITITOR-AIAI-31-002/003 + ATTEST tasks during 2025-11-11 session | +| Mirror assembler lacks staffing (`MIRROR-CRT-56-001`) | DSSE/TUF, OCI/time-anchor, CLI, Export Center automations cannot even start, blocking Wave 110.D and Sprint 125 entirely | 2025-11-12 kickoff must assign an owner and confirm EXPORT-OBS/AIRGAP-TIME prerequisites; track progress daily until assembler code is in flight | diff --git a/docs/implplan/SPRINT_130_scanner_surface.md b/docs/implplan/SPRINT_130_scanner_surface.md index 0a3f6360c..f1e0a6049 100644 --- a/docs/implplan/SPRINT_130_scanner_surface.md +++ b/docs/implplan/SPRINT_130_scanner_surface.md @@ -8,11 +8,11 @@ Execute the tasks below strictly in order; each artifact unblocks the next analy | Order | Task ID | State | Summary | Owner / Source | Depends On | | --- | --- | --- | --- | --- | --- | -| 1 | `SCANNER-ANALYZERS-DENO-26-001` | TODO | Build the deterministic input normalizer + VFS merger for `deno.json(c)`, import maps, lockfiles, vendor trees, `$DENO_DIR`, and OCI layers so analyzers have a canonical file view. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | — | -| 2 | `SCANNER-ANALYZERS-DENO-26-002` | TODO | Implement the module graph resolver covering static/dynamic imports, npm bridge, cache lookups, built-ins, WASM/JSON assertions, and annotate edges with their resolution provenance. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-001 | -| 3 | `SCANNER-ANALYZERS-DENO-26-003` | TODO | Ship the npm/node compatibility adapter that maps `npm:` specifiers, evaluates `exports` conditionals, and logs builtin usage for policy overlays. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-002 | -| 4 | `SCANNER-ANALYZERS-DENO-26-004` | TODO | Add the permission/capability analyzer covering FS/net/env/process/crypto/FFI/workers plus dynamic-import + literal fetch heuristics with reason codes. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-003 | -| 5 | `SCANNER-ANALYZERS-DENO-26-005` | TODO | Build bundle/binary inspectors for eszip and `deno compile` executables to recover graphs, configs, embedded resources, and snapshots. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-004 | -| 6 | `SCANNER-ANALYZERS-DENO-26-006` | TODO | Implement the OCI/container adapter that stitches per-layer Deno caches, vendor trees, and compiled binaries back into provenance-aware analyzer inputs. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-005 | -| 7 | `SCANNER-ANALYZERS-DENO-26-007` | TODO | Produce AOC-compliant observation writers (entrypoints, modules, capability edges, workers, warnings, binaries) with deterministic reason codes. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-006 | +| 1 | `SCANNER-ANALYZERS-DENO-26-001` | DONE | Build the deterministic input normalizer + VFS merger for `deno.json(c)`, import maps, lockfiles, vendor trees, `$DENO_DIR`, and OCI layers so analyzers have a canonical file view. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | — | +| 2 | `SCANNER-ANALYZERS-DENO-26-002` | DONE | Implement the module graph resolver covering static/dynamic imports, npm bridge, cache lookups, built-ins, WASM/JSON assertions, and annotate edges with their resolution provenance. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-001 | +| 3 | `SCANNER-ANALYZERS-DENO-26-003` | DONE | Ship the npm/node compatibility adapter that maps `npm:` specifiers, evaluates `exports` conditionals, and logs builtin usage for policy overlays. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-002 | +| 4 | `SCANNER-ANALYZERS-DENO-26-004` | DONE | Add the permission/capability analyzer covering FS/net/env/process/crypto/FFI/workers plus dynamic-import + literal fetch heuristics with reason codes. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-003 | +| 5 | `SCANNER-ANALYZERS-DENO-26-005` | DONE | Build bundle/binary inspectors for eszip and `deno compile` executables to recover graphs, configs, embedded resources, and snapshots. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-004 | +| 6 | `SCANNER-ANALYZERS-DENO-26-006` | DONE | Implement the OCI/container adapter that stitches per-layer Deno caches, vendor trees, and compiled binaries back into provenance-aware analyzer inputs. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-005 | +| 7 | `SCANNER-ANALYZERS-DENO-26-007` | DOING | Produce AOC-compliant observation writers (entrypoints, modules, capability edges, workers, warnings, binaries) with deterministic reason codes. | Deno Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-006 | | 8 | `SCANNER-ANALYZERS-DENO-26-008` | TODO | Finalize fixture + benchmark suite (vendor/npm/FFI/worker/dynamic import/bundle/cache/container cases) validating analyzer determinism and performance. | Deno Analyzer Guild, QA Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno) | SCANNER-ANALYZERS-DENO-26-007 | diff --git a/docs/implplan/SPRINT_137_scanner_gap_design.md b/docs/implplan/SPRINT_137_scanner_gap_design.md index a45742af3..cdda1a954 100644 --- a/docs/implplan/SPRINT_137_scanner_gap_design.md +++ b/docs/implplan/SPRINT_137_scanner_gap_design.md @@ -6,9 +6,160 @@ | Task ID | State | Summary | Owner / Source | Depends On | | --- | --- | --- | --- | --- | -| `SCANNER-ENG-0002` | TODO | Design the Node.js lockfile collector + CLI validator per `docs/benchmarks/scanner/scanning-gaps-stella-misses-from-competitors.md`, capturing Surface + policy requirements before implementation. | Scanner Guild, CLI Guild (docs/modules/scanner) | — | -| `SCANNER-ENG-0003` | TODO | Design Python lockfile + editable-install parity checks with policy predicates and CLI workflow coverage as outlined in the gap analysis. | Python Analyzer Guild, CLI Guild (docs/modules/scanner) | — | -| `SCANNER-ENG-0004` | TODO | Design Java lockfile ingestion/validation (Gradle/SBT collectors, CLI verb, policy hooks) to close comparison gaps. | Java Analyzer Guild, CLI Guild (docs/modules/scanner) | — | -| `SCANNER-ENG-0005` | TODO | Enhance Go stripped-binary fallback inference design, including inferred module metadata + policy integration, per the gap analysis. | Go Analyzer Guild (docs/modules/scanner) | — | -| `SCANNER-ENG-0006` | TODO | Expand Rust fingerprint coverage design (enriched fingerprint catalogue + policy controls) per the comparison matrix. | Rust Analyzer Guild (docs/modules/scanner) | — | -| `SCANNER-ENG-0007` | TODO | Design the deterministic secret leak detection pipeline covering rule packaging, Policy Engine integration, and CLI workflow. | Scanner Guild, Policy Guild (docs/modules/scanner) | — | +| `SCANNER-ENG-0002` | DONE (2025-11-09) | Design the Node.js lockfile collector + CLI validator per `docs/benchmarks/scanner/scanning-gaps-stella-misses-from-competitors.md`, capturing Surface + policy requirements before implementation. | Scanner Guild, CLI Guild (docs/modules/scanner) | — | +| `SCANNER-ENG-0003` | DONE (2025-11-09) | Design Python lockfile + editable-install parity checks with policy predicates and CLI workflow coverage as outlined in the gap analysis. | Python Analyzer Guild, CLI Guild (docs/modules/scanner) | — | +| `SCANNER-ENG-0004` | DONE (2025-11-09) | Design Java lockfile ingestion/validation (Gradle/SBT collectors, CLI verb, policy hooks) to close comparison gaps. | Java Analyzer Guild, CLI Guild (docs/modules/scanner) | — | +| `SCANNER-ENG-0005` | DONE (2025-11-09) | Enhance Go stripped-binary fallback inference design, including inferred module metadata + policy integration, per the gap analysis. | Go Analyzer Guild (docs/modules/scanner) | — | +| `SCANNER-ENG-0006` | DONE (2025-11-09) | Expand Rust fingerprint coverage design (enriched fingerprint catalogue + policy controls) per the comparison matrix. | Rust Analyzer Guild (docs/modules/scanner) | — | +| `SCANNER-ENG-0007` | DONE (2025-11-09) | Design the deterministic secret leak detection pipeline covering rule packaging, Policy Engine integration, and CLI workflow. | Scanner Guild, Policy Guild (docs/modules/scanner) | — | + +> 2025-11-09: The gap designs below capture analyzer, Surface, CLI, and policy contracts for SCANNER-ENG-0002…0007; tasks were taken DOING → DONE after this review. + +## Implementation progress (2025-11-09) + +- Gradle/Maven lock ingestion is now wired into `JavaLanguageAnalyzer`: `JavaLockFileCollector` sorts lock metadata deterministically, merges it with archive findings (`lockConfiguration`, `lockRepository`, `lockResolved`), and emits declared-only components (with `declaredOnly=true`, `lockSource`, `lockLocator`) whenever jars are missing. CLI/Surface telemetry tags were updated to carry per-language declared/missing counters. +- `stella java lock-validate` shares the `HandleLanguageLockValidateAsync` helper with Node/Python, has table/JSON output parity, and is documented alongside the scanner README + CLI guide (including the new metric `stellaops.cli.java.lock_validate.count`). Tests now cover the Ruby/Node/Java lock workflows end-to-end via `CommandHandlersTests`. + +## Design outcomes + +### SCANNER-ENG-0002 — Node.js lockfile collector + CLI validator + +**Scope & goals** +- Provide deterministic ingestion of `pnpm-lock.yaml`, `package-lock.json`, and `yarn.lock` so declared dependencies are preserved even when `node_modules` is absent. +- Offer a CLI validator that runs without scheduling a scan, reusing the same collector and Surface safety rails. + +**Design decisions** +- Add `NodeLockfileCollector` under `StellaOps.Scanner.Analyzers.Lang.Node`. The collector normalises manifests into a shared model (`package name`, `version`, `resolved`, `integrity`, `registry`, `workspace path`) and emits `DeclaredOnly = true` components stored beside installed fragments (`LayerComponentFragment.DeclaredSources`). +- Reuse `LanguageAnalyzerContext` merge rules so installed packages supersede declared-only entries while retaining discrepancies for policy. +- Gate execution through `Surface.Validation` (`scanner.lockfiles.node.*` knobs) that enforce max lockfile size, workspace limits, and registry allowlists; violations fail fast with deterministic error IDs. +- Private registries referenced in lockfiles must use `secret://` handles. `Surface.Secrets` resolves these handles before validation and the resolved metadata (never the secret) is attached to the collector context for auditing. +- EntryTrace usage hints annotate runtime packages; when a package is used at runtime but missing from the lockfile, the merge step tags it with `UsageWithoutDeclaration`. + +**CLI, policy, docs** +- Add `stella node lock-validate [path] --format {auto|pnpm|npm|yarn}` that runs locally, reuses Surface controls, and returns canonical JSON + table summaries. The CLI inherits `--surface-config` so air-gapped configs stay consistent. +- Scanner/WebService gains `--node-lockfiles` / `SCANNER__NODE__LOCKFILES__ENABLED` toggles to control ingestion during full scans. +- Policy Engine receives predicates: `node.lock.declaredMissing`, `node.lock.registryDisallowed`, `node.lock.declarationOnly`. Templates show how to fail on disallowed registries while only warning on declared-only findings that never reach runtime. +- Update `docs/modules/scanner/architecture.md` and policy DSL appendices with the new evidence flags and CLI workflow. + +**Testing, telemetry, rollout** +- Golden fixtures for pnpm v8, npm v9, and yarn berry lockfiles live under `tests/Scanner.Analyzers.Node/__fixtures__/lockfiles`. Deterministic snapshots are asserted in both analyzer and CLI tests. +- Add integration coverage in `tests/Scanner.Cli.Node` verifying exit codes and explain output for mismatched packages/registries. +- Emit counters (`scanner.node.lock.declared`, `scanner.node.lock.mismatch`, `scanner.node.lock.registry_blocked`) plus structured logs keyed by lockfile digest. +- Offline Kit ships the parser tables and CLI binary help under `offline/scanner/node-lockfiles/README.md`. + +**Implementation status (2025-11-09)** +- Lockfile declarations now emit `DeclaredOnly` components in `StellaOps.Scanner.Analyzers.Lang.Node` with lock source/locator metadata and deterministic evidence for policy use. +- CLI verb `stella node lock-validate` inspects lockfiles locally, rendering declared-only/missing-lock summaries and emitting `stellaops.cli.node.lock_validate.count` telemetry. +- Node analyzer determinism fixtures updated with declared-only coverage; CLI unit suite exercises the new handler. +- Python analyzer ingests `requirements*.txt`, `Pipfile.lock`, and `poetry.lock`, tagging installed distributions with `lockSource` metadata and creating declared-only components. `stella python lock-validate` mirrors the workflow for offline validation and records `stellaops.cli.python.lock_validate.count`. + +### SCANNER-ENG-0003 — Python lockfile + editable-install parity + +**Scope & goals** +- Parse Python lockfiles (`poetry.lock`, `Pipfile.lock`, hashed `requirements*.txt`) to capture declared graphs pre-install. +- Detect editable installs and local path references so policy can assert parity between lockfiles and runtime contents. + +**Design decisions** +- Introduce `PythonLockfileCollector` in `StellaOps.Scanner.Analyzers.Lang.Python`, capable of reading Poetry, Pipenv, pip-tools, and raw requirements syntax (including environment markers, extras, hashes, VCS refs). +- Extend the collector with an `EditableResolver` that inspects lockfile entries (`path =`, `editable = true`, `-e ./pkg`) and consults `Surface.FS` to normalise the referenced directory, capturing `EditablePath`, `SourceDigest`, and `VcsRef` metadata. +- Merge results with installed `*.dist-info` data using `LanguageAnalyzerContext`. Installed evidence overrides declared-only components; editable packages missing from the artifact layer are tagged `EditableMissing`. +- `Surface.Validation` adds knobs `scanner.lockfiles.python.maxBytes`, `scanner.lockfiles.python.allowedIndexes`, and ensures hashes are present when policy mandates repeatable environments. Private index credentials are provided via `Surface.Secrets` and never persisted. + +**CLI, policy, docs** +- New CLI verb `stella python lock-validate` mirrors the Node workflow, validates editable references resolve within the checked-out tree, and emits parity diagnostics. +- Scanner runs accept `--python-lockfiles` to toggle ingestion per tenant. +- Policy predicates: `python.lock.declaredMissing`, `python.lock.editableUnpinned`, `python.lock.indexDisallowed`. Editable packages missing from the filesystem can be set to fail builds or raise waivers. +- Document the workflow in `docs/modules/scanner/architecture.md` and the policy cookbook, including guidance on handling build-system backends. + +**Testing, telemetry, rollout** +- Fixtures covering Poetry 1.6, Pipenv 2024.x, `requirements.txt` with markers, and mixed editable/VCS entries live beside the analyzer tests. +- CLI golden output asserts deterministic ordering and masking of secrets in URLs. +- Metrics: `scanner.python.lock.declared`, `scanner.python.lock.editable`, `scanner.python.lock.failures`. +- Offline Kit bundles include parser definitions and sample policies to keep air-gapped tenants aligned. + +### SCANNER-ENG-0004 — Java/Gradle/SBT lockfile ingestion & validation + +**Scope & goals** +- Capture Gradle, Maven, and SBT dependency locks before artifacts are built, along with repository provenance and configuration scopes. +- Provide CLI validation and policy predicates enforcing repository allowlists and declared/runtime parity. + +**Design decisions** +- Add collectors: `GradleLockfileCollector` (reads `gradle.lockfile` and `gradle/dependency-locks/*.lock`), `MavenLockfileCollector` (parses `pom.xml`/`pom.lock` + dependencyManagement overrides), and `SbtLockfileCollector` (reads Ivy resolution outputs or `dependencies.lock`). +- Each collector emits normalized records keyed by `groupId:artifactId:version` plus config scope (`compileClasspath`, `runtimeClasspath`, etc.), repository URI, checksum, and optional classifier. Records are stored as `DeclaredOnly` fragments associated with their workspace path. +- `Surface.Validation` enforces file-size limits, repository allowlists (`scanner.lockfiles.java.allowedRepos`), and optional checksum requirements. Private Maven credentials flow through `Surface.Secrets`. +- `JavaLanguageAnalyzer` merges declared entries with installed archives. Runtime usage from EntryTrace is attached so policies can prioritize gaps that reach runtime. + +**CLI, policy, docs** +- CLI verb `stella java lock-validate` supports Gradle/Maven/SBT modes, prints mismatched dependencies, and checks repository policy. +- Scanner flags `--java-lockfiles` or env `SCANNER__JAVA__LOCKFILES__ENABLED` gate ingestion. Lockfile artifacts are uploaded to Surface.FS for evidence replay. +- Policy predicates: `java.lock.declaredMissing`, `java.lock.repoDisallowed`, `java.lock.unpinned` (no checksum). Explain traces cite repository + config scope for each discrepancy. +- Docs: update scanner module dossier and policy template library with repository governance examples. + +**Testing, telemetry, rollout** +- Fixtures derived from sample Gradle multi-projects, Maven BOM hierarchies, and SBT builds validate parser coverage and CLI messaging. +- Metrics `scanner.java.lock.declared`, `scanner.java.lock.missing`, `scanner.java.lock.repo_blocked` feed the observability dashboards. +- Offline kits include parser grammars and CLI docs so air-gapped tenants can enforce repo policies without SaaS dependencies. + +### SCANNER-ENG-0005 — Go stripped-binary fallback inference + +**Scope & goals** +- Enrich the stripped-binary fallback so Go modules remain explainable even without embedded `buildinfo`, and give Policy Engine knobs to treat inferred evidence differently. + +**Design decisions** +- Extend `GoBinaryScanner` with an inference pipeline that, when build info is absent, parses ELF/Mach-O symbol tables and DWARF data using the existing `ElfSharp` bindings. Symbols feed into a new `GoSymbolInferenceEngine` that matches against a signed `GoFingerprintCatalog` under `StellaOps.Scanner.Analyzers.Lang.Go.Fingerprints`. +- Inferred results carry `Confidence` (0–1), matched symbol counts, and reasons (`BuildInfoMissing`, `SymbolMatches`, `PkgPathFallback`). Records are emitted as `InferredModule` metadata alongside hashed fallback components. +- Update fragment schemas so DSSE-composed BOMs include both the hashed fallback and the inference summary, enabling deterministic replay. +- `Surface.Validation` exposes `scanner.analyzers.go.fallback.enabled`, `scanner.analyzers.go.fallback.maxSymbolBytes`, ensuring workloads can opt out or constrain processing time. + +**Policy, CLI, docs** +- Policy predicates `go.module.inferenceConfidence` and `go.module.hashOnly` let tenants fail when only hashed provenance exists or warn when inference confidence < threshold. +- CLI flag `--go-fallback-detail` (and corresponding API query) prints hashed vs inferred modules, confidence, and remediation hints (e.g., rebuild with `-buildvcs`). +- Documentation updates cover inference details, how confidence feeds lattice weights, and how to author waivers. + +**Testing, telemetry, rollout** +- Add stripped binary fixtures (Linux, macOS) plus intentionally obfuscated samples. Tests assert deterministic inference and hashing. +- Metrics `scanner.go.inference.count`, `scanner.go.inference.confidence_bucket` ensure observability; logs include `imageDigest`, `binaryPath`, `confidence`. +- Offline Kit bundles the fingerprint catalog and inference changelog so air-gapped tenants can audit provenance. + +### SCANNER-ENG-0006 — Rust fingerprint coverage expansion + +**Scope & goals** +- Improve Rust evidence for stripped binaries by expanding fingerprint sources, symbol parsing, and policy controls over heuristic findings. + +**Design decisions** +- Build a new `RustFingerprintCatalog` signed and versioned, fed by Cargo crate metadata, community hash contributions, and curated fingerprints from StellaOps scans. Catalog lives under `StellaOps.Scanner.Analyzers.Lang.Rust.Fingerprints` with deterministic ordering. +- Extend `RustAnalyzerCollector` with symbol parsing (DWARF, ELF build IDs) via `SymbolGraphResolver`. Resolver correlates crate sections, monomorphized symbol prefixes, and `#[panic_handler]` markers to infer crate names and versions. +- Emit inference metadata (`fingerprintId`, `confidence`, `symbolEvidence[]`) alongside hashed fallbacks. Authoritative Cargo.lock data (when present) still wins in merges. +- `Surface.Validation` adds toggles for fingerprint freshness and maximum catalog size per tenant. Offline bundles deliver catalog updates signed via DSSE. + +**Policy, CLI, docs** +- Policy predicates: `rust.fingerprint.confidence`, `rust.fingerprint.catalogAgeDays`. Templates show how to warn when only heuristic data exists, or fail if catalog updates are stale. +- CLI flag `--rust-fingerprint-detail` prints authoritative vs inferred crates, symbol samples, and guidance. +- Documentation (scanner module + policy guide) explains how inference is stored, how catalog publishing works, and how to tune policy weights. + +**Testing, telemetry, rollout** +- Add fixtures for stripped Rust binaries across editions (2018–2024) and with/without LTO. Determinism tests compare catalog revisions and inference outputs. +- Metrics `scanner.rust.fingerprint.authoritative`, `scanner.rust.fingerprint.inferred`, `scanner.rust.fingerprint.catalog_version` feed dashboards and alerts. +- Offline kit updates include catalog packages, verification instructions, and waiver templates tied to predicate names. + +### SCANNER-ENG-0007 — Deterministic secret leak detection pipeline + +**Scope & goals** +- Provide first-party secret leak detection that matches competitor capabilities while preserving deterministic, offline-friendly execution and explainability. + +**Design decisions** +- Introduce `StellaOps.Scanner.Analyzers.Secrets`, a restart-time plug-in that consumes rule bundles (`ruleset.tgz`) signed with DSSE and versioned (semantic version + hash). Bundles live under `plugins/scanner/secrets/rules/`. +- Rule bundles contain deterministic regex/entropy definitions, context windows, and masking directives. A rule index is generated at build time to guarantee deterministic ordering. +- Analyzer executes after Surface validation of each file/layer. Files pass through a streaming matcher that outputs `SecretLeakEvidence` (rule id, severity, confidence, file path, byte ranges, masking applied). Findings persist in `ScanAnalysisStore` and align with DSSE exports. +- `Surface.Validation` introduces `scanner.secrets.rules.bundle`, `scanner.secrets.maxFileBytes`, and `scanner.secrets.targetGlobs`. `Surface.Secrets` supplies allowlist tokens (e.g., approved test keys) without exposing plaintext to analyzers. +- Events/attestations: findings optionally published via the existing Redis events, and Export Center bundles include masked evidence plus rule metadata. + +**CLI, policy, docs** +- Add `stella secrets scan [path|image]` plus `--secrets` flag on `stella scan` to run the analyzer inline. CLI output redacts payloads, shows rule IDs, severity, and remediation hints. +- Policy Engine ingests `secret.leak` evidence, including `ruleId`, `confidence`, `masking.applied`, enabling predicates like `secret.leak.highConfidence`, `secret.leak.ruleDisabled`. Templates cover severities, approvals, and ticket automation. +- Documentation updates: scanner module dossier (new analyzer), policy cookbook (rule management), and Offline Kit guide (bundling rule updates). + +**Testing, telemetry, rollout** +- Rule-pack regression tests ensure deterministic matching and masking; analyzer unit tests cover regex + entropy combos, while integration tests run across sample repositories and OCI layers. +- Metrics: `scanner.secrets.ruleset.version`, `scanner.secrets.findings.total`, `scanner.secrets.findings.high_confidence`. Logs include rule ID, masked hash, and file digests for auditing. +- Offline Kit delivers the signed ruleset catalog, upgrade guide, and policy defaults so fully air-gapped tenants can keep pace without internet access. diff --git a/docs/implplan/SPRINT_138_scanner_ruby_parity.md b/docs/implplan/SPRINT_138_scanner_ruby_parity.md index c45372279..688c2cb21 100644 --- a/docs/implplan/SPRINT_138_scanner_ruby_parity.md +++ b/docs/implplan/SPRINT_138_scanner_ruby_parity.md @@ -13,11 +13,16 @@ | `SCANNER-ENG-0012` | TODO | Evaluate Dart analyzer requirements (pubspec parsing, AOT artifacts) and split implementation tasks. | Language Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Dart) | — | | `SCANNER-ENG-0013` | TODO | Plan Swift Package Manager coverage (Package.resolved, xcframeworks, runtime hints) with policy hooks. | Swift Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Swift) | — | | `SCANNER-ENG-0014` | TODO | Align Kubernetes/VM target coverage between Scanner and Zastava per comparison findings; publish joint roadmap. | Runtime Guild, Zastava Guild (docs/modules/scanner) | — | -| `SCANNER-ENG-0015` | TODO | Document DSSE/Rekor operator enablement guidance and rollout levers surfaced in the gap analysis. | Export Center Guild, Scanner Guild (docs/modules/scanner) | — | +| `SCANNER-ENG-0015` | DOING (2025-11-09) | Document DSSE/Rekor operator enablement guidance and rollout levers surfaced in the gap analysis. | Export Center Guild, Scanner Guild (docs/modules/scanner) | — | | `SCANNER-ENG-0016` | DOING (2025-11-02) | Implement `RubyLockCollector` + vendor cache ingestion per design §4.1–4.3. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | SCANNER-ENG-0009 | -| `SCANNER-ENG-0017` | TODO | Build the runtime require/autoload graph builder with tree-sitter Ruby per design §4.4 and integrate EntryTrace hints. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | SCANNER-ENG-0016 | -| `SCANNER-ENG-0018` | TODO | Emit Ruby capability + framework surface signals as defined in design §4.5 with policy predicate hooks. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | SCANNER-ENG-0017 | -| `SCANNER-ENG-0019` | TODO | Ship Ruby CLI verbs (`stella ruby inspect|resolve`) and Offline Kit packaging per design §4.6. | Ruby Analyzer Guild, CLI Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | SCANNER-ENG-0016..0018 | +| `SCANNER-ENG-0017` | DONE (2025-11-09) | Build the runtime require/autoload graph builder with tree-sitter Ruby per design §4.4 and integrate EntryTrace hints. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | SCANNER-ENG-0016 | +| `SCANNER-ENG-0018` | DONE (2025-11-09) | Emit Ruby capability + framework surface signals as defined in design §4.5 with policy predicate hooks. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | SCANNER-ENG-0017 | +| `SCANNER-ENG-0019` | DOING (2025-11-10) | Ship Ruby CLI verbs (`stella ruby inspect|resolve`) and Offline Kit packaging per design §4.6. | Ruby Analyzer Guild, CLI Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | SCANNER-ENG-0016..0018 | | `SCANNER-LIC-0001` | DOING (2025-11-02) | Vet tree-sitter Ruby licensing + Offline Kit packaging requirements and document SPDX posture. | Scanner Guild, Legal Guild (docs/modules/scanner) | SCANNER-ENG-0016 | | `SCANNER-POLICY-0001` | TODO | Define Policy Engine predicates for Ruby groups/capabilities and align lattice weights. | Policy Guild, Ruby Analyzer Guild (docs/modules/scanner) | SCANNER-ENG-0018 | -| `SCANNER-CLI-0001` | TODO | Coordinate CLI UX/help text for new Ruby verbs and update CLI docs/golden outputs. | CLI Guild, Ruby Analyzer Guild (src/Cli/StellaOps.Cli) | SCANNER-ENG-0019 | +| `SCANNER-CLI-0001` | DOING (2025-11-09) | Coordinate CLI UX/help text for new Ruby verbs and update CLI docs/golden outputs. | CLI Guild, Ruby Analyzer Guild (src/Cli/StellaOps.Cli) | SCANNER-ENG-0019 | + +### Updates — 2025-11-09 + +- `SCANNER-CLI-0001`: Completed Spectre table wrapping fix for runtime/lockfile columns, expanded Ruby resolve JSON assertions, removed ad-hoc debug artifacts, and drafted CLI docs covering `stellaops-cli ruby inspect|resolve`. Pending: final verification + handoff once docs/tests merge. +- `SCANNER-CLI-0001`: Wired `stellaops-cli ruby inspect|resolve` into `CommandFactory` so the verbs are available via `System.CommandLine` with the expected `--root`, `--image/--scan-id`, and `--format` options; `dotnet test ... --filter Ruby` passes. diff --git a/docs/implplan/SPRINT_140_runtime_signals.md b/docs/implplan/SPRINT_140_runtime_signals.md index 2ef4ff8ee..16fa29171 100644 --- a/docs/implplan/SPRINT_140_runtime_signals.md +++ b/docs/implplan/SPRINT_140_runtime_signals.md @@ -13,4 +13,48 @@ This file now only tracks the runtime & signals status snapshot. Active backlog | 140.C Signals | Signals Guild · Authority Guild (for scopes) · Runtime Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | DOING | API skeleton and callgraph ingestion are active; runtime facts endpoint still depends on the same shared prerequisites. | | 140.D Zastava | Zastava Observer/Webhook Guilds · Security Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | TODO | Surface.FS integration waits on Scanner surface caches; prep sealed-mode env helpers meanwhile. | +# Status snapshot (2025-11-09) + +- **140.A Graph** – GRAPH-INDEX-28-007/008/009/010 remain TODO while Scanner surface artifacts and SBOM projection schemas are outstanding; no clustering/backfill/fixture work has started. +- **140.B SbomService** – Advisory AI, console, and orchestrator tracks stay TODO; SBOM-SERVICE-21-001..004 are BLOCKED until Concelier Link-Not-Merge (`CONCELIER-GRAPH-21-001`) + Cartographer schema (`CARTO-GRAPH-21-002`) land. +- **140.C Signals** – SIGNALS-24-001 now complete (host, RBAC, sealed-mode readiness, `/signals/facts/{subject}`); SIGNALS-24-002 added callgraph retrieval APIs but still needs CAS promotion; SIGNALS-24-003 accepts JSON + NDJSON runtime uploads, yet NDJSON provenance/context wiring remains TODO. Scoring/cache work (SIGNALS-24-004/005) is still BLOCKED pending runtime feed availability (target 2025-11-09). +- **140.D Zastava** – ZASTAVA-ENV-01/02, ZASTAVA-SECRETS-01/02, and ZASTAVA-SURFACE-01/02 are still TODO because Surface.FS cache outputs from Scanner aren’t published; guilds limited to design/prep. + +# Blockers & coordination + +- **Concelier Link-Not-Merge / Cartographer schemas** – SBOM-SERVICE-21-001..004 cannot start until `CONCELIER-GRAPH-21-001` and `CARTO-GRAPH-21-002` deliver the projection payloads. +- **Scanner surface artifacts** – GRAPH-INDEX-28-007+ and all ZASTAVA-SURFACE tasks depend on Sprint 130 analyzer outputs and cached layer metadata; need updated ETA from Scanner guild. +- **Signals host merge** – SIGNALS-24-003/004/005 remain blocked until SIGNALS-24-001/002 merge and Authority scope work (`AUTH-SIG-26-001`) is validated with Runtime guild. + +# Next actions (target: 2025-11-12) + +| Owner(s) | Action | +| --- | --- | +| Graph Indexer Guild | Hold design sync with Scanner Surface + SBOM Service owners to lock artifact delivery dates; prep clustering job scaffolds so work can start once feeds land. | +| SBOM Service Guild | Finalize projection schema doc with Concelier/Cartographer, then flip SBOM-SERVICE-21-001 to DOING and align SBOM-AIAI-31-001 with Sprint 111 requirements. | +| Signals Guild | Land SIGNALS-24-001/002 PRs, then immediately kick off SIGNALS-24-003; coordinate scoring/cache roadmap with Runtime + Data Science guilds. | +| Zastava Guilds | Draft Surface.Env helper adoption plan and ensure Surface.Secrets references are wired so implementation can begin when Surface.FS caches publish. | + +# Downstream dependency rollup (snapshot: 2025-11-09) + +| Track | Dependent sprint(s) | Impact if delayed | +| --- | --- | --- | +| 140.A Graph | `docs/implplan/SPRINT_141_graph.md` (Graph clustering/backfill) and downstream Graph UI overlays | Graph insights, policy overlays, and runtime clustering views cannot progress without GRAPH-INDEX-28-007+ landing. | +| 140.B SbomService | `docs/implplan/SPRINT_142_sbomservice.md`, Advisory AI (Sprint 111), Policy/Vuln Explorer feeds | SBOM projections/events stay unavailable, blocking Advisory AI remedation heuristics, policy joins, and Vuln Explorer candidate generation. | +| 140.C Signals | `docs/implplan/SPRINT_143_signals.md` plus Runtime/Reachability dashboards | Reachability scoring, cache/event layers, and runtime facts outputs cannot start until SIGNALS-24-001/002 merge and Scanner runtime data flows. | +| 140.D Zastava | `docs/implplan/SPRINT_144_zastava.md`, Runtime admission enforcement | Surface-integrated drift/admission hooks remain stalled; sealed-mode env helpers cannot ship without Surface.FS metadata. | + +# Risk log + +| Risk | Impact | Mitigation / owner | +| --- | --- | --- | +| Concelier Link-Not-Merge schema slips | SBOM-SERVICE-21-001..004 + Advisory AI SBOM endpoints stay blocked | Concelier + Cartographer guilds to publish CARTO-GRAPH-21-002 ETA during next coordination call; SBOM guild to prep schema doc meanwhile. | +| Scanner surface artifact delay | GRAPH-INDEX-28-007+ and ZASTAVA-SURFACE-* cannot even start | Scanner guild to deliver analyzer artifact roadmap; Graph/Zastava teams to prepare mocks/tests in advance. | +| Signals host/callgraph merge misses 2025-11-09 | SIGNALS-24-003/004/005 remain blocked, pushing reachability scoring past sprint goals | Signals + Authority guilds to prioritize AUTH-SIG-26-001 review and merge SIGNALS-24-001/002 before 2025-11-10 standup. | + +# Coordination log + +| Date | Notes | +| --- | --- | +| 2025-11-09 | Sprint 140 snapshot refreshed; awaiting Scanner surface artifact ETA, Concelier/CARTO schema delivery, and Signals host merge before any wave can advance to DOING. | # Sprint 140 - Runtime & Signals diff --git a/docs/implplan/SPRINT_143_signals.md b/docs/implplan/SPRINT_143_signals.md index 010f77d3f..b5aab915f 100644 --- a/docs/implplan/SPRINT_143_signals.md +++ b/docs/implplan/SPRINT_143_signals.md @@ -10,11 +10,14 @@ Notes: - 2025-10-29: JSON parsers for Java/Node.js/Python/Go implemented; artifacts stored on filesystem with SHA-256 and callgraphs upserted into Mongo. Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -SIGNALS-24-001 | DOING (2025-11-07) | Stand up Signals API skeleton with RBAC, sealed-mode config, DPoP/mTLS enforcement, and `/facts` scaffolding so downstream ingestion work can begin. Dependencies: AUTH-SIG-26-001. | Signals Guild, Authority Guild (src/Signals/StellaOps.Signals) +SIGNALS-24-001 | DONE (2025-11-09) | Stand up Signals API skeleton with RBAC, sealed-mode config, DPoP/mTLS enforcement, and `/facts` scaffolding so downstream ingestion work can begin. Dependencies: AUTH-SIG-26-001. | Signals Guild, Authority Guild (src/Signals/StellaOps.Signals) +> 2025-11-09: Signals host now registers sealed-mode evidence validation, exposes `/readyz`/`/status` indicators, enforces scope policies, and adds `/signals/facts/{subjectKey}` retrieval plus runtime-facts ingestion backing services. SIGNALS-24-002 | DOING (2025-11-07) | Implement callgraph ingestion/normalization (Java/Node/Python/Go) with CAS persistence and retrieval APIs to feed reachability scoring. Dependencies: SIGNALS-24-001. | Signals Guild (src/Signals/StellaOps.Signals) -SIGNALS-24-003 | BLOCKED (2025-10-27) | Implement runtime facts ingestion endpoint and normalizer (process, sockets, container metadata) populating `context_facts` with AOC provenance.
2025-10-27: Depends on `SIGNALS-24-001` for base API host and authentication plumbing. | Signals Guild, Runtime Guild (src/Signals/StellaOps.Signals) +> 2025-11-09: Added `/signals/callgraphs/{id}` retrieval, sealed-mode gating, and CAS-backed artifact metadata responses; remaining work is CAS bucket promotion + signed graph manifests. +SIGNALS-24-003 | DOING (2025-11-09) | Implement runtime facts ingestion endpoint and normalizer (process, sockets, container metadata) populating `context_facts` with AOC provenance.
2025-11-09: Initial JSON ingestion service + persistence landed; NDJSON/gzip + context enrichment remain TODO. | Signals Guild, Runtime Guild (src/Signals/StellaOps.Signals) > 2025-11-07: Waiting on SIGNALS-24-001 / SIGNALS-24-002 DOING work to land before flipping this to DOING. > 2025-11-07: Upstream SIGNALS-24-001 / SIGNALS-24-002 now DOING; this flips to DOING once host + callgraph ingestion merge. > 2025-11-08: Targeting 2025-11-09 merge for SIGNALS-24-001/002; schema + AOC contract drafted so SIGNALS-24-003 can move to DOING immediately after those PRs land (dependencies confirmed, none missing). +> 2025-11-09: Added runtime facts ingestion service + endpoint, aggregated runtime hit storage, and unit tests; next steps are NDJSON/gzip ingestion and provenance metadata wiring. SIGNALS-24-004 | BLOCKED (2025-10-27) | Deliver reachability scoring engine producing states/scores and writing to `reachability_facts`; expose configuration for weights. Dependencies: SIGNALS-24-003.
2025-10-27: Upstream ingestion pipelines (`SIGNALS-24-002/003`) blocked; scoring engine cannot proceed. | Signals Guild, Data Science (src/Signals/StellaOps.Signals) -SIGNALS-24-005 | BLOCKED (2025-10-27) | Implement Redis caches (`reachability_cache:*`), invalidation on new facts, and publish `signals.fact.updated` events. Dependencies: SIGNALS-24-004.
2025-10-27: Awaiting scoring engine and ingestion layers before wiring cache/events. | Signals Guild, Platform Events Guild (src/Signals/StellaOps.Signals) \ No newline at end of file +SIGNALS-24-005 | BLOCKED (2025-10-27) | Implement Redis caches (`reachability_cache:*`), invalidation on new facts, and publish `signals.fact.updated` events. Dependencies: SIGNALS-24-004.
2025-10-27: Awaiting scoring engine and ingestion layers before wiring cache/events. | Signals Guild, Platform Events Guild (src/Signals/StellaOps.Signals) diff --git a/docs/modules/cli/guides/cli-reference.md b/docs/modules/cli/guides/cli-reference.md index 578e769ef..9c8c0bfdb 100644 --- a/docs/modules/cli/guides/cli-reference.md +++ b/docs/modules/cli/guides/cli-reference.md @@ -189,6 +189,137 @@ Replays the AOC guard against stored raw documents. By default it checks all adv } ``` +--- + +## 4 · `stella node lock-validate` + +### 4.1 Synopsis + +```bash +stella node lock-validate \ + [--path ] \ + [--format table|json] \ + [--verbose] +``` + +### 4.2 Description + +Runs the Node analyzer locally against a working directory to compare lockfiles (`package-lock.json`, `pnpm-lock.yaml`, `yarn.lock`) with what is actually present in `node_modules`. The command is read-only and never schedules a scan; it reuses the same deterministic collector that powers Scanner so results match backend evidence. Output highlights two conditions that policy cares about: + +- **Declared Only** – packages present in lockfiles but missing from the filesystem or final image. +- **Missing Lock** – packages discovered at runtime without corresponding lock metadata (no registry provenance, integrity hash, or repository information). + +This helps catch drift before images are built, keeps lockfiles trustworthy, and feeds policy predicates such as `node.lock.declaredMissing`. + +### 4.3 Options + +| Option | Description | +|--------|-------------| +| `--path`, `-p` | Directory containing `package.json` and lockfiles. Defaults to the current working directory. | +| `--format table|json` | `table` (default) renders a Spectre table with status badges; `json` prints the underlying report for CI automation. | +| `--verbose` | Enables detailed logging (shared root option). | + +### 4.4 Output & exit codes + +- `table` mode prints a summary row and two sections: `Declared Only` (red) and `Missing Lock` (yellow). Columns show package, version, lock source/locator, and filesystem path so engineers can reconcile quickly. +- `json` mode emits `{ declaredOnly: [], missingLockMetadata: [], totalDeclared, totalInstalled }`, mirroring the analyzer telemetry. + +Exit codes: + +| Code | Meaning | +|------|---------| +| `0` | No inconsistencies detected. | +| `1` | Declared-only or missing-lock packages were found. | +| `71` | The requested directory could not be read (missing path, permissions, etc.). | + +The CLI also records `stellaops.cli.node.lock_validate.count{outcome}` so operators can monitor adoption in telemetry. + +### 4.5 Offline notes + +- Works entirely offline; point `--path` at a workspace checked out from an Offline Kit or build cache. +- Honors the same `Surface.Validation` limits configured for Scanner once those knobs (`scanner.lockfiles.node.*`) are deployed cluster-wide. +- Combine with `stella scan` by running lock validation in CI before images are built to fail fast on inconsistent manifests. + +--- + +## 5 · `stella python lock-validate` + +### 5.1 Synopsis + +```bash +stella python lock-validate \ + [--path ] \ + [--format table|json] \ + [--verbose] +``` + +### 5.2 Description + +Validates Python lockfiles (currently `requirements*.txt`, `Pipfile.lock`, and `poetry.lock`) against what exists in `site-packages`. It uses the same analyzer Scanner runs so declared-only packages, missing locks, and editable installs are detected deterministically and without internet access. This catches drift between lock manifests and baked images before scanners or policy gates fail later. + +### 5.3 Options + +| Option | Description | +|--------|-------------| +| `--path`, `-p` | Directory containing `lib/python*/site-packages` and lockfiles. Defaults to `$PWD`. | +| `--format table|json` | `table` (default) prints a human summary; `json` emits the raw report for CI. | +| `--verbose` | Enables detailed logging. | + +### 5.4 Output & exit codes + +Output shape mirrors the Node command: declared-only packages are shown with lock provenance, and runtime packages missing lock metadata are highlighted separately. JSON mode returns the same object schema `{ declaredOnly, missingLockMetadata, totalDeclared, totalInstalled }`. + +Exit codes follow the same contract (`0` success, `1` violations, `71` for unreadable path). Telemetry is published via `stellaops.cli.python.lock_validate.count{outcome}`. + +### 5.5 Offline notes + +- Works entirely offline—lockfiles and `site-packages` must already be present (from a venv snapshot, container rootfs, or Offline Kit). +- Honors upcoming `scanner.lockfiles.python.*` guardrails once Surface.Validation is wired in so CLI + Scanner enforce the same registry/size limits. +- Recommended CI flow: run `stella python lock-validate` before building containers and fail fast when declared-only packages remain. + +## 6 · `stella java lock-validate` + +### 6.1 Synopsis + +```bash +stella java lock-validate \\ + [--path ] \\ + [--format table|json] \\ + [--verbose] +``` + +### 6.2 Description + +Executes the Java language analyzer locally so Gradle `gradle.lockfile`, `gradle/dependency-locks/**/*.lockfile`, and `pom.xml` declarations can be compared with the jars that actually ship in a workspace. The command reuses the new `JavaLockFileCollector` plus the `JavaLanguageAnalyzer` merge logic, so it emits the same `DeclaredOnly` and `Missing Lock` evidence that Scanner and Policy consume. Engineers can see which coordinates exist only in lockfiles (no jar on disk) and which installed jars lack lock metadata (no repository/provenance) before a scan ever runs. + +### 6.3 Options + +| Option | Description | +|--------|-------------| +| `--path`, `-p` | Directory containing jars (e.g., `build/libs`) and lockfiles. Defaults to the current working directory. | +| `--format table|json` | `table` (default) renders the Spectre table; `json` outputs the raw `LockValidationReport`. | +| `--verbose` | Enables detailed logging and surfaces the analyzer paths being inspected. | + +### 6.4 Output & exit codes + +Output mirrors the Node/Python verbs: `Declared Only` rows include the lock source/locator (e.g., `gradle.lockfile`, `gradle/dependency-locks/app.lockfile`) plus configuration/repository hints, while `Missing Lock` rows highlight jars that Scanner would tag with `lockMissing=true`. JSON responses return `{ declaredOnly, missingLockMetadata, totalDeclared, totalInstalled }`. + +Exit codes align with the other lock validators: + +| Code | Meaning | +|------|---------| +| `0` | No inconsistencies detected. | +| `1` | Declared-only or missing-lock jars detected. | +| `71` | Directory could not be read. | + +Telemetry is recorded via `stellaops.cli.java.lock_validate.count{outcome}` so adoption can be monitored alongside the Node/Python verbs. + +### 6.5 Offline notes + +- Works with any workspace (Gradle, Maven, or extracted container layers) – no network access or build tool metadata is required at runtime. +- Honors forthcoming `scanner.lockfiles.java.*` Surface.Validation limits once they are deployed so CLI + Scanner stay in lockstep. +- Recommended CI flow: run `stella java lock-validate` before packaging containers to surface missing locks/declared-only coordinates early. + ### 3.5 Exit codes | Exit code | Meaning | diff --git a/docs/modules/scanner/README.md b/docs/modules/scanner/README.md index c08fb5ae9..81acdc0df 100644 --- a/docs/modules/scanner/README.md +++ b/docs/modules/scanner/README.md @@ -2,7 +2,10 @@ Scanner analyses container images layer-by-layer, producing deterministic SBOM fragments, diffs, and signed reports. -## Latest updates (2025-11-06) +## Latest updates (2025-11-09) +- Node analyzer now ingests npm/yarn/pnpm lockfiles, emitting `DeclaredOnly` components with lock provenance. The CLI companion command `stella node lock-validate` runs the collector offline, surfaces declared-only or missing-lock packages, and emits telemetry via `stellaops.cli.node.lock_validate.count`. +- Python analyzer picks up `requirements*.txt`, `Pipfile.lock`, and `poetry.lock`, tagging installed distributions with lock provenance and generating declared-only components for policy. Use `stella python lock-validate` to run the same checks locally before images are built. +- Java analyzer now parses `gradle.lockfile`, `gradle/dependency-locks/**/*.lockfile`, and `pom.xml` dependencies via the new `JavaLockFileCollector`, merging lock metadata onto jar evidence and emitting declared-only components when jars are absent. The new CLI verb `stella java lock-validate` reuses that collector offline (table/JSON output) and records `stellaops.cli.java.lock_validate.count{outcome}` for observability. - Worker/WebService now resolve cache roots and feature flags via `StellaOps.Scanner.Surface.Env`; misconfiguration warnings are documented in `docs/modules/scanner/design/surface-env.md` and surfaced through startup validation. - Platform events rollout (2025-10-19) continues to publish scanner.report.ready@1 and scanner.scan.completed@1 envelopes with embedded DSSE payloads (see docs/updates/2025-10-19-scanner-policy.md and docs/updates/2025-10-19-platform-events.md). Service and consumer tests should round-trip the canonical samples under docs/events/samples/. @@ -33,6 +36,7 @@ Scanner analyses container images layer-by-layer, producing deterministic SBOM f - ./operations/rustfs-migration.md - ./operations/entrypoint.md - ./operations/secret-leak-detection.md +- ./operations/dsse-rekor-operator-guide.md - ./design/macos-analyzer.md - ./design/windows-analyzer.md - ../benchmarks/scanner/deep-dives/macos.md diff --git a/docs/modules/scanner/operations/dsse-rekor-operator-guide.md b/docs/modules/scanner/operations/dsse-rekor-operator-guide.md new file mode 100644 index 000000000..e6b14f409 --- /dev/null +++ b/docs/modules/scanner/operations/dsse-rekor-operator-guide.md @@ -0,0 +1,171 @@ +# DSSE & Rekor Operator Enablement Guide + +> **Audience.** Scanner / Export Center operators, platform SREs, and field engineers bringing DSSE attestations + Rekor proofs into production (online or air-gapped). +> +> **Sources.** Aligns with Sprint 138 (SCANNER-ENG-0015) gap analysis (§DSSE/Rekor operator enablement) and Scanner architecture specs. + +--- + +## 1. Why this matters + +- **Evidence on demand.** Every SBOM, diff, and report can be bound to a DSSE envelope issued by `StellaOps.Signer`, logged to Rekor via `StellaOps.Attestor`, and bundled for export/offline use. +- **Policy leverage.** Policy Engine predicates gate releases until attestations exist *and* their Rekor proofs verify, reducing “unsigned” drift. +- **Regulatory readiness.** Operators need a deterministic playbook to satisfy PCI, FedRAMP, EU CRA, and national sovereignty requirements without phoning home. + +--- + +## 2. Components & responsibilities + +| Component | Role | Key references | +|-----------|------|----------------| +| `StellaOps.Signer` | Issues DSSE envelopes using PoE-scoped keys (Fulcio or BYO KMS/HSM). | `ops/devops/signing/` | +| `StellaOps.Attestor` | Submits DSSE payloads to Rekor v2, caches `{uuid,index,proof}` and mirrors proofs offline. | `docs/modules/attestor/architecture.md` | +| Rekor v2 (managed or self-hosted) | Transparency log providing UUIDs + inclusion proofs. | `docs/ops/rekor/README.md` (if self-hosted) | +| `StellaOps.Scanner` (WebService/Worker) | Requests attestations per scan, stores Rekor metadata next to SBOM artefacts. | `docs/modules/scanner/architecture.md` | +| Export Center | Packages DSSE payloads + proofs into Offline Kit bundles and mirrors license notices. | `docs/modules/export-center/architecture.md` | +| Policy Engine + CLI | Enforce “attested only” promotion, expose CLI verification verbs. | `docs/modules/policy/architecture.md`, `docs/09_API_CLI_REFERENCE.md` | + +--- + +## 3. Prerequisites checklist + +1. **Keys & trust roots** + - Fulcio / KMS credentials available to `StellaOps.Signer`. + - Rekor public key pinned (`rekor.pub`) for verification jobs and CLI tooling. +2. **Service wiring** + - `scanner.attestation.signerEndpoint` → internal Signer base URL. + - `scanner.attestation.attestorEndpoint` → Attestor base URL. + - `attestor.rekor.api` & `attestor.rekor.pubkey` set for the target log. +3. **Storage** + - Mongo collections `attestations` & `rekorProofs` sized for retention (7–30 days recommended). + - Object store tier with at-rest encryption for DSSE payloads. +4. **Observability** + - Metrics: `attestor_rekor_success_total`, `attestor_rekor_retry_total`, `rekor_inclusion_latency`. + - Logs shipped to your SIEM for compliance (Signer request/response IDs, Rekor UUIDs). +5. **Offline readiness** + - Export Center profile with `attestations.bundle=true`. + - Rekor log snapshots mirrored (ORAS bundle or rsync of `/var/log/rekor`) for disconnected verification. + +--- + +## 4. Enablement workflow + +### 4.1 Configure Signer & Attestor + +```yaml +signer: + schemaVersion: 2 + keyProvider: kms-fleet + attestorEndpoint: https://attestor.internal + defaultPredicate: https://stella-ops.org/attestations/sbom/1 + +attestor: + schemaVersion: 1 + rekor: + api: https://rekor.internal + publicKeyPath: /etc/rekor/rekor.pub + offlineMirrorPath: /var/lib/rekor/snapshots + retry: + maxAttempts: 5 + backoffSeconds: 15 +``` + +### 4.2 Turn on Scanner enforcement + +```yaml +scanner: + schemaVersion: 2 + attestation: + requireDsse: true # fail scans when Signer/Attestor errors occur + signerEndpoint: https://signer.internal + attestorEndpoint: https://attestor.internal + uploadArtifacts: true # store DSSE + proof next to SBOM artefacts +``` + +Set `requireDsse=false` during observation, then flip to `true` once Rekor health SLOs are green. + +### 4.3 Policy templates + +Add Policy Engine predicates (Rego snippet): + +```rego +package stella.policies.attestation + +deny[msg] { + not input.attestations.rekor_verified + msg := sprintf("missing Rekor proof for %s", [input.scan_id]) +} + +warn[msg] { + input.attestations.rekor_age_hours > 24 + msg := sprintf("Rekor proof older than 24h for %s", [input.scan_id]) +} +``` + +Tie Scheduler or CI promotion gates to the `deny` result. + +### 4.4 CLI and verification + +- `stellaops-cli runtime policy test --image --json` already surfaces `attestation.uuid` and `rekorVerified` fields. +- To validate bundles offline: `stellaops-cli attest verify --bundle path/to/export.tar --rekor-key rekor.pub`. + +Document these flows for AppSec teams so they can self-serve proofs during audits. + +### 4.5 Export Center profile + +```yaml +exportProfiles: + secure-default: + includeSboms: true + includeAttestations: true + includeRekorProofs: true + policy: + requireAttestations: true + allowUnsigned: false +``` + +--- + +## 5. Rollout levers & phases + +| Phase | Toggle | Goal | +|-------|--------|------| +| **Observe** | `scanner.attestation.requireDsse=false`, policies in `warn` mode. | Validate plumbing without blocking builds; capture metrics. | +| **Enforce** | Flip `requireDsse=true`, policy `deny` for missing proofs, Rekor SLO alerts live. | Block unsigned artefacts; auto-retry attestor failures. | +| **Escalate** | Export Center profile `includeAttestations=true`, CLI docs distributed, Notify alerts wired. | Broad communication + audit evidence ready. | + +Roll forward per environment; keep the previous phase’s toggles for hot rollback. + +--- + +## 6. Offline / air-gap guidance + +1. **Mirror Rekor**: take log snapshots daily (`rekor-cli log export`) and add to the Offline Kit. +2. **Bundle proofs**: Export Center must include `*.rekor.json` and `rekor-chain.pem` alongside DSSE envelopes. +3. **CLI verification offline**: + ```bash + stellaops-cli attest verify --bundle offline-kit.tar \ + --rekor-root hashsum.txt --rekor-tree treehead.json --rekor-key rekor.pub + ``` +4. **Fallback**: When Rekor connectivity is unavailable, Attestor queues submissions locally and emits `attestationPending=true`; policy can allow waivers for a limited TTL via `policy.attestations.deferHours`. + +--- + +## 7. Troubleshooting + +| Symptom | Checks | Resolution | +|---------|--------|------------| +| `attestationPending` flag stays true | `attestor_rekor_retry_total`, Attestor logs, Rekor `/healthz`. | Verify Rekor endpoint & certs; rotate API tokens; replay queued DSSE payloads via `attestor replay`. | +| Policy denies despite DSSE | Confirm Rekor proof bundle stored under `/artifacts//rekor/`. | Re-run `stellaops-cli attest verify`, ensure Policy Engine has the new schema (`attestations.rekor_verified`). | +| CLI verification fails offline | Ensure Rekor snapshot + `rekor.pub` shipped together; check timestamp gap. | Regenerate snapshot, or import Rekor entries into the isolated log before verifying. | + +--- + +## References + +- Gap analysis: `docs/benchmarks/scanner/scanning-gaps-stella-misses-from-competitors.md#dsse-rekor-operator-enablement-trivy-grype-snyk` +- Scanner architecture (§Signer → Attestor → Rekor): `docs/modules/scanner/architecture.md` +- Export Center profiles: `docs/modules/export-center/architecture.md` +- Policy Engine predicates: `docs/modules/policy/architecture.md` +- CLI reference: `docs/09_API_CLI_REFERENCE.md` + diff --git a/etc/signals.yaml.sample b/etc/signals.yaml.sample index 0fff64b5b..093ed8c34 100644 --- a/etc/signals.yaml.sample +++ b/etc/signals.yaml.sample @@ -20,10 +20,16 @@ Signals: BypassNetworks: - "127.0.0.1/32" - "::1/128" - Mongo: - ConnectionString: "mongodb://localhost:27017/signals" - Database: "signals" + Mongo: + ConnectionString: "mongodb://localhost:27017/signals" + Database: "signals" CallgraphsCollection: "callgraphs" ReachabilityFactsCollection: "reachability_facts" - Storage: - RootPath: "../data/signals-artifacts" + Storage: + RootPath: "../data/signals-artifacts" + AirGap: + SealedMode: + EnforcementEnabled: false + EvidencePath: "../ops/devops/sealed-mode-ci/artifacts/sealed-mode-ci/latest/signals-sealed-ci.json" + MaxEvidenceAge: "06:00:00" + CacheLifetime: "00:01:00" diff --git a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Deno/manifest.json b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Deno/manifest.json new file mode 100644 index 000000000..c5061ebdb --- /dev/null +++ b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Deno/manifest.json @@ -0,0 +1,21 @@ +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.deno", + "displayName": "StellaOps Deno Analyzer", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.Deno.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.Deno.DenoAnalyzerPlugin" + }, + "capabilities": [ + "language-analyzer", + "deno" + ], + "metadata": { + "org.stellaops.analyzer.language": "deno", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true" + } +} diff --git a/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs b/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs index 4d19d1654..82f385a56 100644 --- a/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs +++ b/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs @@ -28,14 +28,15 @@ internal static class CommandFactory { TreatUnmatchedTokensAsErrors = true }; - root.Add(verboseOption); - - root.Add(BuildScannerCommand(services, verboseOption, cancellationToken)); - root.Add(BuildScanCommand(services, options, verboseOption, cancellationToken)); - root.Add(BuildDatabaseCommand(services, verboseOption, cancellationToken)); - root.Add(BuildSourcesCommand(services, verboseOption, cancellationToken)); - root.Add(BuildAocCommand(services, verboseOption, cancellationToken)); - root.Add(BuildAuthCommand(services, options, verboseOption, cancellationToken)); + root.Add(verboseOption); + + root.Add(BuildScannerCommand(services, verboseOption, cancellationToken)); + root.Add(BuildScanCommand(services, options, verboseOption, cancellationToken)); + root.Add(BuildRubyCommand(services, verboseOption, cancellationToken)); + root.Add(BuildDatabaseCommand(services, verboseOption, cancellationToken)); + root.Add(BuildSourcesCommand(services, verboseOption, cancellationToken)); + root.Add(BuildAocCommand(services, verboseOption, cancellationToken)); + root.Add(BuildAuthCommand(services, options, verboseOption, cancellationToken)); root.Add(BuildPolicyCommand(services, options, verboseOption, cancellationToken)); root.Add(BuildTaskRunnerCommand(services, verboseOption, cancellationToken)); root.Add(BuildFindingsCommand(services, verboseOption, cancellationToken)); @@ -177,14 +178,82 @@ internal static class CommandFactory scan.Add(entryTrace); scan.Add(run); - scan.Add(upload); - return scan; - } - + scan.Add(upload); + return scan; + } + + private static Command BuildRubyCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) + { + var ruby = new Command("ruby", "Work with Ruby analyzer outputs."); + + var inspect = new Command("inspect", "Inspect a local Ruby workspace."); + var inspectRootOption = new Option("--root") + { + Description = "Path to the Ruby workspace (defaults to current directory)." + }; + var inspectFormatOption = new Option("--format") + { + Description = "Output format (table or json)." + }; + + inspect.Add(inspectRootOption); + inspect.Add(inspectFormatOption); + inspect.SetAction((parseResult, _) => + { + var root = parseResult.GetValue(inspectRootOption); + var format = parseResult.GetValue(inspectFormatOption) ?? "table"; + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleRubyInspectAsync( + services, + root, + format, + verbose, + cancellationToken); + }); + + var resolve = new Command("resolve", "Fetch Ruby packages for a completed scan."); + var resolveImageOption = new Option("--image") + { + Description = "Image reference (digest or tag) used by the scan." + }; + var resolveScanIdOption = new Option("--scan-id") + { + Description = "Explicit scan identifier." + }; + var resolveFormatOption = new Option("--format") + { + Description = "Output format (table or json)." + }; + + resolve.Add(resolveImageOption); + resolve.Add(resolveScanIdOption); + resolve.Add(resolveFormatOption); + resolve.SetAction((parseResult, _) => + { + var image = parseResult.GetValue(resolveImageOption); + var scanId = parseResult.GetValue(resolveScanIdOption); + var format = parseResult.GetValue(resolveFormatOption) ?? "table"; + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleRubyResolveAsync( + services, + image, + scanId, + format, + verbose, + cancellationToken); + }); + + ruby.Add(inspect); + ruby.Add(resolve); + return ruby; + } + private static Command BuildKmsCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) { - var kms = new Command("kms", "Manage file-backed signing keys."); - + var kms = new Command("kms", "Manage file-backed signing keys."); + var export = new Command("export", "Export key material to a portable bundle."); var exportRootOption = new Option("--root") { diff --git a/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs b/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs index abb1d2243..4cb6a798e 100644 --- a/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs +++ b/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs @@ -1,254 +1,260 @@ -using System; -using System.Buffers; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Diagnostics; -using System.Globalization; -using System.IO; -using System.IO.Compression; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Security.Cryptography; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Text.Json.Serialization; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.DependencyInjection; +using System; +using System.Buffers; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Diagnostics; +using System.Globalization; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Spectre.Console; using Spectre.Console.Rendering; -using StellaOps.Auth.Client; -using StellaOps.Cli.Configuration; -using StellaOps.Cli.Prompts; +using StellaOps.Auth.Client; +using StellaOps.Cli.Configuration; +using StellaOps.Cli.Prompts; using StellaOps.Cli.Services; using StellaOps.Cli.Services.Models; using StellaOps.Cli.Services.Models.AdvisoryAi; +using StellaOps.Cli.Services.Models.Ruby; using StellaOps.Cli.Telemetry; using StellaOps.Cryptography; using StellaOps.Cryptography.DependencyInjection; -using StellaOps.Cryptography.Kms; - -namespace StellaOps.Cli.Commands; - -internal static class CommandHandlers -{ - private const string KmsPassphraseEnvironmentVariable = "STELLAOPS_KMS_PASSPHRASE"; - private static readonly JsonSerializerOptions KmsJsonOptions = new(JsonSerializerDefaults.Web) - { - WriteIndented = true - }; - - public static async Task HandleScannerDownloadAsync( - IServiceProvider services, - string channel, - string? output, - bool overwrite, - bool install, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-download"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.scanner.download", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "scanner download"); - activity?.SetTag("stellaops.cli.channel", channel); - using var duration = CliMetrics.MeasureCommandDuration("scanner download"); - - try - { - var result = await client.DownloadScannerAsync(channel, output ?? string.Empty, overwrite, verbose, cancellationToken).ConfigureAwait(false); - - if (result.FromCache) - { - logger.LogInformation("Using cached scanner at {Path}.", result.Path); - } - else - { - logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", result.Path, result.SizeBytes); - } - - CliMetrics.RecordScannerDownload(channel, result.FromCache); - - if (install) - { - var installer = scope.ServiceProvider.GetRequiredService(); - await installer.InstallAsync(result.Path, verbose, cancellationToken).ConfigureAwait(false); - CliMetrics.RecordScannerInstall(channel); - } - - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to download scanner bundle."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleTaskRunnerSimulateAsync( - IServiceProvider services, - string manifestPath, - string? inputsPath, - string? format, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("task-runner-simulate"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.taskrunner.simulate", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "task-runner simulate"); - using var duration = CliMetrics.MeasureCommandDuration("task-runner simulate"); - - try - { - if (string.IsNullOrWhiteSpace(manifestPath)) - { - throw new ArgumentException("Manifest path must be provided.", nameof(manifestPath)); - } - - var manifestFullPath = Path.GetFullPath(manifestPath); - if (!File.Exists(manifestFullPath)) - { - throw new FileNotFoundException("Manifest file not found.", manifestFullPath); - } - - activity?.SetTag("stellaops.cli.manifest_path", manifestFullPath); - var manifest = await File.ReadAllTextAsync(manifestFullPath, cancellationToken).ConfigureAwait(false); - if (string.IsNullOrWhiteSpace(manifest)) - { - throw new InvalidOperationException("Manifest file was empty."); - } - - JsonObject? inputsObject = null; - if (!string.IsNullOrWhiteSpace(inputsPath)) - { - var inputsFullPath = Path.GetFullPath(inputsPath!); - if (!File.Exists(inputsFullPath)) - { - throw new FileNotFoundException("Inputs file not found.", inputsFullPath); - } - - await using var stream = File.OpenRead(inputsFullPath); - var parsed = await JsonNode.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); - if (parsed is JsonObject obj) - { - inputsObject = obj; - } - else - { - throw new InvalidOperationException("Simulation inputs must be a JSON object."); - } - - activity?.SetTag("stellaops.cli.inputs_path", inputsFullPath); - } - - var request = new TaskRunnerSimulationRequest(manifest, inputsObject); - var result = await client.SimulateTaskRunnerAsync(request, cancellationToken).ConfigureAwait(false); - - activity?.SetTag("stellaops.cli.plan_hash", result.PlanHash); - activity?.SetTag("stellaops.cli.pending_approvals", result.HasPendingApprovals); - activity?.SetTag("stellaops.cli.step_count", result.Steps.Count); - - var outputFormat = DetermineTaskRunnerSimulationFormat(format, outputPath); - var payload = BuildTaskRunnerSimulationPayload(result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteSimulationOutputAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Simulation payload written to {Path}.", Path.GetFullPath(outputPath!)); - } - - if (outputFormat == TaskRunnerSimulationOutputFormat.Json) - { - Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); - } - else - { - RenderTaskRunnerSimulationResult(result); - } - - var outcome = result.HasPendingApprovals ? "pending-approvals" : "ok"; - CliMetrics.RecordTaskRunnerSimulation(outcome); - Environment.ExitCode = 0; - } - catch (FileNotFoundException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordTaskRunnerSimulation("error"); - Environment.ExitCode = 66; - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordTaskRunnerSimulation("error"); - Environment.ExitCode = 64; - } - catch (InvalidOperationException ex) - { - logger.LogError(ex, "Task Runner simulation failed."); - CliMetrics.RecordTaskRunnerSimulation("error"); - Environment.ExitCode = 1; - } - catch (Exception ex) - { - logger.LogError(ex, "Task Runner simulation failed."); - CliMetrics.RecordTaskRunnerSimulation("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - private static void RenderEntryTrace(EntryTraceResponseModel result, bool includeNdjson) - { +using StellaOps.Cryptography.Kms; +using StellaOps.Scanner.Analyzers.Lang; +using StellaOps.Scanner.Analyzers.Lang.Java; +using StellaOps.Scanner.Analyzers.Lang.Node; +using StellaOps.Scanner.Analyzers.Lang.Python; +using StellaOps.Scanner.Analyzers.Lang.Ruby; + +namespace StellaOps.Cli.Commands; + +internal static class CommandHandlers +{ + private const string KmsPassphraseEnvironmentVariable = "STELLAOPS_KMS_PASSPHRASE"; + private static readonly JsonSerializerOptions KmsJsonOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true + }; + + public static async Task HandleScannerDownloadAsync( + IServiceProvider services, + string channel, + string? output, + bool overwrite, + bool install, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-download"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.scanner.download", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "scanner download"); + activity?.SetTag("stellaops.cli.channel", channel); + using var duration = CliMetrics.MeasureCommandDuration("scanner download"); + + try + { + var result = await client.DownloadScannerAsync(channel, output ?? string.Empty, overwrite, verbose, cancellationToken).ConfigureAwait(false); + + if (result.FromCache) + { + logger.LogInformation("Using cached scanner at {Path}.", result.Path); + } + else + { + logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", result.Path, result.SizeBytes); + } + + CliMetrics.RecordScannerDownload(channel, result.FromCache); + + if (install) + { + var installer = scope.ServiceProvider.GetRequiredService(); + await installer.InstallAsync(result.Path, verbose, cancellationToken).ConfigureAwait(false); + CliMetrics.RecordScannerInstall(channel); + } + + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to download scanner bundle."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleTaskRunnerSimulateAsync( + IServiceProvider services, + string manifestPath, + string? inputsPath, + string? format, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("task-runner-simulate"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.taskrunner.simulate", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "task-runner simulate"); + using var duration = CliMetrics.MeasureCommandDuration("task-runner simulate"); + + try + { + if (string.IsNullOrWhiteSpace(manifestPath)) + { + throw new ArgumentException("Manifest path must be provided.", nameof(manifestPath)); + } + + var manifestFullPath = Path.GetFullPath(manifestPath); + if (!File.Exists(manifestFullPath)) + { + throw new FileNotFoundException("Manifest file not found.", manifestFullPath); + } + + activity?.SetTag("stellaops.cli.manifest_path", manifestFullPath); + var manifest = await File.ReadAllTextAsync(manifestFullPath, cancellationToken).ConfigureAwait(false); + if (string.IsNullOrWhiteSpace(manifest)) + { + throw new InvalidOperationException("Manifest file was empty."); + } + + JsonObject? inputsObject = null; + if (!string.IsNullOrWhiteSpace(inputsPath)) + { + var inputsFullPath = Path.GetFullPath(inputsPath!); + if (!File.Exists(inputsFullPath)) + { + throw new FileNotFoundException("Inputs file not found.", inputsFullPath); + } + + await using var stream = File.OpenRead(inputsFullPath); + var parsed = await JsonNode.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + if (parsed is JsonObject obj) + { + inputsObject = obj; + } + else + { + throw new InvalidOperationException("Simulation inputs must be a JSON object."); + } + + activity?.SetTag("stellaops.cli.inputs_path", inputsFullPath); + } + + var request = new TaskRunnerSimulationRequest(manifest, inputsObject); + var result = await client.SimulateTaskRunnerAsync(request, cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.plan_hash", result.PlanHash); + activity?.SetTag("stellaops.cli.pending_approvals", result.HasPendingApprovals); + activity?.SetTag("stellaops.cli.step_count", result.Steps.Count); + + var outputFormat = DetermineTaskRunnerSimulationFormat(format, outputPath); + var payload = BuildTaskRunnerSimulationPayload(result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteSimulationOutputAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Simulation payload written to {Path}.", Path.GetFullPath(outputPath!)); + } + + if (outputFormat == TaskRunnerSimulationOutputFormat.Json) + { + Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); + } + else + { + RenderTaskRunnerSimulationResult(result); + } + + var outcome = result.HasPendingApprovals ? "pending-approvals" : "ok"; + CliMetrics.RecordTaskRunnerSimulation(outcome); + Environment.ExitCode = 0; + } + catch (FileNotFoundException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordTaskRunnerSimulation("error"); + Environment.ExitCode = 66; + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordTaskRunnerSimulation("error"); + Environment.ExitCode = 64; + } + catch (InvalidOperationException ex) + { + logger.LogError(ex, "Task Runner simulation failed."); + CliMetrics.RecordTaskRunnerSimulation("error"); + Environment.ExitCode = 1; + } + catch (Exception ex) + { + logger.LogError(ex, "Task Runner simulation failed."); + CliMetrics.RecordTaskRunnerSimulation("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static void RenderEntryTrace(EntryTraceResponseModel result, bool includeNdjson) + { var console = AnsiConsole.Console; console.MarkupLine($"[bold]Scan[/]: {result.ScanId}"); console.MarkupLine($"Image: {result.ImageDigest}"); console.MarkupLine($"Generated: {result.GeneratedAt:O}"); console.MarkupLine($"Outcome: {result.Graph.Outcome}"); - - var planTable = new Table() - .AddColumn("Terminal") - .AddColumn("Runtime") - .AddColumn("Type") - .AddColumn("Confidence") - .AddColumn("User") - .AddColumn("Workdir"); - - foreach (var plan in result.Graph.Plans.OrderByDescending(p => p.Confidence)) - { - planTable.AddRow( - plan.TerminalPath, - plan.Runtime ?? "-", - plan.Type.ToString(), - plan.Confidence.ToString("F1", CultureInfo.InvariantCulture), - plan.User, - plan.WorkingDirectory); - } - - if (planTable.Rows.Count > 0) - { + + var planTable = new Table() + .AddColumn("Terminal") + .AddColumn("Runtime") + .AddColumn("Type") + .AddColumn("Confidence") + .AddColumn("User") + .AddColumn("Workdir"); + + foreach (var plan in result.Graph.Plans.OrderByDescending(p => p.Confidence)) + { + planTable.AddRow( + plan.TerminalPath, + plan.Runtime ?? "-", + plan.Type.ToString(), + plan.Confidence.ToString("F1", CultureInfo.InvariantCulture), + plan.User, + plan.WorkingDirectory); + } + + if (planTable.Rows.Count > 0) + { console.Write(planTable); } else @@ -257,20 +263,20 @@ internal static class CommandHandlers } if (result.Graph.Diagnostics.Length > 0) - { - var diagTable = new Table() - .AddColumn("Severity") - .AddColumn("Reason") - .AddColumn("Message"); - - foreach (var diagnostic in result.Graph.Diagnostics) - { - diagTable.AddRow( - diagnostic.Severity.ToString(), - diagnostic.Reason.ToString(), - diagnostic.Message); - } - + { + var diagTable = new Table() + .AddColumn("Severity") + .AddColumn("Reason") + .AddColumn("Message"); + + foreach (var diagnostic in result.Graph.Diagnostics) + { + diagTable.AddRow( + diagnostic.Severity.ToString(), + diagnostic.Reason.ToString(), + diagnostic.Message); + } + console.Write(diagTable); } @@ -282,129 +288,129 @@ internal static class CommandHandlers console.WriteLine(line); } } - } - - public static async Task HandleScannerRunAsync( - IServiceProvider services, - string runner, - string entry, - string targetDirectory, - IReadOnlyList arguments, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var executor = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-run"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.scan.run", ActivityKind.Internal); - activity?.SetTag("stellaops.cli.command", "scan run"); - activity?.SetTag("stellaops.cli.runner", runner); - activity?.SetTag("stellaops.cli.entry", entry); - activity?.SetTag("stellaops.cli.target", targetDirectory); - using var duration = CliMetrics.MeasureCommandDuration("scan run"); - - try - { - var options = scope.ServiceProvider.GetRequiredService(); - var resultsDirectory = options.ResultsDirectory; - - var executionResult = await executor.RunAsync( - runner, - entry, - targetDirectory, - resultsDirectory, - arguments, - verbose, - cancellationToken).ConfigureAwait(false); - - Environment.ExitCode = executionResult.ExitCode; - CliMetrics.RecordScanRun(runner, executionResult.ExitCode); - - if (executionResult.ExitCode == 0) - { - var backend = scope.ServiceProvider.GetRequiredService(); - logger.LogInformation("Uploading scan artefact {Path}...", executionResult.ResultsPath); - await backend.UploadScanResultsAsync(executionResult.ResultsPath, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Scan artefact uploaded."); - activity?.SetTag("stellaops.cli.results", executionResult.ResultsPath); - } - else - { - logger.LogWarning("Skipping automatic upload because scan exited with code {Code}.", executionResult.ExitCode); - } - - logger.LogInformation("Run metadata written to {Path}.", executionResult.RunMetadataPath); - activity?.SetTag("stellaops.cli.run_metadata", executionResult.RunMetadataPath); - } - catch (Exception ex) - { - logger.LogError(ex, "Scanner execution failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleScanUploadAsync( - IServiceProvider services, - string file, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-upload"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.scan.upload", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "scan upload"); - activity?.SetTag("stellaops.cli.file", file); - using var duration = CliMetrics.MeasureCommandDuration("scan upload"); - - try - { - var pathFull = Path.GetFullPath(file); - await client.UploadScanResultsAsync(pathFull, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Scan results uploaded successfully."); - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to upload scan results."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleScanEntryTraceAsync( - IServiceProvider services, - string scanId, - bool includeNdjson, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scan-entrytrace"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.scan.entrytrace", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "scan entrytrace"); - activity?.SetTag("stellaops.cli.scan_id", scanId); - using var duration = CliMetrics.MeasureCommandDuration("scan entrytrace"); - - try - { + } + + public static async Task HandleScannerRunAsync( + IServiceProvider services, + string runner, + string entry, + string targetDirectory, + IReadOnlyList arguments, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var executor = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-run"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.scan.run", ActivityKind.Internal); + activity?.SetTag("stellaops.cli.command", "scan run"); + activity?.SetTag("stellaops.cli.runner", runner); + activity?.SetTag("stellaops.cli.entry", entry); + activity?.SetTag("stellaops.cli.target", targetDirectory); + using var duration = CliMetrics.MeasureCommandDuration("scan run"); + + try + { + var options = scope.ServiceProvider.GetRequiredService(); + var resultsDirectory = options.ResultsDirectory; + + var executionResult = await executor.RunAsync( + runner, + entry, + targetDirectory, + resultsDirectory, + arguments, + verbose, + cancellationToken).ConfigureAwait(false); + + Environment.ExitCode = executionResult.ExitCode; + CliMetrics.RecordScanRun(runner, executionResult.ExitCode); + + if (executionResult.ExitCode == 0) + { + var backend = scope.ServiceProvider.GetRequiredService(); + logger.LogInformation("Uploading scan artefact {Path}...", executionResult.ResultsPath); + await backend.UploadScanResultsAsync(executionResult.ResultsPath, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Scan artefact uploaded."); + activity?.SetTag("stellaops.cli.results", executionResult.ResultsPath); + } + else + { + logger.LogWarning("Skipping automatic upload because scan exited with code {Code}.", executionResult.ExitCode); + } + + logger.LogInformation("Run metadata written to {Path}.", executionResult.RunMetadataPath); + activity?.SetTag("stellaops.cli.run_metadata", executionResult.RunMetadataPath); + } + catch (Exception ex) + { + logger.LogError(ex, "Scanner execution failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleScanUploadAsync( + IServiceProvider services, + string file, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-upload"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.scan.upload", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "scan upload"); + activity?.SetTag("stellaops.cli.file", file); + using var duration = CliMetrics.MeasureCommandDuration("scan upload"); + + try + { + var pathFull = Path.GetFullPath(file); + await client.UploadScanResultsAsync(pathFull, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Scan results uploaded successfully."); + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to upload scan results."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleScanEntryTraceAsync( + IServiceProvider services, + string scanId, + bool includeNdjson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scan-entrytrace"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.scan.entrytrace", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "scan entrytrace"); + activity?.SetTag("stellaops.cli.scan_id", scanId); + using var duration = CliMetrics.MeasureCommandDuration("scan entrytrace"); + + try + { var result = await client.GetEntryTraceAsync(scanId, cancellationToken).ConfigureAwait(false); if (result is null) { @@ -416,19 +422,19 @@ internal static class CommandHandlers Environment.ExitCode = 1; return; } - - RenderEntryTrace(result, includeNdjson); - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to fetch EntryTrace for scan {ScanId}.", scanId); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } + + RenderEntryTrace(result, includeNdjson); + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch EntryTrace for scan {ScanId}.", scanId); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } } public static async Task HandleAdviseRunAsync( @@ -577,3822 +583,3822 @@ internal static class CommandHandlers bool dryRun, string source, string input, - string? tenantOverride, - string format, - bool disableColor, - string? output, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("sources-ingest"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - using var activity = CliActivitySource.Instance.StartActivity("cli.sources.ingest.dry_run", ActivityKind.Client); - var statusMetric = "unknown"; - using var duration = CliMetrics.MeasureCommandDuration("sources ingest dry-run"); - - try - { - if (!dryRun) - { - statusMetric = "unsupported"; - logger.LogError("Only --dry-run mode is supported for 'stella sources ingest' at this time."); - Environment.ExitCode = 1; - return; - } - - source = source?.Trim() ?? string.Empty; - if (string.IsNullOrWhiteSpace(source)) - { - throw new InvalidOperationException("Source identifier must be provided."); - } - - var formatNormalized = string.IsNullOrWhiteSpace(format) - ? "table" - : format.Trim().ToLowerInvariant(); - - if (formatNormalized is not ("table" or "json")) - { - throw new InvalidOperationException("Format must be either 'table' or 'json'."); - } - - var tenant = ResolveTenant(tenantOverride); - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); - } - - var payload = await LoadIngestInputAsync(services, input, cancellationToken).ConfigureAwait(false); - - logger.LogInformation("Executing ingestion dry-run for source {Source} using input {Input}.", source, payload.Name); - - activity?.SetTag("stellaops.cli.command", "sources ingest dry-run"); - activity?.SetTag("stellaops.cli.source", source); - activity?.SetTag("stellaops.cli.tenant", tenant); - activity?.SetTag("stellaops.cli.format", formatNormalized); - activity?.SetTag("stellaops.cli.input_kind", payload.Kind); - - var request = new AocIngestDryRunRequest - { - Tenant = tenant, - Source = source, - Document = new AocIngestDryRunDocument - { - Name = payload.Name, - Content = payload.Content, - ContentType = payload.ContentType, - ContentEncoding = payload.ContentEncoding - } - }; - - var response = await client.ExecuteAocIngestDryRunAsync(request, cancellationToken).ConfigureAwait(false); - activity?.SetTag("stellaops.cli.status", response.Status ?? "unknown"); - - if (!string.IsNullOrWhiteSpace(output)) - { - var reportPath = await WriteJsonReportAsync(response, output, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Dry-run report written to {Path}.", reportPath); - } - - if (formatNormalized == "json") - { - var json = JsonSerializer.Serialize(response, new JsonSerializerOptions - { - WriteIndented = true - }); - Console.WriteLine(json); - } - else - { - RenderDryRunTable(response, !disableColor); - } - - var exitCode = DetermineDryRunExitCode(response); - Environment.ExitCode = exitCode; - statusMetric = exitCode == 0 ? "ok" : "violation"; - activity?.SetTag("stellaops.cli.exit_code", exitCode); - } - catch (Exception ex) - { - statusMetric = "transport_error"; - logger.LogError(ex, "Dry-run ingestion failed."); - Environment.ExitCode = 70; - } - finally - { - verbosity.MinimumLevel = previousLevel; - CliMetrics.RecordSourcesDryRun(statusMetric); - } - } - - public static async Task HandleAocVerifyAsync( - IServiceProvider services, - string? sinceOption, - int? limitOption, - string? sourcesOption, - string? codesOption, - string format, - string? exportPath, - string? tenantOverride, - bool disableColor, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("aoc-verify"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - using var activity = CliActivitySource.Instance.StartActivity("cli.aoc.verify", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("aoc verify"); - var outcome = "unknown"; - - try - { - var tenant = ResolveTenant(tenantOverride); - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); - } - - var normalizedFormat = string.IsNullOrWhiteSpace(format) - ? "table" - : format.Trim().ToLowerInvariant(); - - if (normalizedFormat is not ("table" or "json")) - { - throw new InvalidOperationException("Format must be either 'table' or 'json'."); - } - - var since = DetermineVerificationSince(sinceOption); - var sinceIso = since.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - var limit = NormalizeLimit(limitOption); - var sources = ParseCommaSeparatedList(sourcesOption); - var codes = ParseCommaSeparatedList(codesOption); - - var normalizedSources = sources.Count == 0 - ? Array.Empty() - : sources.Select(item => item.ToLowerInvariant()).ToArray(); - - var normalizedCodes = codes.Count == 0 - ? Array.Empty() - : codes.Select(item => item.ToUpperInvariant()).ToArray(); - - activity?.SetTag("stellaops.cli.command", "aoc verify"); - activity?.SetTag("stellaops.cli.tenant", tenant); - activity?.SetTag("stellaops.cli.since", sinceIso); - activity?.SetTag("stellaops.cli.limit", limit); - activity?.SetTag("stellaops.cli.format", normalizedFormat); - if (normalizedSources.Length > 0) - { - activity?.SetTag("stellaops.cli.sources", string.Join(",", normalizedSources)); - } - - if (normalizedCodes.Length > 0) - { - activity?.SetTag("stellaops.cli.codes", string.Join(",", normalizedCodes)); - } - - var request = new AocVerifyRequest - { - Tenant = tenant, - Since = sinceIso, - Limit = limit, - Sources = normalizedSources.Length == 0 ? null : normalizedSources, - Codes = normalizedCodes.Length == 0 ? null : normalizedCodes - }; - - var response = await client.ExecuteAocVerifyAsync(request, cancellationToken).ConfigureAwait(false); - - if (!string.IsNullOrWhiteSpace(exportPath)) - { - var reportPath = await WriteJsonReportAsync(response, exportPath, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Verification report written to {Path}.", reportPath); - } - - if (normalizedFormat == "json") - { - var json = JsonSerializer.Serialize(response, new JsonSerializerOptions - { - WriteIndented = true - }); - Console.WriteLine(json); - } - else - { - RenderAocVerifyTable(response, !disableColor, limit); - } - - var exitCode = DetermineVerifyExitCode(response); - Environment.ExitCode = exitCode; - activity?.SetTag("stellaops.cli.exit_code", exitCode); - outcome = exitCode switch - { - 0 => "ok", - >= 11 and <= 17 => "violations", - 18 => "truncated", - _ => "unknown" - }; - } - catch (InvalidOperationException ex) - { - outcome = "usage_error"; - logger.LogError(ex, "Verification failed: {Message}", ex.Message); - Console.Error.WriteLine(ex.Message); - Environment.ExitCode = 71; - activity?.SetStatus(ActivityStatusCode.Error, ex.Message); - } - catch (Exception ex) - { - outcome = "transport_error"; - logger.LogError(ex, "Verification request failed."); - Console.Error.WriteLine(ex.Message); - Environment.ExitCode = 70; - activity?.SetStatus(ActivityStatusCode.Error, ex.Message); - } - finally - { - verbosity.MinimumLevel = previousLevel; - CliMetrics.RecordAocVerify(outcome); - } - } - - public static async Task HandleConnectorJobAsync( - IServiceProvider services, - string source, - string stage, - string? mode, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-connector"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.db.fetch", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "db fetch"); - activity?.SetTag("stellaops.cli.source", source); - activity?.SetTag("stellaops.cli.stage", stage); - if (!string.IsNullOrWhiteSpace(mode)) - { - activity?.SetTag("stellaops.cli.mode", mode); - } - using var duration = CliMetrics.MeasureCommandDuration("db fetch"); - - try - { - var jobKind = $"source:{source}:{stage}"; - var parameters = new Dictionary(StringComparer.Ordinal); - if (!string.IsNullOrWhiteSpace(mode)) - { - parameters["mode"] = mode; - } - - await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - logger.LogError(ex, "Connector job failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleMergeJobAsync( - IServiceProvider services, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-merge"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.db.merge", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "db merge"); - using var duration = CliMetrics.MeasureCommandDuration("db merge"); - - try - { - await TriggerJobAsync(client, logger, "merge:reconcile", new Dictionary(StringComparer.Ordinal), cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - logger.LogError(ex, "Merge job failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleExportJobAsync( - IServiceProvider services, - string format, - bool delta, - bool? publishFull, - bool? publishDelta, - bool? includeFull, - bool? includeDelta, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-export"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.db.export", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "db export"); - activity?.SetTag("stellaops.cli.format", format); - activity?.SetTag("stellaops.cli.delta", delta); - using var duration = CliMetrics.MeasureCommandDuration("db export"); - activity?.SetTag("stellaops.cli.publish_full", publishFull); - activity?.SetTag("stellaops.cli.publish_delta", publishDelta); - activity?.SetTag("stellaops.cli.include_full", includeFull); - activity?.SetTag("stellaops.cli.include_delta", includeDelta); - - try - { - var jobKind = format switch - { - "trivy-db" or "trivy" => "export:trivy-db", - _ => "export:json" - }; - - var isTrivy = jobKind == "export:trivy-db"; - if (isTrivy - && !publishFull.HasValue - && !publishDelta.HasValue - && !includeFull.HasValue - && !includeDelta.HasValue - && AnsiConsole.Profile.Capabilities.Interactive) - { - var overrides = TrivyDbExportPrompt.PromptOverrides(); - publishFull = overrides.publishFull; - publishDelta = overrides.publishDelta; - includeFull = overrides.includeFull; - includeDelta = overrides.includeDelta; - } - - var parameters = new Dictionary(StringComparer.Ordinal) - { - ["delta"] = delta - }; - if (publishFull.HasValue) - { - parameters["publishFull"] = publishFull.Value; - } - if (publishDelta.HasValue) - { - parameters["publishDelta"] = publishDelta.Value; - } - if (includeFull.HasValue) - { - parameters["includeFull"] = includeFull.Value; - } - if (includeDelta.HasValue) - { - parameters["includeDelta"] = includeDelta.Value; - } - - await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - logger.LogError(ex, "Export job failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static Task HandleExcititorInitAsync( - IServiceProvider services, - IReadOnlyList providers, - bool resume, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (resume) - { - payload["resume"] = true; - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor init", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["resume"] = resume - }, - client => client.ExecuteExcititorOperationAsync("init", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorPullAsync( - IServiceProvider services, - IReadOnlyList providers, - DateTimeOffset? since, - TimeSpan? window, - bool force, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (since.HasValue) - { - payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - if (window.HasValue) - { - payload["window"] = window.Value.ToString("c", CultureInfo.InvariantCulture); - } - if (force) - { - payload["force"] = true; - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor pull", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["force"] = force, - ["since"] = since?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture), - ["window"] = window?.ToString("c", CultureInfo.InvariantCulture) - }, - client => client.ExecuteExcititorOperationAsync("ingest/run", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorResumeAsync( - IServiceProvider services, - IReadOnlyList providers, - string? checkpoint, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (!string.IsNullOrWhiteSpace(checkpoint)) - { - payload["checkpoint"] = checkpoint.Trim(); - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor resume", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["checkpoint"] = checkpoint - }, - client => client.ExecuteExcititorOperationAsync("ingest/resume", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static async Task HandleExcititorListProvidersAsync( - IServiceProvider services, - bool includeDisabled, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("excititor-list-providers"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.list-providers", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "excititor list-providers"); - activity?.SetTag("stellaops.cli.include_disabled", includeDisabled); - using var duration = CliMetrics.MeasureCommandDuration("excititor list-providers"); - - try - { - var providers = await client.GetExcititorProvidersAsync(includeDisabled, cancellationToken).ConfigureAwait(false); - Environment.ExitCode = 0; - logger.LogInformation("Providers returned: {Count}", providers.Count); - - if (providers.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().Border(TableBorder.Rounded).AddColumns("Provider", "Kind", "Trust", "Enabled", "Last Ingested"); - foreach (var provider in providers) - { - table.AddRow( - provider.Id, - provider.Kind, - string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, - provider.Enabled ? "yes" : "no", - provider.LastIngestedAt?.ToString("yyyy-MM-dd HH:mm:ss 'UTC'", CultureInfo.InvariantCulture) ?? "unknown"); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var provider in providers) - { - logger.LogInformation("{ProviderId} [{Kind}] Enabled={Enabled} Trust={Trust} LastIngested={LastIngested}", - provider.Id, - provider.Kind, - provider.Enabled ? "yes" : "no", - string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, - provider.LastIngestedAt?.ToString("O", CultureInfo.InvariantCulture) ?? "unknown"); - } - } - } - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to list Excititor providers."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleExcititorExportAsync( - IServiceProvider services, - string format, - bool delta, - string? scope, - DateTimeOffset? since, - string? provider, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scopeHandle = services.CreateAsyncScope(); - var client = scopeHandle.ServiceProvider.GetRequiredService(); - var logger = scopeHandle.ServiceProvider.GetRequiredService().CreateLogger("excititor-export"); - var options = scopeHandle.ServiceProvider.GetRequiredService(); - var verbosity = scopeHandle.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.export", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "excititor export"); - activity?.SetTag("stellaops.cli.format", format); - activity?.SetTag("stellaops.cli.delta", delta); - if (!string.IsNullOrWhiteSpace(scope)) - { - activity?.SetTag("stellaops.cli.scope", scope); - } - if (since.HasValue) - { - activity?.SetTag("stellaops.cli.since", since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture)); - } - if (!string.IsNullOrWhiteSpace(provider)) - { - activity?.SetTag("stellaops.cli.provider", provider); - } - if (!string.IsNullOrWhiteSpace(outputPath)) - { - activity?.SetTag("stellaops.cli.output", outputPath); - } - using var duration = CliMetrics.MeasureCommandDuration("excititor export"); - - try - { - var payload = new Dictionary(StringComparer.Ordinal) - { - ["format"] = string.IsNullOrWhiteSpace(format) ? "openvex" : format.Trim(), - ["delta"] = delta - }; - - if (!string.IsNullOrWhiteSpace(scope)) - { - payload["scope"] = scope.Trim(); - } - if (since.HasValue) - { - payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - if (!string.IsNullOrWhiteSpace(provider)) - { - payload["provider"] = provider.Trim(); - } - - var result = await client.ExecuteExcititorOperationAsync( - "export", - HttpMethod.Post, - RemoveNullValues(payload), - cancellationToken).ConfigureAwait(false); - - if (!result.Success) - { - logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Excititor export failed." : result.Message); - Environment.ExitCode = 1; - return; - } - - Environment.ExitCode = 0; - - var manifest = TryParseExportManifest(result.Payload); - if (!string.IsNullOrWhiteSpace(result.Message) - && (manifest is null || !string.Equals(result.Message, "ok", StringComparison.OrdinalIgnoreCase))) - { - logger.LogInformation(result.Message); - } - - if (manifest is not null) - { - activity?.SetTag("stellaops.cli.export_id", manifest.ExportId); - if (!string.IsNullOrWhiteSpace(manifest.Format)) - { - activity?.SetTag("stellaops.cli.export_format", manifest.Format); - } - if (manifest.FromCache.HasValue) - { - activity?.SetTag("stellaops.cli.export_cached", manifest.FromCache.Value); - } - if (manifest.SizeBytes.HasValue) - { - activity?.SetTag("stellaops.cli.export_size", manifest.SizeBytes.Value); - } - - if (manifest.FromCache == true) - { - logger.LogInformation("Reusing cached export {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); - } - else - { - logger.LogInformation("Export ready: {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); - } - - if (manifest.CreatedAt.HasValue) - { - logger.LogInformation("Created at {CreatedAt}.", manifest.CreatedAt.Value.ToString("u", CultureInfo.InvariantCulture)); - } - - if (!string.IsNullOrWhiteSpace(manifest.Digest)) - { - var digestDisplay = BuildDigestDisplay(manifest.Algorithm, manifest.Digest); - if (manifest.SizeBytes.HasValue) - { - logger.LogInformation("Digest {Digest} ({Size}).", digestDisplay, FormatSize(manifest.SizeBytes.Value)); - } - else - { - logger.LogInformation("Digest {Digest}.", digestDisplay); - } - } - - if (!string.IsNullOrWhiteSpace(manifest.RekorLocation)) - { - if (!string.IsNullOrWhiteSpace(manifest.RekorIndex)) - { - logger.LogInformation("Rekor entry: {Location} (index {Index}).", manifest.RekorLocation, manifest.RekorIndex); - } - else - { - logger.LogInformation("Rekor entry: {Location}.", manifest.RekorLocation); - } - } - - if (!string.IsNullOrWhiteSpace(manifest.RekorInclusionUrl) - && !string.Equals(manifest.RekorInclusionUrl, manifest.RekorLocation, StringComparison.OrdinalIgnoreCase)) - { - logger.LogInformation("Rekor inclusion proof: {Url}.", manifest.RekorInclusionUrl); - } - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - var resolvedPath = ResolveExportOutputPath(outputPath!, manifest); - var download = await client.DownloadExcititorExportAsync( - manifest.ExportId, - resolvedPath, - manifest.Algorithm, - manifest.Digest, - cancellationToken).ConfigureAwait(false); - - activity?.SetTag("stellaops.cli.export_path", download.Path); - - if (download.FromCache) - { - logger.LogInformation("Export already cached at {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); - } - else - { - logger.LogInformation("Export saved to {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); - } - } - else if (!string.IsNullOrWhiteSpace(result.Location)) - { - var downloadUrl = ResolveLocationUrl(options, result.Location); - if (!string.IsNullOrWhiteSpace(downloadUrl)) - { - logger.LogInformation("Download URL: {Url}", downloadUrl); - } - else - { - logger.LogInformation("Download location: {Location}", result.Location); - } - } - } - else - { - if (!string.IsNullOrWhiteSpace(result.Location)) - { - var downloadUrl = ResolveLocationUrl(options, result.Location); - if (!string.IsNullOrWhiteSpace(downloadUrl)) - { - logger.LogInformation("Download URL: {Url}", downloadUrl); - } - else - { - logger.LogInformation("Location: {Location}", result.Location); - } - } - else if (string.IsNullOrWhiteSpace(result.Message)) - { - logger.LogInformation("Export request accepted."); - } - } - } - catch (Exception ex) - { - logger.LogError(ex, "Excititor export failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static Task HandleExcititorBackfillStatementsAsync( - IServiceProvider services, - DateTimeOffset? retrievedSince, - bool force, - int batchSize, - int? maxDocuments, - bool verbose, - CancellationToken cancellationToken) - { - if (batchSize <= 0) - { - throw new ArgumentOutOfRangeException(nameof(batchSize), "Batch size must be greater than zero."); - } - - if (maxDocuments.HasValue && maxDocuments.Value <= 0) - { - throw new ArgumentOutOfRangeException(nameof(maxDocuments), "Max documents must be greater than zero when specified."); - } - - var payload = new Dictionary(StringComparer.Ordinal) - { - ["force"] = force, - ["batchSize"] = batchSize, - ["maxDocuments"] = maxDocuments - }; - - if (retrievedSince.HasValue) - { - payload["retrievedSince"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - - var activityTags = new Dictionary(StringComparer.Ordinal) - { - ["stellaops.cli.force"] = force, - ["stellaops.cli.batch_size"] = batchSize, - ["stellaops.cli.max_documents"] = maxDocuments - }; - - if (retrievedSince.HasValue) - { - activityTags["stellaops.cli.retrieved_since"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor backfill-statements", - verbose, - activityTags, - client => client.ExecuteExcititorOperationAsync( - "admin/backfill-statements", - HttpMethod.Post, - RemoveNullValues(payload), - cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorVerifyAsync( - IServiceProvider services, - string? exportId, - string? digest, - string? attestationPath, - bool verbose, - CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(exportId) && string.IsNullOrWhiteSpace(digest) && string.IsNullOrWhiteSpace(attestationPath)) - { - var logger = services.GetRequiredService().CreateLogger("excititor-verify"); - logger.LogError("At least one of --export-id, --digest, or --attestation must be provided."); - Environment.ExitCode = 1; - return Task.CompletedTask; - } - - var payload = new Dictionary(StringComparer.Ordinal); - if (!string.IsNullOrWhiteSpace(exportId)) - { - payload["exportId"] = exportId.Trim(); - } - if (!string.IsNullOrWhiteSpace(digest)) - { - payload["digest"] = digest.Trim(); - } - if (!string.IsNullOrWhiteSpace(attestationPath)) - { - var fullPath = Path.GetFullPath(attestationPath); - if (!File.Exists(fullPath)) - { - var logger = services.GetRequiredService().CreateLogger("excititor-verify"); - logger.LogError("Attestation file not found at {Path}.", fullPath); - Environment.ExitCode = 1; - return Task.CompletedTask; - } - - var bytes = File.ReadAllBytes(fullPath); - payload["attestation"] = new Dictionary(StringComparer.Ordinal) - { - ["fileName"] = Path.GetFileName(fullPath), - ["base64"] = Convert.ToBase64String(bytes) - }; - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor verify", - verbose, - new Dictionary - { - ["export_id"] = exportId, - ["digest"] = digest, - ["attestation_path"] = attestationPath - }, - client => client.ExecuteExcititorOperationAsync("verify", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorReconcileAsync( - IServiceProvider services, - IReadOnlyList providers, - TimeSpan? maxAge, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (maxAge.HasValue) - { - payload["maxAge"] = maxAge.Value.ToString("c", CultureInfo.InvariantCulture); - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor reconcile", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["max_age"] = maxAge?.ToString("c", CultureInfo.InvariantCulture) - }, - client => client.ExecuteExcititorOperationAsync("reconcile", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static async Task HandleRuntimePolicyTestAsync( - IServiceProvider services, - string? namespaceValue, - IReadOnlyList imageArguments, - string? filePath, - IReadOnlyList labelArguments, - bool outputJson, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("runtime-policy-test"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.runtime.policy.test", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "runtime policy test"); - if (!string.IsNullOrWhiteSpace(namespaceValue)) - { - activity?.SetTag("stellaops.cli.namespace", namespaceValue); - } - using var duration = CliMetrics.MeasureCommandDuration("runtime policy test"); - - try - { - IReadOnlyList images; - try - { - images = await GatherImageDigestsAsync(imageArguments, filePath, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or ArgumentException or FileNotFoundException) - { - logger.LogError(ex, "Failed to gather image digests: {Message}", ex.Message); - Environment.ExitCode = 9; - return; - } - - if (images.Count == 0) - { - logger.LogError("No image digests provided. Use --image, --file, or pipe digests via stdin."); - Environment.ExitCode = 9; - return; - } - - IReadOnlyDictionary labels; - try - { - labels = ParseLabelSelectors(labelArguments); - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - Environment.ExitCode = 9; - return; - } - - activity?.SetTag("stellaops.cli.images", images.Count); - activity?.SetTag("stellaops.cli.labels", labels.Count); - - var request = new RuntimePolicyEvaluationRequest(namespaceValue, labels, images); - var result = await client.EvaluateRuntimePolicyAsync(request, cancellationToken).ConfigureAwait(false); - - activity?.SetTag("stellaops.cli.ttl_seconds", result.TtlSeconds); - Environment.ExitCode = 0; - - if (outputJson) - { - var json = BuildRuntimePolicyJson(result, images); - Console.WriteLine(json); - return; - } - - if (result.ExpiresAtUtc.HasValue) - { - logger.LogInformation("Decision TTL: {TtlSeconds}s (expires {ExpiresAt})", result.TtlSeconds, result.ExpiresAtUtc.Value.ToString("u", CultureInfo.InvariantCulture)); - } - else - { - logger.LogInformation("Decision TTL: {TtlSeconds}s", result.TtlSeconds); - } - - if (!string.IsNullOrWhiteSpace(result.PolicyRevision)) - { - logger.LogInformation("Policy revision: {Revision}", result.PolicyRevision); - } - - DisplayRuntimePolicyResults(logger, result, images); - } - catch (Exception ex) - { - logger.LogError(ex, "Runtime policy evaluation failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleAuthLoginAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - bool force, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-login"); - Environment.ExitCode = 0; - - if (string.IsNullOrWhiteSpace(options.Authority?.Url)) - { - logger.LogError("Authority URL is not configured. Set STELLAOPS_AUTHORITY_URL or update your configuration."); - Environment.ExitCode = 1; - return; - } - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogError("Authority client is not available. Ensure AddStellaOpsAuthClient is registered in Program.cs."); - Environment.ExitCode = 1; - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogError("Authority configuration is incomplete; unable to determine cache key."); - Environment.ExitCode = 1; - return; - } - - try - { - if (force) - { - await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - } - - var scopeName = AuthorityTokenUtilities.ResolveScope(options); - StellaOpsTokenResult token; - - if (!string.IsNullOrWhiteSpace(options.Authority.Username)) - { - if (string.IsNullOrWhiteSpace(options.Authority.Password)) - { - logger.LogError("Authority password must be provided when username is configured."); - Environment.ExitCode = 1; - return; - } - - token = await tokenClient.RequestPasswordTokenAsync( - options.Authority.Username, - options.Authority.Password!, - scopeName, - null, - cancellationToken).ConfigureAwait(false); - } - else - { - token = await tokenClient.RequestClientCredentialsTokenAsync(scopeName, null, cancellationToken).ConfigureAwait(false); - } - - await tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); - - if (verbose) - { - logger.LogInformation("Authenticated with {Authority} (scopes: {Scopes}).", options.Authority.Url, string.Join(", ", token.Scopes)); - } - - logger.LogInformation("Login successful. Access token expires at {Expires}.", token.ExpiresAtUtc.ToString("u")); - } - catch (Exception ex) - { - logger.LogError(ex, "Authentication failed: {Message}", ex.Message); - Environment.ExitCode = 1; - } - } - - public static async Task HandleAuthLogoutAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-logout"); - Environment.ExitCode = 0; - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogInformation("No authority client registered; nothing to remove."); - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogInformation("Authority configuration missing; no cached tokens to remove."); - return; - } - - await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (verbose) - { - logger.LogInformation("Cleared cached token for {Authority}.", options.Authority?.Url ?? "authority"); - } - } - - public static async Task HandleAuthStatusAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-status"); - Environment.ExitCode = 0; - - if (string.IsNullOrWhiteSpace(options.Authority?.Url)) - { - logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); - Environment.ExitCode = 1; - return; - } - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogInformation("Authority client not registered; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogInformation("Authority configuration incomplete; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (entry is null) - { - logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); - Environment.ExitCode = 1; - return; - } - - logger.LogInformation("Cached token for {Authority} expires at {Expires}.", options.Authority.Url, entry.ExpiresAtUtc.ToString("u")); - if (verbose) - { - logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); - } - } - - public static async Task HandleAuthWhoAmIAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-whoami"); - Environment.ExitCode = 0; - - if (string.IsNullOrWhiteSpace(options.Authority?.Url)) - { - logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); - Environment.ExitCode = 1; - return; - } - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogInformation("Authority client not registered; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogInformation("Authority configuration incomplete; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (entry is null) - { - logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); - Environment.ExitCode = 1; - return; - } - - var grantType = string.IsNullOrWhiteSpace(options.Authority.Username) ? "client_credentials" : "password"; - var now = DateTimeOffset.UtcNow; - var remaining = entry.ExpiresAtUtc - now; - if (remaining < TimeSpan.Zero) - { - remaining = TimeSpan.Zero; - } - - logger.LogInformation("Authority: {Authority}", options.Authority.Url); - logger.LogInformation("Grant type: {GrantType}", grantType); - logger.LogInformation("Token type: {TokenType}", entry.TokenType); - logger.LogInformation("Expires: {Expires} ({Remaining})", entry.ExpiresAtUtc.ToString("u"), FormatDuration(remaining)); - - if (entry.Scopes.Count > 0) - { - logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); - } - - if (TryExtractJwtClaims(entry.AccessToken, out var claims, out var issuedAt, out var notBefore)) - { - if (claims.TryGetValue("sub", out var subject) && !string.IsNullOrWhiteSpace(subject)) - { - logger.LogInformation("Subject: {Subject}", subject); - } - - if (claims.TryGetValue("client_id", out var clientId) && !string.IsNullOrWhiteSpace(clientId)) - { - logger.LogInformation("Client ID (token): {ClientId}", clientId); - } - - if (claims.TryGetValue("aud", out var audience) && !string.IsNullOrWhiteSpace(audience)) - { - logger.LogInformation("Audience: {Audience}", audience); - } - - if (claims.TryGetValue("iss", out var issuer) && !string.IsNullOrWhiteSpace(issuer)) - { - logger.LogInformation("Issuer: {Issuer}", issuer); - } - - if (issuedAt is not null) - { - logger.LogInformation("Issued at: {IssuedAt}", issuedAt.Value.ToString("u")); - } - - if (notBefore is not null) - { - logger.LogInformation("Not before: {NotBefore}", notBefore.Value.ToString("u")); - } - - var extraClaims = CollectAdditionalClaims(claims); - if (extraClaims.Count > 0 && verbose) - { - logger.LogInformation("Additional claims: {Claims}", string.Join(", ", extraClaims)); - } - } - else - { - logger.LogInformation("Access token appears opaque; claims are unavailable."); - } - } - - public static async Task HandleAuthRevokeExportAsync( - IServiceProvider services, - StellaOpsCliOptions options, - string? outputDirectory, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-revoke-export"); - Environment.ExitCode = 0; - - try - { - var client = scope.ServiceProvider.GetRequiredService(); - var result = await client.ExportAsync(verbose, cancellationToken).ConfigureAwait(false); - - var directory = string.IsNullOrWhiteSpace(outputDirectory) - ? Directory.GetCurrentDirectory() - : Path.GetFullPath(outputDirectory); - - Directory.CreateDirectory(directory); - - var bundlePath = Path.Combine(directory, "revocation-bundle.json"); - var signaturePath = Path.Combine(directory, "revocation-bundle.json.jws"); - var digestPath = Path.Combine(directory, "revocation-bundle.json.sha256"); - - await File.WriteAllBytesAsync(bundlePath, result.BundleBytes, cancellationToken).ConfigureAwait(false); - await File.WriteAllTextAsync(signaturePath, result.Signature, cancellationToken).ConfigureAwait(false); - await File.WriteAllTextAsync(digestPath, $"sha256:{result.Digest}", cancellationToken).ConfigureAwait(false); - - var computedDigest = Convert.ToHexString(SHA256.HashData(result.BundleBytes)).ToLowerInvariant(); - if (!string.Equals(computedDigest, result.Digest, StringComparison.OrdinalIgnoreCase)) - { - logger.LogError("Digest mismatch. Expected {Expected} but computed {Actual}.", result.Digest, computedDigest); - Environment.ExitCode = 1; - return; - } - - logger.LogInformation( - "Revocation bundle exported to {Directory} (sequence {Sequence}, issued {Issued:u}, signing key {KeyId}, provider {Provider}).", - directory, - result.Sequence, - result.IssuedAt, - string.IsNullOrWhiteSpace(result.SigningKeyId) ? "" : result.SigningKeyId, - string.IsNullOrWhiteSpace(result.SigningProvider) ? "default" : result.SigningProvider); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to export revocation bundle."); - Environment.ExitCode = 1; - } - } - - public static async Task HandleAuthRevokeVerifyAsync( - string bundlePath, - string signaturePath, - string keyPath, - bool verbose, - CancellationToken cancellationToken) - { - var loggerFactory = LoggerFactory.Create(builder => builder.AddSimpleConsole(options => - { - options.SingleLine = true; - options.TimestampFormat = "HH:mm:ss "; - })); - var logger = loggerFactory.CreateLogger("auth-revoke-verify"); - Environment.ExitCode = 0; - - try - { - if (string.IsNullOrWhiteSpace(bundlePath) || string.IsNullOrWhiteSpace(signaturePath) || string.IsNullOrWhiteSpace(keyPath)) - { - logger.LogError("Arguments --bundle, --signature, and --key are required."); - Environment.ExitCode = 1; - return; - } - - var bundleBytes = await File.ReadAllBytesAsync(bundlePath, cancellationToken).ConfigureAwait(false); - var signatureContent = (await File.ReadAllTextAsync(signaturePath, cancellationToken).ConfigureAwait(false)).Trim(); - var keyPem = await File.ReadAllTextAsync(keyPath, cancellationToken).ConfigureAwait(false); - - var digest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); - logger.LogInformation("Bundle digest sha256:{Digest}", digest); - - if (!TryParseDetachedJws(signatureContent, out var encodedHeader, out var encodedSignature)) - { - logger.LogError("Signature is not in detached JWS format."); - Environment.ExitCode = 1; - return; - } - - var headerJson = Encoding.UTF8.GetString(Base64UrlDecode(encodedHeader)); - using var headerDocument = JsonDocument.Parse(headerJson); - var header = headerDocument.RootElement; - - if (!header.TryGetProperty("b64", out var b64Element) || b64Element.GetBoolean()) - { - logger.LogError("Detached JWS header must include '\"b64\": false'."); - Environment.ExitCode = 1; - return; - } - - var algorithm = header.TryGetProperty("alg", out var algElement) ? algElement.GetString() : SignatureAlgorithms.Es256; - if (string.IsNullOrWhiteSpace(algorithm)) - { - algorithm = SignatureAlgorithms.Es256; - } - - var providerHint = header.TryGetProperty("provider", out var providerElement) - ? providerElement.GetString() - : null; - - var keyId = header.TryGetProperty("kid", out var kidElement) ? kidElement.GetString() : null; - if (string.IsNullOrWhiteSpace(keyId)) - { - keyId = Path.GetFileNameWithoutExtension(keyPath); - logger.LogWarning("JWS header missing 'kid'; using fallback key id {KeyId}.", keyId); - } - - CryptoSigningKey signingKey; - try - { - signingKey = CreateVerificationSigningKey(keyId!, algorithm!, providerHint, keyPem, keyPath); - } - catch (Exception ex) when (ex is InvalidOperationException or CryptographicException) - { - logger.LogError(ex, "Failed to load verification key material."); - Environment.ExitCode = 1; - return; - } - - var providers = new List - { - new DefaultCryptoProvider() - }; - -#if STELLAOPS_CRYPTO_SODIUM - providers.Add(new LibsodiumCryptoProvider()); -#endif - - foreach (var provider in providers) - { - if (provider.Supports(CryptoCapability.Verification, algorithm!)) - { - provider.UpsertSigningKey(signingKey); - } - } - - var preferredOrder = !string.IsNullOrWhiteSpace(providerHint) - ? new[] { providerHint! } - : Array.Empty(); - var registry = new CryptoProviderRegistry(providers, preferredOrder); - CryptoSignerResolution resolution; - try - { - resolution = registry.ResolveSigner( - CryptoCapability.Verification, - algorithm!, - signingKey.Reference, - providerHint); - } - catch (Exception ex) - { - logger.LogError(ex, "No crypto provider available for verification (algorithm {Algorithm}).", algorithm); - Environment.ExitCode = 1; - return; - } - - var signingInputLength = encodedHeader.Length + 1 + bundleBytes.Length; - var buffer = ArrayPool.Shared.Rent(signingInputLength); - try - { - var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); - Buffer.BlockCopy(headerBytes, 0, buffer, 0, headerBytes.Length); - buffer[headerBytes.Length] = (byte)'.'; - Buffer.BlockCopy(bundleBytes, 0, buffer, headerBytes.Length + 1, bundleBytes.Length); - - var signatureBytes = Base64UrlDecode(encodedSignature); - var verified = await resolution.Signer.VerifyAsync( - new ReadOnlyMemory(buffer, 0, signingInputLength), - signatureBytes, - cancellationToken).ConfigureAwait(false); - - if (!verified) - { - logger.LogError("Signature verification failed."); - Environment.ExitCode = 1; - return; - } - } - finally - { - ArrayPool.Shared.Return(buffer); - } - - if (!string.IsNullOrWhiteSpace(providerHint) && !string.Equals(providerHint, resolution.ProviderName, StringComparison.OrdinalIgnoreCase)) - { - logger.LogWarning( - "Preferred provider '{Preferred}' unavailable; verification used '{Provider}'.", - providerHint, - resolution.ProviderName); - } - - logger.LogInformation( - "Signature verified using algorithm {Algorithm} via provider {Provider} (kid {KeyId}).", - algorithm, - resolution.ProviderName, - signingKey.Reference.KeyId); - - if (verbose) - { - logger.LogInformation("JWS header: {Header}", headerJson); - } - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to verify revocation bundle."); - Environment.ExitCode = 1; - } - finally - { - loggerFactory.Dispose(); - } - } - - public static async Task HandleVulnObservationsAsync( - IServiceProvider services, - string tenant, - IReadOnlyList observationIds, - IReadOnlyList aliases, - IReadOnlyList purls, - IReadOnlyList cpes, - int? limit, - string? cursor, - bool emitJson, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-observations"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.observations", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "vuln observations"); - activity?.SetTag("stellaops.cli.tenant", tenant); - using var duration = CliMetrics.MeasureCommandDuration("vuln observations"); - - try - { - tenant = tenant?.Trim().ToLowerInvariant() ?? string.Empty; - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new InvalidOperationException("Tenant must be provided."); - } - - var query = new AdvisoryObservationsQuery( - tenant, - NormalizeSet(observationIds, toLower: false), - NormalizeSet(aliases, toLower: true), - NormalizeSet(purls, toLower: false), - NormalizeSet(cpes, toLower: false), - limit, - cursor); - - var response = await client.GetObservationsAsync(query, cancellationToken).ConfigureAwait(false); - - if (emitJson) - { - var json = JsonSerializer.Serialize(response, new JsonSerializerOptions - { - WriteIndented = true - }); - Console.WriteLine(json); - Environment.ExitCode = 0; - return; - } - - RenderObservationTable(response); - if (!emitJson && response.HasMore && !string.IsNullOrWhiteSpace(response.NextCursor)) - { - var escapedCursor = Markup.Escape(response.NextCursor); - AnsiConsole.MarkupLine($"[yellow]More observations available. Continue with[/] [cyan]--cursor[/] [grey]{escapedCursor}[/]"); - } - Environment.ExitCode = 0; - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - logger.LogWarning("Operation cancelled by user."); - Environment.ExitCode = 130; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to fetch observations from Concelier."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - - static IReadOnlyList NormalizeSet(IReadOnlyList values, bool toLower) - { - if (values is null || values.Count == 0) - { - return Array.Empty(); - } - - var set = new HashSet(StringComparer.Ordinal); - foreach (var raw in values) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var normalized = raw.Trim(); - if (toLower) - { - normalized = normalized.ToLowerInvariant(); - } - - set.Add(normalized); - } - - return set.Count == 0 ? Array.Empty() : set.ToArray(); - } - - static void RenderObservationTable(AdvisoryObservationsResponse response) - { - var observations = response.Observations ?? Array.Empty(); - if (observations.Count == 0) - { - AnsiConsole.MarkupLine("[yellow]No observations matched the provided filters.[/]"); - return; - } - - var table = new Table() - .Centered() - .Border(TableBorder.Rounded); - - table.AddColumn("Observation"); - table.AddColumn("Source"); - table.AddColumn("Upstream Id"); - table.AddColumn("Aliases"); - table.AddColumn("PURLs"); - table.AddColumn("CPEs"); - table.AddColumn("Created (UTC)"); - - foreach (var observation in observations) - { - var sourceVendor = observation.Source?.Vendor ?? "(unknown)"; - var upstreamId = observation.Upstream?.UpstreamId ?? "(unknown)"; - var aliasesText = FormatList(observation.Linkset?.Aliases); - var purlsText = FormatList(observation.Linkset?.Purls); - var cpesText = FormatList(observation.Linkset?.Cpes); - - table.AddRow( - Markup.Escape(observation.ObservationId), - Markup.Escape(sourceVendor), - Markup.Escape(upstreamId), - Markup.Escape(aliasesText), - Markup.Escape(purlsText), - Markup.Escape(cpesText), - observation.CreatedAt.ToUniversalTime().ToString("u", CultureInfo.InvariantCulture)); - } - - AnsiConsole.Write(table); - AnsiConsole.MarkupLine( - "[green]{0}[/] observation(s). Aliases: [green]{1}[/], PURLs: [green]{2}[/], CPEs: [green]{3}[/].", - observations.Count, - response.Linkset?.Aliases?.Count ?? 0, - response.Linkset?.Purls?.Count ?? 0, - response.Linkset?.Cpes?.Count ?? 0); - } - - static string FormatList(IReadOnlyList? values) - { - if (values is null || values.Count == 0) - { - return "(none)"; - } - - const int MaxItems = 3; - if (values.Count <= MaxItems) - { - return string.Join(", ", values); - } - - var preview = values.Take(MaxItems); - return $"{string.Join(", ", preview)} (+{values.Count - MaxItems})"; - } - } - - public static async Task HandleOfflineKitPullAsync( - IServiceProvider services, - string? bundleId, - string? destinationDirectory, - bool overwrite, - bool resume, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var options = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-pull"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.pull", ActivityKind.Client); - activity?.SetTag("stellaops.cli.bundle_id", string.IsNullOrWhiteSpace(bundleId) ? "latest" : bundleId); - using var duration = CliMetrics.MeasureCommandDuration("offline kit pull"); - - try - { - var targetDirectory = string.IsNullOrWhiteSpace(destinationDirectory) - ? options.Offline?.KitsDirectory ?? Path.Combine(Environment.CurrentDirectory, "offline-kits") - : destinationDirectory; - - targetDirectory = Path.GetFullPath(targetDirectory); - Directory.CreateDirectory(targetDirectory); - - var result = await client.DownloadOfflineKitAsync(bundleId, targetDirectory, overwrite, resume, cancellationToken).ConfigureAwait(false); - - logger.LogInformation( - "Bundle {BundleId} stored at {Path} (captured {Captured:u}, sha256:{Digest}).", - result.Descriptor.BundleId, - result.BundlePath, - result.Descriptor.CapturedAt, - result.Descriptor.BundleSha256); - - logger.LogInformation("Manifest saved to {Manifest}.", result.ManifestPath); - - if (!string.IsNullOrWhiteSpace(result.MetadataPath)) - { - logger.LogDebug("Metadata recorded at {Metadata}.", result.MetadataPath); - } - - if (result.BundleSignaturePath is not null) - { - logger.LogInformation("Bundle signature saved to {Signature}.", result.BundleSignaturePath); - } - - if (result.ManifestSignaturePath is not null) - { - logger.LogInformation("Manifest signature saved to {Signature}.", result.ManifestSignaturePath); - } - - CliMetrics.RecordOfflineKitDownload(result.Descriptor.Kind ?? "unknown", result.FromCache); - activity?.SetTag("stellaops.cli.bundle_cache", result.FromCache); - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to download offline kit bundle."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyFindingsListAsync( - IServiceProvider services, - string policyId, - string[] sbomFilters, - string[] statusFilters, - string[] severityFilters, - string? since, - string? cursor, - int? page, - int? pageSize, - string? format, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-ls"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.list", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("policy findings list"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (page.HasValue && page.Value < 1) - { - throw new ArgumentException("--page must be greater than or equal to 1.", nameof(page)); - } - - if (pageSize.HasValue && (pageSize.Value < 1 || pageSize.Value > 500)) - { - throw new ArgumentException("--page-size must be between 1 and 500.", nameof(pageSize)); - } - - var normalizedPolicyId = policyId.Trim(); - var sboms = NormalizePolicyFilterValues(sbomFilters); - var statuses = NormalizePolicyFilterValues(statusFilters, toLower: true); - var severities = NormalizePolicyFilterValues(severityFilters); - var sinceValue = ParsePolicySince(since); - var cursorValue = string.IsNullOrWhiteSpace(cursor) ? null : cursor.Trim(); - - var query = new PolicyFindingsQuery( - normalizedPolicyId, - sboms, - statuses, - severities, - cursorValue, - page, - pageSize, - sinceValue); - - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - if (sboms.Count > 0) - { - activity?.SetTag("stellaops.cli.findings.sbom_filters", string.Join(",", sboms)); - } - - if (statuses.Count > 0) - { - activity?.SetTag("stellaops.cli.findings.status_filters", string.Join(",", statuses)); - } - - if (severities.Count > 0) - { - activity?.SetTag("stellaops.cli.findings.severity_filters", string.Join(",", severities)); - } - - if (!string.IsNullOrWhiteSpace(cursorValue)) - { - activity?.SetTag("stellaops.cli.findings.cursor", cursorValue); - } - - if (page.HasValue) - { - activity?.SetTag("stellaops.cli.findings.page", page.Value); - } - - if (pageSize.HasValue) - { - activity?.SetTag("stellaops.cli.findings.page_size", pageSize.Value); - } - - if (sinceValue.HasValue) - { - activity?.SetTag("stellaops.cli.findings.since", sinceValue.Value.ToString("o", CultureInfo.InvariantCulture)); - } - - var result = await client.GetPolicyFindingsAsync(query, cancellationToken).ConfigureAwait(false); - activity?.SetTag("stellaops.cli.findings.count", result.Items.Count); - if (!string.IsNullOrWhiteSpace(result.NextCursor)) - { - activity?.SetTag("stellaops.cli.findings.next_cursor", result.NextCursor); - } - - var payload = BuildPolicyFindingsPayload(normalizedPolicyId, query, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Results written to {Path}.", Path.GetFullPath(outputPath!)); - } - - var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); - if (outputFormat == PolicyFindingsOutputFormat.Json) - { - var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); - Console.WriteLine(json); - } - else - { - RenderPolicyFindingsTable(logger, result); - } - - CliMetrics.RecordPolicyFindingsList(result.Items.Count == 0 ? "empty" : "ok"); - Environment.ExitCode = 0; - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyFindingsList("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsList); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to list policy findings."); - CliMetrics.RecordPolicyFindingsList("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyFindingsGetAsync( - IServiceProvider services, - string policyId, - string findingId, - string? format, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-get"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.get", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("policy findings get"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); - } - - var normalizedPolicyId = policyId.Trim(); - var normalizedFindingId = findingId.Trim(); - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); - - var result = await client.GetPolicyFindingAsync(normalizedPolicyId, normalizedFindingId, cancellationToken).ConfigureAwait(false); - var payload = BuildPolicyFindingPayload(normalizedPolicyId, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Finding written to {Path}.", Path.GetFullPath(outputPath!)); - } - - var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); - if (outputFormat == PolicyFindingsOutputFormat.Json) - { - Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); - } - else - { - RenderPolicyFindingDetails(logger, result); - } - - var outcome = string.IsNullOrWhiteSpace(result.Status) ? "unknown" : result.Status.ToLowerInvariant(); - CliMetrics.RecordPolicyFindingsGet(outcome); - Environment.ExitCode = 0; - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyFindingsGet("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsGet); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to retrieve policy finding."); - CliMetrics.RecordPolicyFindingsGet("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyFindingsExplainAsync( - IServiceProvider services, - string policyId, - string findingId, - string? mode, - string? format, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-explain"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.explain", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("policy findings explain"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); - } - - var normalizedPolicyId = policyId.Trim(); - var normalizedFindingId = findingId.Trim(); - var normalizedMode = NormalizeExplainMode(mode); - - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); - if (!string.IsNullOrWhiteSpace(normalizedMode)) - { - activity?.SetTag("stellaops.cli.findings.mode", normalizedMode); - } - - var result = await client.GetPolicyFindingExplainAsync(normalizedPolicyId, normalizedFindingId, normalizedMode, cancellationToken).ConfigureAwait(false); - activity?.SetTag("stellaops.cli.findings.step_count", result.Steps.Count); - - var payload = BuildPolicyFindingExplainPayload(normalizedPolicyId, normalizedFindingId, normalizedMode, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Explain trace written to {Path}.", Path.GetFullPath(outputPath!)); - } - - var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); - if (outputFormat == PolicyFindingsOutputFormat.Json) - { - Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); - } - else - { - RenderPolicyFindingExplain(logger, result); - } - - CliMetrics.RecordPolicyFindingsExplain(result.Steps.Count == 0 ? "empty" : "ok"); - Environment.ExitCode = 0; - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyFindingsExplain("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsExplain); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to fetch policy explain trace."); - CliMetrics.RecordPolicyFindingsExplain("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyActivateAsync( - IServiceProvider services, - string policyId, - int version, - string? note, - bool runNow, - string? scheduledAt, - string? priority, - bool rollback, - string? incidentId, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-activate"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.activate", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "policy activate"); - using var duration = CliMetrics.MeasureCommandDuration("policy activate"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (version <= 0) - { - throw new ArgumentOutOfRangeException(nameof(version), "Version must be greater than zero."); - } - - var normalizedPolicyId = policyId.Trim(); - DateTimeOffset? scheduled = null; - if (!string.IsNullOrWhiteSpace(scheduledAt)) - { - if (!DateTimeOffset.TryParse(scheduledAt, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed)) - { - throw new ArgumentException("Scheduled timestamp must be a valid ISO-8601 value.", nameof(scheduledAt)); - } - - scheduled = parsed; - } - - var request = new PolicyActivationRequest( - runNow, - scheduled, - NormalizePolicyPriority(priority), - rollback, - string.IsNullOrWhiteSpace(incidentId) ? null : incidentId.Trim(), - string.IsNullOrWhiteSpace(note) ? null : note.Trim()); - - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - activity?.SetTag("stellaops.cli.policy_version", version); - if (request.RunNow) - { - activity?.SetTag("stellaops.cli.policy_run_now", true); - } - - if (request.ScheduledAt.HasValue) - { - activity?.SetTag("stellaops.cli.policy_scheduled_at", request.ScheduledAt.Value.ToString("o", CultureInfo.InvariantCulture)); - } - - if (!string.IsNullOrWhiteSpace(request.Priority)) - { - activity?.SetTag("stellaops.cli.policy_priority", request.Priority); - } - - if (request.Rollback) - { - activity?.SetTag("stellaops.cli.policy_rollback", true); - } - - var result = await client.ActivatePolicyRevisionAsync(normalizedPolicyId, version, request, cancellationToken).ConfigureAwait(false); - - var outcome = NormalizePolicyActivationOutcome(result.Status); - CliMetrics.RecordPolicyActivation(outcome); - RenderPolicyActivationResult(result, request); - - var exitCode = DeterminePolicyActivationExitCode(outcome); - Environment.ExitCode = exitCode; - - if (exitCode == 0) - { - logger.LogInformation("Policy {PolicyId} v{Version} activation status: {Status}.", result.Revision.PolicyId, result.Revision.Version, outcome); - } - else - { - logger.LogWarning("Policy {PolicyId} v{Version} requires additional approval (status: {Status}).", result.Revision.PolicyId, result.Revision.Version, outcome); - } - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyActivation("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyActivationFailure(ex, logger); - } - catch (Exception ex) - { - logger.LogError(ex, "Policy activation failed."); - CliMetrics.RecordPolicyActivation("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicySimulateAsync( - IServiceProvider services, - string policyId, - int? baseVersion, - int? candidateVersion, - IReadOnlyList sbomArguments, - IReadOnlyList environmentArguments, - string? format, - string? outputPath, - bool explain, - bool failOnDiff, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-simulate"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.simulate", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "policy simulate"); - activity?.SetTag("stellaops.cli.policy_id", policyId); - if (baseVersion.HasValue) - { - activity?.SetTag("stellaops.cli.base_version", baseVersion.Value); - } - if (candidateVersion.HasValue) - { - activity?.SetTag("stellaops.cli.candidate_version", candidateVersion.Value); - } - using var duration = CliMetrics.MeasureCommandDuration("policy simulate"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - var normalizedPolicyId = policyId.Trim(); - var sbomSet = NormalizePolicySbomSet(sbomArguments); - var environment = ParsePolicyEnvironment(environmentArguments); - - var input = new PolicySimulationInput( - baseVersion, - candidateVersion, - sbomSet, - environment, - explain); - - var result = await client.SimulatePolicyAsync(normalizedPolicyId, input, cancellationToken).ConfigureAwait(false); - - activity?.SetTag("stellaops.cli.diff_added", result.Diff.Added); - activity?.SetTag("stellaops.cli.diff_removed", result.Diff.Removed); - if (result.Diff.BySeverity.Count > 0) - { - activity?.SetTag("stellaops.cli.severity_buckets", result.Diff.BySeverity.Count); - } - - var outputFormat = DeterminePolicySimulationFormat(format, outputPath); - var payload = BuildPolicySimulationPayload(normalizedPolicyId, baseVersion, candidateVersion, sbomSet, environment, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteSimulationOutputAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Simulation results written to {Path}.", Path.GetFullPath(outputPath!)); - } - - RenderPolicySimulationResult(logger, payload, result, outputFormat); - - var exitCode = DetermineSimulationExitCode(result, failOnDiff); - Environment.ExitCode = exitCode; - - var outcome = exitCode == 20 - ? "diff_blocked" - : (result.Diff.Added + result.Diff.Removed) > 0 ? "diff" : "clean"; - CliMetrics.RecordPolicySimulation(outcome); - - if (exitCode == 20) - { - logger.LogWarning("Differences detected; exiting with code 20 due to --fail-on-diff."); - } - - if (!string.IsNullOrWhiteSpace(result.ExplainUri)) - { - activity?.SetTag("stellaops.cli.explain_uri", result.ExplainUri); - } - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicySimulation("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicySimulationFailure(ex, logger); - } - catch (Exception ex) - { - logger.LogError(ex, "Policy simulation failed."); - CliMetrics.RecordPolicySimulation("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleOfflineKitImportAsync( - IServiceProvider services, - string bundlePath, - string? manifestPath, - string? bundleSignaturePath, - string? manifestSignaturePath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var options = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-import"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.import", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("offline kit import"); - - try - { - if (string.IsNullOrWhiteSpace(bundlePath)) - { - logger.LogError("Bundle path is required."); - Environment.ExitCode = 1; - return; - } - - bundlePath = Path.GetFullPath(bundlePath); - if (!File.Exists(bundlePath)) - { - logger.LogError("Bundle file {Path} not found.", bundlePath); - Environment.ExitCode = 1; - return; - } - - var metadata = await LoadOfflineKitMetadataAsync(bundlePath, cancellationToken).ConfigureAwait(false); - if (metadata is not null) - { - manifestPath ??= metadata.ManifestPath; - bundleSignaturePath ??= metadata.BundleSignaturePath; - manifestSignaturePath ??= metadata.ManifestSignaturePath; - } - - manifestPath = NormalizeFilePath(manifestPath); - bundleSignaturePath = NormalizeFilePath(bundleSignaturePath); - manifestSignaturePath = NormalizeFilePath(manifestSignaturePath); - - if (manifestPath is null) - { - manifestPath = TryInferManifestPath(bundlePath); - if (manifestPath is not null) - { - logger.LogDebug("Using inferred manifest path {Path}.", manifestPath); - } - } - - if (manifestPath is not null && !File.Exists(manifestPath)) - { - logger.LogError("Manifest file {Path} not found.", manifestPath); - Environment.ExitCode = 1; - return; - } - - if (bundleSignaturePath is not null && !File.Exists(bundleSignaturePath)) - { - logger.LogWarning("Bundle signature {Path} not found; skipping.", bundleSignaturePath); - bundleSignaturePath = null; - } - - if (manifestSignaturePath is not null && !File.Exists(manifestSignaturePath)) - { - logger.LogWarning("Manifest signature {Path} not found; skipping.", manifestSignaturePath); - manifestSignaturePath = null; - } - - if (metadata is not null) - { - var computedBundleDigest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false); - if (!DigestsEqual(computedBundleDigest, metadata.BundleSha256)) - { - logger.LogError("Bundle digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.BundleSha256, computedBundleDigest); - Environment.ExitCode = 1; - return; - } - - if (manifestPath is not null) - { - var computedManifestDigest = await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false); - if (!DigestsEqual(computedManifestDigest, metadata.ManifestSha256)) - { - logger.LogError("Manifest digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.ManifestSha256, computedManifestDigest); - Environment.ExitCode = 1; - return; - } - } - } - - var request = new OfflineKitImportRequest( - bundlePath, - manifestPath, - bundleSignaturePath, - manifestSignaturePath, - metadata?.BundleId, - metadata?.BundleSha256, - metadata?.BundleSize, - metadata?.CapturedAt, - metadata?.Channel, - metadata?.Kind, - metadata?.IsDelta, - metadata?.BaseBundleId, - metadata?.ManifestSha256, - metadata?.ManifestSize); - - var result = await client.ImportOfflineKitAsync(request, cancellationToken).ConfigureAwait(false); - CliMetrics.RecordOfflineKitImport(result.Status); - - logger.LogInformation( - "Import {ImportId} submitted at {Submitted:u} with status {Status}.", - string.IsNullOrWhiteSpace(result.ImportId) ? "" : result.ImportId, - result.SubmittedAt, - string.IsNullOrWhiteSpace(result.Status) ? "queued" : result.Status); - - if (!string.IsNullOrWhiteSpace(result.Message)) - { - logger.LogInformation(result.Message); - } - - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Offline kit import failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleOfflineKitStatusAsync( - IServiceProvider services, - bool asJson, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-status"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.status", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("offline kit status"); - - try - { - var status = await client.GetOfflineKitStatusAsync(cancellationToken).ConfigureAwait(false); - - if (asJson) - { - var payload = new - { - bundleId = status.BundleId, - channel = status.Channel, - kind = status.Kind, - isDelta = status.IsDelta, - baseBundleId = status.BaseBundleId, - capturedAt = status.CapturedAt, - importedAt = status.ImportedAt, - sha256 = status.BundleSha256, - sizeBytes = status.BundleSize, - components = status.Components.Select(component => new - { - component.Name, - component.Version, - component.Digest, - component.CapturedAt, - component.SizeBytes - }) - }; - - var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true }); - Console.WriteLine(json); - } - else - { - if (string.IsNullOrWhiteSpace(status.BundleId)) - { - logger.LogInformation("No offline kit bundle has been imported yet."); - } - else - { - logger.LogInformation( - "Current bundle {BundleId} ({Kind}) captured {Captured:u}, imported {Imported:u}, sha256:{Digest}, size {Size}.", - status.BundleId, - status.Kind ?? "unknown", - status.CapturedAt ?? default, - status.ImportedAt ?? default, - status.BundleSha256 ?? "", - status.BundleSize.HasValue ? status.BundleSize.Value.ToString("N0", CultureInfo.InvariantCulture) : ""); - } - - if (status.Components.Count > 0) - { - var table = new Table().AddColumns("Component", "Version", "Digest", "Captured", "Size (bytes)"); - foreach (var component in status.Components) - { - table.AddRow( - component.Name, - string.IsNullOrWhiteSpace(component.Version) ? "-" : component.Version!, - string.IsNullOrWhiteSpace(component.Digest) ? "-" : $"sha256:{component.Digest}", - component.CapturedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "-", - component.SizeBytes.HasValue ? component.SizeBytes.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"); - } - - AnsiConsole.Write(table); - } - } - - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to read offline kit status."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - private static async Task LoadOfflineKitMetadataAsync(string bundlePath, CancellationToken cancellationToken) - { - var metadataPath = bundlePath + ".metadata.json"; - if (!File.Exists(metadataPath)) - { - return null; - } - - try - { - await using var stream = File.OpenRead(metadataPath); - return await JsonSerializer.DeserializeAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); - } - catch - { - return null; - } - } - - private static string? NormalizeFilePath(string? path) - { - if (string.IsNullOrWhiteSpace(path)) - { - return null; - } - - return Path.GetFullPath(path); - } - - private static string? TryInferManifestPath(string bundlePath) - { - var directory = Path.GetDirectoryName(bundlePath); - if (string.IsNullOrWhiteSpace(directory)) - { - return null; - } - - var baseName = Path.GetFileName(bundlePath); - if (string.IsNullOrWhiteSpace(baseName)) - { - return null; - } - - baseName = Path.GetFileNameWithoutExtension(baseName); - if (baseName.EndsWith(".tar", StringComparison.OrdinalIgnoreCase)) - { - baseName = Path.GetFileNameWithoutExtension(baseName); - } - - var candidates = new[] - { - Path.Combine(directory, $"offline-manifest-{baseName}.json"), - Path.Combine(directory, "offline-manifest.json") - }; - - foreach (var candidate in candidates) - { - if (File.Exists(candidate)) - { - return Path.GetFullPath(candidate); - } - } - - return Directory.EnumerateFiles(directory, "offline-manifest*.json").FirstOrDefault(); - } - - private static bool DigestsEqual(string computed, string? expected) - { - if (string.IsNullOrWhiteSpace(expected)) - { - return true; - } - - return string.Equals(NormalizeDigest(computed), NormalizeDigest(expected), StringComparison.OrdinalIgnoreCase); - } - - private static string NormalizeDigest(string digest) - { - var value = digest.Trim(); - if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) - { - value = value.Substring("sha256:".Length); - } - - return value.ToLowerInvariant(); - } - - private static async Task ComputeSha256Async(string path, CancellationToken cancellationToken) - { - await using var stream = File.OpenRead(path); - var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature) - { - encodedHeader = string.Empty; - encodedSignature = string.Empty; - - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - var parts = value.Split('.'); - if (parts.Length != 3) - { - return false; - } - - encodedHeader = parts[0]; - encodedSignature = parts[2]; - return parts[1].Length == 0; - } - - private static byte[] Base64UrlDecode(string value) - { - var normalized = value.Replace('-', '+').Replace('_', '/'); - var padding = normalized.Length % 4; - if (padding == 2) - { - normalized += "=="; - } - else if (padding == 3) - { - normalized += "="; - } - else if (padding == 1) - { - throw new FormatException("Invalid Base64Url value."); - } - - return Convert.FromBase64String(normalized); - } - - private static CryptoSigningKey CreateVerificationSigningKey( - string keyId, - string algorithm, - string? providerHint, - string keyPem, - string keyPath) - { - if (string.IsNullOrWhiteSpace(keyPem)) - { - throw new InvalidOperationException("Verification key PEM content is empty."); - } - - using var ecdsa = ECDsa.Create(); - ecdsa.ImportFromPem(keyPem); - - var parameters = ecdsa.ExportParameters(includePrivateParameters: false); - if (parameters.D is null || parameters.D.Length == 0) - { - parameters.D = new byte[] { 0x01 }; - } - - var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) - { - ["source"] = Path.GetFullPath(keyPath), - ["verificationOnly"] = "true" - }; - - return new CryptoSigningKey( - new CryptoKeyReference(keyId, providerHint), - algorithm, - in parameters, - DateTimeOffset.UtcNow, - metadata: metadata); - } - - private static string FormatDuration(TimeSpan duration) - { - if (duration <= TimeSpan.Zero) - { - return "expired"; - } - - if (duration.TotalDays >= 1) - { - var days = (int)duration.TotalDays; - var hours = duration.Hours; - return hours > 0 - ? FormattableString.Invariant($"{days}d {hours}h") - : FormattableString.Invariant($"{days}d"); - } - - if (duration.TotalHours >= 1) - { - return FormattableString.Invariant($"{(int)duration.TotalHours}h {duration.Minutes}m"); - } - - if (duration.TotalMinutes >= 1) - { - return FormattableString.Invariant($"{(int)duration.TotalMinutes}m {duration.Seconds}s"); - } - - return FormattableString.Invariant($"{duration.Seconds}s"); - } - - private static bool TryExtractJwtClaims( - string accessToken, - out Dictionary claims, - out DateTimeOffset? issuedAt, - out DateTimeOffset? notBefore) - { - claims = new Dictionary(StringComparer.OrdinalIgnoreCase); - issuedAt = null; - notBefore = null; - - if (string.IsNullOrWhiteSpace(accessToken)) - { - return false; - } - - var parts = accessToken.Split('.'); - if (parts.Length < 2) - { - return false; - } - - if (!TryDecodeBase64Url(parts[1], out var payloadBytes)) - { - return false; - } - - try - { - using var document = JsonDocument.Parse(payloadBytes); - foreach (var property in document.RootElement.EnumerateObject()) - { - var value = FormatJsonValue(property.Value); - claims[property.Name] = value; - - if (issuedAt is null && property.NameEquals("iat") && TryParseUnixSeconds(property.Value, out var parsedIat)) - { - issuedAt = parsedIat; - } - - if (notBefore is null && property.NameEquals("nbf") && TryParseUnixSeconds(property.Value, out var parsedNbf)) - { - notBefore = parsedNbf; - } - } - - return true; - } - catch (JsonException) - { - claims.Clear(); - issuedAt = null; - notBefore = null; - return false; - } - } - - private static bool TryDecodeBase64Url(string value, out byte[] bytes) - { - bytes = Array.Empty(); - - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - var normalized = value.Replace('-', '+').Replace('_', '/'); - var padding = normalized.Length % 4; - if (padding is 2 or 3) - { - normalized = normalized.PadRight(normalized.Length + (4 - padding), '='); - } - else if (padding == 1) - { - return false; - } - - try - { - bytes = Convert.FromBase64String(normalized); - return true; - } - catch (FormatException) - { - return false; - } - } - - private static string FormatJsonValue(JsonElement element) - { - return element.ValueKind switch - { - JsonValueKind.String => element.GetString() ?? string.Empty, - JsonValueKind.Number => element.TryGetInt64(out var longValue) - ? longValue.ToString(CultureInfo.InvariantCulture) - : element.GetDouble().ToString(CultureInfo.InvariantCulture), - JsonValueKind.True => "true", - JsonValueKind.False => "false", - JsonValueKind.Null => "null", - JsonValueKind.Array => FormatArray(element), - JsonValueKind.Object => element.GetRawText(), - _ => element.GetRawText() - }; - } - - private static string FormatArray(JsonElement array) - { - var values = new List(); - foreach (var item in array.EnumerateArray()) - { - values.Add(FormatJsonValue(item)); - } - - return string.Join(", ", values); - } - - private static bool TryParseUnixSeconds(JsonElement element, out DateTimeOffset value) - { - value = default; - - if (element.ValueKind == JsonValueKind.Number) - { - if (element.TryGetInt64(out var seconds)) - { - value = DateTimeOffset.FromUnixTimeSeconds(seconds); - return true; - } - - if (element.TryGetDouble(out var doubleValue)) - { - value = DateTimeOffset.FromUnixTimeSeconds((long)doubleValue); - return true; - } - } - - if (element.ValueKind == JsonValueKind.String) - { - var text = element.GetString(); - if (!string.IsNullOrWhiteSpace(text) && long.TryParse(text, NumberStyles.Integer, CultureInfo.InvariantCulture, out var seconds)) - { - value = DateTimeOffset.FromUnixTimeSeconds(seconds); - return true; - } - } - - return false; - } - - private static List CollectAdditionalClaims(Dictionary claims) - { - var result = new List(); - foreach (var pair in claims) - { - if (CommonClaimNames.Contains(pair.Key)) - { - continue; - } - - result.Add(FormattableString.Invariant($"{pair.Key}={pair.Value}")); - } - - result.Sort(StringComparer.OrdinalIgnoreCase); - return result; - } - - private static readonly HashSet CommonClaimNames = new(StringComparer.OrdinalIgnoreCase) - { - "aud", - "client_id", - "exp", - "iat", - "iss", - "nbf", - "scope", - "scopes", - "sub", - "token_type", - "jti" - }; - - private static async Task ExecuteExcititorCommandAsync( - IServiceProvider services, - string commandName, - bool verbose, - IDictionary? activityTags, - Func> operation, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger(commandName.Replace(' ', '-')); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity($"cli.{commandName.Replace(' ', '.')}" , ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", commandName); - if (activityTags is not null) - { - foreach (var tag in activityTags) - { - activity?.SetTag(tag.Key, tag.Value); - } - } - using var duration = CliMetrics.MeasureCommandDuration(commandName); - - try - { - var result = await operation(client).ConfigureAwait(false); - if (result.Success) - { - if (!string.IsNullOrWhiteSpace(result.Message)) - { - logger.LogInformation(result.Message); - } - else - { - logger.LogInformation("Operation completed successfully."); - } - - if (!string.IsNullOrWhiteSpace(result.Location)) - { - logger.LogInformation("Location: {Location}", result.Location); - } - - if (result.Payload is JsonElement payload && payload.ValueKind is not JsonValueKind.Undefined and not JsonValueKind.Null) - { - logger.LogDebug("Response payload: {Payload}", payload.ToString()); - } - - Environment.ExitCode = 0; - } - else - { - logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Operation failed." : result.Message); - Environment.ExitCode = 1; - } - } - catch (Exception ex) - { - logger.LogError(ex, "Excititor operation failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - private static async Task> GatherImageDigestsAsync( - IReadOnlyList inline, - string? filePath, - CancellationToken cancellationToken) - { - var results = new List(); - var seen = new HashSet(StringComparer.Ordinal); - - void AddCandidates(string? candidate) - { - foreach (var image in SplitImageCandidates(candidate)) - { - if (seen.Add(image)) - { - results.Add(image); - } - } - } - - if (inline is not null) - { - foreach (var entry in inline) - { - AddCandidates(entry); - } - } - - if (!string.IsNullOrWhiteSpace(filePath)) - { - var path = Path.GetFullPath(filePath); - if (!File.Exists(path)) - { - throw new FileNotFoundException("Input file not found.", path); - } - - foreach (var line in File.ReadLines(path)) - { - cancellationToken.ThrowIfCancellationRequested(); - AddCandidates(line); - } - } - - if (Console.IsInputRedirected) - { - while (!cancellationToken.IsCancellationRequested) - { - var line = await Console.In.ReadLineAsync().ConfigureAwait(false); - if (line is null) - { - break; - } - - AddCandidates(line); - } - } - - return new ReadOnlyCollection(results); - } - - private static IEnumerable SplitImageCandidates(string? raw) - { - if (string.IsNullOrWhiteSpace(raw)) - { - yield break; - } - - var candidate = raw.Trim(); - var commentIndex = candidate.IndexOf('#'); - if (commentIndex >= 0) - { - candidate = candidate[..commentIndex].Trim(); - } - - if (candidate.Length == 0) - { - yield break; - } - - var tokens = candidate.Split(new[] { ',', ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries); - foreach (var token in tokens) - { - var trimmed = token.Trim(); - if (trimmed.Length > 0) - { - yield return trimmed; - } - } - } - - private static IReadOnlyDictionary ParseLabelSelectors(IReadOnlyList labelArguments) - { - if (labelArguments is null || labelArguments.Count == 0) - { - return EmptyLabelSelectors; - } - - var labels = new Dictionary(StringComparer.OrdinalIgnoreCase); - foreach (var raw in labelArguments) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var trimmed = raw.Trim(); - var delimiter = trimmed.IndexOf('='); - if (delimiter <= 0 || delimiter == trimmed.Length - 1) - { - throw new ArgumentException($"Invalid label '{raw}'. Expected key=value format."); - } - - var key = trimmed[..delimiter].Trim(); - var value = trimmed[(delimiter + 1)..].Trim(); - if (key.Length == 0) - { - throw new ArgumentException($"Invalid label '{raw}'. Label key cannot be empty."); - } - - labels[key] = value; - } - - return labels.Count == 0 ? EmptyLabelSelectors : new ReadOnlyDictionary(labels); - } - - private sealed record ExcititorExportManifestSummary( - string ExportId, - string? Format, - string? Algorithm, - string? Digest, - long? SizeBytes, - bool? FromCache, - DateTimeOffset? CreatedAt, - string? RekorLocation, - string? RekorIndex, - string? RekorInclusionUrl); - - private static ExcititorExportManifestSummary? TryParseExportManifest(JsonElement? payload) - { - if (payload is null || payload.Value.ValueKind is JsonValueKind.Undefined or JsonValueKind.Null) - { - return null; - } - - var element = payload.Value; - var exportId = GetStringProperty(element, "exportId"); - if (string.IsNullOrWhiteSpace(exportId)) - { - return null; - } - - var format = GetStringProperty(element, "format"); - var algorithm = default(string?); - var digest = default(string?); - - if (TryGetPropertyCaseInsensitive(element, "artifact", out var artifact) && artifact.ValueKind == JsonValueKind.Object) - { - algorithm = GetStringProperty(artifact, "algorithm"); - digest = GetStringProperty(artifact, "digest"); - } - - var sizeBytes = GetInt64Property(element, "sizeBytes"); - var fromCache = GetBooleanProperty(element, "fromCache"); - var createdAt = GetDateTimeOffsetProperty(element, "createdAt"); - - string? rekorLocation = null; - string? rekorIndex = null; - string? rekorInclusion = null; - - if (TryGetPropertyCaseInsensitive(element, "attestation", out var attestation) && attestation.ValueKind == JsonValueKind.Object) - { - if (TryGetPropertyCaseInsensitive(attestation, "rekor", out var rekor) && rekor.ValueKind == JsonValueKind.Object) - { - rekorLocation = GetStringProperty(rekor, "location"); - rekorIndex = GetStringProperty(rekor, "logIndex"); - var inclusion = GetStringProperty(rekor, "inclusionProofUri"); - if (!string.IsNullOrWhiteSpace(inclusion)) - { - rekorInclusion = inclusion; - } - } - } - - return new ExcititorExportManifestSummary( - exportId.Trim(), - format, - algorithm, - digest, - sizeBytes, - fromCache, - createdAt, - rekorLocation, - rekorIndex, - rekorInclusion); - } - - private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement property) - { - if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty(propertyName, out property)) - { - return true; - } - - if (element.ValueKind == JsonValueKind.Object) - { - foreach (var candidate in element.EnumerateObject()) - { - if (string.Equals(candidate.Name, propertyName, StringComparison.OrdinalIgnoreCase)) - { - property = candidate.Value; - return true; - } - } - } - - property = default; - return false; - } - - private static string? GetStringProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - return property.ValueKind switch - { - JsonValueKind.String => property.GetString(), - JsonValueKind.Number => property.ToString(), - _ => null - }; - } - - return null; - } - - private static bool? GetBooleanProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - return property.ValueKind switch - { - JsonValueKind.True => true, - JsonValueKind.False => false, - JsonValueKind.String when bool.TryParse(property.GetString(), out var parsed) => parsed, - _ => null - }; - } - - return null; - } - - private static long? GetInt64Property(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - if (property.ValueKind == JsonValueKind.Number && property.TryGetInt64(out var value)) - { - return value; - } - - if (property.ValueKind == JsonValueKind.String - && long.TryParse(property.GetString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) - { - return parsed; - } - } - - return null; - } - - private static DateTimeOffset? GetDateTimeOffsetProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property) - && property.ValueKind == JsonValueKind.String - && DateTimeOffset.TryParse(property.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var value)) - { - return value.ToUniversalTime(); - } - - return null; - } - - private static string BuildDigestDisplay(string? algorithm, string digest) - { - if (string.IsNullOrWhiteSpace(digest)) - { - return string.Empty; - } - - if (digest.Contains(':', StringComparison.Ordinal)) - { - return digest; - } - - if (string.IsNullOrWhiteSpace(algorithm) || algorithm.Equals("sha256", StringComparison.OrdinalIgnoreCase)) - { - return $"sha256:{digest}"; - } - - return $"{algorithm}:{digest}"; - } - - private static string FormatSize(long sizeBytes) - { - if (sizeBytes < 0) - { - return $"{sizeBytes} bytes"; - } - - string[] units = { "bytes", "KB", "MB", "GB", "TB" }; - double size = sizeBytes; - var unit = 0; - - while (size >= 1024 && unit < units.Length - 1) - { - size /= 1024; - unit++; - } - - return unit == 0 ? $"{sizeBytes} bytes" : $"{size:0.##} {units[unit]}"; - } - - private static string ResolveExportOutputPath(string outputPath, ExcititorExportManifestSummary manifest) - { - if (string.IsNullOrWhiteSpace(outputPath)) - { - throw new ArgumentException("Output path must be provided.", nameof(outputPath)); - } - - var fullPath = Path.GetFullPath(outputPath); - if (Directory.Exists(fullPath) - || outputPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal) - || outputPath.EndsWith(Path.AltDirectorySeparatorChar.ToString(), StringComparison.Ordinal)) - { - return Path.Combine(fullPath, BuildExportFileName(manifest)); - } - - var directory = Path.GetDirectoryName(fullPath); - if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) - { - Directory.CreateDirectory(directory); - } - - return fullPath; - } - - private static string BuildExportFileName(ExcititorExportManifestSummary manifest) - { - var token = !string.IsNullOrWhiteSpace(manifest.Digest) - ? manifest.Digest! - : manifest.ExportId; - - token = SanitizeToken(token); - if (token.Length > 40) - { - token = token[..40]; - } - - var extension = DetermineExportExtension(manifest.Format); - return $"stellaops-excititor-{token}{extension}"; - } - - private static string DetermineExportExtension(string? format) - { - if (string.IsNullOrWhiteSpace(format)) - { - return ".bin"; - } - - return format switch - { - not null when format.Equals("jsonl", StringComparison.OrdinalIgnoreCase) => ".jsonl", - not null when format.Equals("json", StringComparison.OrdinalIgnoreCase) => ".json", - not null when format.Equals("openvex", StringComparison.OrdinalIgnoreCase) => ".json", - not null when format.Equals("csaf", StringComparison.OrdinalIgnoreCase) => ".json", - _ => ".bin" - }; - } - - private static string SanitizeToken(string token) - { - var builder = new StringBuilder(token.Length); - foreach (var ch in token) - { - if (char.IsLetterOrDigit(ch)) - { - builder.Append(char.ToLowerInvariant(ch)); - } - } - - if (builder.Length == 0) - { - builder.Append("export"); - } - - return builder.ToString(); - } - - private static string? ResolveLocationUrl(StellaOpsCliOptions options, string location) - { - if (string.IsNullOrWhiteSpace(location)) - { - return null; - } - - if (Uri.TryCreate(location, UriKind.Absolute, out var absolute)) - { - return absolute.ToString(); - } - - if (!string.IsNullOrWhiteSpace(options?.BackendUrl) && Uri.TryCreate(options.BackendUrl, UriKind.Absolute, out var baseUri)) - { - if (!location.StartsWith("/", StringComparison.Ordinal)) - { - location = "/" + location; - } - - return new Uri(baseUri, location).ToString(); - } - - return location; - } - - private static string BuildRuntimePolicyJson(RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) - { - var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); - var results = new Dictionary(StringComparer.Ordinal); - - foreach (var image in orderedImages) - { - if (result.Decisions.TryGetValue(image, out var decision)) - { - results[image] = BuildDecisionMap(decision); - } - } - - var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) - { - WriteIndented = true, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - var payload = new Dictionary(StringComparer.Ordinal) - { - ["ttlSeconds"] = result.TtlSeconds, - ["expiresAtUtc"] = result.ExpiresAtUtc?.ToString("O", CultureInfo.InvariantCulture), - ["policyRevision"] = result.PolicyRevision, - ["results"] = results - }; - - return JsonSerializer.Serialize(payload, options); - } - - private static IDictionary BuildDecisionMap(RuntimePolicyImageDecision decision) - { - var map = new Dictionary(StringComparer.Ordinal) - { - ["policyVerdict"] = decision.PolicyVerdict, - ["signed"] = decision.Signed, - ["hasSbomReferrers"] = decision.HasSbomReferrers - }; - - if (decision.Reasons.Count > 0) - { - map["reasons"] = decision.Reasons; - } - - if (decision.Rekor is not null) - { - var rekorMap = new Dictionary(StringComparer.Ordinal); - if (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid)) - { - rekorMap["uuid"] = decision.Rekor.Uuid; - } - - if (!string.IsNullOrWhiteSpace(decision.Rekor.Url)) - { - rekorMap["url"] = decision.Rekor.Url; - } - - if (decision.Rekor.Verified.HasValue) - { - rekorMap["verified"] = decision.Rekor.Verified; - } - - if (rekorMap.Count > 0) - { - map["rekor"] = rekorMap; - } - } - - foreach (var kvp in decision.AdditionalProperties) - { - map[kvp.Key] = kvp.Value; - } - - return map; - } - - private static void DisplayRuntimePolicyResults(ILogger logger, RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) - { - var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); - var summary = new Dictionary(StringComparer.OrdinalIgnoreCase); - - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().Border(TableBorder.Rounded) - .AddColumns("Image", "Verdict", "Signed", "SBOM Ref", "Quieted", "Confidence", "Reasons", "Attestation"); - - foreach (var image in orderedImages) - { - if (result.Decisions.TryGetValue(image, out var decision)) - { - table.AddRow( - image, - decision.PolicyVerdict, - FormatBoolean(decision.Signed), - FormatBoolean(decision.HasSbomReferrers), - FormatQuietedDisplay(decision.AdditionalProperties), - FormatConfidenceDisplay(decision.AdditionalProperties), - decision.Reasons.Count > 0 ? string.Join(Environment.NewLine, decision.Reasons) : "-", - FormatAttestation(decision.Rekor)); - - summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; - - if (decision.AdditionalProperties.Count > 0) - { - var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); - logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); - } - } - else - { - table.AddRow(image, "", "-", "-", "-", "-", "-", "-"); - } - } - - AnsiConsole.Write(table); - } - else - { - foreach (var image in orderedImages) - { - if (result.Decisions.TryGetValue(image, out var decision)) - { - var reasons = decision.Reasons.Count > 0 ? string.Join(", ", decision.Reasons) : "none"; - logger.LogInformation( - "{Image} -> verdict={Verdict} signed={Signed} sbomRef={Sbom} quieted={Quieted} confidence={Confidence} attestation={Attestation} reasons={Reasons}", - image, - decision.PolicyVerdict, - FormatBoolean(decision.Signed), - FormatBoolean(decision.HasSbomReferrers), - FormatQuietedDisplay(decision.AdditionalProperties), - FormatConfidenceDisplay(decision.AdditionalProperties), - FormatAttestation(decision.Rekor), - reasons); - - summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; - - if (decision.AdditionalProperties.Count > 0) - { - var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); - logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); - } - } - else - { - logger.LogWarning("{Image} -> no decision returned by backend.", image); - } - } - } - - if (summary.Count > 0) - { - var summaryText = string.Join(", ", summary.Select(kvp => $"{kvp.Key}:{kvp.Value}")); - logger.LogInformation("Verdict summary: {Summary}", summaryText); - } - } - - private static IReadOnlyList BuildImageOrder(IReadOnlyList requestedImages, IEnumerable actual) - { - var order = new List(); - var seen = new HashSet(StringComparer.Ordinal); - - if (requestedImages is not null) - { - foreach (var image in requestedImages) - { - if (!string.IsNullOrWhiteSpace(image)) - { - var trimmed = image.Trim(); - if (seen.Add(trimmed)) - { - order.Add(trimmed); - } - } - } - } - - foreach (var image in actual) - { - if (!string.IsNullOrWhiteSpace(image)) - { - var trimmed = image.Trim(); - if (seen.Add(trimmed)) - { - order.Add(trimmed); - } - } - } - - return new ReadOnlyCollection(order); - } - - private static string FormatBoolean(bool? value) - => value is null ? "unknown" : value.Value ? "yes" : "no"; - - private static string FormatQuietedDisplay(IReadOnlyDictionary metadata) - { - var quieted = GetMetadataBoolean(metadata, "quieted", "quiet"); - var quietedBy = GetMetadataString(metadata, "quietedBy", "quietedReason"); - - if (quieted is true) - { - return string.IsNullOrWhiteSpace(quietedBy) ? "yes" : $"yes ({quietedBy})"; - } - - if (quieted is false) - { - return "no"; - } - - return string.IsNullOrWhiteSpace(quietedBy) ? "-" : $"? ({quietedBy})"; - } - - private static string FormatConfidenceDisplay(IReadOnlyDictionary metadata) - { - var confidence = GetMetadataDouble(metadata, "confidence"); - var confidenceBand = GetMetadataString(metadata, "confidenceBand", "confidenceTier"); - - if (confidence.HasValue && !string.IsNullOrWhiteSpace(confidenceBand)) - { - return string.Format(CultureInfo.InvariantCulture, "{0:0.###} ({1})", confidence.Value, confidenceBand); - } - - if (confidence.HasValue) - { - return confidence.Value.ToString("0.###", CultureInfo.InvariantCulture); - } - - if (!string.IsNullOrWhiteSpace(confidenceBand)) - { - return confidenceBand!; - } - - return "-"; - } - - private static string FormatAttestation(RuntimePolicyRekorReference? rekor) - { - if (rekor is null) - { - return "-"; - } - - var uuid = string.IsNullOrWhiteSpace(rekor.Uuid) ? null : rekor.Uuid; - var url = string.IsNullOrWhiteSpace(rekor.Url) ? null : rekor.Url; - var verified = rekor.Verified; - - var core = uuid ?? url; - if (!string.IsNullOrEmpty(core)) - { - if (verified.HasValue) - { - var suffix = verified.Value ? " (verified)" : " (unverified)"; - return core + suffix; - } - - return core!; - } - - if (verified.HasValue) - { - return verified.Value ? "verified" : "unverified"; - } - - return "-"; - } - - private static bool? GetMetadataBoolean(IReadOnlyDictionary metadata, params string[] keys) - { - foreach (var key in keys) - { - if (metadata.TryGetValue(key, out var value) && value is not null) - { - switch (value) - { - case bool b: - return b; - case string s when bool.TryParse(s, out var parsed): - return parsed; - } - } - } - - return null; - } - - private static string? GetMetadataString(IReadOnlyDictionary metadata, params string[] keys) - { - foreach (var key in keys) - { - if (metadata.TryGetValue(key, out var value) && value is not null) - { - if (value is string s) - { - return string.IsNullOrWhiteSpace(s) ? null : s; - } - } - } - - return null; - } - - private static double? GetMetadataDouble(IReadOnlyDictionary metadata, params string[] keys) - { - foreach (var key in keys) - { - if (metadata.TryGetValue(key, out var value) && value is not null) - { - switch (value) - { - case double d: - return d; - case float f: - return f; - case decimal m: - return (double)m; - case long l: - return l; - case int i: - return i; - case string s when double.TryParse(s, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var parsed): - return parsed; - } - } - } - - return null; - } - - private static TaskRunnerSimulationOutputFormat DetermineTaskRunnerSimulationFormat(string? value, string? outputPath) - { - if (!string.IsNullOrWhiteSpace(value)) - { - return value.Trim().ToLowerInvariant() switch - { - "table" => TaskRunnerSimulationOutputFormat.Table, - "json" => TaskRunnerSimulationOutputFormat.Json, - _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") - }; - } - + string? tenantOverride, + string format, + bool disableColor, + string? output, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("sources-ingest"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + using var activity = CliActivitySource.Instance.StartActivity("cli.sources.ingest.dry_run", ActivityKind.Client); + var statusMetric = "unknown"; + using var duration = CliMetrics.MeasureCommandDuration("sources ingest dry-run"); + + try + { + if (!dryRun) + { + statusMetric = "unsupported"; + logger.LogError("Only --dry-run mode is supported for 'stella sources ingest' at this time."); + Environment.ExitCode = 1; + return; + } + + source = source?.Trim() ?? string.Empty; + if (string.IsNullOrWhiteSpace(source)) + { + throw new InvalidOperationException("Source identifier must be provided."); + } + + var formatNormalized = string.IsNullOrWhiteSpace(format) + ? "table" + : format.Trim().ToLowerInvariant(); + + if (formatNormalized is not ("table" or "json")) + { + throw new InvalidOperationException("Format must be either 'table' or 'json'."); + } + + var tenant = ResolveTenant(tenantOverride); + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); + } + + var payload = await LoadIngestInputAsync(services, input, cancellationToken).ConfigureAwait(false); + + logger.LogInformation("Executing ingestion dry-run for source {Source} using input {Input}.", source, payload.Name); + + activity?.SetTag("stellaops.cli.command", "sources ingest dry-run"); + activity?.SetTag("stellaops.cli.source", source); + activity?.SetTag("stellaops.cli.tenant", tenant); + activity?.SetTag("stellaops.cli.format", formatNormalized); + activity?.SetTag("stellaops.cli.input_kind", payload.Kind); + + var request = new AocIngestDryRunRequest + { + Tenant = tenant, + Source = source, + Document = new AocIngestDryRunDocument + { + Name = payload.Name, + Content = payload.Content, + ContentType = payload.ContentType, + ContentEncoding = payload.ContentEncoding + } + }; + + var response = await client.ExecuteAocIngestDryRunAsync(request, cancellationToken).ConfigureAwait(false); + activity?.SetTag("stellaops.cli.status", response.Status ?? "unknown"); + + if (!string.IsNullOrWhiteSpace(output)) + { + var reportPath = await WriteJsonReportAsync(response, output, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Dry-run report written to {Path}.", reportPath); + } + + if (formatNormalized == "json") + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + } + else + { + RenderDryRunTable(response, !disableColor); + } + + var exitCode = DetermineDryRunExitCode(response); + Environment.ExitCode = exitCode; + statusMetric = exitCode == 0 ? "ok" : "violation"; + activity?.SetTag("stellaops.cli.exit_code", exitCode); + } + catch (Exception ex) + { + statusMetric = "transport_error"; + logger.LogError(ex, "Dry-run ingestion failed."); + Environment.ExitCode = 70; + } + finally + { + verbosity.MinimumLevel = previousLevel; + CliMetrics.RecordSourcesDryRun(statusMetric); + } + } + + public static async Task HandleAocVerifyAsync( + IServiceProvider services, + string? sinceOption, + int? limitOption, + string? sourcesOption, + string? codesOption, + string format, + string? exportPath, + string? tenantOverride, + bool disableColor, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("aoc-verify"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + using var activity = CliActivitySource.Instance.StartActivity("cli.aoc.verify", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("aoc verify"); + var outcome = "unknown"; + + try + { + var tenant = ResolveTenant(tenantOverride); + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); + } + + var normalizedFormat = string.IsNullOrWhiteSpace(format) + ? "table" + : format.Trim().ToLowerInvariant(); + + if (normalizedFormat is not ("table" or "json")) + { + throw new InvalidOperationException("Format must be either 'table' or 'json'."); + } + + var since = DetermineVerificationSince(sinceOption); + var sinceIso = since.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + var limit = NormalizeLimit(limitOption); + var sources = ParseCommaSeparatedList(sourcesOption); + var codes = ParseCommaSeparatedList(codesOption); + + var normalizedSources = sources.Count == 0 + ? Array.Empty() + : sources.Select(item => item.ToLowerInvariant()).ToArray(); + + var normalizedCodes = codes.Count == 0 + ? Array.Empty() + : codes.Select(item => item.ToUpperInvariant()).ToArray(); + + activity?.SetTag("stellaops.cli.command", "aoc verify"); + activity?.SetTag("stellaops.cli.tenant", tenant); + activity?.SetTag("stellaops.cli.since", sinceIso); + activity?.SetTag("stellaops.cli.limit", limit); + activity?.SetTag("stellaops.cli.format", normalizedFormat); + if (normalizedSources.Length > 0) + { + activity?.SetTag("stellaops.cli.sources", string.Join(",", normalizedSources)); + } + + if (normalizedCodes.Length > 0) + { + activity?.SetTag("stellaops.cli.codes", string.Join(",", normalizedCodes)); + } + + var request = new AocVerifyRequest + { + Tenant = tenant, + Since = sinceIso, + Limit = limit, + Sources = normalizedSources.Length == 0 ? null : normalizedSources, + Codes = normalizedCodes.Length == 0 ? null : normalizedCodes + }; + + var response = await client.ExecuteAocVerifyAsync(request, cancellationToken).ConfigureAwait(false); + + if (!string.IsNullOrWhiteSpace(exportPath)) + { + var reportPath = await WriteJsonReportAsync(response, exportPath, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Verification report written to {Path}.", reportPath); + } + + if (normalizedFormat == "json") + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + } + else + { + RenderAocVerifyTable(response, !disableColor, limit); + } + + var exitCode = DetermineVerifyExitCode(response); + Environment.ExitCode = exitCode; + activity?.SetTag("stellaops.cli.exit_code", exitCode); + outcome = exitCode switch + { + 0 => "ok", + >= 11 and <= 17 => "violations", + 18 => "truncated", + _ => "unknown" + }; + } + catch (InvalidOperationException ex) + { + outcome = "usage_error"; + logger.LogError(ex, "Verification failed: {Message}", ex.Message); + Console.Error.WriteLine(ex.Message); + Environment.ExitCode = 71; + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + } + catch (Exception ex) + { + outcome = "transport_error"; + logger.LogError(ex, "Verification request failed."); + Console.Error.WriteLine(ex.Message); + Environment.ExitCode = 70; + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + } + finally + { + verbosity.MinimumLevel = previousLevel; + CliMetrics.RecordAocVerify(outcome); + } + } + + public static async Task HandleConnectorJobAsync( + IServiceProvider services, + string source, + string stage, + string? mode, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-connector"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.db.fetch", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "db fetch"); + activity?.SetTag("stellaops.cli.source", source); + activity?.SetTag("stellaops.cli.stage", stage); + if (!string.IsNullOrWhiteSpace(mode)) + { + activity?.SetTag("stellaops.cli.mode", mode); + } + using var duration = CliMetrics.MeasureCommandDuration("db fetch"); + + try + { + var jobKind = $"source:{source}:{stage}"; + var parameters = new Dictionary(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(mode)) + { + parameters["mode"] = mode; + } + + await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + logger.LogError(ex, "Connector job failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleMergeJobAsync( + IServiceProvider services, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-merge"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.db.merge", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "db merge"); + using var duration = CliMetrics.MeasureCommandDuration("db merge"); + + try + { + await TriggerJobAsync(client, logger, "merge:reconcile", new Dictionary(StringComparer.Ordinal), cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + logger.LogError(ex, "Merge job failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleExportJobAsync( + IServiceProvider services, + string format, + bool delta, + bool? publishFull, + bool? publishDelta, + bool? includeFull, + bool? includeDelta, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-export"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.db.export", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "db export"); + activity?.SetTag("stellaops.cli.format", format); + activity?.SetTag("stellaops.cli.delta", delta); + using var duration = CliMetrics.MeasureCommandDuration("db export"); + activity?.SetTag("stellaops.cli.publish_full", publishFull); + activity?.SetTag("stellaops.cli.publish_delta", publishDelta); + activity?.SetTag("stellaops.cli.include_full", includeFull); + activity?.SetTag("stellaops.cli.include_delta", includeDelta); + + try + { + var jobKind = format switch + { + "trivy-db" or "trivy" => "export:trivy-db", + _ => "export:json" + }; + + var isTrivy = jobKind == "export:trivy-db"; + if (isTrivy + && !publishFull.HasValue + && !publishDelta.HasValue + && !includeFull.HasValue + && !includeDelta.HasValue + && AnsiConsole.Profile.Capabilities.Interactive) + { + var overrides = TrivyDbExportPrompt.PromptOverrides(); + publishFull = overrides.publishFull; + publishDelta = overrides.publishDelta; + includeFull = overrides.includeFull; + includeDelta = overrides.includeDelta; + } + + var parameters = new Dictionary(StringComparer.Ordinal) + { + ["delta"] = delta + }; + if (publishFull.HasValue) + { + parameters["publishFull"] = publishFull.Value; + } + if (publishDelta.HasValue) + { + parameters["publishDelta"] = publishDelta.Value; + } + if (includeFull.HasValue) + { + parameters["includeFull"] = includeFull.Value; + } + if (includeDelta.HasValue) + { + parameters["includeDelta"] = includeDelta.Value; + } + + await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + logger.LogError(ex, "Export job failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static Task HandleExcititorInitAsync( + IServiceProvider services, + IReadOnlyList providers, + bool resume, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (resume) + { + payload["resume"] = true; + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor init", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["resume"] = resume + }, + client => client.ExecuteExcititorOperationAsync("init", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorPullAsync( + IServiceProvider services, + IReadOnlyList providers, + DateTimeOffset? since, + TimeSpan? window, + bool force, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (since.HasValue) + { + payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + if (window.HasValue) + { + payload["window"] = window.Value.ToString("c", CultureInfo.InvariantCulture); + } + if (force) + { + payload["force"] = true; + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor pull", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["force"] = force, + ["since"] = since?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture), + ["window"] = window?.ToString("c", CultureInfo.InvariantCulture) + }, + client => client.ExecuteExcititorOperationAsync("ingest/run", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorResumeAsync( + IServiceProvider services, + IReadOnlyList providers, + string? checkpoint, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (!string.IsNullOrWhiteSpace(checkpoint)) + { + payload["checkpoint"] = checkpoint.Trim(); + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor resume", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["checkpoint"] = checkpoint + }, + client => client.ExecuteExcititorOperationAsync("ingest/resume", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static async Task HandleExcititorListProvidersAsync( + IServiceProvider services, + bool includeDisabled, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("excititor-list-providers"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.list-providers", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "excititor list-providers"); + activity?.SetTag("stellaops.cli.include_disabled", includeDisabled); + using var duration = CliMetrics.MeasureCommandDuration("excititor list-providers"); + + try + { + var providers = await client.GetExcititorProvidersAsync(includeDisabled, cancellationToken).ConfigureAwait(false); + Environment.ExitCode = 0; + logger.LogInformation("Providers returned: {Count}", providers.Count); + + if (providers.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().Border(TableBorder.Rounded).AddColumns("Provider", "Kind", "Trust", "Enabled", "Last Ingested"); + foreach (var provider in providers) + { + table.AddRow( + provider.Id, + provider.Kind, + string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, + provider.Enabled ? "yes" : "no", + provider.LastIngestedAt?.ToString("yyyy-MM-dd HH:mm:ss 'UTC'", CultureInfo.InvariantCulture) ?? "unknown"); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var provider in providers) + { + logger.LogInformation("{ProviderId} [{Kind}] Enabled={Enabled} Trust={Trust} LastIngested={LastIngested}", + provider.Id, + provider.Kind, + provider.Enabled ? "yes" : "no", + string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, + provider.LastIngestedAt?.ToString("O", CultureInfo.InvariantCulture) ?? "unknown"); + } + } + } + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to list Excititor providers."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleExcititorExportAsync( + IServiceProvider services, + string format, + bool delta, + string? scope, + DateTimeOffset? since, + string? provider, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scopeHandle = services.CreateAsyncScope(); + var client = scopeHandle.ServiceProvider.GetRequiredService(); + var logger = scopeHandle.ServiceProvider.GetRequiredService().CreateLogger("excititor-export"); + var options = scopeHandle.ServiceProvider.GetRequiredService(); + var verbosity = scopeHandle.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.export", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "excititor export"); + activity?.SetTag("stellaops.cli.format", format); + activity?.SetTag("stellaops.cli.delta", delta); + if (!string.IsNullOrWhiteSpace(scope)) + { + activity?.SetTag("stellaops.cli.scope", scope); + } + if (since.HasValue) + { + activity?.SetTag("stellaops.cli.since", since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrWhiteSpace(provider)) + { + activity?.SetTag("stellaops.cli.provider", provider); + } + if (!string.IsNullOrWhiteSpace(outputPath)) + { + activity?.SetTag("stellaops.cli.output", outputPath); + } + using var duration = CliMetrics.MeasureCommandDuration("excititor export"); + + try + { + var payload = new Dictionary(StringComparer.Ordinal) + { + ["format"] = string.IsNullOrWhiteSpace(format) ? "openvex" : format.Trim(), + ["delta"] = delta + }; + + if (!string.IsNullOrWhiteSpace(scope)) + { + payload["scope"] = scope.Trim(); + } + if (since.HasValue) + { + payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + if (!string.IsNullOrWhiteSpace(provider)) + { + payload["provider"] = provider.Trim(); + } + + var result = await client.ExecuteExcititorOperationAsync( + "export", + HttpMethod.Post, + RemoveNullValues(payload), + cancellationToken).ConfigureAwait(false); + + if (!result.Success) + { + logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Excititor export failed." : result.Message); + Environment.ExitCode = 1; + return; + } + + Environment.ExitCode = 0; + + var manifest = TryParseExportManifest(result.Payload); + if (!string.IsNullOrWhiteSpace(result.Message) + && (manifest is null || !string.Equals(result.Message, "ok", StringComparison.OrdinalIgnoreCase))) + { + logger.LogInformation(result.Message); + } + + if (manifest is not null) + { + activity?.SetTag("stellaops.cli.export_id", manifest.ExportId); + if (!string.IsNullOrWhiteSpace(manifest.Format)) + { + activity?.SetTag("stellaops.cli.export_format", manifest.Format); + } + if (manifest.FromCache.HasValue) + { + activity?.SetTag("stellaops.cli.export_cached", manifest.FromCache.Value); + } + if (manifest.SizeBytes.HasValue) + { + activity?.SetTag("stellaops.cli.export_size", manifest.SizeBytes.Value); + } + + if (manifest.FromCache == true) + { + logger.LogInformation("Reusing cached export {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); + } + else + { + logger.LogInformation("Export ready: {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); + } + + if (manifest.CreatedAt.HasValue) + { + logger.LogInformation("Created at {CreatedAt}.", manifest.CreatedAt.Value.ToString("u", CultureInfo.InvariantCulture)); + } + + if (!string.IsNullOrWhiteSpace(manifest.Digest)) + { + var digestDisplay = BuildDigestDisplay(manifest.Algorithm, manifest.Digest); + if (manifest.SizeBytes.HasValue) + { + logger.LogInformation("Digest {Digest} ({Size}).", digestDisplay, FormatSize(manifest.SizeBytes.Value)); + } + else + { + logger.LogInformation("Digest {Digest}.", digestDisplay); + } + } + + if (!string.IsNullOrWhiteSpace(manifest.RekorLocation)) + { + if (!string.IsNullOrWhiteSpace(manifest.RekorIndex)) + { + logger.LogInformation("Rekor entry: {Location} (index {Index}).", manifest.RekorLocation, manifest.RekorIndex); + } + else + { + logger.LogInformation("Rekor entry: {Location}.", manifest.RekorLocation); + } + } + + if (!string.IsNullOrWhiteSpace(manifest.RekorInclusionUrl) + && !string.Equals(manifest.RekorInclusionUrl, manifest.RekorLocation, StringComparison.OrdinalIgnoreCase)) + { + logger.LogInformation("Rekor inclusion proof: {Url}.", manifest.RekorInclusionUrl); + } + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + var resolvedPath = ResolveExportOutputPath(outputPath!, manifest); + var download = await client.DownloadExcititorExportAsync( + manifest.ExportId, + resolvedPath, + manifest.Algorithm, + manifest.Digest, + cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.export_path", download.Path); + + if (download.FromCache) + { + logger.LogInformation("Export already cached at {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); + } + else + { + logger.LogInformation("Export saved to {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); + } + } + else if (!string.IsNullOrWhiteSpace(result.Location)) + { + var downloadUrl = ResolveLocationUrl(options, result.Location); + if (!string.IsNullOrWhiteSpace(downloadUrl)) + { + logger.LogInformation("Download URL: {Url}", downloadUrl); + } + else + { + logger.LogInformation("Download location: {Location}", result.Location); + } + } + } + else + { + if (!string.IsNullOrWhiteSpace(result.Location)) + { + var downloadUrl = ResolveLocationUrl(options, result.Location); + if (!string.IsNullOrWhiteSpace(downloadUrl)) + { + logger.LogInformation("Download URL: {Url}", downloadUrl); + } + else + { + logger.LogInformation("Location: {Location}", result.Location); + } + } + else if (string.IsNullOrWhiteSpace(result.Message)) + { + logger.LogInformation("Export request accepted."); + } + } + } + catch (Exception ex) + { + logger.LogError(ex, "Excititor export failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static Task HandleExcititorBackfillStatementsAsync( + IServiceProvider services, + DateTimeOffset? retrievedSince, + bool force, + int batchSize, + int? maxDocuments, + bool verbose, + CancellationToken cancellationToken) + { + if (batchSize <= 0) + { + throw new ArgumentOutOfRangeException(nameof(batchSize), "Batch size must be greater than zero."); + } + + if (maxDocuments.HasValue && maxDocuments.Value <= 0) + { + throw new ArgumentOutOfRangeException(nameof(maxDocuments), "Max documents must be greater than zero when specified."); + } + + var payload = new Dictionary(StringComparer.Ordinal) + { + ["force"] = force, + ["batchSize"] = batchSize, + ["maxDocuments"] = maxDocuments + }; + + if (retrievedSince.HasValue) + { + payload["retrievedSince"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + + var activityTags = new Dictionary(StringComparer.Ordinal) + { + ["stellaops.cli.force"] = force, + ["stellaops.cli.batch_size"] = batchSize, + ["stellaops.cli.max_documents"] = maxDocuments + }; + + if (retrievedSince.HasValue) + { + activityTags["stellaops.cli.retrieved_since"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor backfill-statements", + verbose, + activityTags, + client => client.ExecuteExcititorOperationAsync( + "admin/backfill-statements", + HttpMethod.Post, + RemoveNullValues(payload), + cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorVerifyAsync( + IServiceProvider services, + string? exportId, + string? digest, + string? attestationPath, + bool verbose, + CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(exportId) && string.IsNullOrWhiteSpace(digest) && string.IsNullOrWhiteSpace(attestationPath)) + { + var logger = services.GetRequiredService().CreateLogger("excititor-verify"); + logger.LogError("At least one of --export-id, --digest, or --attestation must be provided."); + Environment.ExitCode = 1; + return Task.CompletedTask; + } + + var payload = new Dictionary(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(exportId)) + { + payload["exportId"] = exportId.Trim(); + } + if (!string.IsNullOrWhiteSpace(digest)) + { + payload["digest"] = digest.Trim(); + } + if (!string.IsNullOrWhiteSpace(attestationPath)) + { + var fullPath = Path.GetFullPath(attestationPath); + if (!File.Exists(fullPath)) + { + var logger = services.GetRequiredService().CreateLogger("excititor-verify"); + logger.LogError("Attestation file not found at {Path}.", fullPath); + Environment.ExitCode = 1; + return Task.CompletedTask; + } + + var bytes = File.ReadAllBytes(fullPath); + payload["attestation"] = new Dictionary(StringComparer.Ordinal) + { + ["fileName"] = Path.GetFileName(fullPath), + ["base64"] = Convert.ToBase64String(bytes) + }; + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor verify", + verbose, + new Dictionary + { + ["export_id"] = exportId, + ["digest"] = digest, + ["attestation_path"] = attestationPath + }, + client => client.ExecuteExcititorOperationAsync("verify", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorReconcileAsync( + IServiceProvider services, + IReadOnlyList providers, + TimeSpan? maxAge, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (maxAge.HasValue) + { + payload["maxAge"] = maxAge.Value.ToString("c", CultureInfo.InvariantCulture); + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor reconcile", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["max_age"] = maxAge?.ToString("c", CultureInfo.InvariantCulture) + }, + client => client.ExecuteExcititorOperationAsync("reconcile", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static async Task HandleRuntimePolicyTestAsync( + IServiceProvider services, + string? namespaceValue, + IReadOnlyList imageArguments, + string? filePath, + IReadOnlyList labelArguments, + bool outputJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("runtime-policy-test"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.runtime.policy.test", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "runtime policy test"); + if (!string.IsNullOrWhiteSpace(namespaceValue)) + { + activity?.SetTag("stellaops.cli.namespace", namespaceValue); + } + using var duration = CliMetrics.MeasureCommandDuration("runtime policy test"); + + try + { + IReadOnlyList images; + try + { + images = await GatherImageDigestsAsync(imageArguments, filePath, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or ArgumentException or FileNotFoundException) + { + logger.LogError(ex, "Failed to gather image digests: {Message}", ex.Message); + Environment.ExitCode = 9; + return; + } + + if (images.Count == 0) + { + logger.LogError("No image digests provided. Use --image, --file, or pipe digests via stdin."); + Environment.ExitCode = 9; + return; + } + + IReadOnlyDictionary labels; + try + { + labels = ParseLabelSelectors(labelArguments); + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + Environment.ExitCode = 9; + return; + } + + activity?.SetTag("stellaops.cli.images", images.Count); + activity?.SetTag("stellaops.cli.labels", labels.Count); + + var request = new RuntimePolicyEvaluationRequest(namespaceValue, labels, images); + var result = await client.EvaluateRuntimePolicyAsync(request, cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.ttl_seconds", result.TtlSeconds); + Environment.ExitCode = 0; + + if (outputJson) + { + var json = BuildRuntimePolicyJson(result, images); + Console.WriteLine(json); + return; + } + + if (result.ExpiresAtUtc.HasValue) + { + logger.LogInformation("Decision TTL: {TtlSeconds}s (expires {ExpiresAt})", result.TtlSeconds, result.ExpiresAtUtc.Value.ToString("u", CultureInfo.InvariantCulture)); + } + else + { + logger.LogInformation("Decision TTL: {TtlSeconds}s", result.TtlSeconds); + } + + if (!string.IsNullOrWhiteSpace(result.PolicyRevision)) + { + logger.LogInformation("Policy revision: {Revision}", result.PolicyRevision); + } + + DisplayRuntimePolicyResults(logger, result, images); + } + catch (Exception ex) + { + logger.LogError(ex, "Runtime policy evaluation failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleAuthLoginAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + bool force, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-login"); + Environment.ExitCode = 0; + + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + logger.LogError("Authority URL is not configured. Set STELLAOPS_AUTHORITY_URL or update your configuration."); + Environment.ExitCode = 1; + return; + } + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogError("Authority client is not available. Ensure AddStellaOpsAuthClient is registered in Program.cs."); + Environment.ExitCode = 1; + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogError("Authority configuration is incomplete; unable to determine cache key."); + Environment.ExitCode = 1; + return; + } + + try + { + if (force) + { + await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + } + + var scopeName = AuthorityTokenUtilities.ResolveScope(options); + StellaOpsTokenResult token; + + if (!string.IsNullOrWhiteSpace(options.Authority.Username)) + { + if (string.IsNullOrWhiteSpace(options.Authority.Password)) + { + logger.LogError("Authority password must be provided when username is configured."); + Environment.ExitCode = 1; + return; + } + + token = await tokenClient.RequestPasswordTokenAsync( + options.Authority.Username, + options.Authority.Password!, + scopeName, + null, + cancellationToken).ConfigureAwait(false); + } + else + { + token = await tokenClient.RequestClientCredentialsTokenAsync(scopeName, null, cancellationToken).ConfigureAwait(false); + } + + await tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); + + if (verbose) + { + logger.LogInformation("Authenticated with {Authority} (scopes: {Scopes}).", options.Authority.Url, string.Join(", ", token.Scopes)); + } + + logger.LogInformation("Login successful. Access token expires at {Expires}.", token.ExpiresAtUtc.ToString("u")); + } + catch (Exception ex) + { + logger.LogError(ex, "Authentication failed: {Message}", ex.Message); + Environment.ExitCode = 1; + } + } + + public static async Task HandleAuthLogoutAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-logout"); + Environment.ExitCode = 0; + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogInformation("No authority client registered; nothing to remove."); + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogInformation("Authority configuration missing; no cached tokens to remove."); + return; + } + + await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (verbose) + { + logger.LogInformation("Cleared cached token for {Authority}.", options.Authority?.Url ?? "authority"); + } + } + + public static async Task HandleAuthStatusAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-status"); + Environment.ExitCode = 0; + + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); + Environment.ExitCode = 1; + return; + } + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogInformation("Authority client not registered; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogInformation("Authority configuration incomplete; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (entry is null) + { + logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); + Environment.ExitCode = 1; + return; + } + + logger.LogInformation("Cached token for {Authority} expires at {Expires}.", options.Authority.Url, entry.ExpiresAtUtc.ToString("u")); + if (verbose) + { + logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); + } + } + + public static async Task HandleAuthWhoAmIAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-whoami"); + Environment.ExitCode = 0; + + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); + Environment.ExitCode = 1; + return; + } + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogInformation("Authority client not registered; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogInformation("Authority configuration incomplete; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (entry is null) + { + logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); + Environment.ExitCode = 1; + return; + } + + var grantType = string.IsNullOrWhiteSpace(options.Authority.Username) ? "client_credentials" : "password"; + var now = DateTimeOffset.UtcNow; + var remaining = entry.ExpiresAtUtc - now; + if (remaining < TimeSpan.Zero) + { + remaining = TimeSpan.Zero; + } + + logger.LogInformation("Authority: {Authority}", options.Authority.Url); + logger.LogInformation("Grant type: {GrantType}", grantType); + logger.LogInformation("Token type: {TokenType}", entry.TokenType); + logger.LogInformation("Expires: {Expires} ({Remaining})", entry.ExpiresAtUtc.ToString("u"), FormatDuration(remaining)); + + if (entry.Scopes.Count > 0) + { + logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); + } + + if (TryExtractJwtClaims(entry.AccessToken, out var claims, out var issuedAt, out var notBefore)) + { + if (claims.TryGetValue("sub", out var subject) && !string.IsNullOrWhiteSpace(subject)) + { + logger.LogInformation("Subject: {Subject}", subject); + } + + if (claims.TryGetValue("client_id", out var clientId) && !string.IsNullOrWhiteSpace(clientId)) + { + logger.LogInformation("Client ID (token): {ClientId}", clientId); + } + + if (claims.TryGetValue("aud", out var audience) && !string.IsNullOrWhiteSpace(audience)) + { + logger.LogInformation("Audience: {Audience}", audience); + } + + if (claims.TryGetValue("iss", out var issuer) && !string.IsNullOrWhiteSpace(issuer)) + { + logger.LogInformation("Issuer: {Issuer}", issuer); + } + + if (issuedAt is not null) + { + logger.LogInformation("Issued at: {IssuedAt}", issuedAt.Value.ToString("u")); + } + + if (notBefore is not null) + { + logger.LogInformation("Not before: {NotBefore}", notBefore.Value.ToString("u")); + } + + var extraClaims = CollectAdditionalClaims(claims); + if (extraClaims.Count > 0 && verbose) + { + logger.LogInformation("Additional claims: {Claims}", string.Join(", ", extraClaims)); + } + } + else + { + logger.LogInformation("Access token appears opaque; claims are unavailable."); + } + } + + public static async Task HandleAuthRevokeExportAsync( + IServiceProvider services, + StellaOpsCliOptions options, + string? outputDirectory, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-revoke-export"); + Environment.ExitCode = 0; + + try + { + var client = scope.ServiceProvider.GetRequiredService(); + var result = await client.ExportAsync(verbose, cancellationToken).ConfigureAwait(false); + + var directory = string.IsNullOrWhiteSpace(outputDirectory) + ? Directory.GetCurrentDirectory() + : Path.GetFullPath(outputDirectory); + + Directory.CreateDirectory(directory); + + var bundlePath = Path.Combine(directory, "revocation-bundle.json"); + var signaturePath = Path.Combine(directory, "revocation-bundle.json.jws"); + var digestPath = Path.Combine(directory, "revocation-bundle.json.sha256"); + + await File.WriteAllBytesAsync(bundlePath, result.BundleBytes, cancellationToken).ConfigureAwait(false); + await File.WriteAllTextAsync(signaturePath, result.Signature, cancellationToken).ConfigureAwait(false); + await File.WriteAllTextAsync(digestPath, $"sha256:{result.Digest}", cancellationToken).ConfigureAwait(false); + + var computedDigest = Convert.ToHexString(SHA256.HashData(result.BundleBytes)).ToLowerInvariant(); + if (!string.Equals(computedDigest, result.Digest, StringComparison.OrdinalIgnoreCase)) + { + logger.LogError("Digest mismatch. Expected {Expected} but computed {Actual}.", result.Digest, computedDigest); + Environment.ExitCode = 1; + return; + } + + logger.LogInformation( + "Revocation bundle exported to {Directory} (sequence {Sequence}, issued {Issued:u}, signing key {KeyId}, provider {Provider}).", + directory, + result.Sequence, + result.IssuedAt, + string.IsNullOrWhiteSpace(result.SigningKeyId) ? "" : result.SigningKeyId, + string.IsNullOrWhiteSpace(result.SigningProvider) ? "default" : result.SigningProvider); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to export revocation bundle."); + Environment.ExitCode = 1; + } + } + + public static async Task HandleAuthRevokeVerifyAsync( + string bundlePath, + string signaturePath, + string keyPath, + bool verbose, + CancellationToken cancellationToken) + { + var loggerFactory = LoggerFactory.Create(builder => builder.AddSimpleConsole(options => + { + options.SingleLine = true; + options.TimestampFormat = "HH:mm:ss "; + })); + var logger = loggerFactory.CreateLogger("auth-revoke-verify"); + Environment.ExitCode = 0; + + try + { + if (string.IsNullOrWhiteSpace(bundlePath) || string.IsNullOrWhiteSpace(signaturePath) || string.IsNullOrWhiteSpace(keyPath)) + { + logger.LogError("Arguments --bundle, --signature, and --key are required."); + Environment.ExitCode = 1; + return; + } + + var bundleBytes = await File.ReadAllBytesAsync(bundlePath, cancellationToken).ConfigureAwait(false); + var signatureContent = (await File.ReadAllTextAsync(signaturePath, cancellationToken).ConfigureAwait(false)).Trim(); + var keyPem = await File.ReadAllTextAsync(keyPath, cancellationToken).ConfigureAwait(false); + + var digest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); + logger.LogInformation("Bundle digest sha256:{Digest}", digest); + + if (!TryParseDetachedJws(signatureContent, out var encodedHeader, out var encodedSignature)) + { + logger.LogError("Signature is not in detached JWS format."); + Environment.ExitCode = 1; + return; + } + + var headerJson = Encoding.UTF8.GetString(Base64UrlDecode(encodedHeader)); + using var headerDocument = JsonDocument.Parse(headerJson); + var header = headerDocument.RootElement; + + if (!header.TryGetProperty("b64", out var b64Element) || b64Element.GetBoolean()) + { + logger.LogError("Detached JWS header must include '\"b64\": false'."); + Environment.ExitCode = 1; + return; + } + + var algorithm = header.TryGetProperty("alg", out var algElement) ? algElement.GetString() : SignatureAlgorithms.Es256; + if (string.IsNullOrWhiteSpace(algorithm)) + { + algorithm = SignatureAlgorithms.Es256; + } + + var providerHint = header.TryGetProperty("provider", out var providerElement) + ? providerElement.GetString() + : null; + + var keyId = header.TryGetProperty("kid", out var kidElement) ? kidElement.GetString() : null; + if (string.IsNullOrWhiteSpace(keyId)) + { + keyId = Path.GetFileNameWithoutExtension(keyPath); + logger.LogWarning("JWS header missing 'kid'; using fallback key id {KeyId}.", keyId); + } + + CryptoSigningKey signingKey; + try + { + signingKey = CreateVerificationSigningKey(keyId!, algorithm!, providerHint, keyPem, keyPath); + } + catch (Exception ex) when (ex is InvalidOperationException or CryptographicException) + { + logger.LogError(ex, "Failed to load verification key material."); + Environment.ExitCode = 1; + return; + } + + var providers = new List + { + new DefaultCryptoProvider() + }; + +#if STELLAOPS_CRYPTO_SODIUM + providers.Add(new LibsodiumCryptoProvider()); +#endif + + foreach (var provider in providers) + { + if (provider.Supports(CryptoCapability.Verification, algorithm!)) + { + provider.UpsertSigningKey(signingKey); + } + } + + var preferredOrder = !string.IsNullOrWhiteSpace(providerHint) + ? new[] { providerHint! } + : Array.Empty(); + var registry = new CryptoProviderRegistry(providers, preferredOrder); + CryptoSignerResolution resolution; + try + { + resolution = registry.ResolveSigner( + CryptoCapability.Verification, + algorithm!, + signingKey.Reference, + providerHint); + } + catch (Exception ex) + { + logger.LogError(ex, "No crypto provider available for verification (algorithm {Algorithm}).", algorithm); + Environment.ExitCode = 1; + return; + } + + var signingInputLength = encodedHeader.Length + 1 + bundleBytes.Length; + var buffer = ArrayPool.Shared.Rent(signingInputLength); + try + { + var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); + Buffer.BlockCopy(headerBytes, 0, buffer, 0, headerBytes.Length); + buffer[headerBytes.Length] = (byte)'.'; + Buffer.BlockCopy(bundleBytes, 0, buffer, headerBytes.Length + 1, bundleBytes.Length); + + var signatureBytes = Base64UrlDecode(encodedSignature); + var verified = await resolution.Signer.VerifyAsync( + new ReadOnlyMemory(buffer, 0, signingInputLength), + signatureBytes, + cancellationToken).ConfigureAwait(false); + + if (!verified) + { + logger.LogError("Signature verification failed."); + Environment.ExitCode = 1; + return; + } + } + finally + { + ArrayPool.Shared.Return(buffer); + } + + if (!string.IsNullOrWhiteSpace(providerHint) && !string.Equals(providerHint, resolution.ProviderName, StringComparison.OrdinalIgnoreCase)) + { + logger.LogWarning( + "Preferred provider '{Preferred}' unavailable; verification used '{Provider}'.", + providerHint, + resolution.ProviderName); + } + + logger.LogInformation( + "Signature verified using algorithm {Algorithm} via provider {Provider} (kid {KeyId}).", + algorithm, + resolution.ProviderName, + signingKey.Reference.KeyId); + + if (verbose) + { + logger.LogInformation("JWS header: {Header}", headerJson); + } + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to verify revocation bundle."); + Environment.ExitCode = 1; + } + finally + { + loggerFactory.Dispose(); + } + } + + public static async Task HandleVulnObservationsAsync( + IServiceProvider services, + string tenant, + IReadOnlyList observationIds, + IReadOnlyList aliases, + IReadOnlyList purls, + IReadOnlyList cpes, + int? limit, + string? cursor, + bool emitJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-observations"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.observations", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vuln observations"); + activity?.SetTag("stellaops.cli.tenant", tenant); + using var duration = CliMetrics.MeasureCommandDuration("vuln observations"); + + try + { + tenant = tenant?.Trim().ToLowerInvariant() ?? string.Empty; + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new InvalidOperationException("Tenant must be provided."); + } + + var query = new AdvisoryObservationsQuery( + tenant, + NormalizeSet(observationIds, toLower: false), + NormalizeSet(aliases, toLower: true), + NormalizeSet(purls, toLower: false), + NormalizeSet(cpes, toLower: false), + limit, + cursor); + + var response = await client.GetObservationsAsync(query, cancellationToken).ConfigureAwait(false); + + if (emitJson) + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + Environment.ExitCode = 0; + return; + } + + RenderObservationTable(response); + if (!emitJson && response.HasMore && !string.IsNullOrWhiteSpace(response.NextCursor)) + { + var escapedCursor = Markup.Escape(response.NextCursor); + AnsiConsole.MarkupLine($"[yellow]More observations available. Continue with[/] [cyan]--cursor[/] [grey]{escapedCursor}[/]"); + } + Environment.ExitCode = 0; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch observations from Concelier."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + + static IReadOnlyList NormalizeSet(IReadOnlyList values, bool toLower) + { + if (values is null || values.Count == 0) + { + return Array.Empty(); + } + + var set = new HashSet(StringComparer.Ordinal); + foreach (var raw in values) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var normalized = raw.Trim(); + if (toLower) + { + normalized = normalized.ToLowerInvariant(); + } + + set.Add(normalized); + } + + return set.Count == 0 ? Array.Empty() : set.ToArray(); + } + + static void RenderObservationTable(AdvisoryObservationsResponse response) + { + var observations = response.Observations ?? Array.Empty(); + if (observations.Count == 0) + { + AnsiConsole.MarkupLine("[yellow]No observations matched the provided filters.[/]"); + return; + } + + var table = new Table() + .Centered() + .Border(TableBorder.Rounded); + + table.AddColumn("Observation"); + table.AddColumn("Source"); + table.AddColumn("Upstream Id"); + table.AddColumn("Aliases"); + table.AddColumn("PURLs"); + table.AddColumn("CPEs"); + table.AddColumn("Created (UTC)"); + + foreach (var observation in observations) + { + var sourceVendor = observation.Source?.Vendor ?? "(unknown)"; + var upstreamId = observation.Upstream?.UpstreamId ?? "(unknown)"; + var aliasesText = FormatList(observation.Linkset?.Aliases); + var purlsText = FormatList(observation.Linkset?.Purls); + var cpesText = FormatList(observation.Linkset?.Cpes); + + table.AddRow( + Markup.Escape(observation.ObservationId), + Markup.Escape(sourceVendor), + Markup.Escape(upstreamId), + Markup.Escape(aliasesText), + Markup.Escape(purlsText), + Markup.Escape(cpesText), + observation.CreatedAt.ToUniversalTime().ToString("u", CultureInfo.InvariantCulture)); + } + + AnsiConsole.Write(table); + AnsiConsole.MarkupLine( + "[green]{0}[/] observation(s). Aliases: [green]{1}[/], PURLs: [green]{2}[/], CPEs: [green]{3}[/].", + observations.Count, + response.Linkset?.Aliases?.Count ?? 0, + response.Linkset?.Purls?.Count ?? 0, + response.Linkset?.Cpes?.Count ?? 0); + } + + static string FormatList(IReadOnlyList? values) + { + if (values is null || values.Count == 0) + { + return "(none)"; + } + + const int MaxItems = 3; + if (values.Count <= MaxItems) + { + return string.Join(", ", values); + } + + var preview = values.Take(MaxItems); + return $"{string.Join(", ", preview)} (+{values.Count - MaxItems})"; + } + } + + public static async Task HandleOfflineKitPullAsync( + IServiceProvider services, + string? bundleId, + string? destinationDirectory, + bool overwrite, + bool resume, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var options = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-pull"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.pull", ActivityKind.Client); + activity?.SetTag("stellaops.cli.bundle_id", string.IsNullOrWhiteSpace(bundleId) ? "latest" : bundleId); + using var duration = CliMetrics.MeasureCommandDuration("offline kit pull"); + + try + { + var targetDirectory = string.IsNullOrWhiteSpace(destinationDirectory) + ? options.Offline?.KitsDirectory ?? Path.Combine(Environment.CurrentDirectory, "offline-kits") + : destinationDirectory; + + targetDirectory = Path.GetFullPath(targetDirectory); + Directory.CreateDirectory(targetDirectory); + + var result = await client.DownloadOfflineKitAsync(bundleId, targetDirectory, overwrite, resume, cancellationToken).ConfigureAwait(false); + + logger.LogInformation( + "Bundle {BundleId} stored at {Path} (captured {Captured:u}, sha256:{Digest}).", + result.Descriptor.BundleId, + result.BundlePath, + result.Descriptor.CapturedAt, + result.Descriptor.BundleSha256); + + logger.LogInformation("Manifest saved to {Manifest}.", result.ManifestPath); + + if (!string.IsNullOrWhiteSpace(result.MetadataPath)) + { + logger.LogDebug("Metadata recorded at {Metadata}.", result.MetadataPath); + } + + if (result.BundleSignaturePath is not null) + { + logger.LogInformation("Bundle signature saved to {Signature}.", result.BundleSignaturePath); + } + + if (result.ManifestSignaturePath is not null) + { + logger.LogInformation("Manifest signature saved to {Signature}.", result.ManifestSignaturePath); + } + + CliMetrics.RecordOfflineKitDownload(result.Descriptor.Kind ?? "unknown", result.FromCache); + activity?.SetTag("stellaops.cli.bundle_cache", result.FromCache); + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to download offline kit bundle."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyFindingsListAsync( + IServiceProvider services, + string policyId, + string[] sbomFilters, + string[] statusFilters, + string[] severityFilters, + string? since, + string? cursor, + int? page, + int? pageSize, + string? format, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-ls"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.list", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("policy findings list"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (page.HasValue && page.Value < 1) + { + throw new ArgumentException("--page must be greater than or equal to 1.", nameof(page)); + } + + if (pageSize.HasValue && (pageSize.Value < 1 || pageSize.Value > 500)) + { + throw new ArgumentException("--page-size must be between 1 and 500.", nameof(pageSize)); + } + + var normalizedPolicyId = policyId.Trim(); + var sboms = NormalizePolicyFilterValues(sbomFilters); + var statuses = NormalizePolicyFilterValues(statusFilters, toLower: true); + var severities = NormalizePolicyFilterValues(severityFilters); + var sinceValue = ParsePolicySince(since); + var cursorValue = string.IsNullOrWhiteSpace(cursor) ? null : cursor.Trim(); + + var query = new PolicyFindingsQuery( + normalizedPolicyId, + sboms, + statuses, + severities, + cursorValue, + page, + pageSize, + sinceValue); + + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + if (sboms.Count > 0) + { + activity?.SetTag("stellaops.cli.findings.sbom_filters", string.Join(",", sboms)); + } + + if (statuses.Count > 0) + { + activity?.SetTag("stellaops.cli.findings.status_filters", string.Join(",", statuses)); + } + + if (severities.Count > 0) + { + activity?.SetTag("stellaops.cli.findings.severity_filters", string.Join(",", severities)); + } + + if (!string.IsNullOrWhiteSpace(cursorValue)) + { + activity?.SetTag("stellaops.cli.findings.cursor", cursorValue); + } + + if (page.HasValue) + { + activity?.SetTag("stellaops.cli.findings.page", page.Value); + } + + if (pageSize.HasValue) + { + activity?.SetTag("stellaops.cli.findings.page_size", pageSize.Value); + } + + if (sinceValue.HasValue) + { + activity?.SetTag("stellaops.cli.findings.since", sinceValue.Value.ToString("o", CultureInfo.InvariantCulture)); + } + + var result = await client.GetPolicyFindingsAsync(query, cancellationToken).ConfigureAwait(false); + activity?.SetTag("stellaops.cli.findings.count", result.Items.Count); + if (!string.IsNullOrWhiteSpace(result.NextCursor)) + { + activity?.SetTag("stellaops.cli.findings.next_cursor", result.NextCursor); + } + + var payload = BuildPolicyFindingsPayload(normalizedPolicyId, query, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Results written to {Path}.", Path.GetFullPath(outputPath!)); + } + + var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); + if (outputFormat == PolicyFindingsOutputFormat.Json) + { + var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); + Console.WriteLine(json); + } + else + { + RenderPolicyFindingsTable(logger, result); + } + + CliMetrics.RecordPolicyFindingsList(result.Items.Count == 0 ? "empty" : "ok"); + Environment.ExitCode = 0; + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyFindingsList("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsList); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to list policy findings."); + CliMetrics.RecordPolicyFindingsList("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyFindingsGetAsync( + IServiceProvider services, + string policyId, + string findingId, + string? format, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-get"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.get", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("policy findings get"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); + } + + var normalizedPolicyId = policyId.Trim(); + var normalizedFindingId = findingId.Trim(); + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); + + var result = await client.GetPolicyFindingAsync(normalizedPolicyId, normalizedFindingId, cancellationToken).ConfigureAwait(false); + var payload = BuildPolicyFindingPayload(normalizedPolicyId, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Finding written to {Path}.", Path.GetFullPath(outputPath!)); + } + + var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); + if (outputFormat == PolicyFindingsOutputFormat.Json) + { + Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); + } + else + { + RenderPolicyFindingDetails(logger, result); + } + + var outcome = string.IsNullOrWhiteSpace(result.Status) ? "unknown" : result.Status.ToLowerInvariant(); + CliMetrics.RecordPolicyFindingsGet(outcome); + Environment.ExitCode = 0; + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyFindingsGet("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsGet); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to retrieve policy finding."); + CliMetrics.RecordPolicyFindingsGet("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyFindingsExplainAsync( + IServiceProvider services, + string policyId, + string findingId, + string? mode, + string? format, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-explain"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.explain", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("policy findings explain"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); + } + + var normalizedPolicyId = policyId.Trim(); + var normalizedFindingId = findingId.Trim(); + var normalizedMode = NormalizeExplainMode(mode); + + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); + if (!string.IsNullOrWhiteSpace(normalizedMode)) + { + activity?.SetTag("stellaops.cli.findings.mode", normalizedMode); + } + + var result = await client.GetPolicyFindingExplainAsync(normalizedPolicyId, normalizedFindingId, normalizedMode, cancellationToken).ConfigureAwait(false); + activity?.SetTag("stellaops.cli.findings.step_count", result.Steps.Count); + + var payload = BuildPolicyFindingExplainPayload(normalizedPolicyId, normalizedFindingId, normalizedMode, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Explain trace written to {Path}.", Path.GetFullPath(outputPath!)); + } + + var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); + if (outputFormat == PolicyFindingsOutputFormat.Json) + { + Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); + } + else + { + RenderPolicyFindingExplain(logger, result); + } + + CliMetrics.RecordPolicyFindingsExplain(result.Steps.Count == 0 ? "empty" : "ok"); + Environment.ExitCode = 0; + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyFindingsExplain("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsExplain); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch policy explain trace."); + CliMetrics.RecordPolicyFindingsExplain("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyActivateAsync( + IServiceProvider services, + string policyId, + int version, + string? note, + bool runNow, + string? scheduledAt, + string? priority, + bool rollback, + string? incidentId, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-activate"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.activate", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "policy activate"); + using var duration = CliMetrics.MeasureCommandDuration("policy activate"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (version <= 0) + { + throw new ArgumentOutOfRangeException(nameof(version), "Version must be greater than zero."); + } + + var normalizedPolicyId = policyId.Trim(); + DateTimeOffset? scheduled = null; + if (!string.IsNullOrWhiteSpace(scheduledAt)) + { + if (!DateTimeOffset.TryParse(scheduledAt, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed)) + { + throw new ArgumentException("Scheduled timestamp must be a valid ISO-8601 value.", nameof(scheduledAt)); + } + + scheduled = parsed; + } + + var request = new PolicyActivationRequest( + runNow, + scheduled, + NormalizePolicyPriority(priority), + rollback, + string.IsNullOrWhiteSpace(incidentId) ? null : incidentId.Trim(), + string.IsNullOrWhiteSpace(note) ? null : note.Trim()); + + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + activity?.SetTag("stellaops.cli.policy_version", version); + if (request.RunNow) + { + activity?.SetTag("stellaops.cli.policy_run_now", true); + } + + if (request.ScheduledAt.HasValue) + { + activity?.SetTag("stellaops.cli.policy_scheduled_at", request.ScheduledAt.Value.ToString("o", CultureInfo.InvariantCulture)); + } + + if (!string.IsNullOrWhiteSpace(request.Priority)) + { + activity?.SetTag("stellaops.cli.policy_priority", request.Priority); + } + + if (request.Rollback) + { + activity?.SetTag("stellaops.cli.policy_rollback", true); + } + + var result = await client.ActivatePolicyRevisionAsync(normalizedPolicyId, version, request, cancellationToken).ConfigureAwait(false); + + var outcome = NormalizePolicyActivationOutcome(result.Status); + CliMetrics.RecordPolicyActivation(outcome); + RenderPolicyActivationResult(result, request); + + var exitCode = DeterminePolicyActivationExitCode(outcome); + Environment.ExitCode = exitCode; + + if (exitCode == 0) + { + logger.LogInformation("Policy {PolicyId} v{Version} activation status: {Status}.", result.Revision.PolicyId, result.Revision.Version, outcome); + } + else + { + logger.LogWarning("Policy {PolicyId} v{Version} requires additional approval (status: {Status}).", result.Revision.PolicyId, result.Revision.Version, outcome); + } + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyActivation("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyActivationFailure(ex, logger); + } + catch (Exception ex) + { + logger.LogError(ex, "Policy activation failed."); + CliMetrics.RecordPolicyActivation("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicySimulateAsync( + IServiceProvider services, + string policyId, + int? baseVersion, + int? candidateVersion, + IReadOnlyList sbomArguments, + IReadOnlyList environmentArguments, + string? format, + string? outputPath, + bool explain, + bool failOnDiff, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-simulate"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.simulate", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "policy simulate"); + activity?.SetTag("stellaops.cli.policy_id", policyId); + if (baseVersion.HasValue) + { + activity?.SetTag("stellaops.cli.base_version", baseVersion.Value); + } + if (candidateVersion.HasValue) + { + activity?.SetTag("stellaops.cli.candidate_version", candidateVersion.Value); + } + using var duration = CliMetrics.MeasureCommandDuration("policy simulate"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + var normalizedPolicyId = policyId.Trim(); + var sbomSet = NormalizePolicySbomSet(sbomArguments); + var environment = ParsePolicyEnvironment(environmentArguments); + + var input = new PolicySimulationInput( + baseVersion, + candidateVersion, + sbomSet, + environment, + explain); + + var result = await client.SimulatePolicyAsync(normalizedPolicyId, input, cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.diff_added", result.Diff.Added); + activity?.SetTag("stellaops.cli.diff_removed", result.Diff.Removed); + if (result.Diff.BySeverity.Count > 0) + { + activity?.SetTag("stellaops.cli.severity_buckets", result.Diff.BySeverity.Count); + } + + var outputFormat = DeterminePolicySimulationFormat(format, outputPath); + var payload = BuildPolicySimulationPayload(normalizedPolicyId, baseVersion, candidateVersion, sbomSet, environment, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteSimulationOutputAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Simulation results written to {Path}.", Path.GetFullPath(outputPath!)); + } + + RenderPolicySimulationResult(logger, payload, result, outputFormat); + + var exitCode = DetermineSimulationExitCode(result, failOnDiff); + Environment.ExitCode = exitCode; + + var outcome = exitCode == 20 + ? "diff_blocked" + : (result.Diff.Added + result.Diff.Removed) > 0 ? "diff" : "clean"; + CliMetrics.RecordPolicySimulation(outcome); + + if (exitCode == 20) + { + logger.LogWarning("Differences detected; exiting with code 20 due to --fail-on-diff."); + } + + if (!string.IsNullOrWhiteSpace(result.ExplainUri)) + { + activity?.SetTag("stellaops.cli.explain_uri", result.ExplainUri); + } + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicySimulation("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicySimulationFailure(ex, logger); + } + catch (Exception ex) + { + logger.LogError(ex, "Policy simulation failed."); + CliMetrics.RecordPolicySimulation("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleOfflineKitImportAsync( + IServiceProvider services, + string bundlePath, + string? manifestPath, + string? bundleSignaturePath, + string? manifestSignaturePath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var options = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-import"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.import", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("offline kit import"); + + try + { + if (string.IsNullOrWhiteSpace(bundlePath)) + { + logger.LogError("Bundle path is required."); + Environment.ExitCode = 1; + return; + } + + bundlePath = Path.GetFullPath(bundlePath); + if (!File.Exists(bundlePath)) + { + logger.LogError("Bundle file {Path} not found.", bundlePath); + Environment.ExitCode = 1; + return; + } + + var metadata = await LoadOfflineKitMetadataAsync(bundlePath, cancellationToken).ConfigureAwait(false); + if (metadata is not null) + { + manifestPath ??= metadata.ManifestPath; + bundleSignaturePath ??= metadata.BundleSignaturePath; + manifestSignaturePath ??= metadata.ManifestSignaturePath; + } + + manifestPath = NormalizeFilePath(manifestPath); + bundleSignaturePath = NormalizeFilePath(bundleSignaturePath); + manifestSignaturePath = NormalizeFilePath(manifestSignaturePath); + + if (manifestPath is null) + { + manifestPath = TryInferManifestPath(bundlePath); + if (manifestPath is not null) + { + logger.LogDebug("Using inferred manifest path {Path}.", manifestPath); + } + } + + if (manifestPath is not null && !File.Exists(manifestPath)) + { + logger.LogError("Manifest file {Path} not found.", manifestPath); + Environment.ExitCode = 1; + return; + } + + if (bundleSignaturePath is not null && !File.Exists(bundleSignaturePath)) + { + logger.LogWarning("Bundle signature {Path} not found; skipping.", bundleSignaturePath); + bundleSignaturePath = null; + } + + if (manifestSignaturePath is not null && !File.Exists(manifestSignaturePath)) + { + logger.LogWarning("Manifest signature {Path} not found; skipping.", manifestSignaturePath); + manifestSignaturePath = null; + } + + if (metadata is not null) + { + var computedBundleDigest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false); + if (!DigestsEqual(computedBundleDigest, metadata.BundleSha256)) + { + logger.LogError("Bundle digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.BundleSha256, computedBundleDigest); + Environment.ExitCode = 1; + return; + } + + if (manifestPath is not null) + { + var computedManifestDigest = await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false); + if (!DigestsEqual(computedManifestDigest, metadata.ManifestSha256)) + { + logger.LogError("Manifest digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.ManifestSha256, computedManifestDigest); + Environment.ExitCode = 1; + return; + } + } + } + + var request = new OfflineKitImportRequest( + bundlePath, + manifestPath, + bundleSignaturePath, + manifestSignaturePath, + metadata?.BundleId, + metadata?.BundleSha256, + metadata?.BundleSize, + metadata?.CapturedAt, + metadata?.Channel, + metadata?.Kind, + metadata?.IsDelta, + metadata?.BaseBundleId, + metadata?.ManifestSha256, + metadata?.ManifestSize); + + var result = await client.ImportOfflineKitAsync(request, cancellationToken).ConfigureAwait(false); + CliMetrics.RecordOfflineKitImport(result.Status); + + logger.LogInformation( + "Import {ImportId} submitted at {Submitted:u} with status {Status}.", + string.IsNullOrWhiteSpace(result.ImportId) ? "" : result.ImportId, + result.SubmittedAt, + string.IsNullOrWhiteSpace(result.Status) ? "queued" : result.Status); + + if (!string.IsNullOrWhiteSpace(result.Message)) + { + logger.LogInformation(result.Message); + } + + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Offline kit import failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleOfflineKitStatusAsync( + IServiceProvider services, + bool asJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-status"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.status", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("offline kit status"); + + try + { + var status = await client.GetOfflineKitStatusAsync(cancellationToken).ConfigureAwait(false); + + if (asJson) + { + var payload = new + { + bundleId = status.BundleId, + channel = status.Channel, + kind = status.Kind, + isDelta = status.IsDelta, + baseBundleId = status.BaseBundleId, + capturedAt = status.CapturedAt, + importedAt = status.ImportedAt, + sha256 = status.BundleSha256, + sizeBytes = status.BundleSize, + components = status.Components.Select(component => new + { + component.Name, + component.Version, + component.Digest, + component.CapturedAt, + component.SizeBytes + }) + }; + + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true }); + Console.WriteLine(json); + } + else + { + if (string.IsNullOrWhiteSpace(status.BundleId)) + { + logger.LogInformation("No offline kit bundle has been imported yet."); + } + else + { + logger.LogInformation( + "Current bundle {BundleId} ({Kind}) captured {Captured:u}, imported {Imported:u}, sha256:{Digest}, size {Size}.", + status.BundleId, + status.Kind ?? "unknown", + status.CapturedAt ?? default, + status.ImportedAt ?? default, + status.BundleSha256 ?? "", + status.BundleSize.HasValue ? status.BundleSize.Value.ToString("N0", CultureInfo.InvariantCulture) : ""); + } + + if (status.Components.Count > 0) + { + var table = new Table().AddColumns("Component", "Version", "Digest", "Captured", "Size (bytes)"); + foreach (var component in status.Components) + { + table.AddRow( + component.Name, + string.IsNullOrWhiteSpace(component.Version) ? "-" : component.Version!, + string.IsNullOrWhiteSpace(component.Digest) ? "-" : $"sha256:{component.Digest}", + component.CapturedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "-", + component.SizeBytes.HasValue ? component.SizeBytes.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"); + } + + AnsiConsole.Write(table); + } + } + + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to read offline kit status."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static async Task LoadOfflineKitMetadataAsync(string bundlePath, CancellationToken cancellationToken) + { + var metadataPath = bundlePath + ".metadata.json"; + if (!File.Exists(metadataPath)) + { + return null; + } + + try + { + await using var stream = File.OpenRead(metadataPath); + return await JsonSerializer.DeserializeAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch + { + return null; + } + } + + private static string? NormalizeFilePath(string? path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return null; + } + + return Path.GetFullPath(path); + } + + private static string? TryInferManifestPath(string bundlePath) + { + var directory = Path.GetDirectoryName(bundlePath); + if (string.IsNullOrWhiteSpace(directory)) + { + return null; + } + + var baseName = Path.GetFileName(bundlePath); + if (string.IsNullOrWhiteSpace(baseName)) + { + return null; + } + + baseName = Path.GetFileNameWithoutExtension(baseName); + if (baseName.EndsWith(".tar", StringComparison.OrdinalIgnoreCase)) + { + baseName = Path.GetFileNameWithoutExtension(baseName); + } + + var candidates = new[] + { + Path.Combine(directory, $"offline-manifest-{baseName}.json"), + Path.Combine(directory, "offline-manifest.json") + }; + + foreach (var candidate in candidates) + { + if (File.Exists(candidate)) + { + return Path.GetFullPath(candidate); + } + } + + return Directory.EnumerateFiles(directory, "offline-manifest*.json").FirstOrDefault(); + } + + private static bool DigestsEqual(string computed, string? expected) + { + if (string.IsNullOrWhiteSpace(expected)) + { + return true; + } + + return string.Equals(NormalizeDigest(computed), NormalizeDigest(expected), StringComparison.OrdinalIgnoreCase); + } + + private static string NormalizeDigest(string digest) + { + var value = digest.Trim(); + if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + value = value.Substring("sha256:".Length); + } + + return value.ToLowerInvariant(); + } + + private static async Task ComputeSha256Async(string path, CancellationToken cancellationToken) + { + await using var stream = File.OpenRead(path); + var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature) + { + encodedHeader = string.Empty; + encodedSignature = string.Empty; + + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var parts = value.Split('.'); + if (parts.Length != 3) + { + return false; + } + + encodedHeader = parts[0]; + encodedSignature = parts[2]; + return parts[1].Length == 0; + } + + private static byte[] Base64UrlDecode(string value) + { + var normalized = value.Replace('-', '+').Replace('_', '/'); + var padding = normalized.Length % 4; + if (padding == 2) + { + normalized += "=="; + } + else if (padding == 3) + { + normalized += "="; + } + else if (padding == 1) + { + throw new FormatException("Invalid Base64Url value."); + } + + return Convert.FromBase64String(normalized); + } + + private static CryptoSigningKey CreateVerificationSigningKey( + string keyId, + string algorithm, + string? providerHint, + string keyPem, + string keyPath) + { + if (string.IsNullOrWhiteSpace(keyPem)) + { + throw new InvalidOperationException("Verification key PEM content is empty."); + } + + using var ecdsa = ECDsa.Create(); + ecdsa.ImportFromPem(keyPem); + + var parameters = ecdsa.ExportParameters(includePrivateParameters: false); + if (parameters.D is null || parameters.D.Length == 0) + { + parameters.D = new byte[] { 0x01 }; + } + + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["source"] = Path.GetFullPath(keyPath), + ["verificationOnly"] = "true" + }; + + return new CryptoSigningKey( + new CryptoKeyReference(keyId, providerHint), + algorithm, + in parameters, + DateTimeOffset.UtcNow, + metadata: metadata); + } + + private static string FormatDuration(TimeSpan duration) + { + if (duration <= TimeSpan.Zero) + { + return "expired"; + } + + if (duration.TotalDays >= 1) + { + var days = (int)duration.TotalDays; + var hours = duration.Hours; + return hours > 0 + ? FormattableString.Invariant($"{days}d {hours}h") + : FormattableString.Invariant($"{days}d"); + } + + if (duration.TotalHours >= 1) + { + return FormattableString.Invariant($"{(int)duration.TotalHours}h {duration.Minutes}m"); + } + + if (duration.TotalMinutes >= 1) + { + return FormattableString.Invariant($"{(int)duration.TotalMinutes}m {duration.Seconds}s"); + } + + return FormattableString.Invariant($"{duration.Seconds}s"); + } + + private static bool TryExtractJwtClaims( + string accessToken, + out Dictionary claims, + out DateTimeOffset? issuedAt, + out DateTimeOffset? notBefore) + { + claims = new Dictionary(StringComparer.OrdinalIgnoreCase); + issuedAt = null; + notBefore = null; + + if (string.IsNullOrWhiteSpace(accessToken)) + { + return false; + } + + var parts = accessToken.Split('.'); + if (parts.Length < 2) + { + return false; + } + + if (!TryDecodeBase64Url(parts[1], out var payloadBytes)) + { + return false; + } + + try + { + using var document = JsonDocument.Parse(payloadBytes); + foreach (var property in document.RootElement.EnumerateObject()) + { + var value = FormatJsonValue(property.Value); + claims[property.Name] = value; + + if (issuedAt is null && property.NameEquals("iat") && TryParseUnixSeconds(property.Value, out var parsedIat)) + { + issuedAt = parsedIat; + } + + if (notBefore is null && property.NameEquals("nbf") && TryParseUnixSeconds(property.Value, out var parsedNbf)) + { + notBefore = parsedNbf; + } + } + + return true; + } + catch (JsonException) + { + claims.Clear(); + issuedAt = null; + notBefore = null; + return false; + } + } + + private static bool TryDecodeBase64Url(string value, out byte[] bytes) + { + bytes = Array.Empty(); + + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var normalized = value.Replace('-', '+').Replace('_', '/'); + var padding = normalized.Length % 4; + if (padding is 2 or 3) + { + normalized = normalized.PadRight(normalized.Length + (4 - padding), '='); + } + else if (padding == 1) + { + return false; + } + + try + { + bytes = Convert.FromBase64String(normalized); + return true; + } + catch (FormatException) + { + return false; + } + } + + private static string FormatJsonValue(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString() ?? string.Empty, + JsonValueKind.Number => element.TryGetInt64(out var longValue) + ? longValue.ToString(CultureInfo.InvariantCulture) + : element.GetDouble().ToString(CultureInfo.InvariantCulture), + JsonValueKind.True => "true", + JsonValueKind.False => "false", + JsonValueKind.Null => "null", + JsonValueKind.Array => FormatArray(element), + JsonValueKind.Object => element.GetRawText(), + _ => element.GetRawText() + }; + } + + private static string FormatArray(JsonElement array) + { + var values = new List(); + foreach (var item in array.EnumerateArray()) + { + values.Add(FormatJsonValue(item)); + } + + return string.Join(", ", values); + } + + private static bool TryParseUnixSeconds(JsonElement element, out DateTimeOffset value) + { + value = default; + + if (element.ValueKind == JsonValueKind.Number) + { + if (element.TryGetInt64(out var seconds)) + { + value = DateTimeOffset.FromUnixTimeSeconds(seconds); + return true; + } + + if (element.TryGetDouble(out var doubleValue)) + { + value = DateTimeOffset.FromUnixTimeSeconds((long)doubleValue); + return true; + } + } + + if (element.ValueKind == JsonValueKind.String) + { + var text = element.GetString(); + if (!string.IsNullOrWhiteSpace(text) && long.TryParse(text, NumberStyles.Integer, CultureInfo.InvariantCulture, out var seconds)) + { + value = DateTimeOffset.FromUnixTimeSeconds(seconds); + return true; + } + } + + return false; + } + + private static List CollectAdditionalClaims(Dictionary claims) + { + var result = new List(); + foreach (var pair in claims) + { + if (CommonClaimNames.Contains(pair.Key)) + { + continue; + } + + result.Add(FormattableString.Invariant($"{pair.Key}={pair.Value}")); + } + + result.Sort(StringComparer.OrdinalIgnoreCase); + return result; + } + + private static readonly HashSet CommonClaimNames = new(StringComparer.OrdinalIgnoreCase) + { + "aud", + "client_id", + "exp", + "iat", + "iss", + "nbf", + "scope", + "scopes", + "sub", + "token_type", + "jti" + }; + + private static async Task ExecuteExcititorCommandAsync( + IServiceProvider services, + string commandName, + bool verbose, + IDictionary? activityTags, + Func> operation, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger(commandName.Replace(' ', '-')); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity($"cli.{commandName.Replace(' ', '.')}" , ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", commandName); + if (activityTags is not null) + { + foreach (var tag in activityTags) + { + activity?.SetTag(tag.Key, tag.Value); + } + } + using var duration = CliMetrics.MeasureCommandDuration(commandName); + + try + { + var result = await operation(client).ConfigureAwait(false); + if (result.Success) + { + if (!string.IsNullOrWhiteSpace(result.Message)) + { + logger.LogInformation(result.Message); + } + else + { + logger.LogInformation("Operation completed successfully."); + } + + if (!string.IsNullOrWhiteSpace(result.Location)) + { + logger.LogInformation("Location: {Location}", result.Location); + } + + if (result.Payload is JsonElement payload && payload.ValueKind is not JsonValueKind.Undefined and not JsonValueKind.Null) + { + logger.LogDebug("Response payload: {Payload}", payload.ToString()); + } + + Environment.ExitCode = 0; + } + else + { + logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Operation failed." : result.Message); + Environment.ExitCode = 1; + } + } + catch (Exception ex) + { + logger.LogError(ex, "Excititor operation failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static async Task> GatherImageDigestsAsync( + IReadOnlyList inline, + string? filePath, + CancellationToken cancellationToken) + { + var results = new List(); + var seen = new HashSet(StringComparer.Ordinal); + + void AddCandidates(string? candidate) + { + foreach (var image in SplitImageCandidates(candidate)) + { + if (seen.Add(image)) + { + results.Add(image); + } + } + } + + if (inline is not null) + { + foreach (var entry in inline) + { + AddCandidates(entry); + } + } + + if (!string.IsNullOrWhiteSpace(filePath)) + { + var path = Path.GetFullPath(filePath); + if (!File.Exists(path)) + { + throw new FileNotFoundException("Input file not found.", path); + } + + foreach (var line in File.ReadLines(path)) + { + cancellationToken.ThrowIfCancellationRequested(); + AddCandidates(line); + } + } + + if (Console.IsInputRedirected) + { + while (!cancellationToken.IsCancellationRequested) + { + var line = await Console.In.ReadLineAsync().ConfigureAwait(false); + if (line is null) + { + break; + } + + AddCandidates(line); + } + } + + return new ReadOnlyCollection(results); + } + + private static IEnumerable SplitImageCandidates(string? raw) + { + if (string.IsNullOrWhiteSpace(raw)) + { + yield break; + } + + var candidate = raw.Trim(); + var commentIndex = candidate.IndexOf('#'); + if (commentIndex >= 0) + { + candidate = candidate[..commentIndex].Trim(); + } + + if (candidate.Length == 0) + { + yield break; + } + + var tokens = candidate.Split(new[] { ',', ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries); + foreach (var token in tokens) + { + var trimmed = token.Trim(); + if (trimmed.Length > 0) + { + yield return trimmed; + } + } + } + + private static IReadOnlyDictionary ParseLabelSelectors(IReadOnlyList labelArguments) + { + if (labelArguments is null || labelArguments.Count == 0) + { + return EmptyLabelSelectors; + } + + var labels = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var raw in labelArguments) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var trimmed = raw.Trim(); + var delimiter = trimmed.IndexOf('='); + if (delimiter <= 0 || delimiter == trimmed.Length - 1) + { + throw new ArgumentException($"Invalid label '{raw}'. Expected key=value format."); + } + + var key = trimmed[..delimiter].Trim(); + var value = trimmed[(delimiter + 1)..].Trim(); + if (key.Length == 0) + { + throw new ArgumentException($"Invalid label '{raw}'. Label key cannot be empty."); + } + + labels[key] = value; + } + + return labels.Count == 0 ? EmptyLabelSelectors : new ReadOnlyDictionary(labels); + } + + private sealed record ExcititorExportManifestSummary( + string ExportId, + string? Format, + string? Algorithm, + string? Digest, + long? SizeBytes, + bool? FromCache, + DateTimeOffset? CreatedAt, + string? RekorLocation, + string? RekorIndex, + string? RekorInclusionUrl); + + private static ExcititorExportManifestSummary? TryParseExportManifest(JsonElement? payload) + { + if (payload is null || payload.Value.ValueKind is JsonValueKind.Undefined or JsonValueKind.Null) + { + return null; + } + + var element = payload.Value; + var exportId = GetStringProperty(element, "exportId"); + if (string.IsNullOrWhiteSpace(exportId)) + { + return null; + } + + var format = GetStringProperty(element, "format"); + var algorithm = default(string?); + var digest = default(string?); + + if (TryGetPropertyCaseInsensitive(element, "artifact", out var artifact) && artifact.ValueKind == JsonValueKind.Object) + { + algorithm = GetStringProperty(artifact, "algorithm"); + digest = GetStringProperty(artifact, "digest"); + } + + var sizeBytes = GetInt64Property(element, "sizeBytes"); + var fromCache = GetBooleanProperty(element, "fromCache"); + var createdAt = GetDateTimeOffsetProperty(element, "createdAt"); + + string? rekorLocation = null; + string? rekorIndex = null; + string? rekorInclusion = null; + + if (TryGetPropertyCaseInsensitive(element, "attestation", out var attestation) && attestation.ValueKind == JsonValueKind.Object) + { + if (TryGetPropertyCaseInsensitive(attestation, "rekor", out var rekor) && rekor.ValueKind == JsonValueKind.Object) + { + rekorLocation = GetStringProperty(rekor, "location"); + rekorIndex = GetStringProperty(rekor, "logIndex"); + var inclusion = GetStringProperty(rekor, "inclusionProofUri"); + if (!string.IsNullOrWhiteSpace(inclusion)) + { + rekorInclusion = inclusion; + } + } + } + + return new ExcititorExportManifestSummary( + exportId.Trim(), + format, + algorithm, + digest, + sizeBytes, + fromCache, + createdAt, + rekorLocation, + rekorIndex, + rekorInclusion); + } + + private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement property) + { + if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty(propertyName, out property)) + { + return true; + } + + if (element.ValueKind == JsonValueKind.Object) + { + foreach (var candidate in element.EnumerateObject()) + { + if (string.Equals(candidate.Name, propertyName, StringComparison.OrdinalIgnoreCase)) + { + property = candidate.Value; + return true; + } + } + } + + property = default; + return false; + } + + private static string? GetStringProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + return property.ValueKind switch + { + JsonValueKind.String => property.GetString(), + JsonValueKind.Number => property.ToString(), + _ => null + }; + } + + return null; + } + + private static bool? GetBooleanProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + return property.ValueKind switch + { + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.String when bool.TryParse(property.GetString(), out var parsed) => parsed, + _ => null + }; + } + + return null; + } + + private static long? GetInt64Property(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + if (property.ValueKind == JsonValueKind.Number && property.TryGetInt64(out var value)) + { + return value; + } + + if (property.ValueKind == JsonValueKind.String + && long.TryParse(property.GetString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) + { + return parsed; + } + } + + return null; + } + + private static DateTimeOffset? GetDateTimeOffsetProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property) + && property.ValueKind == JsonValueKind.String + && DateTimeOffset.TryParse(property.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var value)) + { + return value.ToUniversalTime(); + } + + return null; + } + + private static string BuildDigestDisplay(string? algorithm, string digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return string.Empty; + } + + if (digest.Contains(':', StringComparison.Ordinal)) + { + return digest; + } + + if (string.IsNullOrWhiteSpace(algorithm) || algorithm.Equals("sha256", StringComparison.OrdinalIgnoreCase)) + { + return $"sha256:{digest}"; + } + + return $"{algorithm}:{digest}"; + } + + private static string FormatSize(long sizeBytes) + { + if (sizeBytes < 0) + { + return $"{sizeBytes} bytes"; + } + + string[] units = { "bytes", "KB", "MB", "GB", "TB" }; + double size = sizeBytes; + var unit = 0; + + while (size >= 1024 && unit < units.Length - 1) + { + size /= 1024; + unit++; + } + + return unit == 0 ? $"{sizeBytes} bytes" : $"{size:0.##} {units[unit]}"; + } + + private static string ResolveExportOutputPath(string outputPath, ExcititorExportManifestSummary manifest) + { + if (string.IsNullOrWhiteSpace(outputPath)) + { + throw new ArgumentException("Output path must be provided.", nameof(outputPath)); + } + + var fullPath = Path.GetFullPath(outputPath); + if (Directory.Exists(fullPath) + || outputPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal) + || outputPath.EndsWith(Path.AltDirectorySeparatorChar.ToString(), StringComparison.Ordinal)) + { + return Path.Combine(fullPath, BuildExportFileName(manifest)); + } + + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) + { + Directory.CreateDirectory(directory); + } + + return fullPath; + } + + private static string BuildExportFileName(ExcititorExportManifestSummary manifest) + { + var token = !string.IsNullOrWhiteSpace(manifest.Digest) + ? manifest.Digest! + : manifest.ExportId; + + token = SanitizeToken(token); + if (token.Length > 40) + { + token = token[..40]; + } + + var extension = DetermineExportExtension(manifest.Format); + return $"stellaops-excititor-{token}{extension}"; + } + + private static string DetermineExportExtension(string? format) + { + if (string.IsNullOrWhiteSpace(format)) + { + return ".bin"; + } + + return format switch + { + not null when format.Equals("jsonl", StringComparison.OrdinalIgnoreCase) => ".jsonl", + not null when format.Equals("json", StringComparison.OrdinalIgnoreCase) => ".json", + not null when format.Equals("openvex", StringComparison.OrdinalIgnoreCase) => ".json", + not null when format.Equals("csaf", StringComparison.OrdinalIgnoreCase) => ".json", + _ => ".bin" + }; + } + + private static string SanitizeToken(string token) + { + var builder = new StringBuilder(token.Length); + foreach (var ch in token) + { + if (char.IsLetterOrDigit(ch)) + { + builder.Append(char.ToLowerInvariant(ch)); + } + } + + if (builder.Length == 0) + { + builder.Append("export"); + } + + return builder.ToString(); + } + + private static string? ResolveLocationUrl(StellaOpsCliOptions options, string location) + { + if (string.IsNullOrWhiteSpace(location)) + { + return null; + } + + if (Uri.TryCreate(location, UriKind.Absolute, out var absolute)) + { + return absolute.ToString(); + } + + if (!string.IsNullOrWhiteSpace(options?.BackendUrl) && Uri.TryCreate(options.BackendUrl, UriKind.Absolute, out var baseUri)) + { + if (!location.StartsWith("/", StringComparison.Ordinal)) + { + location = "/" + location; + } + + return new Uri(baseUri, location).ToString(); + } + + return location; + } + + private static string BuildRuntimePolicyJson(RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) + { + var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); + var results = new Dictionary(StringComparer.Ordinal); + + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + results[image] = BuildDecisionMap(decision); + } + } + + var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + var payload = new Dictionary(StringComparer.Ordinal) + { + ["ttlSeconds"] = result.TtlSeconds, + ["expiresAtUtc"] = result.ExpiresAtUtc?.ToString("O", CultureInfo.InvariantCulture), + ["policyRevision"] = result.PolicyRevision, + ["results"] = results + }; + + return JsonSerializer.Serialize(payload, options); + } + + private static IDictionary BuildDecisionMap(RuntimePolicyImageDecision decision) + { + var map = new Dictionary(StringComparer.Ordinal) + { + ["policyVerdict"] = decision.PolicyVerdict, + ["signed"] = decision.Signed, + ["hasSbomReferrers"] = decision.HasSbomReferrers + }; + + if (decision.Reasons.Count > 0) + { + map["reasons"] = decision.Reasons; + } + + if (decision.Rekor is not null) + { + var rekorMap = new Dictionary(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid)) + { + rekorMap["uuid"] = decision.Rekor.Uuid; + } + + if (!string.IsNullOrWhiteSpace(decision.Rekor.Url)) + { + rekorMap["url"] = decision.Rekor.Url; + } + + if (decision.Rekor.Verified.HasValue) + { + rekorMap["verified"] = decision.Rekor.Verified; + } + + if (rekorMap.Count > 0) + { + map["rekor"] = rekorMap; + } + } + + foreach (var kvp in decision.AdditionalProperties) + { + map[kvp.Key] = kvp.Value; + } + + return map; + } + + private static void DisplayRuntimePolicyResults(ILogger logger, RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) + { + var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); + var summary = new Dictionary(StringComparer.OrdinalIgnoreCase); + + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().Border(TableBorder.Rounded) + .AddColumns("Image", "Verdict", "Signed", "SBOM Ref", "Quieted", "Confidence", "Reasons", "Attestation"); + + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + table.AddRow( + image, + decision.PolicyVerdict, + FormatBoolean(decision.Signed), + FormatBoolean(decision.HasSbomReferrers), + FormatQuietedDisplay(decision.AdditionalProperties), + FormatConfidenceDisplay(decision.AdditionalProperties), + decision.Reasons.Count > 0 ? string.Join(Environment.NewLine, decision.Reasons) : "-", + FormatAttestation(decision.Rekor)); + + summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; + + if (decision.AdditionalProperties.Count > 0) + { + var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); + logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); + } + } + else + { + table.AddRow(image, "", "-", "-", "-", "-", "-", "-"); + } + } + + AnsiConsole.Write(table); + } + else + { + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + var reasons = decision.Reasons.Count > 0 ? string.Join(", ", decision.Reasons) : "none"; + logger.LogInformation( + "{Image} -> verdict={Verdict} signed={Signed} sbomRef={Sbom} quieted={Quieted} confidence={Confidence} attestation={Attestation} reasons={Reasons}", + image, + decision.PolicyVerdict, + FormatBoolean(decision.Signed), + FormatBoolean(decision.HasSbomReferrers), + FormatQuietedDisplay(decision.AdditionalProperties), + FormatConfidenceDisplay(decision.AdditionalProperties), + FormatAttestation(decision.Rekor), + reasons); + + summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; + + if (decision.AdditionalProperties.Count > 0) + { + var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); + logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); + } + } + else + { + logger.LogWarning("{Image} -> no decision returned by backend.", image); + } + } + } + + if (summary.Count > 0) + { + var summaryText = string.Join(", ", summary.Select(kvp => $"{kvp.Key}:{kvp.Value}")); + logger.LogInformation("Verdict summary: {Summary}", summaryText); + } + } + + private static IReadOnlyList BuildImageOrder(IReadOnlyList requestedImages, IEnumerable actual) + { + var order = new List(); + var seen = new HashSet(StringComparer.Ordinal); + + if (requestedImages is not null) + { + foreach (var image in requestedImages) + { + if (!string.IsNullOrWhiteSpace(image)) + { + var trimmed = image.Trim(); + if (seen.Add(trimmed)) + { + order.Add(trimmed); + } + } + } + } + + foreach (var image in actual) + { + if (!string.IsNullOrWhiteSpace(image)) + { + var trimmed = image.Trim(); + if (seen.Add(trimmed)) + { + order.Add(trimmed); + } + } + } + + return new ReadOnlyCollection(order); + } + + private static string FormatBoolean(bool? value) + => value is null ? "unknown" : value.Value ? "yes" : "no"; + + private static string FormatQuietedDisplay(IReadOnlyDictionary metadata) + { + var quieted = GetMetadataBoolean(metadata, "quieted", "quiet"); + var quietedBy = GetMetadataString(metadata, "quietedBy", "quietedReason"); + + if (quieted is true) + { + return string.IsNullOrWhiteSpace(quietedBy) ? "yes" : $"yes ({quietedBy})"; + } + + if (quieted is false) + { + return "no"; + } + + return string.IsNullOrWhiteSpace(quietedBy) ? "-" : $"? ({quietedBy})"; + } + + private static string FormatConfidenceDisplay(IReadOnlyDictionary metadata) + { + var confidence = GetMetadataDouble(metadata, "confidence"); + var confidenceBand = GetMetadataString(metadata, "confidenceBand", "confidenceTier"); + + if (confidence.HasValue && !string.IsNullOrWhiteSpace(confidenceBand)) + { + return string.Format(CultureInfo.InvariantCulture, "{0:0.###} ({1})", confidence.Value, confidenceBand); + } + + if (confidence.HasValue) + { + return confidence.Value.ToString("0.###", CultureInfo.InvariantCulture); + } + + if (!string.IsNullOrWhiteSpace(confidenceBand)) + { + return confidenceBand!; + } + + return "-"; + } + + private static string FormatAttestation(RuntimePolicyRekorReference? rekor) + { + if (rekor is null) + { + return "-"; + } + + var uuid = string.IsNullOrWhiteSpace(rekor.Uuid) ? null : rekor.Uuid; + var url = string.IsNullOrWhiteSpace(rekor.Url) ? null : rekor.Url; + var verified = rekor.Verified; + + var core = uuid ?? url; + if (!string.IsNullOrEmpty(core)) + { + if (verified.HasValue) + { + var suffix = verified.Value ? " (verified)" : " (unverified)"; + return core + suffix; + } + + return core!; + } + + if (verified.HasValue) + { + return verified.Value ? "verified" : "unverified"; + } + + return "-"; + } + + private static bool? GetMetadataBoolean(IReadOnlyDictionary metadata, params string[] keys) + { + foreach (var key in keys) + { + if (metadata.TryGetValue(key, out var value) && value is not null) + { + switch (value) + { + case bool b: + return b; + case string s when bool.TryParse(s, out var parsed): + return parsed; + } + } + } + + return null; + } + + private static string? GetMetadataString(IReadOnlyDictionary metadata, params string[] keys) + { + foreach (var key in keys) + { + if (metadata.TryGetValue(key, out var value) && value is not null) + { + if (value is string s) + { + return string.IsNullOrWhiteSpace(s) ? null : s; + } + } + } + + return null; + } + + private static double? GetMetadataDouble(IReadOnlyDictionary metadata, params string[] keys) + { + foreach (var key in keys) + { + if (metadata.TryGetValue(key, out var value) && value is not null) + { + switch (value) + { + case double d: + return d; + case float f: + return f; + case decimal m: + return (double)m; + case long l: + return l; + case int i: + return i; + case string s when double.TryParse(s, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var parsed): + return parsed; + } + } + } + + return null; + } + + private static TaskRunnerSimulationOutputFormat DetermineTaskRunnerSimulationFormat(string? value, string? outputPath) + { + if (!string.IsNullOrWhiteSpace(value)) + { + return value.Trim().ToLowerInvariant() switch + { + "table" => TaskRunnerSimulationOutputFormat.Table, + "json" => TaskRunnerSimulationOutputFormat.Json, + _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") + }; + } + if (!string.IsNullOrWhiteSpace(outputPath)) { return TaskRunnerSimulationOutputFormat.Json; } - - return TaskRunnerSimulationOutputFormat.Table; - } - - private static object BuildTaskRunnerSimulationPayload(TaskRunnerSimulationResult result) - => new - { - planHash = result.PlanHash, - failurePolicy = new - { - result.FailurePolicy.MaxAttempts, - result.FailurePolicy.BackoffSeconds, - result.FailurePolicy.ContinueOnError - }, - hasPendingApprovals = result.HasPendingApprovals, - steps = result.Steps, - outputs = result.Outputs - }; - - private static void RenderTaskRunnerSimulationResult(TaskRunnerSimulationResult result) - { + + return TaskRunnerSimulationOutputFormat.Table; + } + + private static object BuildTaskRunnerSimulationPayload(TaskRunnerSimulationResult result) + => new + { + planHash = result.PlanHash, + failurePolicy = new + { + result.FailurePolicy.MaxAttempts, + result.FailurePolicy.BackoffSeconds, + result.FailurePolicy.ContinueOnError + }, + hasPendingApprovals = result.HasPendingApprovals, + steps = result.Steps, + outputs = result.Outputs + }; + + private static void RenderTaskRunnerSimulationResult(TaskRunnerSimulationResult result) + { var console = AnsiConsole.Console; var table = new Table { Border = TableBorder.Rounded }; - table.AddColumn("Step"); - table.AddColumn("Kind"); - table.AddColumn("Status"); - table.AddColumn("Reason"); - table.AddColumn("MaxParallel"); - table.AddColumn("ContinueOnError"); - table.AddColumn("Approval"); - - foreach (var (step, depth) in FlattenTaskRunnerSimulationSteps(result.Steps)) - { - var indent = new string(' ', depth * 2); - table.AddRow( - Markup.Escape($"{indent}{step.Id}"), - Markup.Escape(step.Kind), - Markup.Escape(step.Status), - Markup.Escape(string.IsNullOrWhiteSpace(step.StatusReason) ? "-" : step.StatusReason!), - step.MaxParallel?.ToString(CultureInfo.InvariantCulture) ?? "-", - step.ContinueOnError ? "yes" : "no", - Markup.Escape(string.IsNullOrWhiteSpace(step.ApprovalId) ? "-" : step.ApprovalId!)); - } - + table.AddColumn("Step"); + table.AddColumn("Kind"); + table.AddColumn("Status"); + table.AddColumn("Reason"); + table.AddColumn("MaxParallel"); + table.AddColumn("ContinueOnError"); + table.AddColumn("Approval"); + + foreach (var (step, depth) in FlattenTaskRunnerSimulationSteps(result.Steps)) + { + var indent = new string(' ', depth * 2); + table.AddRow( + Markup.Escape($"{indent}{step.Id}"), + Markup.Escape(step.Kind), + Markup.Escape(step.Status), + Markup.Escape(string.IsNullOrWhiteSpace(step.StatusReason) ? "-" : step.StatusReason!), + step.MaxParallel?.ToString(CultureInfo.InvariantCulture) ?? "-", + step.ContinueOnError ? "yes" : "no", + Markup.Escape(string.IsNullOrWhiteSpace(step.ApprovalId) ? "-" : step.ApprovalId!)); + } + console.Write(table); if (result.Outputs.Count > 0) { - var outputsTable = new Table - { - Border = TableBorder.Rounded - }; - outputsTable.AddColumn("Name"); - outputsTable.AddColumn("Type"); - outputsTable.AddColumn("Requires Runtime"); - outputsTable.AddColumn("Path"); - outputsTable.AddColumn("Expression"); - - foreach (var output in result.Outputs) - { - outputsTable.AddRow( - Markup.Escape(output.Name), - Markup.Escape(output.Type), - output.RequiresRuntimeValue ? "yes" : "no", - Markup.Escape(string.IsNullOrWhiteSpace(output.PathExpression) ? "-" : output.PathExpression!), - Markup.Escape(string.IsNullOrWhiteSpace(output.ValueExpression) ? "-" : output.ValueExpression!)); - } - + var outputsTable = new Table + { + Border = TableBorder.Rounded + }; + outputsTable.AddColumn("Name"); + outputsTable.AddColumn("Type"); + outputsTable.AddColumn("Requires Runtime"); + outputsTable.AddColumn("Path"); + outputsTable.AddColumn("Expression"); + + foreach (var output in result.Outputs) + { + outputsTable.AddRow( + Markup.Escape(output.Name), + Markup.Escape(output.Type), + output.RequiresRuntimeValue ? "yes" : "no", + Markup.Escape(string.IsNullOrWhiteSpace(output.PathExpression) ? "-" : output.PathExpression!), + Markup.Escape(string.IsNullOrWhiteSpace(output.ValueExpression) ? "-" : output.ValueExpression!)); + } + console.WriteLine(); console.Write(outputsTable); } @@ -4402,1884 +4408,1884 @@ internal static class CommandHandlers console.MarkupLine($"[grey]Pending Approvals:[/] {(result.HasPendingApprovals ? "yes" : "no")}"); console.Write(new Text($"Plan Hash: {result.PlanHash}{Environment.NewLine}")); console.Write(new Text($"Pending Approvals: {(result.HasPendingApprovals ? "yes" : "no")}{Environment.NewLine}")); - } - - private static IEnumerable<(TaskRunnerSimulationStep Step, int Depth)> FlattenTaskRunnerSimulationSteps( - IReadOnlyList steps, - int depth = 0) - { - for (var i = 0; i < steps.Count; i++) - { - var step = steps[i]; - yield return (step, depth); - - foreach (var child in FlattenTaskRunnerSimulationSteps(step.Children, depth + 1)) - { - yield return child; - } - } - } - - private static PolicySimulationOutputFormat DeterminePolicySimulationFormat(string? value, string? outputPath) - { - if (!string.IsNullOrWhiteSpace(value)) - { - return value.Trim().ToLowerInvariant() switch - { - "table" => PolicySimulationOutputFormat.Table, - "json" => PolicySimulationOutputFormat.Json, - _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") - }; - } - - if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) - { - return PolicySimulationOutputFormat.Json; - } - - return PolicySimulationOutputFormat.Table; - } - - private static object BuildPolicySimulationPayload( - string policyId, - int? baseVersion, - int? candidateVersion, - IReadOnlyList sbomSet, - IReadOnlyDictionary environment, - PolicySimulationResult result) - => new - { - policyId, - baseVersion, - candidateVersion, - sbomSet = sbomSet.Count == 0 ? Array.Empty() : sbomSet, - environment = environment.Count == 0 ? null : environment, - diff = result.Diff, - explainUri = result.ExplainUri - }; - - private static void RenderPolicySimulationResult( - ILogger logger, - object payload, - PolicySimulationResult result, - PolicySimulationOutputFormat format) - { - if (format == PolicySimulationOutputFormat.Json) - { - var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); - Console.WriteLine(json); - return; - } - - logger.LogInformation( - "Policy diff summary — Added: {Added}, Removed: {Removed}, Unchanged: {Unchanged}.", - result.Diff.Added, - result.Diff.Removed, - result.Diff.Unchanged); - - if (result.Diff.BySeverity.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().AddColumns("Severity", "Up", "Down"); - foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) - { - table.AddRow( - entry.Key, - FormatDelta(entry.Value.Up), - FormatDelta(entry.Value.Down)); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) - { - logger.LogInformation("Severity {Severity}: up={Up}, down={Down}", entry.Key, entry.Value.Up ?? 0, entry.Value.Down ?? 0); - } - } - } - - if (result.Diff.RuleHits.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().AddColumns("Rule", "Up", "Down"); - foreach (var hit in result.Diff.RuleHits) - { - table.AddRow( - string.IsNullOrWhiteSpace(hit.RuleName) ? hit.RuleId : $"{hit.RuleName} ({hit.RuleId})", - FormatDelta(hit.Up), - FormatDelta(hit.Down)); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var hit in result.Diff.RuleHits) - { - logger.LogInformation("Rule {RuleId}: up={Up}, down={Down}", hit.RuleId, hit.Up ?? 0, hit.Down ?? 0); - } - } - } - - if (!string.IsNullOrWhiteSpace(result.ExplainUri)) - { - logger.LogInformation("Explain trace available at {ExplainUri}.", result.ExplainUri); - } - } - - private static IReadOnlyList NormalizePolicySbomSet(IReadOnlyList arguments) - { - if (arguments is null || arguments.Count == 0) - { - return EmptyPolicySbomSet; - } - - var set = new SortedSet(StringComparer.Ordinal); - foreach (var raw in arguments) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var trimmed = raw.Trim(); - if (trimmed.Length > 0) - { - set.Add(trimmed); - } - } - - if (set.Count == 0) - { - return EmptyPolicySbomSet; - } - - var list = set.ToList(); - return new ReadOnlyCollection(list); - } - - private static IReadOnlyDictionary ParsePolicyEnvironment(IReadOnlyList arguments) - { - if (arguments is null || arguments.Count == 0) - { - return EmptyPolicyEnvironment; - } - - var env = new SortedDictionary(StringComparer.Ordinal); - foreach (var raw in arguments) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var trimmed = raw.Trim(); - var separator = trimmed.IndexOf('='); - if (separator <= 0 || separator == trimmed.Length - 1) - { - throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); - } - - var key = trimmed[..separator].Trim().ToLowerInvariant(); - if (string.IsNullOrWhiteSpace(key)) - { - throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); - } - - var valueToken = trimmed[(separator + 1)..].Trim(); - env[key] = ParsePolicyEnvironmentValue(valueToken); - } - - return env.Count == 0 ? EmptyPolicyEnvironment : new ReadOnlyDictionary(env); - } - - private static object? ParsePolicyEnvironmentValue(string token) - { - if (string.IsNullOrWhiteSpace(token)) - { - return string.Empty; - } - - var value = token; - if ((value.Length >= 2 && value.StartsWith("\"", StringComparison.Ordinal) && value.EndsWith("\"", StringComparison.Ordinal)) || - (value.Length >= 2 && value.StartsWith("'", StringComparison.Ordinal) && value.EndsWith("'", StringComparison.Ordinal))) - { - value = value[1..^1]; - } - - if (string.Equals(value, "null", StringComparison.OrdinalIgnoreCase)) - { - return null; - } - - if (bool.TryParse(value, out var boolResult)) - { - return boolResult; - } - - if (long.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var longResult)) - { - return longResult; - } - - if (double.TryParse(value, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var doubleResult)) - { - return doubleResult; - } - - return value; - } - - private static Task WriteSimulationOutputAsync(string outputPath, object payload, CancellationToken cancellationToken) - => WriteJsonPayloadAsync(outputPath, payload, cancellationToken); - - private static async Task WriteJsonPayloadAsync(string outputPath, object payload, CancellationToken cancellationToken) - { - var fullPath = Path.GetFullPath(outputPath); - var directory = Path.GetDirectoryName(fullPath); - if (!string.IsNullOrWhiteSpace(directory)) - { - Directory.CreateDirectory(directory); - } - - var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); - await File.WriteAllTextAsync(fullPath, json + Environment.NewLine, cancellationToken).ConfigureAwait(false); - } - - private static int DetermineSimulationExitCode(PolicySimulationResult result, bool failOnDiff) - { - if (!failOnDiff) - { - return 0; - } - - return (result.Diff.Added + result.Diff.Removed) > 0 ? 20 : 0; - } - - private static void HandlePolicySimulationFailure(PolicyApiException exception, ILogger logger) - { - var exitCode = exception.ErrorCode switch - { - "ERR_POL_001" => 10, - "ERR_POL_002" or "ERR_POL_005" => 12, - "ERR_POL_003" => 21, - "ERR_POL_004" => 22, - "ERR_POL_006" => 23, - _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, - _ => 1 - }; - - if (string.IsNullOrWhiteSpace(exception.ErrorCode)) - { - logger.LogError("Policy simulation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); - } - else - { - logger.LogError("Policy simulation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); - } - - CliMetrics.RecordPolicySimulation("error"); - Environment.ExitCode = exitCode; - } - - private static void HandlePolicyActivationFailure(PolicyApiException exception, ILogger logger) - { - var exitCode = exception.ErrorCode switch - { - "ERR_POL_002" => 70, - "ERR_POL_003" => 71, - "ERR_POL_004" => 72, - _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, - _ => 1 - }; - - if (string.IsNullOrWhiteSpace(exception.ErrorCode)) - { - logger.LogError("Policy activation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); - } - else - { - logger.LogError("Policy activation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); - } - - CliMetrics.RecordPolicyActivation("error"); - Environment.ExitCode = exitCode; - } - - private static IReadOnlyList NormalizePolicyFilterValues(string[] values, bool toLower = false) - { - if (values is null || values.Length == 0) - { - return Array.Empty(); - } - - var set = new HashSet(StringComparer.OrdinalIgnoreCase); - var list = new List(); - foreach (var raw in values) - { - var candidate = raw?.Trim(); - if (string.IsNullOrWhiteSpace(candidate)) - { - continue; - } - - var normalized = toLower ? candidate.ToLowerInvariant() : candidate; - if (set.Add(normalized)) - { - list.Add(normalized); - } - } - - return list.Count == 0 ? Array.Empty() : list; - } - - private static string? NormalizePolicyPriority(string? priority) - { - if (string.IsNullOrWhiteSpace(priority)) - { - return null; - } - - var normalized = priority.Trim(); - return string.IsNullOrWhiteSpace(normalized) ? null : normalized.ToLowerInvariant(); - } - - private static string NormalizePolicyActivationOutcome(string status) - { - if (string.IsNullOrWhiteSpace(status)) - { - return "unknown"; - } - - return status.Trim().ToLowerInvariant(); - } - - private static int DeterminePolicyActivationExitCode(string outcome) - => string.Equals(outcome, "pending_second_approval", StringComparison.Ordinal) ? 75 : 0; - - private static void RenderPolicyActivationResult(PolicyActivationResult result, PolicyActivationRequest request) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var summary = new Table().Expand(); - summary.Border(TableBorder.Rounded); - summary.AddColumn(new TableColumn("[grey]Field[/]").LeftAligned()); - summary.AddColumn(new TableColumn("[grey]Value[/]").LeftAligned()); - summary.AddRow("Policy", Markup.Escape($"{result.Revision.PolicyId} v{result.Revision.Version}")); - summary.AddRow("Status", FormatActivationStatus(result.Status)); - summary.AddRow("Requires 2 approvals", result.Revision.RequiresTwoPersonApproval ? "[yellow]yes[/]" : "[green]no[/]"); - summary.AddRow("Created (UTC)", Markup.Escape(FormatUpdatedAt(result.Revision.CreatedAt))); - summary.AddRow("Activated (UTC)", result.Revision.ActivatedAt.HasValue - ? Markup.Escape(FormatUpdatedAt(result.Revision.ActivatedAt.Value)) - : "[grey](not yet active)[/]"); - - if (request.RunNow) - { - summary.AddRow("Run", "[green]immediate[/]"); - } - else if (request.ScheduledAt.HasValue) - { - summary.AddRow("Scheduled at", Markup.Escape(FormatUpdatedAt(request.ScheduledAt.Value))); - } - - if (!string.IsNullOrWhiteSpace(request.Priority)) - { - summary.AddRow("Priority", Markup.Escape(request.Priority!)); - } - - if (request.Rollback) - { - summary.AddRow("Rollback", "[yellow]yes[/]"); - } - - if (!string.IsNullOrWhiteSpace(request.IncidentId)) - { - summary.AddRow("Incident", Markup.Escape(request.IncidentId!)); - } - - if (!string.IsNullOrWhiteSpace(request.Comment)) - { - summary.AddRow("Note", Markup.Escape(request.Comment!)); - } - - AnsiConsole.Write(summary); - - if (result.Revision.Approvals.Count > 0) - { - var approvalTable = new Table().Title("[grey]Approvals[/]"); - approvalTable.Border(TableBorder.Minimal); - approvalTable.AddColumn(new TableColumn("Actor").LeftAligned()); - approvalTable.AddColumn(new TableColumn("Approved (UTC)").LeftAligned()); - approvalTable.AddColumn(new TableColumn("Comment").LeftAligned()); - - foreach (var approval in result.Revision.Approvals) - { - var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment!; - approvalTable.AddRow( - Markup.Escape(approval.ActorId), - Markup.Escape(FormatUpdatedAt(approval.ApprovedAt)), - Markup.Escape(comment)); - } - - AnsiConsole.Write(approvalTable); - } - else - { - AnsiConsole.MarkupLine("[grey]No activation approvals recorded yet.[/]"); - } - } - else - { - Console.WriteLine(FormattableString.Invariant($"Policy: {result.Revision.PolicyId} v{result.Revision.Version}")); - Console.WriteLine(FormattableString.Invariant($"Status: {NormalizePolicyActivationOutcome(result.Status)}")); - Console.WriteLine(FormattableString.Invariant($"Requires 2 approvals: {(result.Revision.RequiresTwoPersonApproval ? "yes" : "no")}")); - Console.WriteLine(FormattableString.Invariant($"Created (UTC): {FormatUpdatedAt(result.Revision.CreatedAt)}")); - Console.WriteLine(FormattableString.Invariant($"Activated (UTC): {(result.Revision.ActivatedAt.HasValue ? FormatUpdatedAt(result.Revision.ActivatedAt.Value) : "(not yet active)")}")); - - if (request.RunNow) - { - Console.WriteLine("Run: immediate"); - } - else if (request.ScheduledAt.HasValue) - { - Console.WriteLine(FormattableString.Invariant($"Scheduled at: {FormatUpdatedAt(request.ScheduledAt.Value)}")); - } - - if (!string.IsNullOrWhiteSpace(request.Priority)) - { - Console.WriteLine(FormattableString.Invariant($"Priority: {request.Priority}")); - } - - if (request.Rollback) - { - Console.WriteLine("Rollback: yes"); - } - - if (!string.IsNullOrWhiteSpace(request.IncidentId)) - { - Console.WriteLine(FormattableString.Invariant($"Incident: {request.IncidentId}")); - } - - if (!string.IsNullOrWhiteSpace(request.Comment)) - { - Console.WriteLine(FormattableString.Invariant($"Note: {request.Comment}")); - } - - if (result.Revision.Approvals.Count == 0) - { - Console.WriteLine("Approvals: none"); - } - else - { - foreach (var approval in result.Revision.Approvals) - { - var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment; - Console.WriteLine(FormattableString.Invariant($"Approval: {approval.ActorId} at {FormatUpdatedAt(approval.ApprovedAt)} ({comment})")); - } - } - } - } - - private static string FormatActivationStatus(string status) - { - var normalized = NormalizePolicyActivationOutcome(status); - return normalized switch - { - "activated" => "[green]activated[/]", - "already_active" => "[yellow]already_active[/]", - "pending_second_approval" => "[yellow]pending_second_approval[/]", - _ => "[red]" + Markup.Escape(string.IsNullOrWhiteSpace(status) ? "unknown" : status) + "[/]" - }; - } - - private static DateTimeOffset? ParsePolicySince(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return null; - } - - if (DateTimeOffset.TryParse( - value.Trim(), - CultureInfo.InvariantCulture, - DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, - out var parsed)) - { - return parsed.ToUniversalTime(); - } - - throw new ArgumentException("Invalid --since value. Use an ISO-8601 timestamp."); - } - - private static string? NormalizeExplainMode(string? mode) - => string.IsNullOrWhiteSpace(mode) ? null : mode.Trim().ToLowerInvariant(); - - private static PolicyFindingsOutputFormat DeterminePolicyFindingsFormat(string? value, string? outputPath) - { - if (!string.IsNullOrWhiteSpace(value)) - { - return value.Trim().ToLowerInvariant() switch - { - "table" => PolicyFindingsOutputFormat.Table, - "json" => PolicyFindingsOutputFormat.Json, - _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") - }; - } - - if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) - { - return PolicyFindingsOutputFormat.Json; - } - - return PolicyFindingsOutputFormat.Table; - } - - private static object BuildPolicyFindingsPayload( - string policyId, - PolicyFindingsQuery query, - PolicyFindingsPage page) - => new - { - policyId, - filters = new - { - sbom = query.SbomIds, - status = query.Statuses, - severity = query.Severities, - cursor = query.Cursor, - page = query.Page, - pageSize = query.PageSize, - since = query.Since?.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture) - }, - items = page.Items.Select(item => new - { - findingId = item.FindingId, - status = item.Status, - severity = new - { - normalized = item.Severity.Normalized, - score = item.Severity.Score - }, - sbomId = item.SbomId, - advisoryIds = item.AdvisoryIds, - vex = item.Vex is null ? null : new - { - winningStatementId = item.Vex.WinningStatementId, - source = item.Vex.Source, - status = item.Vex.Status - }, - policyVersion = item.PolicyVersion, - updatedAt = item.UpdatedAt == DateTimeOffset.MinValue ? null : item.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), - runId = item.RunId - }), - nextCursor = page.NextCursor, - totalCount = page.TotalCount - }; - - private static object BuildPolicyFindingPayload(string policyId, PolicyFindingDocument finding) - => new - { - policyId, - finding = new - { - findingId = finding.FindingId, - status = finding.Status, - severity = new - { - normalized = finding.Severity.Normalized, - score = finding.Severity.Score - }, - sbomId = finding.SbomId, - advisoryIds = finding.AdvisoryIds, - vex = finding.Vex is null ? null : new - { - winningStatementId = finding.Vex.WinningStatementId, - source = finding.Vex.Source, - status = finding.Vex.Status - }, - policyVersion = finding.PolicyVersion, - updatedAt = finding.UpdatedAt == DateTimeOffset.MinValue ? null : finding.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), - runId = finding.RunId - } - }; - - private static object BuildPolicyFindingExplainPayload( - string policyId, - string findingId, - string? mode, - PolicyFindingExplainResult explain) - => new - { - policyId, - findingId, - mode, - explain = new - { - policyVersion = explain.PolicyVersion, - steps = explain.Steps.Select(step => new - { - rule = step.Rule, - status = step.Status, - action = step.Action, - score = step.Score, - inputs = step.Inputs, - evidence = step.Evidence - }), - sealedHints = explain.SealedHints.Select(hint => hint.Message) - } - }; - - private static void RenderPolicyFindingsTable(ILogger logger, PolicyFindingsPage page) - { - var items = page.Items; - if (items.Count == 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - AnsiConsole.MarkupLine("[yellow]No findings matched the provided filters.[/]"); - } - else - { - logger.LogWarning("No findings matched the provided filters."); - } - return; - } - - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table() - .Border(TableBorder.Rounded) - .Centered(); - - table.AddColumn("Finding"); - table.AddColumn("Status"); - table.AddColumn("Severity"); - table.AddColumn("Score"); - table.AddColumn("SBOM"); - table.AddColumn("Advisories"); - table.AddColumn("Updated (UTC)"); - - foreach (var item in items) - { - table.AddRow( - Markup.Escape(item.FindingId), - Markup.Escape(item.Status), - Markup.Escape(item.Severity.Normalized), - Markup.Escape(FormatScore(item.Severity.Score)), - Markup.Escape(item.SbomId), - Markup.Escape(FormatListPreview(item.AdvisoryIds)), - Markup.Escape(FormatUpdatedAt(item.UpdatedAt))); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var item in items) - { - logger.LogInformation( - "{Finding} — Status {Status}, Severity {Severity} ({Score}), SBOM {Sbom}, Updated {Updated}", - item.FindingId, - item.Status, - item.Severity.Normalized, - item.Severity.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", - item.SbomId, - FormatUpdatedAt(item.UpdatedAt)); - } - } - - logger.LogInformation("{Count} finding(s).", items.Count); - - if (page.TotalCount.HasValue) - { - logger.LogInformation("Total available: {Total}", page.TotalCount.Value); - } - - if (!string.IsNullOrWhiteSpace(page.NextCursor)) - { - logger.LogInformation("Next cursor: {Cursor}", page.NextCursor); - } - } - - private static void RenderPolicyFindingDetails(ILogger logger, PolicyFindingDocument finding) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table() - .Border(TableBorder.Rounded) - .AddColumn("Field") - .AddColumn("Value"); - - table.AddRow("Finding", Markup.Escape(finding.FindingId)); - table.AddRow("Status", Markup.Escape(finding.Status)); - table.AddRow("Severity", Markup.Escape(FormatSeverity(finding.Severity))); - table.AddRow("SBOM", Markup.Escape(finding.SbomId)); - table.AddRow("Policy Version", Markup.Escape(finding.PolicyVersion.ToString(CultureInfo.InvariantCulture))); - table.AddRow("Updated (UTC)", Markup.Escape(FormatUpdatedAt(finding.UpdatedAt))); - table.AddRow("Run Id", Markup.Escape(string.IsNullOrWhiteSpace(finding.RunId) ? "(none)" : finding.RunId)); - table.AddRow("Advisories", Markup.Escape(FormatListPreview(finding.AdvisoryIds))); - table.AddRow("VEX", Markup.Escape(FormatVexMetadata(finding.Vex))); - - AnsiConsole.Write(table); - } - else - { - logger.LogInformation("Finding {Finding}", finding.FindingId); - logger.LogInformation(" Status: {Status}", finding.Status); - logger.LogInformation(" Severity: {Severity}", FormatSeverity(finding.Severity)); - logger.LogInformation(" SBOM: {Sbom}", finding.SbomId); - logger.LogInformation(" Policy version: {Version}", finding.PolicyVersion); - logger.LogInformation(" Updated (UTC): {Updated}", FormatUpdatedAt(finding.UpdatedAt)); - if (!string.IsNullOrWhiteSpace(finding.RunId)) - { - logger.LogInformation(" Run Id: {Run}", finding.RunId); - } - if (finding.AdvisoryIds.Count > 0) - { - logger.LogInformation(" Advisories: {Advisories}", string.Join(", ", finding.AdvisoryIds)); - } - if (!string.IsNullOrWhiteSpace(FormatVexMetadata(finding.Vex))) - { - logger.LogInformation(" VEX: {Vex}", FormatVexMetadata(finding.Vex)); - } - } - } - - private static void RenderPolicyFindingExplain(ILogger logger, PolicyFindingExplainResult explain) - { - if (explain.Steps.Count == 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - AnsiConsole.MarkupLine("[yellow]No explain steps were returned.[/]"); - } - else - { - logger.LogWarning("No explain steps were returned."); - } - } - else if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table() - .Border(TableBorder.Rounded) - .AddColumn("Rule") - .AddColumn("Status") - .AddColumn("Action") - .AddColumn("Score") - .AddColumn("Inputs") - .AddColumn("Evidence"); - - foreach (var step in explain.Steps) - { - table.AddRow( - Markup.Escape(step.Rule), - Markup.Escape(step.Status ?? "(n/a)"), - Markup.Escape(step.Action ?? "(n/a)"), - Markup.Escape(step.Score.HasValue ? step.Score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"), - Markup.Escape(FormatKeyValuePairs(step.Inputs)), - Markup.Escape(FormatKeyValuePairs(step.Evidence))); - } - - AnsiConsole.Write(table); - } - else - { - logger.LogInformation("{Count} explain step(s).", explain.Steps.Count); - foreach (var step in explain.Steps) - { - logger.LogInformation( - "Rule {Rule} — Status {Status}, Action {Action}, Score {Score}, Inputs {Inputs}", - step.Rule, - step.Status ?? "n/a", - step.Action ?? "n/a", - step.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", - FormatKeyValuePairs(step.Inputs)); - - if (step.Evidence is not null && step.Evidence.Count > 0) - { - logger.LogInformation(" Evidence: {Evidence}", FormatKeyValuePairs(step.Evidence)); - } - } - } - - if (explain.SealedHints.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - AnsiConsole.MarkupLine("[grey]Hints:[/]"); - foreach (var hint in explain.SealedHints) - { - AnsiConsole.MarkupLine($" • {Markup.Escape(hint.Message)}"); - } - } - else - { - foreach (var hint in explain.SealedHints) - { - logger.LogInformation("Hint: {Hint}", hint.Message); - } - } - } - } - - private static string FormatSeverity(PolicyFindingSeverity severity) - { - if (severity.Score.HasValue) - { - return FormattableString.Invariant($"{severity.Normalized} ({severity.Score.Value:0.00})"); - } - - return severity.Normalized; - } - - private static string FormatListPreview(IReadOnlyList values) - { - if (values is null || values.Count == 0) - { - return "(none)"; - } - - const int MaxItems = 3; - if (values.Count <= MaxItems) - { - return string.Join(", ", values); - } - - var preview = string.Join(", ", values.Take(MaxItems)); - return FormattableString.Invariant($"{preview} (+{values.Count - MaxItems})"); - } - - private static string FormatUpdatedAt(DateTimeOffset timestamp) - { - if (timestamp == DateTimeOffset.MinValue) - { - return "(unknown)"; - } - - return timestamp.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss'Z'", CultureInfo.InvariantCulture); - } - - private static string FormatScore(double? score) - => score.HasValue ? score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"; - - private static string FormatKeyValuePairs(IReadOnlyDictionary? values) - { - if (values is null || values.Count == 0) - { - return "(none)"; - } - - return string.Join(", ", values.Select(pair => $"{pair.Key}={pair.Value}")); - } - - private static string FormatVexMetadata(PolicyFindingVexMetadata? value) - { - if (value is null) - { - return "(none)"; - } - - var parts = new List(3); - if (!string.IsNullOrWhiteSpace(value.WinningStatementId)) - { - parts.Add($"winning={value.WinningStatementId}"); - } - - if (!string.IsNullOrWhiteSpace(value.Source)) - { - parts.Add($"source={value.Source}"); - } - - if (!string.IsNullOrWhiteSpace(value.Status)) - { - parts.Add($"status={value.Status}"); - } - - return parts.Count == 0 ? "(none)" : string.Join(", ", parts); - } - - private static void HandlePolicyFindingsFailure(PolicyApiException exception, ILogger logger, Action recordMetric) - { - var exitCode = exception.StatusCode switch - { - HttpStatusCode.Unauthorized or HttpStatusCode.Forbidden => 12, - HttpStatusCode.NotFound => 1, - _ => 1 - }; - - if (string.IsNullOrWhiteSpace(exception.ErrorCode)) - { - logger.LogError("Policy API request failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); - } - else - { - logger.LogError("Policy API request failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); - } - - recordMetric("error"); - Environment.ExitCode = exitCode; - } - - private static string FormatDelta(int? value) - => value.HasValue ? value.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"; - - private static readonly JsonSerializerOptions SimulationJsonOptions = - new(JsonSerializerDefaults.Web) { WriteIndented = true }; - - private static readonly IReadOnlyDictionary EmptyPolicyEnvironment = - new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)); - - private static readonly IReadOnlyList EmptyPolicySbomSet = - new ReadOnlyCollection(Array.Empty()); - - private static readonly IReadOnlyDictionary EmptyLabelSelectors = - new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); - - private enum TaskRunnerSimulationOutputFormat - { - Table, - Json - } - - private enum PolicySimulationOutputFormat - { - Table, - Json - } - - private enum PolicyFindingsOutputFormat - { - Table, - Json - } - - - private static string FormatAdditionalValue(object? value) - { - return value switch - { - null => "null", - bool b => b ? "true" : "false", - double d => d.ToString("G17", CultureInfo.InvariantCulture), - float f => f.ToString("G9", CultureInfo.InvariantCulture), - IFormattable formattable => formattable.ToString(null, CultureInfo.InvariantCulture), - _ => value.ToString() ?? string.Empty - }; - } - - - private static IReadOnlyList NormalizeProviders(IReadOnlyList providers) - { - if (providers is null || providers.Count == 0) - { - return Array.Empty(); - } - - var list = new List(); - foreach (var provider in providers) - { - if (!string.IsNullOrWhiteSpace(provider)) - { - list.Add(provider.Trim()); - } - } - - return list.Count == 0 ? Array.Empty() : list; - } - - private static string ResolveTenant(string? tenantOption) - { - if (!string.IsNullOrWhiteSpace(tenantOption)) - { - return tenantOption.Trim(); - } - - var fromEnvironment = Environment.GetEnvironmentVariable("STELLA_TENANT"); - return string.IsNullOrWhiteSpace(fromEnvironment) ? string.Empty : fromEnvironment.Trim(); - } - - private static async Task LoadIngestInputAsync(IServiceProvider services, string input, CancellationToken cancellationToken) - { - if (Uri.TryCreate(input, UriKind.Absolute, out var uri) && - (uri.Scheme.Equals(Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase) || - uri.Scheme.Equals(Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase))) - { - return await LoadIngestInputFromHttpAsync(services, uri, cancellationToken).ConfigureAwait(false); - } - - return await LoadIngestInputFromFileAsync(input, cancellationToken).ConfigureAwait(false); - } - - private static async Task LoadIngestInputFromHttpAsync(IServiceProvider services, Uri uri, CancellationToken cancellationToken) - { - var httpClientFactory = services.GetRequiredService(); - var httpClient = httpClientFactory.CreateClient("stellaops-cli.ingest-download"); - using var response = await httpClient.GetAsync(uri, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - throw new InvalidOperationException($"Failed to download document from {uri} (HTTP {(int)response.StatusCode})."); - } - - var contentType = response.Content.Headers.ContentType?.MediaType ?? "application/json"; - var contentEncoding = response.Content.Headers.ContentEncoding is { Count: > 0 } - ? string.Join(",", response.Content.Headers.ContentEncoding) - : null; - - var bytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); - var normalized = NormalizeDocument(bytes, contentType, contentEncoding); - - return new IngestInputPayload( - "uri", - uri.ToString(), - normalized.Content, - normalized.ContentType, - normalized.ContentEncoding); - } - - private static async Task LoadIngestInputFromFileAsync(string path, CancellationToken cancellationToken) - { - var fullPath = Path.GetFullPath(path); - if (!File.Exists(fullPath)) - { - throw new FileNotFoundException("Input document not found.", fullPath); - } - - var bytes = await File.ReadAllBytesAsync(fullPath, cancellationToken).ConfigureAwait(false); - var normalized = NormalizeDocument(bytes, GuessContentTypeFromExtension(fullPath), null); - - return new IngestInputPayload( - "file", - Path.GetFileName(fullPath), - normalized.Content, - normalized.ContentType, - normalized.ContentEncoding); - } - - private static DocumentNormalizationResult NormalizeDocument(byte[] bytes, string? contentType, string? encodingHint) - { - if (bytes is null || bytes.Length == 0) - { - throw new InvalidOperationException("Input document is empty."); - } - - var working = bytes; - var encodings = new List(); - if (!string.IsNullOrWhiteSpace(encodingHint)) - { - encodings.Add(encodingHint); - } - - if (IsGzip(working)) - { - working = DecompressGzip(working); - encodings.Add("gzip"); - } - - var text = DecodeText(working); - var trimmed = text.TrimStart(); - - if (!string.IsNullOrWhiteSpace(trimmed) && trimmed[0] != '{' && trimmed[0] != '[') - { - if (TryDecodeBase64(text, out var decodedBytes)) - { - working = decodedBytes; - encodings.Add("base64"); - - if (IsGzip(working)) - { - working = DecompressGzip(working); - encodings.Add("gzip"); - } - - text = DecodeText(working); - } - } - - text = text.Trim(); - if (string.IsNullOrWhiteSpace(text)) - { - throw new InvalidOperationException("Input document contained no data after decoding."); - } - - var encodingLabel = encodings.Count == 0 ? null : string.Join("+", encodings); - var finalContentType = string.IsNullOrWhiteSpace(contentType) ? "application/json" : contentType; - - return new DocumentNormalizationResult(text, finalContentType, encodingLabel); - } - - private static string GuessContentTypeFromExtension(string path) - { - var extension = Path.GetExtension(path); - if (string.IsNullOrWhiteSpace(extension)) - { - return "application/json"; - } - - return extension.ToLowerInvariant() switch - { - ".json" or ".csaf" => "application/json", - ".xml" => "application/xml", - _ => "application/json" - }; - } - - private static DateTimeOffset DetermineVerificationSince(string? sinceOption) - { - if (string.IsNullOrWhiteSpace(sinceOption)) - { - return DateTimeOffset.UtcNow.AddHours(-24); - } - - var trimmed = sinceOption.Trim(); - - if (DateTimeOffset.TryParse( - trimmed, - CultureInfo.InvariantCulture, - DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, - out var parsedTimestamp)) - { - return parsedTimestamp.ToUniversalTime(); - } - - if (TryParseRelativeDuration(trimmed, out var duration)) - { - return DateTimeOffset.UtcNow.Subtract(duration); - } - - throw new InvalidOperationException("Invalid --since value. Use ISO-8601 timestamp or duration (e.g. 24h, 7d)."); - } - - private static bool TryParseRelativeDuration(string value, out TimeSpan duration) - { - duration = TimeSpan.Zero; - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - var normalized = value.Trim().ToLowerInvariant(); - if (normalized.Length < 2) - { - return false; - } - - var suffix = normalized[^1]; - var magnitudeText = normalized[..^1]; - - double multiplier = suffix switch - { - 's' => 1, - 'm' => 60, - 'h' => 3600, - 'd' => 86400, - 'w' => 604800, - _ => 0 - }; - - if (multiplier == 0) - { - return false; - } - - if (!double.TryParse(magnitudeText, NumberStyles.Float, CultureInfo.InvariantCulture, out var magnitude)) - { - return false; - } - - if (double.IsNaN(magnitude) || double.IsInfinity(magnitude) || magnitude <= 0) - { - return false; - } - - var seconds = magnitude * multiplier; - if (double.IsNaN(seconds) || double.IsInfinity(seconds) || seconds <= 0) - { - return false; - } - - duration = TimeSpan.FromSeconds(seconds); - return true; - } - - private static int NormalizeLimit(int? limitOption) - { - if (!limitOption.HasValue) - { - return 20; - } - - if (limitOption.Value < 0) - { - throw new InvalidOperationException("Limit cannot be negative."); - } - - return limitOption.Value; - } - - private static IReadOnlyList ParseCommaSeparatedList(string? raw) - { - if (string.IsNullOrWhiteSpace(raw)) - { - return Array.Empty(); - } - - var tokens = raw - .Split(',', StringSplitOptions.RemoveEmptyEntries) - .Select(token => token.Trim()) - .Where(token => token.Length > 0) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToArray(); - - return tokens.Length == 0 ? Array.Empty() : tokens; - } - - private static string FormatWindowRange(AocVerifyWindow? window) - { - if (window is null) - { - return "(unspecified)"; - } - - var fromText = window.From?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; - var toText = window.To?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; - return $"{fromText} -> {toText}"; - } - - private static string FormatCheckedCounts(AocVerifyChecked? checkedCounts) - { - if (checkedCounts is null) - { - return "(unspecified)"; - } - - return $"advisories: {checkedCounts.Advisories.ToString("N0", CultureInfo.InvariantCulture)}, vex: {checkedCounts.Vex.ToString("N0", CultureInfo.InvariantCulture)}"; - } - - private static string DetermineVerifyStatus(AocVerifyResponse? response) - { - if (response is null) - { - return "unknown"; - } - - if (response.Truncated == true && (response.Violations is null || response.Violations.Count == 0)) - { - return "truncated"; - } - - var total = response.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; - return total > 0 ? "violations" : "ok"; - } - - private static string FormatBoolean(bool value, bool useColor) - { - var text = value ? "yes" : "no"; - if (!useColor) - { - return text; - } - - return value - ? $"[yellow]{text}[/]" - : $"[green]{text}[/]"; - } - - private static string FormatVerifyStatus(string? status, bool useColor) - { - var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); - var escaped = Markup.Escape(normalized); - if (!useColor) - { - return escaped; - } - - return normalized switch - { - "ok" => $"[green]{escaped}[/]", - "violations" => $"[red]{escaped}[/]", - "truncated" => $"[yellow]{escaped}[/]", - _ => $"[grey]{escaped}[/]" - }; - } - - private static string FormatViolationExample(AocVerifyViolationExample? example) - { - if (example is null) - { - return "(n/a)"; - } - - var parts = new List(); - if (!string.IsNullOrWhiteSpace(example.Source)) - { - parts.Add(example.Source.Trim()); - } - - if (!string.IsNullOrWhiteSpace(example.DocumentId)) - { - parts.Add(example.DocumentId.Trim()); - } - - var label = parts.Count == 0 ? "(n/a)" : string.Join(" | ", parts); - if (!string.IsNullOrWhiteSpace(example.ContentHash)) - { - label = $"{label} [{example.ContentHash.Trim()}]"; - } - - return label; - } - - private static void RenderAocVerifyTable(AocVerifyResponse response, bool useColor, int limit) - { - var summary = new Table().Border(TableBorder.Rounded); - summary.AddColumn("Field"); - summary.AddColumn("Value"); - - summary.AddRow("Tenant", Markup.Escape(string.IsNullOrWhiteSpace(response?.Tenant) ? "(unknown)" : response.Tenant!)); - summary.AddRow("Window", Markup.Escape(FormatWindowRange(response?.Window))); - summary.AddRow("Checked", Markup.Escape(FormatCheckedCounts(response?.Checked))); - - summary.AddRow("Limit", Markup.Escape(limit <= 0 ? "unbounded" : limit.ToString(CultureInfo.InvariantCulture))); - summary.AddRow("Status", FormatVerifyStatus(DetermineVerifyStatus(response), useColor)); - - if (response?.Metrics?.IngestionWriteTotal is int writes) - { - summary.AddRow("Ingestion Writes", Markup.Escape(writes.ToString("N0", CultureInfo.InvariantCulture))); - } - - if (response?.Metrics?.AocViolationTotal is int totalViolations) - { - summary.AddRow("Violations (total)", Markup.Escape(totalViolations.ToString("N0", CultureInfo.InvariantCulture))); - } - else - { - var computedViolations = response?.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; - summary.AddRow("Violations (total)", Markup.Escape(computedViolations.ToString("N0", CultureInfo.InvariantCulture))); - } - - summary.AddRow("Truncated", FormatBoolean(response?.Truncated == true, useColor)); - - AnsiConsole.Write(summary); - - if (response?.Violations is null || response.Violations.Count == 0) - { - var message = response?.Truncated == true - ? "No violations reported, but results were truncated. Increase --limit to review full output." - : "No AOC violations detected in the requested window."; - - if (useColor) - { - var color = response?.Truncated == true ? "yellow" : "green"; - AnsiConsole.MarkupLine($"[{color}]{Markup.Escape(message)}[/]"); - } - else - { - Console.WriteLine(message); - } - - return; - } - - var violationTable = new Table().Border(TableBorder.Rounded); - violationTable.AddColumn("Code"); - violationTable.AddColumn("Count"); - violationTable.AddColumn("Sample Document"); - violationTable.AddColumn("Path"); - - foreach (var violation in response.Violations) - { - var codeDisplay = FormatViolationCode(violation.Code, useColor); - var countDisplay = violation.Count.ToString("N0", CultureInfo.InvariantCulture); - var example = violation.Examples?.FirstOrDefault(); - var documentDisplay = Markup.Escape(FormatViolationExample(example)); - var pathDisplay = example is null || string.IsNullOrWhiteSpace(example.Path) - ? "(none)" - : example.Path!; - - violationTable.AddRow(codeDisplay, countDisplay, documentDisplay, Markup.Escape(pathDisplay)); - } - - AnsiConsole.Write(violationTable); -} - - private static int DetermineVerifyExitCode(AocVerifyResponse response) - { - ArgumentNullException.ThrowIfNull(response); - - if (response.Violations is not null && response.Violations.Count > 0) - { - var exitCodes = new List(); - foreach (var violation in response.Violations) - { - if (string.IsNullOrWhiteSpace(violation.Code)) - { - continue; - } - - if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) - { - exitCodes.Add(mapped); - } - } - - if (exitCodes.Count > 0) - { - return exitCodes.Min(); - } - - return response.Truncated == true ? 18 : 17; - } - - if (response.Truncated == true) - { - return 18; - } - - return 0; - } - - private static async Task WriteJsonReportAsync(T payload, string destination, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(payload); - - if (string.IsNullOrWhiteSpace(destination)) - { - throw new InvalidOperationException("Output path must be provided."); - } - - var outputPath = Path.GetFullPath(destination); - var directory = Path.GetDirectoryName(outputPath); - if (!string.IsNullOrWhiteSpace(directory)) - { - Directory.CreateDirectory(directory); - } - - var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions - { - WriteIndented = true - }); - - await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false); - return outputPath; - } - - private static void RenderDryRunTable(AocIngestDryRunResponse response, bool useColor) - { - var summary = new Table().Border(TableBorder.Rounded); - summary.AddColumn("Field"); - summary.AddColumn("Value"); - - summary.AddRow("Source", Markup.Escape(response?.Source ?? "(unknown)")); - summary.AddRow("Tenant", Markup.Escape(response?.Tenant ?? "(unknown)")); - summary.AddRow("Guard Version", Markup.Escape(response?.GuardVersion ?? "(unknown)")); - summary.AddRow("Status", FormatStatusMarkup(response?.Status, useColor)); - - var violationCount = response?.Violations?.Count ?? 0; - summary.AddRow("Violations", violationCount.ToString(CultureInfo.InvariantCulture)); - - if (!string.IsNullOrWhiteSpace(response?.Document?.ContentHash)) - { - summary.AddRow("Content Hash", Markup.Escape(response.Document.ContentHash!)); - } - - if (!string.IsNullOrWhiteSpace(response?.Document?.Supersedes)) - { - summary.AddRow("Supersedes", Markup.Escape(response.Document.Supersedes!)); - } - - if (!string.IsNullOrWhiteSpace(response?.Document?.Provenance?.Signature?.Format)) - { - var signature = response.Document.Provenance.Signature; - var summaryText = signature!.Present - ? signature.Format ?? "present" - : "missing"; - summary.AddRow("Signature", Markup.Escape(summaryText)); - } - - AnsiConsole.Write(summary); - - if (violationCount == 0) - { - if (useColor) - { - AnsiConsole.MarkupLine("[green]No AOC violations detected.[/]"); - } - else - { - Console.WriteLine("No AOC violations detected."); - } - - return; - } - - var violationTable = new Table().Border(TableBorder.Rounded); - violationTable.AddColumn("Code"); - violationTable.AddColumn("Path"); - violationTable.AddColumn("Message"); - - foreach (var violation in response!.Violations!) - { - var codeDisplay = FormatViolationCode(violation.Code, useColor); - var pathDisplay = string.IsNullOrWhiteSpace(violation.Path) ? "(root)" : violation.Path!; - var messageDisplay = string.IsNullOrWhiteSpace(violation.Message) ? "(unspecified)" : violation.Message!; - violationTable.AddRow(codeDisplay, Markup.Escape(pathDisplay), Markup.Escape(messageDisplay)); - } - - AnsiConsole.Write(violationTable); - } - - private static int DetermineDryRunExitCode(AocIngestDryRunResponse response) - { - if (response?.Violations is null || response.Violations.Count == 0) - { - return 0; - } - - var exitCodes = new List(); - foreach (var violation in response.Violations) - { - if (string.IsNullOrWhiteSpace(violation.Code)) - { - continue; - } - - if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) - { - exitCodes.Add(mapped); - } - } - - if (exitCodes.Count == 0) - { - return 17; - } - - return exitCodes.Min(); - } - - private static string FormatStatusMarkup(string? status, bool useColor) - { - var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); - if (!useColor) - { - return Markup.Escape(normalized); - } - - return normalized.Equals("ok", StringComparison.OrdinalIgnoreCase) - ? $"[green]{Markup.Escape(normalized)}[/]" - : $"[red]{Markup.Escape(normalized)}[/]"; - } - - private static string FormatViolationCode(string code, bool useColor) - { - var sanitized = string.IsNullOrWhiteSpace(code) ? "(unknown)" : code.Trim(); - if (!useColor) - { - return Markup.Escape(sanitized); - } - - return $"[red]{Markup.Escape(sanitized)}[/]"; - } - - private static bool IsGzip(ReadOnlySpan data) - { - return data.Length >= 2 && data[0] == 0x1F && data[1] == 0x8B; - } - - private static byte[] DecompressGzip(byte[] payload) - { - using var input = new MemoryStream(payload); - using var gzip = new GZipStream(input, CompressionMode.Decompress); - using var output = new MemoryStream(); - gzip.CopyTo(output); - return output.ToArray(); - } - - private static string DecodeText(byte[] payload) - { - var encoding = DetectEncoding(payload); - return encoding.GetString(payload); - } - - private static Encoding DetectEncoding(ReadOnlySpan data) - { - if (data.Length >= 4) - { - if (data[0] == 0x00 && data[1] == 0x00 && data[2] == 0xFE && data[3] == 0xFF) - { - return new UTF32Encoding(bigEndian: true, byteOrderMark: true); - } - - if (data[0] == 0xFF && data[1] == 0xFE && data[2] == 0x00 && data[3] == 0x00) - { - return new UTF32Encoding(bigEndian: false, byteOrderMark: true); - } - } - - if (data.Length >= 2) - { - if (data[0] == 0xFE && data[1] == 0xFF) - { - return Encoding.BigEndianUnicode; - } - - if (data[0] == 0xFF && data[1] == 0xFE) - { - return Encoding.Unicode; - } - } - - if (data.Length >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF) - { - return Encoding.UTF8; - } - - return Encoding.UTF8; - } - - public static async Task HandleKmsExportAsync( - IServiceProvider services, - string? rootPath, - string keyId, - string? versionId, - string outputPath, - bool overwrite, - string? passphrase, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("kms-export"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - try - { - var resolvedPassphrase = ResolvePassphrase(passphrase, "Enter file KMS passphrase:"); - if (string.IsNullOrEmpty(resolvedPassphrase)) - { - logger.LogError("KMS passphrase must be supplied via --passphrase, {EnvironmentVariable}, or interactive prompt.", KmsPassphraseEnvironmentVariable); - Environment.ExitCode = 1; - return; - } - - var resolvedRoot = ResolveRootDirectory(rootPath); - if (!Directory.Exists(resolvedRoot)) - { - logger.LogError("KMS root directory '{Root}' does not exist.", resolvedRoot); - Environment.ExitCode = 1; - return; - } - - var outputFullPath = Path.GetFullPath(string.IsNullOrWhiteSpace(outputPath) ? "kms-export.json" : outputPath); - if (Directory.Exists(outputFullPath)) - { - logger.LogError("Output path '{Output}' is a directory. Provide a file path.", outputFullPath); - Environment.ExitCode = 1; - return; - } - - if (!overwrite && File.Exists(outputFullPath)) - { - logger.LogError("Output file '{Output}' already exists. Use --force to overwrite.", outputFullPath); - Environment.ExitCode = 1; - return; - } - - var outputDirectory = Path.GetDirectoryName(outputFullPath); - if (!string.IsNullOrEmpty(outputDirectory)) - { - Directory.CreateDirectory(outputDirectory); - } - - using var client = new FileKmsClient(new FileKmsOptions - { - RootPath = resolvedRoot, - Password = resolvedPassphrase! - }); - - var material = await client.ExportAsync(keyId, versionId, cancellationToken).ConfigureAwait(false); - var json = JsonSerializer.Serialize(material, KmsJsonOptions); - await File.WriteAllTextAsync(outputFullPath, json, cancellationToken).ConfigureAwait(false); - - logger.LogInformation("Exported key {KeyId} version {VersionId} to {Output}.", material.KeyId, material.VersionId, outputFullPath); - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to export key material."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleKmsImportAsync( - IServiceProvider services, - string? rootPath, - string keyId, - string inputPath, - string? versionOverride, - string? passphrase, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("kms-import"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - try - { - var resolvedPassphrase = ResolvePassphrase(passphrase, "Enter file KMS passphrase:"); - if (string.IsNullOrEmpty(resolvedPassphrase)) - { - logger.LogError("KMS passphrase must be supplied via --passphrase, {EnvironmentVariable}, or interactive prompt.", KmsPassphraseEnvironmentVariable); - Environment.ExitCode = 1; - return; - } - - var resolvedRoot = ResolveRootDirectory(rootPath); - Directory.CreateDirectory(resolvedRoot); - - var inputFullPath = Path.GetFullPath(inputPath ?? string.Empty); - if (!File.Exists(inputFullPath)) - { - logger.LogError("Input file '{Input}' does not exist.", inputFullPath); - Environment.ExitCode = 1; - return; - } - - var json = await File.ReadAllTextAsync(inputFullPath, cancellationToken).ConfigureAwait(false); - var material = JsonSerializer.Deserialize(json, KmsJsonOptions) - ?? throw new InvalidOperationException("Key material payload is empty."); - - if (!string.IsNullOrWhiteSpace(versionOverride)) - { - material = material with { VersionId = versionOverride }; - } - - var sourceKeyId = material.KeyId; - material = material with { KeyId = keyId }; - - using var client = new FileKmsClient(new FileKmsOptions - { - RootPath = resolvedRoot, - Password = resolvedPassphrase! - }); - - var metadata = await client.ImportAsync(keyId, material, cancellationToken).ConfigureAwait(false); - if (!string.IsNullOrWhiteSpace(sourceKeyId) && !string.Equals(sourceKeyId, keyId, StringComparison.Ordinal)) - { - logger.LogWarning("Imported key material originally identified as '{SourceKeyId}' into '{TargetKeyId}'.", sourceKeyId, keyId); - } - - var activeVersion = metadata.Versions.Length > 0 ? metadata.Versions[^1].VersionId : material.VersionId; - logger.LogInformation("Imported key {KeyId} version {VersionId} into {Root}.", metadata.KeyId, activeVersion, resolvedRoot); - Environment.ExitCode = 0; - } - catch (JsonException ex) - { - logger.LogError(ex, "Failed to parse key material JSON from {Input}.", inputPath); - Environment.ExitCode = 1; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to import key material."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - private static string ResolveRootDirectory(string? rootPath) - => Path.GetFullPath(string.IsNullOrWhiteSpace(rootPath) ? "kms" : rootPath); - - private static string? ResolvePassphrase(string? passphrase, string promptMessage) - { - if (!string.IsNullOrWhiteSpace(passphrase)) - { - return passphrase; - } - - var fromEnvironment = Environment.GetEnvironmentVariable(KmsPassphraseEnvironmentVariable); - if (!string.IsNullOrWhiteSpace(fromEnvironment)) - { - return fromEnvironment; - } - - return KmsPassphrasePrompt.Prompt(promptMessage); - } - - private static bool TryDecodeBase64(string text, out byte[] decoded) - { - decoded = Array.Empty(); - if (string.IsNullOrWhiteSpace(text)) - { - return false; - } - - var builder = new StringBuilder(text.Length); - foreach (var ch in text) - { - if (!char.IsWhiteSpace(ch)) - { - builder.Append(ch); - } - } - - var candidate = builder.ToString(); - if (candidate.Length < 8 || candidate.Length % 4 != 0) - { - return false; - } - - for (var i = 0; i < candidate.Length; i++) - { - var c = candidate[i]; - if (!(char.IsLetterOrDigit(c) || c is '+' or '/' or '=')) - { - return false; - } - } - - try - { - decoded = Convert.FromBase64String(candidate); - return true; - } - catch (FormatException) - { - return false; - } - } - - private sealed record IngestInputPayload(string Kind, string Name, string Content, string ContentType, string? ContentEncoding); - - private sealed record DocumentNormalizationResult(string Content, string ContentType, string? ContentEncoding); - - private static readonly IReadOnlyDictionary AocViolationExitCodeMap = new Dictionary(StringComparer.OrdinalIgnoreCase) - { - ["ERR_AOC_001"] = 11, - ["ERR_AOC_002"] = 12, - ["ERR_AOC_003"] = 13, - ["ERR_AOC_004"] = 14, - ["ERR_AOC_005"] = 15, - ["ERR_AOC_006"] = 16, - ["ERR_AOC_007"] = 17 - }; - + } + + private static IEnumerable<(TaskRunnerSimulationStep Step, int Depth)> FlattenTaskRunnerSimulationSteps( + IReadOnlyList steps, + int depth = 0) + { + for (var i = 0; i < steps.Count; i++) + { + var step = steps[i]; + yield return (step, depth); + + foreach (var child in FlattenTaskRunnerSimulationSteps(step.Children, depth + 1)) + { + yield return child; + } + } + } + + private static PolicySimulationOutputFormat DeterminePolicySimulationFormat(string? value, string? outputPath) + { + if (!string.IsNullOrWhiteSpace(value)) + { + return value.Trim().ToLowerInvariant() switch + { + "table" => PolicySimulationOutputFormat.Table, + "json" => PolicySimulationOutputFormat.Json, + _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") + }; + } + + if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) + { + return PolicySimulationOutputFormat.Json; + } + + return PolicySimulationOutputFormat.Table; + } + + private static object BuildPolicySimulationPayload( + string policyId, + int? baseVersion, + int? candidateVersion, + IReadOnlyList sbomSet, + IReadOnlyDictionary environment, + PolicySimulationResult result) + => new + { + policyId, + baseVersion, + candidateVersion, + sbomSet = sbomSet.Count == 0 ? Array.Empty() : sbomSet, + environment = environment.Count == 0 ? null : environment, + diff = result.Diff, + explainUri = result.ExplainUri + }; + + private static void RenderPolicySimulationResult( + ILogger logger, + object payload, + PolicySimulationResult result, + PolicySimulationOutputFormat format) + { + if (format == PolicySimulationOutputFormat.Json) + { + var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); + Console.WriteLine(json); + return; + } + + logger.LogInformation( + "Policy diff summary — Added: {Added}, Removed: {Removed}, Unchanged: {Unchanged}.", + result.Diff.Added, + result.Diff.Removed, + result.Diff.Unchanged); + + if (result.Diff.BySeverity.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().AddColumns("Severity", "Up", "Down"); + foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) + { + table.AddRow( + entry.Key, + FormatDelta(entry.Value.Up), + FormatDelta(entry.Value.Down)); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) + { + logger.LogInformation("Severity {Severity}: up={Up}, down={Down}", entry.Key, entry.Value.Up ?? 0, entry.Value.Down ?? 0); + } + } + } + + if (result.Diff.RuleHits.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().AddColumns("Rule", "Up", "Down"); + foreach (var hit in result.Diff.RuleHits) + { + table.AddRow( + string.IsNullOrWhiteSpace(hit.RuleName) ? hit.RuleId : $"{hit.RuleName} ({hit.RuleId})", + FormatDelta(hit.Up), + FormatDelta(hit.Down)); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var hit in result.Diff.RuleHits) + { + logger.LogInformation("Rule {RuleId}: up={Up}, down={Down}", hit.RuleId, hit.Up ?? 0, hit.Down ?? 0); + } + } + } + + if (!string.IsNullOrWhiteSpace(result.ExplainUri)) + { + logger.LogInformation("Explain trace available at {ExplainUri}.", result.ExplainUri); + } + } + + private static IReadOnlyList NormalizePolicySbomSet(IReadOnlyList arguments) + { + if (arguments is null || arguments.Count == 0) + { + return EmptyPolicySbomSet; + } + + var set = new SortedSet(StringComparer.Ordinal); + foreach (var raw in arguments) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var trimmed = raw.Trim(); + if (trimmed.Length > 0) + { + set.Add(trimmed); + } + } + + if (set.Count == 0) + { + return EmptyPolicySbomSet; + } + + var list = set.ToList(); + return new ReadOnlyCollection(list); + } + + private static IReadOnlyDictionary ParsePolicyEnvironment(IReadOnlyList arguments) + { + if (arguments is null || arguments.Count == 0) + { + return EmptyPolicyEnvironment; + } + + var env = new SortedDictionary(StringComparer.Ordinal); + foreach (var raw in arguments) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var trimmed = raw.Trim(); + var separator = trimmed.IndexOf('='); + if (separator <= 0 || separator == trimmed.Length - 1) + { + throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); + } + + var key = trimmed[..separator].Trim().ToLowerInvariant(); + if (string.IsNullOrWhiteSpace(key)) + { + throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); + } + + var valueToken = trimmed[(separator + 1)..].Trim(); + env[key] = ParsePolicyEnvironmentValue(valueToken); + } + + return env.Count == 0 ? EmptyPolicyEnvironment : new ReadOnlyDictionary(env); + } + + private static object? ParsePolicyEnvironmentValue(string token) + { + if (string.IsNullOrWhiteSpace(token)) + { + return string.Empty; + } + + var value = token; + if ((value.Length >= 2 && value.StartsWith("\"", StringComparison.Ordinal) && value.EndsWith("\"", StringComparison.Ordinal)) || + (value.Length >= 2 && value.StartsWith("'", StringComparison.Ordinal) && value.EndsWith("'", StringComparison.Ordinal))) + { + value = value[1..^1]; + } + + if (string.Equals(value, "null", StringComparison.OrdinalIgnoreCase)) + { + return null; + } + + if (bool.TryParse(value, out var boolResult)) + { + return boolResult; + } + + if (long.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var longResult)) + { + return longResult; + } + + if (double.TryParse(value, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var doubleResult)) + { + return doubleResult; + } + + return value; + } + + private static Task WriteSimulationOutputAsync(string outputPath, object payload, CancellationToken cancellationToken) + => WriteJsonPayloadAsync(outputPath, payload, cancellationToken); + + private static async Task WriteJsonPayloadAsync(string outputPath, object payload, CancellationToken cancellationToken) + { + var fullPath = Path.GetFullPath(outputPath); + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrWhiteSpace(directory)) + { + Directory.CreateDirectory(directory); + } + + var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); + await File.WriteAllTextAsync(fullPath, json + Environment.NewLine, cancellationToken).ConfigureAwait(false); + } + + private static int DetermineSimulationExitCode(PolicySimulationResult result, bool failOnDiff) + { + if (!failOnDiff) + { + return 0; + } + + return (result.Diff.Added + result.Diff.Removed) > 0 ? 20 : 0; + } + + private static void HandlePolicySimulationFailure(PolicyApiException exception, ILogger logger) + { + var exitCode = exception.ErrorCode switch + { + "ERR_POL_001" => 10, + "ERR_POL_002" or "ERR_POL_005" => 12, + "ERR_POL_003" => 21, + "ERR_POL_004" => 22, + "ERR_POL_006" => 23, + _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, + _ => 1 + }; + + if (string.IsNullOrWhiteSpace(exception.ErrorCode)) + { + logger.LogError("Policy simulation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); + } + else + { + logger.LogError("Policy simulation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); + } + + CliMetrics.RecordPolicySimulation("error"); + Environment.ExitCode = exitCode; + } + + private static void HandlePolicyActivationFailure(PolicyApiException exception, ILogger logger) + { + var exitCode = exception.ErrorCode switch + { + "ERR_POL_002" => 70, + "ERR_POL_003" => 71, + "ERR_POL_004" => 72, + _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, + _ => 1 + }; + + if (string.IsNullOrWhiteSpace(exception.ErrorCode)) + { + logger.LogError("Policy activation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); + } + else + { + logger.LogError("Policy activation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); + } + + CliMetrics.RecordPolicyActivation("error"); + Environment.ExitCode = exitCode; + } + + private static IReadOnlyList NormalizePolicyFilterValues(string[] values, bool toLower = false) + { + if (values is null || values.Length == 0) + { + return Array.Empty(); + } + + var set = new HashSet(StringComparer.OrdinalIgnoreCase); + var list = new List(); + foreach (var raw in values) + { + var candidate = raw?.Trim(); + if (string.IsNullOrWhiteSpace(candidate)) + { + continue; + } + + var normalized = toLower ? candidate.ToLowerInvariant() : candidate; + if (set.Add(normalized)) + { + list.Add(normalized); + } + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static string? NormalizePolicyPriority(string? priority) + { + if (string.IsNullOrWhiteSpace(priority)) + { + return null; + } + + var normalized = priority.Trim(); + return string.IsNullOrWhiteSpace(normalized) ? null : normalized.ToLowerInvariant(); + } + + private static string NormalizePolicyActivationOutcome(string status) + { + if (string.IsNullOrWhiteSpace(status)) + { + return "unknown"; + } + + return status.Trim().ToLowerInvariant(); + } + + private static int DeterminePolicyActivationExitCode(string outcome) + => string.Equals(outcome, "pending_second_approval", StringComparison.Ordinal) ? 75 : 0; + + private static void RenderPolicyActivationResult(PolicyActivationResult result, PolicyActivationRequest request) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var summary = new Table().Expand(); + summary.Border(TableBorder.Rounded); + summary.AddColumn(new TableColumn("[grey]Field[/]").LeftAligned()); + summary.AddColumn(new TableColumn("[grey]Value[/]").LeftAligned()); + summary.AddRow("Policy", Markup.Escape($"{result.Revision.PolicyId} v{result.Revision.Version}")); + summary.AddRow("Status", FormatActivationStatus(result.Status)); + summary.AddRow("Requires 2 approvals", result.Revision.RequiresTwoPersonApproval ? "[yellow]yes[/]" : "[green]no[/]"); + summary.AddRow("Created (UTC)", Markup.Escape(FormatUpdatedAt(result.Revision.CreatedAt))); + summary.AddRow("Activated (UTC)", result.Revision.ActivatedAt.HasValue + ? Markup.Escape(FormatUpdatedAt(result.Revision.ActivatedAt.Value)) + : "[grey](not yet active)[/]"); + + if (request.RunNow) + { + summary.AddRow("Run", "[green]immediate[/]"); + } + else if (request.ScheduledAt.HasValue) + { + summary.AddRow("Scheduled at", Markup.Escape(FormatUpdatedAt(request.ScheduledAt.Value))); + } + + if (!string.IsNullOrWhiteSpace(request.Priority)) + { + summary.AddRow("Priority", Markup.Escape(request.Priority!)); + } + + if (request.Rollback) + { + summary.AddRow("Rollback", "[yellow]yes[/]"); + } + + if (!string.IsNullOrWhiteSpace(request.IncidentId)) + { + summary.AddRow("Incident", Markup.Escape(request.IncidentId!)); + } + + if (!string.IsNullOrWhiteSpace(request.Comment)) + { + summary.AddRow("Note", Markup.Escape(request.Comment!)); + } + + AnsiConsole.Write(summary); + + if (result.Revision.Approvals.Count > 0) + { + var approvalTable = new Table().Title("[grey]Approvals[/]"); + approvalTable.Border(TableBorder.Minimal); + approvalTable.AddColumn(new TableColumn("Actor").LeftAligned()); + approvalTable.AddColumn(new TableColumn("Approved (UTC)").LeftAligned()); + approvalTable.AddColumn(new TableColumn("Comment").LeftAligned()); + + foreach (var approval in result.Revision.Approvals) + { + var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment!; + approvalTable.AddRow( + Markup.Escape(approval.ActorId), + Markup.Escape(FormatUpdatedAt(approval.ApprovedAt)), + Markup.Escape(comment)); + } + + AnsiConsole.Write(approvalTable); + } + else + { + AnsiConsole.MarkupLine("[grey]No activation approvals recorded yet.[/]"); + } + } + else + { + Console.WriteLine(FormattableString.Invariant($"Policy: {result.Revision.PolicyId} v{result.Revision.Version}")); + Console.WriteLine(FormattableString.Invariant($"Status: {NormalizePolicyActivationOutcome(result.Status)}")); + Console.WriteLine(FormattableString.Invariant($"Requires 2 approvals: {(result.Revision.RequiresTwoPersonApproval ? "yes" : "no")}")); + Console.WriteLine(FormattableString.Invariant($"Created (UTC): {FormatUpdatedAt(result.Revision.CreatedAt)}")); + Console.WriteLine(FormattableString.Invariant($"Activated (UTC): {(result.Revision.ActivatedAt.HasValue ? FormatUpdatedAt(result.Revision.ActivatedAt.Value) : "(not yet active)")}")); + + if (request.RunNow) + { + Console.WriteLine("Run: immediate"); + } + else if (request.ScheduledAt.HasValue) + { + Console.WriteLine(FormattableString.Invariant($"Scheduled at: {FormatUpdatedAt(request.ScheduledAt.Value)}")); + } + + if (!string.IsNullOrWhiteSpace(request.Priority)) + { + Console.WriteLine(FormattableString.Invariant($"Priority: {request.Priority}")); + } + + if (request.Rollback) + { + Console.WriteLine("Rollback: yes"); + } + + if (!string.IsNullOrWhiteSpace(request.IncidentId)) + { + Console.WriteLine(FormattableString.Invariant($"Incident: {request.IncidentId}")); + } + + if (!string.IsNullOrWhiteSpace(request.Comment)) + { + Console.WriteLine(FormattableString.Invariant($"Note: {request.Comment}")); + } + + if (result.Revision.Approvals.Count == 0) + { + Console.WriteLine("Approvals: none"); + } + else + { + foreach (var approval in result.Revision.Approvals) + { + var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment; + Console.WriteLine(FormattableString.Invariant($"Approval: {approval.ActorId} at {FormatUpdatedAt(approval.ApprovedAt)} ({comment})")); + } + } + } + } + + private static string FormatActivationStatus(string status) + { + var normalized = NormalizePolicyActivationOutcome(status); + return normalized switch + { + "activated" => "[green]activated[/]", + "already_active" => "[yellow]already_active[/]", + "pending_second_approval" => "[yellow]pending_second_approval[/]", + _ => "[red]" + Markup.Escape(string.IsNullOrWhiteSpace(status) ? "unknown" : status) + "[/]" + }; + } + + private static DateTimeOffset? ParsePolicySince(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + if (DateTimeOffset.TryParse( + value.Trim(), + CultureInfo.InvariantCulture, + DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, + out var parsed)) + { + return parsed.ToUniversalTime(); + } + + throw new ArgumentException("Invalid --since value. Use an ISO-8601 timestamp."); + } + + private static string? NormalizeExplainMode(string? mode) + => string.IsNullOrWhiteSpace(mode) ? null : mode.Trim().ToLowerInvariant(); + + private static PolicyFindingsOutputFormat DeterminePolicyFindingsFormat(string? value, string? outputPath) + { + if (!string.IsNullOrWhiteSpace(value)) + { + return value.Trim().ToLowerInvariant() switch + { + "table" => PolicyFindingsOutputFormat.Table, + "json" => PolicyFindingsOutputFormat.Json, + _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") + }; + } + + if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) + { + return PolicyFindingsOutputFormat.Json; + } + + return PolicyFindingsOutputFormat.Table; + } + + private static object BuildPolicyFindingsPayload( + string policyId, + PolicyFindingsQuery query, + PolicyFindingsPage page) + => new + { + policyId, + filters = new + { + sbom = query.SbomIds, + status = query.Statuses, + severity = query.Severities, + cursor = query.Cursor, + page = query.Page, + pageSize = query.PageSize, + since = query.Since?.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture) + }, + items = page.Items.Select(item => new + { + findingId = item.FindingId, + status = item.Status, + severity = new + { + normalized = item.Severity.Normalized, + score = item.Severity.Score + }, + sbomId = item.SbomId, + advisoryIds = item.AdvisoryIds, + vex = item.Vex is null ? null : new + { + winningStatementId = item.Vex.WinningStatementId, + source = item.Vex.Source, + status = item.Vex.Status + }, + policyVersion = item.PolicyVersion, + updatedAt = item.UpdatedAt == DateTimeOffset.MinValue ? null : item.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), + runId = item.RunId + }), + nextCursor = page.NextCursor, + totalCount = page.TotalCount + }; + + private static object BuildPolicyFindingPayload(string policyId, PolicyFindingDocument finding) + => new + { + policyId, + finding = new + { + findingId = finding.FindingId, + status = finding.Status, + severity = new + { + normalized = finding.Severity.Normalized, + score = finding.Severity.Score + }, + sbomId = finding.SbomId, + advisoryIds = finding.AdvisoryIds, + vex = finding.Vex is null ? null : new + { + winningStatementId = finding.Vex.WinningStatementId, + source = finding.Vex.Source, + status = finding.Vex.Status + }, + policyVersion = finding.PolicyVersion, + updatedAt = finding.UpdatedAt == DateTimeOffset.MinValue ? null : finding.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), + runId = finding.RunId + } + }; + + private static object BuildPolicyFindingExplainPayload( + string policyId, + string findingId, + string? mode, + PolicyFindingExplainResult explain) + => new + { + policyId, + findingId, + mode, + explain = new + { + policyVersion = explain.PolicyVersion, + steps = explain.Steps.Select(step => new + { + rule = step.Rule, + status = step.Status, + action = step.Action, + score = step.Score, + inputs = step.Inputs, + evidence = step.Evidence + }), + sealedHints = explain.SealedHints.Select(hint => hint.Message) + } + }; + + private static void RenderPolicyFindingsTable(ILogger logger, PolicyFindingsPage page) + { + var items = page.Items; + if (items.Count == 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + AnsiConsole.MarkupLine("[yellow]No findings matched the provided filters.[/]"); + } + else + { + logger.LogWarning("No findings matched the provided filters."); + } + return; + } + + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table() + .Border(TableBorder.Rounded) + .Centered(); + + table.AddColumn("Finding"); + table.AddColumn("Status"); + table.AddColumn("Severity"); + table.AddColumn("Score"); + table.AddColumn("SBOM"); + table.AddColumn("Advisories"); + table.AddColumn("Updated (UTC)"); + + foreach (var item in items) + { + table.AddRow( + Markup.Escape(item.FindingId), + Markup.Escape(item.Status), + Markup.Escape(item.Severity.Normalized), + Markup.Escape(FormatScore(item.Severity.Score)), + Markup.Escape(item.SbomId), + Markup.Escape(FormatListPreview(item.AdvisoryIds)), + Markup.Escape(FormatUpdatedAt(item.UpdatedAt))); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var item in items) + { + logger.LogInformation( + "{Finding} — Status {Status}, Severity {Severity} ({Score}), SBOM {Sbom}, Updated {Updated}", + item.FindingId, + item.Status, + item.Severity.Normalized, + item.Severity.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", + item.SbomId, + FormatUpdatedAt(item.UpdatedAt)); + } + } + + logger.LogInformation("{Count} finding(s).", items.Count); + + if (page.TotalCount.HasValue) + { + logger.LogInformation("Total available: {Total}", page.TotalCount.Value); + } + + if (!string.IsNullOrWhiteSpace(page.NextCursor)) + { + logger.LogInformation("Next cursor: {Cursor}", page.NextCursor); + } + } + + private static void RenderPolicyFindingDetails(ILogger logger, PolicyFindingDocument finding) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table() + .Border(TableBorder.Rounded) + .AddColumn("Field") + .AddColumn("Value"); + + table.AddRow("Finding", Markup.Escape(finding.FindingId)); + table.AddRow("Status", Markup.Escape(finding.Status)); + table.AddRow("Severity", Markup.Escape(FormatSeverity(finding.Severity))); + table.AddRow("SBOM", Markup.Escape(finding.SbomId)); + table.AddRow("Policy Version", Markup.Escape(finding.PolicyVersion.ToString(CultureInfo.InvariantCulture))); + table.AddRow("Updated (UTC)", Markup.Escape(FormatUpdatedAt(finding.UpdatedAt))); + table.AddRow("Run Id", Markup.Escape(string.IsNullOrWhiteSpace(finding.RunId) ? "(none)" : finding.RunId)); + table.AddRow("Advisories", Markup.Escape(FormatListPreview(finding.AdvisoryIds))); + table.AddRow("VEX", Markup.Escape(FormatVexMetadata(finding.Vex))); + + AnsiConsole.Write(table); + } + else + { + logger.LogInformation("Finding {Finding}", finding.FindingId); + logger.LogInformation(" Status: {Status}", finding.Status); + logger.LogInformation(" Severity: {Severity}", FormatSeverity(finding.Severity)); + logger.LogInformation(" SBOM: {Sbom}", finding.SbomId); + logger.LogInformation(" Policy version: {Version}", finding.PolicyVersion); + logger.LogInformation(" Updated (UTC): {Updated}", FormatUpdatedAt(finding.UpdatedAt)); + if (!string.IsNullOrWhiteSpace(finding.RunId)) + { + logger.LogInformation(" Run Id: {Run}", finding.RunId); + } + if (finding.AdvisoryIds.Count > 0) + { + logger.LogInformation(" Advisories: {Advisories}", string.Join(", ", finding.AdvisoryIds)); + } + if (!string.IsNullOrWhiteSpace(FormatVexMetadata(finding.Vex))) + { + logger.LogInformation(" VEX: {Vex}", FormatVexMetadata(finding.Vex)); + } + } + } + + private static void RenderPolicyFindingExplain(ILogger logger, PolicyFindingExplainResult explain) + { + if (explain.Steps.Count == 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + AnsiConsole.MarkupLine("[yellow]No explain steps were returned.[/]"); + } + else + { + logger.LogWarning("No explain steps were returned."); + } + } + else if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table() + .Border(TableBorder.Rounded) + .AddColumn("Rule") + .AddColumn("Status") + .AddColumn("Action") + .AddColumn("Score") + .AddColumn("Inputs") + .AddColumn("Evidence"); + + foreach (var step in explain.Steps) + { + table.AddRow( + Markup.Escape(step.Rule), + Markup.Escape(step.Status ?? "(n/a)"), + Markup.Escape(step.Action ?? "(n/a)"), + Markup.Escape(step.Score.HasValue ? step.Score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"), + Markup.Escape(FormatKeyValuePairs(step.Inputs)), + Markup.Escape(FormatKeyValuePairs(step.Evidence))); + } + + AnsiConsole.Write(table); + } + else + { + logger.LogInformation("{Count} explain step(s).", explain.Steps.Count); + foreach (var step in explain.Steps) + { + logger.LogInformation( + "Rule {Rule} — Status {Status}, Action {Action}, Score {Score}, Inputs {Inputs}", + step.Rule, + step.Status ?? "n/a", + step.Action ?? "n/a", + step.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", + FormatKeyValuePairs(step.Inputs)); + + if (step.Evidence is not null && step.Evidence.Count > 0) + { + logger.LogInformation(" Evidence: {Evidence}", FormatKeyValuePairs(step.Evidence)); + } + } + } + + if (explain.SealedHints.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + AnsiConsole.MarkupLine("[grey]Hints:[/]"); + foreach (var hint in explain.SealedHints) + { + AnsiConsole.MarkupLine($" • {Markup.Escape(hint.Message)}"); + } + } + else + { + foreach (var hint in explain.SealedHints) + { + logger.LogInformation("Hint: {Hint}", hint.Message); + } + } + } + } + + private static string FormatSeverity(PolicyFindingSeverity severity) + { + if (severity.Score.HasValue) + { + return FormattableString.Invariant($"{severity.Normalized} ({severity.Score.Value:0.00})"); + } + + return severity.Normalized; + } + + private static string FormatListPreview(IReadOnlyList values) + { + if (values is null || values.Count == 0) + { + return "(none)"; + } + + const int MaxItems = 3; + if (values.Count <= MaxItems) + { + return string.Join(", ", values); + } + + var preview = string.Join(", ", values.Take(MaxItems)); + return FormattableString.Invariant($"{preview} (+{values.Count - MaxItems})"); + } + + private static string FormatUpdatedAt(DateTimeOffset timestamp) + { + if (timestamp == DateTimeOffset.MinValue) + { + return "(unknown)"; + } + + return timestamp.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss'Z'", CultureInfo.InvariantCulture); + } + + private static string FormatScore(double? score) + => score.HasValue ? score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"; + + private static string FormatKeyValuePairs(IReadOnlyDictionary? values) + { + if (values is null || values.Count == 0) + { + return "(none)"; + } + + return string.Join(", ", values.Select(pair => $"{pair.Key}={pair.Value}")); + } + + private static string FormatVexMetadata(PolicyFindingVexMetadata? value) + { + if (value is null) + { + return "(none)"; + } + + var parts = new List(3); + if (!string.IsNullOrWhiteSpace(value.WinningStatementId)) + { + parts.Add($"winning={value.WinningStatementId}"); + } + + if (!string.IsNullOrWhiteSpace(value.Source)) + { + parts.Add($"source={value.Source}"); + } + + if (!string.IsNullOrWhiteSpace(value.Status)) + { + parts.Add($"status={value.Status}"); + } + + return parts.Count == 0 ? "(none)" : string.Join(", ", parts); + } + + private static void HandlePolicyFindingsFailure(PolicyApiException exception, ILogger logger, Action recordMetric) + { + var exitCode = exception.StatusCode switch + { + HttpStatusCode.Unauthorized or HttpStatusCode.Forbidden => 12, + HttpStatusCode.NotFound => 1, + _ => 1 + }; + + if (string.IsNullOrWhiteSpace(exception.ErrorCode)) + { + logger.LogError("Policy API request failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); + } + else + { + logger.LogError("Policy API request failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); + } + + recordMetric("error"); + Environment.ExitCode = exitCode; + } + + private static string FormatDelta(int? value) + => value.HasValue ? value.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"; + + private static readonly JsonSerializerOptions SimulationJsonOptions = + new(JsonSerializerDefaults.Web) { WriteIndented = true }; + + private static readonly IReadOnlyDictionary EmptyPolicyEnvironment = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)); + + private static readonly IReadOnlyList EmptyPolicySbomSet = + new ReadOnlyCollection(Array.Empty()); + + private static readonly IReadOnlyDictionary EmptyLabelSelectors = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); + + private enum TaskRunnerSimulationOutputFormat + { + Table, + Json + } + + private enum PolicySimulationOutputFormat + { + Table, + Json + } + + private enum PolicyFindingsOutputFormat + { + Table, + Json + } + + + private static string FormatAdditionalValue(object? value) + { + return value switch + { + null => "null", + bool b => b ? "true" : "false", + double d => d.ToString("G17", CultureInfo.InvariantCulture), + float f => f.ToString("G9", CultureInfo.InvariantCulture), + IFormattable formattable => formattable.ToString(null, CultureInfo.InvariantCulture), + _ => value.ToString() ?? string.Empty + }; + } + + + private static IReadOnlyList NormalizeProviders(IReadOnlyList providers) + { + if (providers is null || providers.Count == 0) + { + return Array.Empty(); + } + + var list = new List(); + foreach (var provider in providers) + { + if (!string.IsNullOrWhiteSpace(provider)) + { + list.Add(provider.Trim()); + } + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static string ResolveTenant(string? tenantOption) + { + if (!string.IsNullOrWhiteSpace(tenantOption)) + { + return tenantOption.Trim(); + } + + var fromEnvironment = Environment.GetEnvironmentVariable("STELLA_TENANT"); + return string.IsNullOrWhiteSpace(fromEnvironment) ? string.Empty : fromEnvironment.Trim(); + } + + private static async Task LoadIngestInputAsync(IServiceProvider services, string input, CancellationToken cancellationToken) + { + if (Uri.TryCreate(input, UriKind.Absolute, out var uri) && + (uri.Scheme.Equals(Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase) || + uri.Scheme.Equals(Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase))) + { + return await LoadIngestInputFromHttpAsync(services, uri, cancellationToken).ConfigureAwait(false); + } + + return await LoadIngestInputFromFileAsync(input, cancellationToken).ConfigureAwait(false); + } + + private static async Task LoadIngestInputFromHttpAsync(IServiceProvider services, Uri uri, CancellationToken cancellationToken) + { + var httpClientFactory = services.GetRequiredService(); + var httpClient = httpClientFactory.CreateClient("stellaops-cli.ingest-download"); + using var response = await httpClient.GetAsync(uri, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + throw new InvalidOperationException($"Failed to download document from {uri} (HTTP {(int)response.StatusCode})."); + } + + var contentType = response.Content.Headers.ContentType?.MediaType ?? "application/json"; + var contentEncoding = response.Content.Headers.ContentEncoding is { Count: > 0 } + ? string.Join(",", response.Content.Headers.ContentEncoding) + : null; + + var bytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + var normalized = NormalizeDocument(bytes, contentType, contentEncoding); + + return new IngestInputPayload( + "uri", + uri.ToString(), + normalized.Content, + normalized.ContentType, + normalized.ContentEncoding); + } + + private static async Task LoadIngestInputFromFileAsync(string path, CancellationToken cancellationToken) + { + var fullPath = Path.GetFullPath(path); + if (!File.Exists(fullPath)) + { + throw new FileNotFoundException("Input document not found.", fullPath); + } + + var bytes = await File.ReadAllBytesAsync(fullPath, cancellationToken).ConfigureAwait(false); + var normalized = NormalizeDocument(bytes, GuessContentTypeFromExtension(fullPath), null); + + return new IngestInputPayload( + "file", + Path.GetFileName(fullPath), + normalized.Content, + normalized.ContentType, + normalized.ContentEncoding); + } + + private static DocumentNormalizationResult NormalizeDocument(byte[] bytes, string? contentType, string? encodingHint) + { + if (bytes is null || bytes.Length == 0) + { + throw new InvalidOperationException("Input document is empty."); + } + + var working = bytes; + var encodings = new List(); + if (!string.IsNullOrWhiteSpace(encodingHint)) + { + encodings.Add(encodingHint); + } + + if (IsGzip(working)) + { + working = DecompressGzip(working); + encodings.Add("gzip"); + } + + var text = DecodeText(working); + var trimmed = text.TrimStart(); + + if (!string.IsNullOrWhiteSpace(trimmed) && trimmed[0] != '{' && trimmed[0] != '[') + { + if (TryDecodeBase64(text, out var decodedBytes)) + { + working = decodedBytes; + encodings.Add("base64"); + + if (IsGzip(working)) + { + working = DecompressGzip(working); + encodings.Add("gzip"); + } + + text = DecodeText(working); + } + } + + text = text.Trim(); + if (string.IsNullOrWhiteSpace(text)) + { + throw new InvalidOperationException("Input document contained no data after decoding."); + } + + var encodingLabel = encodings.Count == 0 ? null : string.Join("+", encodings); + var finalContentType = string.IsNullOrWhiteSpace(contentType) ? "application/json" : contentType; + + return new DocumentNormalizationResult(text, finalContentType, encodingLabel); + } + + private static string GuessContentTypeFromExtension(string path) + { + var extension = Path.GetExtension(path); + if (string.IsNullOrWhiteSpace(extension)) + { + return "application/json"; + } + + return extension.ToLowerInvariant() switch + { + ".json" or ".csaf" => "application/json", + ".xml" => "application/xml", + _ => "application/json" + }; + } + + private static DateTimeOffset DetermineVerificationSince(string? sinceOption) + { + if (string.IsNullOrWhiteSpace(sinceOption)) + { + return DateTimeOffset.UtcNow.AddHours(-24); + } + + var trimmed = sinceOption.Trim(); + + if (DateTimeOffset.TryParse( + trimmed, + CultureInfo.InvariantCulture, + DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, + out var parsedTimestamp)) + { + return parsedTimestamp.ToUniversalTime(); + } + + if (TryParseRelativeDuration(trimmed, out var duration)) + { + return DateTimeOffset.UtcNow.Subtract(duration); + } + + throw new InvalidOperationException("Invalid --since value. Use ISO-8601 timestamp or duration (e.g. 24h, 7d)."); + } + + private static bool TryParseRelativeDuration(string value, out TimeSpan duration) + { + duration = TimeSpan.Zero; + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var normalized = value.Trim().ToLowerInvariant(); + if (normalized.Length < 2) + { + return false; + } + + var suffix = normalized[^1]; + var magnitudeText = normalized[..^1]; + + double multiplier = suffix switch + { + 's' => 1, + 'm' => 60, + 'h' => 3600, + 'd' => 86400, + 'w' => 604800, + _ => 0 + }; + + if (multiplier == 0) + { + return false; + } + + if (!double.TryParse(magnitudeText, NumberStyles.Float, CultureInfo.InvariantCulture, out var magnitude)) + { + return false; + } + + if (double.IsNaN(magnitude) || double.IsInfinity(magnitude) || magnitude <= 0) + { + return false; + } + + var seconds = magnitude * multiplier; + if (double.IsNaN(seconds) || double.IsInfinity(seconds) || seconds <= 0) + { + return false; + } + + duration = TimeSpan.FromSeconds(seconds); + return true; + } + + private static int NormalizeLimit(int? limitOption) + { + if (!limitOption.HasValue) + { + return 20; + } + + if (limitOption.Value < 0) + { + throw new InvalidOperationException("Limit cannot be negative."); + } + + return limitOption.Value; + } + + private static IReadOnlyList ParseCommaSeparatedList(string? raw) + { + if (string.IsNullOrWhiteSpace(raw)) + { + return Array.Empty(); + } + + var tokens = raw + .Split(',', StringSplitOptions.RemoveEmptyEntries) + .Select(token => token.Trim()) + .Where(token => token.Length > 0) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + return tokens.Length == 0 ? Array.Empty() : tokens; + } + + private static string FormatWindowRange(AocVerifyWindow? window) + { + if (window is null) + { + return "(unspecified)"; + } + + var fromText = window.From?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; + var toText = window.To?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; + return $"{fromText} -> {toText}"; + } + + private static string FormatCheckedCounts(AocVerifyChecked? checkedCounts) + { + if (checkedCounts is null) + { + return "(unspecified)"; + } + + return $"advisories: {checkedCounts.Advisories.ToString("N0", CultureInfo.InvariantCulture)}, vex: {checkedCounts.Vex.ToString("N0", CultureInfo.InvariantCulture)}"; + } + + private static string DetermineVerifyStatus(AocVerifyResponse? response) + { + if (response is null) + { + return "unknown"; + } + + if (response.Truncated == true && (response.Violations is null || response.Violations.Count == 0)) + { + return "truncated"; + } + + var total = response.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; + return total > 0 ? "violations" : "ok"; + } + + private static string FormatBoolean(bool value, bool useColor) + { + var text = value ? "yes" : "no"; + if (!useColor) + { + return text; + } + + return value + ? $"[yellow]{text}[/]" + : $"[green]{text}[/]"; + } + + private static string FormatVerifyStatus(string? status, bool useColor) + { + var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); + var escaped = Markup.Escape(normalized); + if (!useColor) + { + return escaped; + } + + return normalized switch + { + "ok" => $"[green]{escaped}[/]", + "violations" => $"[red]{escaped}[/]", + "truncated" => $"[yellow]{escaped}[/]", + _ => $"[grey]{escaped}[/]" + }; + } + + private static string FormatViolationExample(AocVerifyViolationExample? example) + { + if (example is null) + { + return "(n/a)"; + } + + var parts = new List(); + if (!string.IsNullOrWhiteSpace(example.Source)) + { + parts.Add(example.Source.Trim()); + } + + if (!string.IsNullOrWhiteSpace(example.DocumentId)) + { + parts.Add(example.DocumentId.Trim()); + } + + var label = parts.Count == 0 ? "(n/a)" : string.Join(" | ", parts); + if (!string.IsNullOrWhiteSpace(example.ContentHash)) + { + label = $"{label} [{example.ContentHash.Trim()}]"; + } + + return label; + } + + private static void RenderAocVerifyTable(AocVerifyResponse response, bool useColor, int limit) + { + var summary = new Table().Border(TableBorder.Rounded); + summary.AddColumn("Field"); + summary.AddColumn("Value"); + + summary.AddRow("Tenant", Markup.Escape(string.IsNullOrWhiteSpace(response?.Tenant) ? "(unknown)" : response.Tenant!)); + summary.AddRow("Window", Markup.Escape(FormatWindowRange(response?.Window))); + summary.AddRow("Checked", Markup.Escape(FormatCheckedCounts(response?.Checked))); + + summary.AddRow("Limit", Markup.Escape(limit <= 0 ? "unbounded" : limit.ToString(CultureInfo.InvariantCulture))); + summary.AddRow("Status", FormatVerifyStatus(DetermineVerifyStatus(response), useColor)); + + if (response?.Metrics?.IngestionWriteTotal is int writes) + { + summary.AddRow("Ingestion Writes", Markup.Escape(writes.ToString("N0", CultureInfo.InvariantCulture))); + } + + if (response?.Metrics?.AocViolationTotal is int totalViolations) + { + summary.AddRow("Violations (total)", Markup.Escape(totalViolations.ToString("N0", CultureInfo.InvariantCulture))); + } + else + { + var computedViolations = response?.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; + summary.AddRow("Violations (total)", Markup.Escape(computedViolations.ToString("N0", CultureInfo.InvariantCulture))); + } + + summary.AddRow("Truncated", FormatBoolean(response?.Truncated == true, useColor)); + + AnsiConsole.Write(summary); + + if (response?.Violations is null || response.Violations.Count == 0) + { + var message = response?.Truncated == true + ? "No violations reported, but results were truncated. Increase --limit to review full output." + : "No AOC violations detected in the requested window."; + + if (useColor) + { + var color = response?.Truncated == true ? "yellow" : "green"; + AnsiConsole.MarkupLine($"[{color}]{Markup.Escape(message)}[/]"); + } + else + { + Console.WriteLine(message); + } + + return; + } + + var violationTable = new Table().Border(TableBorder.Rounded); + violationTable.AddColumn("Code"); + violationTable.AddColumn("Count"); + violationTable.AddColumn("Sample Document"); + violationTable.AddColumn("Path"); + + foreach (var violation in response.Violations) + { + var codeDisplay = FormatViolationCode(violation.Code, useColor); + var countDisplay = violation.Count.ToString("N0", CultureInfo.InvariantCulture); + var example = violation.Examples?.FirstOrDefault(); + var documentDisplay = Markup.Escape(FormatViolationExample(example)); + var pathDisplay = example is null || string.IsNullOrWhiteSpace(example.Path) + ? "(none)" + : example.Path!; + + violationTable.AddRow(codeDisplay, countDisplay, documentDisplay, Markup.Escape(pathDisplay)); + } + + AnsiConsole.Write(violationTable); +} + + private static int DetermineVerifyExitCode(AocVerifyResponse response) + { + ArgumentNullException.ThrowIfNull(response); + + if (response.Violations is not null && response.Violations.Count > 0) + { + var exitCodes = new List(); + foreach (var violation in response.Violations) + { + if (string.IsNullOrWhiteSpace(violation.Code)) + { + continue; + } + + if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) + { + exitCodes.Add(mapped); + } + } + + if (exitCodes.Count > 0) + { + return exitCodes.Min(); + } + + return response.Truncated == true ? 18 : 17; + } + + if (response.Truncated == true) + { + return 18; + } + + return 0; + } + + private static async Task WriteJsonReportAsync(T payload, string destination, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(payload); + + if (string.IsNullOrWhiteSpace(destination)) + { + throw new InvalidOperationException("Output path must be provided."); + } + + var outputPath = Path.GetFullPath(destination); + var directory = Path.GetDirectoryName(outputPath); + if (!string.IsNullOrWhiteSpace(directory)) + { + Directory.CreateDirectory(directory); + } + + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions + { + WriteIndented = true + }); + + await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false); + return outputPath; + } + + private static void RenderDryRunTable(AocIngestDryRunResponse response, bool useColor) + { + var summary = new Table().Border(TableBorder.Rounded); + summary.AddColumn("Field"); + summary.AddColumn("Value"); + + summary.AddRow("Source", Markup.Escape(response?.Source ?? "(unknown)")); + summary.AddRow("Tenant", Markup.Escape(response?.Tenant ?? "(unknown)")); + summary.AddRow("Guard Version", Markup.Escape(response?.GuardVersion ?? "(unknown)")); + summary.AddRow("Status", FormatStatusMarkup(response?.Status, useColor)); + + var violationCount = response?.Violations?.Count ?? 0; + summary.AddRow("Violations", violationCount.ToString(CultureInfo.InvariantCulture)); + + if (!string.IsNullOrWhiteSpace(response?.Document?.ContentHash)) + { + summary.AddRow("Content Hash", Markup.Escape(response.Document.ContentHash!)); + } + + if (!string.IsNullOrWhiteSpace(response?.Document?.Supersedes)) + { + summary.AddRow("Supersedes", Markup.Escape(response.Document.Supersedes!)); + } + + if (!string.IsNullOrWhiteSpace(response?.Document?.Provenance?.Signature?.Format)) + { + var signature = response.Document.Provenance.Signature; + var summaryText = signature!.Present + ? signature.Format ?? "present" + : "missing"; + summary.AddRow("Signature", Markup.Escape(summaryText)); + } + + AnsiConsole.Write(summary); + + if (violationCount == 0) + { + if (useColor) + { + AnsiConsole.MarkupLine("[green]No AOC violations detected.[/]"); + } + else + { + Console.WriteLine("No AOC violations detected."); + } + + return; + } + + var violationTable = new Table().Border(TableBorder.Rounded); + violationTable.AddColumn("Code"); + violationTable.AddColumn("Path"); + violationTable.AddColumn("Message"); + + foreach (var violation in response!.Violations!) + { + var codeDisplay = FormatViolationCode(violation.Code, useColor); + var pathDisplay = string.IsNullOrWhiteSpace(violation.Path) ? "(root)" : violation.Path!; + var messageDisplay = string.IsNullOrWhiteSpace(violation.Message) ? "(unspecified)" : violation.Message!; + violationTable.AddRow(codeDisplay, Markup.Escape(pathDisplay), Markup.Escape(messageDisplay)); + } + + AnsiConsole.Write(violationTable); + } + + private static int DetermineDryRunExitCode(AocIngestDryRunResponse response) + { + if (response?.Violations is null || response.Violations.Count == 0) + { + return 0; + } + + var exitCodes = new List(); + foreach (var violation in response.Violations) + { + if (string.IsNullOrWhiteSpace(violation.Code)) + { + continue; + } + + if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) + { + exitCodes.Add(mapped); + } + } + + if (exitCodes.Count == 0) + { + return 17; + } + + return exitCodes.Min(); + } + + private static string FormatStatusMarkup(string? status, bool useColor) + { + var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); + if (!useColor) + { + return Markup.Escape(normalized); + } + + return normalized.Equals("ok", StringComparison.OrdinalIgnoreCase) + ? $"[green]{Markup.Escape(normalized)}[/]" + : $"[red]{Markup.Escape(normalized)}[/]"; + } + + private static string FormatViolationCode(string code, bool useColor) + { + var sanitized = string.IsNullOrWhiteSpace(code) ? "(unknown)" : code.Trim(); + if (!useColor) + { + return Markup.Escape(sanitized); + } + + return $"[red]{Markup.Escape(sanitized)}[/]"; + } + + private static bool IsGzip(ReadOnlySpan data) + { + return data.Length >= 2 && data[0] == 0x1F && data[1] == 0x8B; + } + + private static byte[] DecompressGzip(byte[] payload) + { + using var input = new MemoryStream(payload); + using var gzip = new GZipStream(input, CompressionMode.Decompress); + using var output = new MemoryStream(); + gzip.CopyTo(output); + return output.ToArray(); + } + + private static string DecodeText(byte[] payload) + { + var encoding = DetectEncoding(payload); + return encoding.GetString(payload); + } + + private static Encoding DetectEncoding(ReadOnlySpan data) + { + if (data.Length >= 4) + { + if (data[0] == 0x00 && data[1] == 0x00 && data[2] == 0xFE && data[3] == 0xFF) + { + return new UTF32Encoding(bigEndian: true, byteOrderMark: true); + } + + if (data[0] == 0xFF && data[1] == 0xFE && data[2] == 0x00 && data[3] == 0x00) + { + return new UTF32Encoding(bigEndian: false, byteOrderMark: true); + } + } + + if (data.Length >= 2) + { + if (data[0] == 0xFE && data[1] == 0xFF) + { + return Encoding.BigEndianUnicode; + } + + if (data[0] == 0xFF && data[1] == 0xFE) + { + return Encoding.Unicode; + } + } + + if (data.Length >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF) + { + return Encoding.UTF8; + } + + return Encoding.UTF8; + } + + public static async Task HandleKmsExportAsync( + IServiceProvider services, + string? rootPath, + string keyId, + string? versionId, + string outputPath, + bool overwrite, + string? passphrase, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("kms-export"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + try + { + var resolvedPassphrase = ResolvePassphrase(passphrase, "Enter file KMS passphrase:"); + if (string.IsNullOrEmpty(resolvedPassphrase)) + { + logger.LogError("KMS passphrase must be supplied via --passphrase, {EnvironmentVariable}, or interactive prompt.", KmsPassphraseEnvironmentVariable); + Environment.ExitCode = 1; + return; + } + + var resolvedRoot = ResolveRootDirectory(rootPath); + if (!Directory.Exists(resolvedRoot)) + { + logger.LogError("KMS root directory '{Root}' does not exist.", resolvedRoot); + Environment.ExitCode = 1; + return; + } + + var outputFullPath = Path.GetFullPath(string.IsNullOrWhiteSpace(outputPath) ? "kms-export.json" : outputPath); + if (Directory.Exists(outputFullPath)) + { + logger.LogError("Output path '{Output}' is a directory. Provide a file path.", outputFullPath); + Environment.ExitCode = 1; + return; + } + + if (!overwrite && File.Exists(outputFullPath)) + { + logger.LogError("Output file '{Output}' already exists. Use --force to overwrite.", outputFullPath); + Environment.ExitCode = 1; + return; + } + + var outputDirectory = Path.GetDirectoryName(outputFullPath); + if (!string.IsNullOrEmpty(outputDirectory)) + { + Directory.CreateDirectory(outputDirectory); + } + + using var client = new FileKmsClient(new FileKmsOptions + { + RootPath = resolvedRoot, + Password = resolvedPassphrase! + }); + + var material = await client.ExportAsync(keyId, versionId, cancellationToken).ConfigureAwait(false); + var json = JsonSerializer.Serialize(material, KmsJsonOptions); + await File.WriteAllTextAsync(outputFullPath, json, cancellationToken).ConfigureAwait(false); + + logger.LogInformation("Exported key {KeyId} version {VersionId} to {Output}.", material.KeyId, material.VersionId, outputFullPath); + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to export key material."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleKmsImportAsync( + IServiceProvider services, + string? rootPath, + string keyId, + string inputPath, + string? versionOverride, + string? passphrase, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("kms-import"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + try + { + var resolvedPassphrase = ResolvePassphrase(passphrase, "Enter file KMS passphrase:"); + if (string.IsNullOrEmpty(resolvedPassphrase)) + { + logger.LogError("KMS passphrase must be supplied via --passphrase, {EnvironmentVariable}, or interactive prompt.", KmsPassphraseEnvironmentVariable); + Environment.ExitCode = 1; + return; + } + + var resolvedRoot = ResolveRootDirectory(rootPath); + Directory.CreateDirectory(resolvedRoot); + + var inputFullPath = Path.GetFullPath(inputPath ?? string.Empty); + if (!File.Exists(inputFullPath)) + { + logger.LogError("Input file '{Input}' does not exist.", inputFullPath); + Environment.ExitCode = 1; + return; + } + + var json = await File.ReadAllTextAsync(inputFullPath, cancellationToken).ConfigureAwait(false); + var material = JsonSerializer.Deserialize(json, KmsJsonOptions) + ?? throw new InvalidOperationException("Key material payload is empty."); + + if (!string.IsNullOrWhiteSpace(versionOverride)) + { + material = material with { VersionId = versionOverride }; + } + + var sourceKeyId = material.KeyId; + material = material with { KeyId = keyId }; + + using var client = new FileKmsClient(new FileKmsOptions + { + RootPath = resolvedRoot, + Password = resolvedPassphrase! + }); + + var metadata = await client.ImportAsync(keyId, material, cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(sourceKeyId) && !string.Equals(sourceKeyId, keyId, StringComparison.Ordinal)) + { + logger.LogWarning("Imported key material originally identified as '{SourceKeyId}' into '{TargetKeyId}'.", sourceKeyId, keyId); + } + + var activeVersion = metadata.Versions.Length > 0 ? metadata.Versions[^1].VersionId : material.VersionId; + logger.LogInformation("Imported key {KeyId} version {VersionId} into {Root}.", metadata.KeyId, activeVersion, resolvedRoot); + Environment.ExitCode = 0; + } + catch (JsonException ex) + { + logger.LogError(ex, "Failed to parse key material JSON from {Input}.", inputPath); + Environment.ExitCode = 1; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to import key material."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static string ResolveRootDirectory(string? rootPath) + => Path.GetFullPath(string.IsNullOrWhiteSpace(rootPath) ? "kms" : rootPath); + + private static string? ResolvePassphrase(string? passphrase, string promptMessage) + { + if (!string.IsNullOrWhiteSpace(passphrase)) + { + return passphrase; + } + + var fromEnvironment = Environment.GetEnvironmentVariable(KmsPassphraseEnvironmentVariable); + if (!string.IsNullOrWhiteSpace(fromEnvironment)) + { + return fromEnvironment; + } + + return KmsPassphrasePrompt.Prompt(promptMessage); + } + + private static bool TryDecodeBase64(string text, out byte[] decoded) + { + decoded = Array.Empty(); + if (string.IsNullOrWhiteSpace(text)) + { + return false; + } + + var builder = new StringBuilder(text.Length); + foreach (var ch in text) + { + if (!char.IsWhiteSpace(ch)) + { + builder.Append(ch); + } + } + + var candidate = builder.ToString(); + if (candidate.Length < 8 || candidate.Length % 4 != 0) + { + return false; + } + + for (var i = 0; i < candidate.Length; i++) + { + var c = candidate[i]; + if (!(char.IsLetterOrDigit(c) || c is '+' or '/' or '=')) + { + return false; + } + } + + try + { + decoded = Convert.FromBase64String(candidate); + return true; + } + catch (FormatException) + { + return false; + } + } + + private sealed record IngestInputPayload(string Kind, string Name, string Content, string ContentType, string? ContentEncoding); + + private sealed record DocumentNormalizationResult(string Content, string ContentType, string? ContentEncoding); + + private static readonly IReadOnlyDictionary AocViolationExitCodeMap = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["ERR_AOC_001"] = 11, + ["ERR_AOC_002"] = 12, + ["ERR_AOC_003"] = 13, + ["ERR_AOC_004"] = 14, + ["ERR_AOC_005"] = 15, + ["ERR_AOC_006"] = 16, + ["ERR_AOC_007"] = 17 + }; + private static string[] NormalizeSections(IReadOnlyList sections) { if (sections is null || sections.Count == 0) @@ -6441,15 +6447,15 @@ internal static class CommandHandlers } private static IDictionary RemoveNullValues(Dictionary source) - { - foreach (var key in source.Where(kvp => kvp.Value is null).Select(kvp => kvp.Key).ToList()) - { - source.Remove(key); - } - - return source; - } - + { + foreach (var key in source.Where(kvp => kvp.Value is null).Select(kvp => kvp.Key).ToList()) + { + source.Remove(key); + } + + return source; + } + private static async Task TriggerJobAsync( IBackendOperationsClient client, ILogger logger, @@ -6556,6 +6562,451 @@ internal static class CommandHandlers return Task.CompletedTask; } + public static Task HandleNodeLockValidateAsync( + IServiceProvider services, + string? rootPath, + string format, + bool verbose, + CancellationToken cancellationToken) + => HandleLanguageLockValidateAsync( + services, + loggerCategory: "node-lock-validate", + activityName: "cli.node.lock_validate", + rootTag: "stellaops.cli.node.root", + declaredTag: "stellaops.cli.node.declared_only", + missingTag: "stellaops.cli.node.lock_missing", + commandName: "node lock-validate", + analyzer: new NodeLanguageAnalyzer(), + rootPath: rootPath, + format: format, + verbose: verbose, + cancellationToken: cancellationToken, + telemetryRecorder: CliMetrics.RecordNodeLockValidate); + + public static Task HandlePythonLockValidateAsync( + IServiceProvider services, + string? rootPath, + string format, + bool verbose, + CancellationToken cancellationToken) + => HandleLanguageLockValidateAsync( + services, + loggerCategory: "python-lock-validate", + activityName: "cli.python.lock_validate", + rootTag: "stellaops.cli.python.root", + declaredTag: "stellaops.cli.python.declared_only", + missingTag: "stellaops.cli.python.lock_missing", + commandName: "python lock-validate", + analyzer: new PythonLanguageAnalyzer(), + rootPath: rootPath, + format: format, + verbose: verbose, + cancellationToken: cancellationToken, + telemetryRecorder: CliMetrics.RecordPythonLockValidate); + + public static Task HandleJavaLockValidateAsync( + IServiceProvider services, + string? rootPath, + string format, + bool verbose, + CancellationToken cancellationToken) + => HandleLanguageLockValidateAsync( + services, + loggerCategory: "java-lock-validate", + activityName: "cli.java.lock_validate", + rootTag: "stellaops.cli.java.root", + declaredTag: "stellaops.cli.java.declared_only", + missingTag: "stellaops.cli.java.lock_missing", + commandName: "java lock-validate", + analyzer: new JavaLanguageAnalyzer(), + rootPath: rootPath, + format: format, + verbose: verbose, + cancellationToken: cancellationToken, + telemetryRecorder: CliMetrics.RecordJavaLockValidate); + + private static async Task HandleLanguageLockValidateAsync( + IServiceProvider services, + string loggerCategory, + string activityName, + string rootTag, + string declaredTag, + string missingTag, + string commandName, + ILanguageAnalyzer analyzer, + string? rootPath, + string format, + bool verbose, + CancellationToken cancellationToken, + Action telemetryRecorder) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger(loggerCategory); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + using var activity = CliActivitySource.Instance.StartActivity(activityName, ActivityKind.Internal); + using var duration = CliMetrics.MeasureCommandDuration(commandName); + var outcome = "unknown"; + + try + { + var normalizedFormat = string.IsNullOrWhiteSpace(format) + ? "table" + : format.Trim().ToLowerInvariant(); + + if (normalizedFormat is not ("table" or "json")) + { + throw new InvalidOperationException("Format must be either 'table' or 'json'."); + } + + var targetRoot = string.IsNullOrWhiteSpace(rootPath) + ? Directory.GetCurrentDirectory() + : Path.GetFullPath(rootPath); + + if (!Directory.Exists(targetRoot)) + { + throw new DirectoryNotFoundException($"Directory '{targetRoot}' was not found."); + } + + logger.LogInformation("Validating lockfiles in {Root}.", targetRoot); + activity?.SetTag(rootTag, targetRoot); + + var engine = new LanguageAnalyzerEngine(new[] { analyzer }); + var context = new LanguageAnalyzerContext(targetRoot, TimeProvider.System); + var result = await engine.AnalyzeAsync(context, cancellationToken).ConfigureAwait(false); + var report = LockValidationReport.Create(result.ToSnapshots()); + + activity?.SetTag(declaredTag, report.DeclaredOnly.Count); + activity?.SetTag(missingTag, report.MissingLockMetadata.Count); + + if (string.Equals(normalizedFormat, "json", StringComparison.Ordinal)) + { + var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = true + }; + Console.WriteLine(JsonSerializer.Serialize(report, options)); + } + else + { + RenderLockValidationReport(report); + } + + outcome = report.HasIssues ? "violations" : "ok"; + Environment.ExitCode = report.HasIssues ? 1 : 0; + } + catch (DirectoryNotFoundException ex) + { + outcome = "not_found"; + logger.LogError(ex.Message); + Environment.ExitCode = 71; + } + catch (Exception ex) + { + outcome = "error"; + logger.LogError(ex, "Lock validation failed."); + Environment.ExitCode = 70; + } + finally + { + verbosity.MinimumLevel = previousLevel; + telemetryRecorder(outcome); + } + } + + private static void RenderLockValidationReport(LockValidationReport report) + { + if (!report.HasIssues) + { + AnsiConsole.MarkupLine("[green]Lockfiles match installed packages.[/]"); + AnsiConsole.MarkupLine($"[grey]Declared components: {report.TotalDeclared}, Installed: {report.TotalInstalled}[/]"); + return; + } + + var table = new Table().Border(TableBorder.Rounded); + table.AddColumn("Status"); + table.AddColumn("Package"); + table.AddColumn("Version"); + table.AddColumn("Source"); + table.AddColumn("Locator"); + table.AddColumn("Path"); + + foreach (var entry in report.DeclaredOnly) + { + table.AddRow( + "[red]Declared Only[/]", + Markup.Escape(entry.Name), + Markup.Escape(entry.Version ?? "-"), + Markup.Escape(entry.LockSource ?? "-"), + Markup.Escape(entry.LockLocator ?? "-"), + Markup.Escape(entry.Path)); + } + + foreach (var entry in report.MissingLockMetadata) + { + table.AddRow( + "[yellow]Missing Lock[/]", + Markup.Escape(entry.Name), + Markup.Escape(entry.Version ?? "-"), + "-", + "-", + Markup.Escape(entry.Path)); + } + + AnsiConsole.Write(table); + AnsiConsole.MarkupLine($"[grey]Declared components: {report.TotalDeclared}, Installed: {report.TotalInstalled}[/]"); + } + + public static async Task HandleRubyInspectAsync( + IServiceProvider services, + string? rootPath, + string format, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("ruby-inspect"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + using var activity = CliActivitySource.Instance.StartActivity("cli.ruby.inspect", ActivityKind.Internal); + activity?.SetTag("stellaops.cli.command", "ruby inspect"); + using var duration = CliMetrics.MeasureCommandDuration("ruby inspect"); + + var outcome = "unknown"; + try + { + var normalizedFormat = string.IsNullOrWhiteSpace(format) + ? "table" + : format.Trim().ToLowerInvariant(); + if (normalizedFormat is not ("table" or "json")) + { + throw new InvalidOperationException("Format must be either 'table' or 'json'."); + } + + var targetRoot = string.IsNullOrWhiteSpace(rootPath) + ? Directory.GetCurrentDirectory() + : Path.GetFullPath(rootPath); + if (!Directory.Exists(targetRoot)) + { + throw new DirectoryNotFoundException($"Directory '{targetRoot}' was not found."); + } + + logger.LogInformation("Inspecting Ruby workspace in {Root}.", targetRoot); + activity?.SetTag("stellaops.cli.ruby.root", targetRoot); + + var engine = new LanguageAnalyzerEngine(new ILanguageAnalyzer[] { new RubyLanguageAnalyzer() }); + var context = new LanguageAnalyzerContext(targetRoot, TimeProvider.System); + var result = await engine.AnalyzeAsync(context, cancellationToken).ConfigureAwait(false); + var report = RubyInspectReport.Create(result.ToSnapshots()); + + activity?.SetTag("stellaops.cli.ruby.package_count", report.Packages.Count); + + if (string.Equals(normalizedFormat, "json", StringComparison.Ordinal)) + { + var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = true + }; + Console.WriteLine(JsonSerializer.Serialize(report, options)); + } + else + { + RenderRubyInspectReport(report); + } + + outcome = report.Packages.Count == 0 ? "empty" : "ok"; + Environment.ExitCode = 0; + } + catch (DirectoryNotFoundException ex) + { + outcome = "not_found"; + logger.LogError(ex.Message); + Environment.ExitCode = 71; + } + catch (InvalidOperationException ex) + { + outcome = "invalid"; + logger.LogError(ex.Message); + Environment.ExitCode = 64; + } + catch (Exception ex) + { + outcome = "error"; + logger.LogError(ex, "Ruby inspect failed."); + Environment.ExitCode = 70; + } + finally + { + verbosity.MinimumLevel = previousLevel; + CliMetrics.RecordRubyInspect(outcome); + } + } + + public static async Task HandleRubyResolveAsync( + IServiceProvider services, + string? imageReference, + string? scanId, + string format, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("ruby-resolve"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + using var activity = CliActivitySource.Instance.StartActivity("cli.ruby.resolve", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "ruby resolve"); + using var duration = CliMetrics.MeasureCommandDuration("ruby resolve"); + + var outcome = "unknown"; + try + { + var normalizedFormat = string.IsNullOrWhiteSpace(format) + ? "table" + : format.Trim().ToLowerInvariant(); + if (normalizedFormat is not ("table" or "json")) + { + throw new InvalidOperationException("Format must be either 'table' or 'json'."); + } + + var identifier = !string.IsNullOrWhiteSpace(scanId) + ? scanId!.Trim() + : imageReference?.Trim(); + + if (string.IsNullOrWhiteSpace(identifier)) + { + throw new InvalidOperationException("An --image or --scan-id value is required."); + } + + logger.LogInformation("Resolving Ruby packages for scan {ScanId}.", identifier); + activity?.SetTag("stellaops.cli.scan_id", identifier); + + var packages = await client.GetRubyPackagesAsync(identifier, cancellationToken).ConfigureAwait(false); + var report = RubyResolveReport.Create(identifier, packages); + + if (!report.HasPackages) + { + outcome = "empty"; + Environment.ExitCode = 0; + AnsiConsole.MarkupLine("[yellow]No Ruby packages found for scan {0}.[/]", Markup.Escape(identifier)); + return; + } + + if (string.Equals(normalizedFormat, "json", StringComparison.Ordinal)) + { + var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = true + }; + Console.WriteLine(JsonSerializer.Serialize(report, options)); + } + else + { + RenderRubyResolveReport(report); + } + + outcome = "ok"; + Environment.ExitCode = 0; + } + catch (InvalidOperationException ex) + { + outcome = "invalid"; + logger.LogError(ex.Message); + Environment.ExitCode = 64; + } + catch (Exception ex) + { + outcome = "error"; + logger.LogError(ex, "Ruby resolve failed."); + Environment.ExitCode = 70; + } + finally + { + verbosity.MinimumLevel = previousLevel; + CliMetrics.RecordRubyResolve(outcome); + } + } + + private static void RenderRubyInspectReport(RubyInspectReport report) + { + if (!report.Packages.Any()) + { + AnsiConsole.MarkupLine("[yellow]No Ruby packages detected.[/]"); + return; + } + + var table = new Table().Border(TableBorder.Rounded); + table.AddColumn("Package"); + table.AddColumn("Version"); + table.AddColumn("Groups"); + table.AddColumn("Platform"); + table.AddColumn(new TableColumn("Source").NoWrap()); + table.AddColumn(new TableColumn("Lockfile").NoWrap()); + table.AddColumn(new TableColumn("Runtime").NoWrap()); + + foreach (var entry in report.Packages) + { + var groups = entry.Groups.Count == 0 ? "-" : string.Join(", ", entry.Groups); + var runtime = entry.UsedByEntrypoint + ? "[green]Entrypoint[/]" + : entry.RuntimeEntrypoints.Count > 0 + ? Markup.Escape(string.Join(", ", entry.RuntimeEntrypoints)) + : "[grey]-[/]"; + + table.AddRow( + Markup.Escape(entry.Name), + Markup.Escape(entry.Version ?? "-"), + Markup.Escape(groups), + Markup.Escape(entry.Platform ?? "-"), + Markup.Escape(entry.Source ?? "-"), + Markup.Escape(entry.Lockfile ?? "-"), + runtime); + } + + AnsiConsole.Write(table); + } + + private static void RenderRubyResolveReport(RubyResolveReport report) + { + var table = new Table().Border(TableBorder.Rounded); + table.AddColumn("Group"); + table.AddColumn("Platform"); + table.AddColumn("Package"); + table.AddColumn("Version"); + table.AddColumn(new TableColumn("Source").NoWrap()); + table.AddColumn(new TableColumn("Lockfile").NoWrap()); + table.AddColumn(new TableColumn("Runtime").NoWrap()); + + foreach (var group in report.Groups) + { + foreach (var package in group.Packages) + { + var runtime = package.RuntimeEntrypoints.Count > 0 + ? Markup.Escape(string.Join(", ", package.RuntimeEntrypoints)) + : package.RuntimeUsed ? "[green]Entrypoint[/]" : "[grey]-[/]"; + + table.AddRow( + Markup.Escape(group.Group), + Markup.Escape(group.Platform ?? "-"), + Markup.Escape(package.Name), + Markup.Escape(package.Version ?? "-"), + Markup.Escape(package.Source ?? "-"), + Markup.Escape(package.Lockfile ?? "-"), + runtime); + } + } + + AnsiConsole.Write(table); + AnsiConsole.MarkupLine("[grey]Scan {0} • Total packages: {1}[/]", Markup.Escape(report.ScanId), report.TotalPackages); + } + private static void RenderCryptoProviders( IReadOnlyList preferredOrder, IReadOnlyCollection providers) @@ -6632,6 +7083,380 @@ internal static class CommandHandlers return descriptors; } + private sealed class RubyInspectReport + { + [JsonPropertyName("packages")] + public IReadOnlyList Packages { get; } + + private RubyInspectReport(IReadOnlyList packages) + { + Packages = packages; + } + + public static RubyInspectReport Create(IEnumerable? snapshots) + { + var source = snapshots ?? Array.Empty(); + + var entries = source + .Select(RubyInspectEntry.FromSnapshot) + .OrderBy(static entry => entry.Name, StringComparer.OrdinalIgnoreCase) + .ThenBy(static entry => entry.Version ?? string.Empty, StringComparer.OrdinalIgnoreCase) + .ToArray(); + + return new RubyInspectReport(entries); + } + } + + private sealed record RubyInspectEntry( + [property: JsonPropertyName("name")] string Name, + [property: JsonPropertyName("version")] string? Version, + [property: JsonPropertyName("source")] string? Source, + [property: JsonPropertyName("lockfile")] string? Lockfile, + [property: JsonPropertyName("groups")] IReadOnlyList Groups, + [property: JsonPropertyName("platform")] string? Platform, + [property: JsonPropertyName("declaredOnly")] bool DeclaredOnly, + [property: JsonPropertyName("runtimeEntrypoints")] IReadOnlyList RuntimeEntrypoints, + [property: JsonPropertyName("runtimeFiles")] IReadOnlyList RuntimeFiles, + [property: JsonPropertyName("runtimeReasons")] IReadOnlyList RuntimeReasons, + [property: JsonPropertyName("usedByEntrypoint")] bool UsedByEntrypoint) + { + public static RubyInspectEntry FromSnapshot(LanguageComponentSnapshot snapshot) + { + var metadata = RubyMetadataHelpers.Clone(snapshot.Metadata); + var groups = RubyMetadataHelpers.GetList(metadata, "groups"); + var platform = RubyMetadataHelpers.GetString(metadata, "platform"); + var source = RubyMetadataHelpers.GetString(metadata, "source"); + var lockfile = RubyMetadataHelpers.GetString(metadata, "lockfile"); + var declaredOnly = RubyMetadataHelpers.GetBool(metadata, "declaredOnly") ?? false; + var runtimeEntrypoints = RubyMetadataHelpers.GetList(metadata, "runtime.entrypoints"); + var runtimeFiles = RubyMetadataHelpers.GetList(metadata, "runtime.files"); + var runtimeReasons = RubyMetadataHelpers.GetList(metadata, "runtime.reasons"); + var usedByEntrypoint = RubyMetadataHelpers.GetBool(metadata, "runtime.used") ?? snapshot.UsedByEntrypoint; + + return new RubyInspectEntry( + snapshot.Name, + snapshot.Version, + source, + lockfile, + groups, + platform, + declaredOnly, + runtimeEntrypoints, + runtimeFiles, + runtimeReasons, + usedByEntrypoint); + } + } + + private sealed class RubyResolveReport + { + [JsonPropertyName("scanId")] + public string ScanId { get; } + + [JsonPropertyName("groups")] + public IReadOnlyList Groups { get; } + + [JsonIgnore] + public bool HasPackages => TotalPackages > 0; + + [JsonIgnore] + public int TotalPackages => Groups.Sum(static group => group.Packages.Count); + + private RubyResolveReport(string scanId, IReadOnlyList groups) + { + ScanId = scanId; + Groups = groups; + } + + public static RubyResolveReport Create(string scanId, IReadOnlyList? packages) + { + var resolved = (packages ?? Array.Empty()) + .Select(RubyResolvePackage.FromModel) + .ToArray(); + + var rows = new List<(string Group, string Platform, RubyResolvePackage Package)>(); + foreach (var package in resolved) + { + var groups = package.Groups.Count == 0 + ? new[] { "(default)" } + : package.Groups; + + foreach (var group in groups) + { + rows.Add((group, package.Platform ?? "-", package)); + } + } + + var grouped = rows + .GroupBy(static row => (row.Group, row.Platform)) + .OrderBy(static g => g.Key.Group, StringComparer.OrdinalIgnoreCase) + .ThenBy(static g => g.Key.Platform, StringComparer.OrdinalIgnoreCase) + .Select(group => new RubyResolveGroup( + group.Key.Group, + group.Key.Platform, + group.Select(row => row.Package) + .OrderBy(static pkg => pkg.Name, StringComparer.OrdinalIgnoreCase) + .ThenBy(static pkg => pkg.Version ?? string.Empty, StringComparer.OrdinalIgnoreCase) + .ToArray())) + .ToArray(); + + return new RubyResolveReport(scanId, grouped); + } + } + + private sealed record RubyResolveGroup( + [property: JsonPropertyName("group")] string Group, + [property: JsonPropertyName("platform")] string Platform, + [property: JsonPropertyName("packages")] IReadOnlyList Packages); + + private sealed record RubyResolvePackage( + [property: JsonPropertyName("name")] string Name, + [property: JsonPropertyName("version")] string? Version, + [property: JsonPropertyName("source")] string? Source, + [property: JsonPropertyName("lockfile")] string? Lockfile, + [property: JsonPropertyName("groups")] IReadOnlyList Groups, + [property: JsonPropertyName("platform")] string? Platform, + [property: JsonPropertyName("declaredOnly")] bool DeclaredOnly, + [property: JsonPropertyName("runtimeEntrypoints")] IReadOnlyList RuntimeEntrypoints, + [property: JsonPropertyName("runtimeFiles")] IReadOnlyList RuntimeFiles, + [property: JsonPropertyName("runtimeReasons")] IReadOnlyList RuntimeReasons, + [property: JsonPropertyName("runtimeUsed")] bool RuntimeUsed) + { + public static RubyResolvePackage FromModel(RubyPackageArtifactModel model) + { + var metadata = RubyMetadataHelpers.Clone(model.Metadata); + + IReadOnlyList groups = model.Groups is { Count: > 0 } + ? model.Groups + .Where(static group => !string.IsNullOrWhiteSpace(group)) + .Select(static group => group.Trim()) + .ToArray() + : RubyMetadataHelpers.GetList(metadata, "groups"); + + IReadOnlyList? runtimeEntrypoints = model.Runtime?.Entrypoints?.Where(static e => !string.IsNullOrWhiteSpace(e)).Select(static e => e.Trim()).ToArray(); + if (runtimeEntrypoints is null || runtimeEntrypoints.Count == 0) + { + runtimeEntrypoints = RubyMetadataHelpers.GetList(metadata, "runtime.entrypoints"); + } + + IReadOnlyList? runtimeFiles = model.Runtime?.Files?.Where(static e => !string.IsNullOrWhiteSpace(e)).Select(static e => e.Trim()).ToArray(); + if (runtimeFiles is null || runtimeFiles.Count == 0) + { + runtimeFiles = RubyMetadataHelpers.GetList(metadata, "runtime.files"); + } + + IReadOnlyList? runtimeReasons = model.Runtime?.Reasons?.Where(static e => !string.IsNullOrWhiteSpace(e)).Select(static e => e.Trim()).ToArray(); + if (runtimeReasons is null || runtimeReasons.Count == 0) + { + runtimeReasons = RubyMetadataHelpers.GetList(metadata, "runtime.reasons"); + } + + runtimeEntrypoints ??= Array.Empty(); + runtimeFiles ??= Array.Empty(); + runtimeReasons ??= Array.Empty(); + + var source = model.Provenance?.Source + ?? model.Source + ?? RubyMetadataHelpers.GetString(metadata, "source"); + var lockfile = model.Provenance?.Lockfile ?? RubyMetadataHelpers.GetString(metadata, "lockfile"); + var platform = model.Platform ?? RubyMetadataHelpers.GetString(metadata, "platform"); + var declaredOnly = model.DeclaredOnly ?? RubyMetadataHelpers.GetBool(metadata, "declaredOnly") ?? false; + var runtimeUsed = model.RuntimeUsed ?? RubyMetadataHelpers.GetBool(metadata, "runtime.used") ?? false; + + return new RubyResolvePackage( + model.Name, + model.Version, + source, + lockfile, + groups, + platform, + declaredOnly, + runtimeEntrypoints, + runtimeFiles, + runtimeReasons, + runtimeUsed); + } + } + + private static class RubyMetadataHelpers + { + public static IDictionary Clone(IDictionary? metadata) + { + if (metadata is null || metadata.Count == 0) + { + return new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + var clone = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var pair in metadata) + { + clone[pair.Key] = pair.Value; + } + + return clone; + } + + public static string? GetString(IDictionary metadata, string key) + { + if (metadata.TryGetValue(key, out var value)) + { + return value; + } + + foreach (var pair in metadata) + { + if (string.Equals(pair.Key, key, StringComparison.OrdinalIgnoreCase)) + { + return pair.Value; + } + } + + return null; + } + + public static IReadOnlyList GetList(IDictionary metadata, string key) + { + var value = GetString(metadata, key); + if (string.IsNullOrWhiteSpace(value)) + { + return Array.Empty(); + } + + return value + .Split(';', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .ToArray(); + } + + public static bool? GetBool(IDictionary metadata, string key) + { + var value = GetString(metadata, key); + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + if (bool.TryParse(value, out var parsed)) + { + return parsed; + } + + return null; + } + } + + private sealed record LockValidationEntry( + [property: JsonPropertyName("name")] string Name, + [property: JsonPropertyName("version")] string? Version, + [property: JsonPropertyName("path")] string Path, + [property: JsonPropertyName("lockSource")] string? LockSource, + [property: JsonPropertyName("lockLocator")] string? LockLocator, + [property: JsonPropertyName("resolved")] string? Resolved, + [property: JsonPropertyName("integrity")] string? Integrity); + + private sealed class LockValidationReport + { + public LockValidationReport( + IReadOnlyList declaredOnly, + IReadOnlyList missingLockMetadata, + int totalDeclared, + int totalInstalled) + { + DeclaredOnly = declaredOnly; + MissingLockMetadata = missingLockMetadata; + TotalDeclared = totalDeclared; + TotalInstalled = totalInstalled; + } + + [JsonPropertyName("declaredOnly")] + public IReadOnlyList DeclaredOnly { get; } + + [JsonPropertyName("missingLockMetadata")] + public IReadOnlyList MissingLockMetadata { get; } + + [JsonPropertyName("totalDeclared")] + public int TotalDeclared { get; } + + [JsonPropertyName("totalInstalled")] + public int TotalInstalled { get; } + + [JsonIgnore] + public bool HasIssues => DeclaredOnly.Count > 0 || MissingLockMetadata.Count > 0; + + public static LockValidationReport Create(IEnumerable snapshots) + { + var declaredOnly = new List(); + var missingLock = new List(); + var declaredCount = 0; + var installedCount = 0; + + foreach (var component in snapshots ?? Array.Empty()) + { + var metadata = component.Metadata ?? new Dictionary(StringComparer.Ordinal); + var entry = CreateEntry(component, metadata); + + if (IsDeclaredOnly(metadata)) + { + declaredOnly.Add(entry); + declaredCount++; + continue; + } + + installedCount++; + + if (!metadata.TryGetValue("lockSource", out var lockSource) || string.IsNullOrWhiteSpace(lockSource)) + { + missingLock.Add(entry); + } + } + + declaredOnly.Sort(CompareEntries); + missingLock.Sort(CompareEntries); + + return new LockValidationReport(declaredOnly, missingLock, declaredCount, installedCount); + } + + private static LockValidationEntry CreateEntry( + LanguageComponentSnapshot component, + IDictionary metadata) + { + metadata.TryGetValue("path", out var path); + metadata.TryGetValue("lockSource", out var lockSource); + metadata.TryGetValue("lockLocator", out var lockLocator); + metadata.TryGetValue("resolved", out var resolved); + metadata.TryGetValue("integrity", out var integrity); + + return new LockValidationEntry( + component.Name, + component.Version, + string.IsNullOrWhiteSpace(path) ? "." : path!, + lockSource, + lockLocator, + resolved, + integrity); + } + + private static bool IsDeclaredOnly(IDictionary metadata) + { + if (metadata.TryGetValue("declaredOnly", out var value)) + { + return string.Equals(value, "true", StringComparison.OrdinalIgnoreCase); + } + + return false; + } + + private static int CompareEntries(LockValidationEntry left, LockValidationEntry right) + { + var nameComparison = string.Compare(left.Name, right.Name, StringComparison.OrdinalIgnoreCase); + if (nameComparison != 0) + { + return nameComparison; + } + + return string.Compare(left.Version, right.Version, StringComparison.OrdinalIgnoreCase); + } + } + private static IReadOnlyList DeterminePreferredOrder( CryptoProviderRegistryOptions? options, string? overrideProfile) diff --git a/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs b/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs index d3a1972b9..ff2df6863 100644 --- a/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs +++ b/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs @@ -20,7 +20,8 @@ using StellaOps.Auth.Client; using StellaOps.Cli.Configuration; using StellaOps.Cli.Services.Models; using StellaOps.Cli.Services.Models.AdvisoryAi; -using StellaOps.Cli.Services.Models.Transport; +using StellaOps.Cli.Services.Models.Ruby; +using StellaOps.Cli.Services.Models.Transport; namespace StellaOps.Cli.Services; @@ -858,9 +859,9 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient return MapPolicyFindingExplain(document); } - public async Task GetEntryTraceAsync(string scanId, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); + public async Task GetEntryTraceAsync(string scanId, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); if (string.IsNullOrWhiteSpace(scanId)) { @@ -882,15 +883,46 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient throw new InvalidOperationException(failure); } - var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - if (result is null) - { - throw new InvalidOperationException("EntryTrace response payload was empty."); - } - + var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + if (result is null) + { + throw new InvalidOperationException("EntryTrace response payload was empty."); + } + return result; } + public async Task> GetRubyPackagesAsync(string scanId, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(scanId)) + { + throw new ArgumentException("Scan identifier is required.", nameof(scanId)); + } + + using var request = CreateRequest(HttpMethod.Get, $"api/scans/{scanId}/ruby-packages"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (response.StatusCode == HttpStatusCode.NotFound) + { + return Array.Empty(); + } + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + var packages = await response.Content + .ReadFromJsonAsync>(SerializerOptions, cancellationToken) + .ConfigureAwait(false); + + return packages ?? Array.Empty(); + } + public async Task CreateAdvisoryPipelinePlanAsync( AdvisoryAiTaskType taskType, AdvisoryPipelinePlanRequestModel request, diff --git a/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs b/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs index 86396dc35..b2c1e2e6b 100644 --- a/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs +++ b/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs @@ -5,6 +5,7 @@ using System.Threading.Tasks; using StellaOps.Cli.Configuration; using StellaOps.Cli.Services.Models; using StellaOps.Cli.Services.Models.AdvisoryAi; +using StellaOps.Cli.Services.Models.Ruby; namespace StellaOps.Cli.Services; @@ -48,6 +49,8 @@ internal interface IBackendOperationsClient Task GetEntryTraceAsync(string scanId, CancellationToken cancellationToken); + Task> GetRubyPackagesAsync(string scanId, CancellationToken cancellationToken); + Task CreateAdvisoryPipelinePlanAsync(AdvisoryAiTaskType taskType, AdvisoryPipelinePlanRequestModel request, CancellationToken cancellationToken); Task TryGetAdvisoryPipelineOutputAsync(string cacheKey, AdvisoryAiTaskType taskType, string profile, CancellationToken cancellationToken); diff --git a/src/Cli/StellaOps.Cli/Services/Models/Ruby/RubyPackageArtifactModel.cs b/src/Cli/StellaOps.Cli/Services/Models/Ruby/RubyPackageArtifactModel.cs new file mode 100644 index 000000000..6449f7260 --- /dev/null +++ b/src/Cli/StellaOps.Cli/Services/Models/Ruby/RubyPackageArtifactModel.cs @@ -0,0 +1,28 @@ +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Cli.Services.Models.Ruby; + +internal sealed record RubyPackageArtifactModel( + [property: JsonPropertyName("id")] string Id, + [property: JsonPropertyName("name")] string Name, + [property: JsonPropertyName("version")] string? Version, + [property: JsonPropertyName("source")] string? Source, + [property: JsonPropertyName("platform")] string? Platform, + [property: JsonPropertyName("groups")] IReadOnlyList? Groups, + [property: JsonPropertyName("declaredOnly")] bool? DeclaredOnly, + [property: JsonPropertyName("runtimeUsed")] bool? RuntimeUsed, + [property: JsonPropertyName("provenance")] RubyPackageProvenance? Provenance, + [property: JsonPropertyName("runtime")] RubyPackageRuntime? Runtime, + [property: JsonPropertyName("metadata")] IDictionary? Metadata); + +internal sealed record RubyPackageProvenance( + [property: JsonPropertyName("source")] string? Source, + [property: JsonPropertyName("lockfile")] string? Lockfile, + [property: JsonPropertyName("locator")] string? Locator); + +internal sealed record RubyPackageRuntime( + [property: JsonPropertyName("entrypoints")] IReadOnlyList? Entrypoints, + [property: JsonPropertyName("files")] IReadOnlyList? Files, + [property: JsonPropertyName("reasons")] IReadOnlyList? Reasons); + diff --git a/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj b/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj index f0aa22450..7f925a9a1 100644 --- a/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj +++ b/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj @@ -47,6 +47,13 @@ + + + + + + + diff --git a/src/Cli/StellaOps.Cli/TASKS.md b/src/Cli/StellaOps.Cli/TASKS.md new file mode 100644 index 000000000..df190d5d1 --- /dev/null +++ b/src/Cli/StellaOps.Cli/TASKS.md @@ -0,0 +1,6 @@ +# CLI Guild — Active Tasks + +| Task ID | State | Notes | +| --- | --- | --- | +| `SCANNER-CLI-0001` | DOING (2025-11-09) | Add Ruby-specific verbs/help, refresh docs & goldens per Sprint 138. | + diff --git a/src/Cli/StellaOps.Cli/Telemetry/CliMetrics.cs b/src/Cli/StellaOps.Cli/Telemetry/CliMetrics.cs index 36fe9ce0c..fbdcaaf59 100644 --- a/src/Cli/StellaOps.Cli/Telemetry/CliMetrics.cs +++ b/src/Cli/StellaOps.Cli/Telemetry/CliMetrics.cs @@ -21,6 +21,11 @@ internal static class CliMetrics private static readonly Counter PolicyFindingsGetCounter = Meter.CreateCounter("stellaops.cli.policy.findings.get.count"); private static readonly Counter PolicyFindingsExplainCounter = Meter.CreateCounter("stellaops.cli.policy.findings.explain.count"); private static readonly Counter AdvisoryRunCounter = Meter.CreateCounter("stellaops.cli.advisory.run.count"); + private static readonly Counter NodeLockValidateCounter = Meter.CreateCounter("stellaops.cli.node.lock_validate.count"); + private static readonly Counter PythonLockValidateCounter = Meter.CreateCounter("stellaops.cli.python.lock_validate.count"); + private static readonly Counter JavaLockValidateCounter = Meter.CreateCounter("stellaops.cli.java.lock_validate.count"); + private static readonly Counter RubyInspectCounter = Meter.CreateCounter("stellaops.cli.ruby.inspect.count"); + private static readonly Counter RubyResolveCounter = Meter.CreateCounter("stellaops.cli.ruby.resolve.count"); private static readonly Histogram CommandDurationHistogram = Meter.CreateHistogram("stellaops.cli.command.duration.ms"); public static void RecordScannerDownload(string channel, bool fromCache) @@ -108,6 +113,36 @@ internal static class CliMetrics new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome) }); + public static void RecordNodeLockValidate(string outcome) + => NodeLockValidateCounter.Add(1, new KeyValuePair[] + { + new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome) + }); + + public static void RecordPythonLockValidate(string outcome) + => PythonLockValidateCounter.Add(1, new KeyValuePair[] + { + new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome) + }); + + public static void RecordJavaLockValidate(string outcome) + => JavaLockValidateCounter.Add(1, new KeyValuePair[] + { + new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome) + }); + + public static void RecordRubyInspect(string outcome) + => RubyInspectCounter.Add(1, new KeyValuePair[] + { + new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome) + }); + + public static void RecordRubyResolve(string outcome) + => RubyResolveCounter.Add(1, new KeyValuePair[] + { + new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome) + }); + public static IDisposable MeasureCommandDuration(string command) { var start = DateTime.UtcNow; diff --git a/src/Cli/__Tests/StellaOps.Cli.Tests/Commands/CommandFactoryTests.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/Commands/CommandFactoryTests.cs new file mode 100644 index 000000000..0d0254545 --- /dev/null +++ b/src/Cli/__Tests/StellaOps.Cli.Tests/Commands/CommandFactoryTests.cs @@ -0,0 +1,36 @@ +using System; +using System.CommandLine; +using System.Linq; +using System.Threading; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Cli.Commands; +using StellaOps.Cli.Configuration; + +namespace StellaOps.Cli.Tests.Commands; + +public sealed class CommandFactoryTests +{ + [Fact] + public void Create_RegistersRubyInspectAndResolveCommands() + { + using var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Critical)); + var services = new ServiceCollection().BuildServiceProvider(); + var root = CommandFactory.Create(services, new StellaOpsCliOptions(), CancellationToken.None, loggerFactory); + + var ruby = Assert.Single(root.Subcommands, command => string.Equals(command.Name, "ruby", StringComparison.Ordinal)); + + var inspect = Assert.Single(ruby.Subcommands, command => string.Equals(command.Name, "inspect", StringComparison.Ordinal)); + var inspectOptions = inspect.Children.OfType