diff --git a/deploy/compose/docker-compose.airgap.yaml b/deploy/compose/docker-compose.airgap.yaml index 89673149e..e30a8c14a 100644 --- a/deploy/compose/docker-compose.airgap.yaml +++ b/deploy/compose/docker-compose.airgap.yaml @@ -7,12 +7,13 @@ networks: stellaops: driver: bridge -volumes: - mongo-data: - minio-data: - rustfs-data: - concelier-jobs: - nats-data: +volumes: + mongo-data: + minio-data: + rustfs-data: + concelier-jobs: + nats-data: + scanner-surface-cache: services: mongo: @@ -28,8 +29,8 @@ services: - stellaops labels: *release-labels - minio: - image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e + minio: + image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e command: ["server", "/data", "--console-address", ":9001"] restart: unless-stopped environment: @@ -41,22 +42,22 @@ services: - "${MINIO_CONSOLE_PORT:-29001}:9001" networks: - stellaops - labels: *release-labels - - rustfs: - image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge - command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"] - restart: unless-stopped - environment: - RUSTFS__LOG__LEVEL: info - RUSTFS__STORAGE__PATH: /data - volumes: - - rustfs-data:/data - ports: - - "${RUSTFS_HTTP_PORT:-8080}:8080" - networks: - - stellaops - labels: *release-labels + labels: *release-labels + + rustfs: + image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge + command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"] + restart: unless-stopped + environment: + RUSTFS__LOG__LEVEL: info + RUSTFS__STORAGE__PATH: /data + volumes: + - rustfs-data:/data + ports: + - "${RUSTFS_HTTP_PORT:-8080}:8080" + networks: + - stellaops + labels: *release-labels nats: image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e @@ -107,43 +108,43 @@ services: - stellaops labels: *release-labels - attestor: - image: registry.stella-ops.org/stellaops/attestor@sha256:1ff0a3124d66d3a2702d8e421df40fbd98cc75cb605d95510598ebbae1433c50 - restart: unless-stopped - depends_on: - - signer - environment: - ATTESTOR__SIGNER__BASEURL: "https://signer:8441" - ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" - ports: - - "${ATTESTOR_PORT:-8442}:8442" - networks: - - stellaops - labels: *release-labels - - issuer-directory: - image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge - restart: unless-stopped - depends_on: - - mongo - - authority - environment: - ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml" - ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}" - ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440" - ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}" - ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}" - volumes: - - ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro - ports: - - "${ISSUER_DIRECTORY_PORT:-8447}:8080" - networks: - - stellaops - labels: *release-labels - - concelier: - image: registry.stella-ops.org/stellaops/concelier@sha256:29e2e1a0972707e092cbd3d370701341f9fec2aa9316fb5d8100480f2a1c76b5 - restart: unless-stopped + attestor: + image: registry.stella-ops.org/stellaops/attestor@sha256:1ff0a3124d66d3a2702d8e421df40fbd98cc75cb605d95510598ebbae1433c50 + restart: unless-stopped + depends_on: + - signer + environment: + ATTESTOR__SIGNER__BASEURL: "https://signer:8441" + ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" + ports: + - "${ATTESTOR_PORT:-8442}:8442" + networks: + - stellaops + labels: *release-labels + + issuer-directory: + image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge + restart: unless-stopped + depends_on: + - mongo + - authority + environment: + ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml" + ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}" + ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440" + ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}" + ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}" + volumes: + - ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro + ports: + - "${ISSUER_DIRECTORY_PORT:-8447}:8080" + networks: + - stellaops + labels: *release-labels + + concelier: + image: registry.stella-ops.org/stellaops/concelier@sha256:29e2e1a0972707e092cbd3d370701341f9fec2aa9316fb5d8100480f2a1c76b5 + restart: unless-stopped depends_on: - mongo - minio @@ -163,69 +164,95 @@ services: - stellaops labels: *release-labels - scanner-web: + scanner-web: image: registry.stella-ops.org/stellaops/scanner-web@sha256:3df8ca21878126758203c1a0444e39fd97f77ddacf04a69685cda9f1e5e94718 restart: unless-stopped - depends_on: - - concelier - - rustfs - - nats - environment: - SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" - SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" - SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1" - SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" - SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" - SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}" - SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}" - SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-redis}" - SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}" - SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}" - SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}" - SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}" + depends_on: + - concelier + - rustfs + - nats + environment: + SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" + SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" + SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1" + SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" + SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" + SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}" + SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}" + SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-redis}" + SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}" + SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}" + SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}" + SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}" + # Surface.Env configuration (see docs/modules/scanner/design/surface-env.md) + SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}" + SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}" + SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}" + SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}" + SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}" + SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}" + SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}" + SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}" + SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}" + SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}" + volumes: + - scanner-surface-cache:/var/lib/stellaops/surface ports: - "${SCANNER_WEB_PORT:-8444}:8444" networks: - stellaops labels: *release-labels - scanner-worker: - image: registry.stella-ops.org/stellaops/scanner-worker@sha256:eea5d6cfe7835950c5ec7a735a651f2f0d727d3e470cf9027a4a402ea89c4fb5 - restart: unless-stopped - depends_on: - - scanner-web - - rustfs - - nats - environment: - SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" - SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" - SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1" - SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" - SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" - SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}" - networks: - - stellaops - labels: *release-labels - - scheduler-worker: - image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge - restart: unless-stopped - depends_on: - - mongo - - nats - - scanner-web - command: - - "dotnet" - - "StellaOps.Scheduler.Worker.Host.dll" - environment: - SCHEDULER__QUEUE__KIND: "${SCHEDULER_QUEUE_KIND:-Nats}" - SCHEDULER__QUEUE__NATS__URL: "${SCHEDULER_QUEUE_NATS_URL:-nats://nats:4222}" - SCHEDULER__STORAGE__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" - SCHEDULER__STORAGE__DATABASE: "${SCHEDULER_STORAGE_DATABASE:-stellaops_scheduler}" - SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}" - networks: - - stellaops - labels: *release-labels + scanner-worker: + image: registry.stella-ops.org/stellaops/scanner-worker@sha256:eea5d6cfe7835950c5ec7a735a651f2f0d727d3e470cf9027a4a402ea89c4fb5 + restart: unless-stopped + depends_on: + - scanner-web + - rustfs + - nats + environment: + SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" + SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" + SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1" + SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" + SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" + SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}" + # Surface.Env configuration (see docs/modules/scanner/design/surface-env.md) + SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}" + SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}" + SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}" + SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}" + SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}" + SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}" + SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}" + SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}" + SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}" + SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}" + volumes: + - scanner-surface-cache:/var/lib/stellaops/surface + networks: + - stellaops + labels: *release-labels + + scheduler-worker: + image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge + restart: unless-stopped + depends_on: + - mongo + - nats + - scanner-web + command: + - "dotnet" + - "StellaOps.Scheduler.Worker.Host.dll" + environment: + SCHEDULER__QUEUE__KIND: "${SCHEDULER_QUEUE_KIND:-Nats}" + SCHEDULER__QUEUE__NATS__URL: "${SCHEDULER_QUEUE_NATS_URL:-nats://nats:4222}" + SCHEDULER__STORAGE__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" + SCHEDULER__STORAGE__DATABASE: "${SCHEDULER_STORAGE_DATABASE:-stellaops_scheduler}" + SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}" + networks: + - stellaops + labels: *release-labels notify-web: image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2} @@ -235,70 +262,70 @@ services: - authority environment: DOTNET_ENVIRONMENT: Production - volumes: - - ../../etc/notify.airgap.yaml:/app/etc/notify.yaml:ro + volumes: + - ../../etc/notify.airgap.yaml:/app/etc/notify.yaml:ro ports: - "${NOTIFY_WEB_PORT:-9446}:8446" networks: - stellaops labels: *release-labels - excititor: - image: registry.stella-ops.org/stellaops/excititor@sha256:65c0ee13f773efe920d7181512349a09d363ab3f3e177d276136bd2742325a68 - restart: unless-stopped - depends_on: - - concelier - environment: - EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445" - EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" - networks: - - stellaops - labels: *release-labels - - advisory-ai-web: - image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.09.2-airgap - restart: unless-stopped - depends_on: - - scanner-web - environment: - ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}" - ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue" - ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans" - ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs" - ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}" - ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}" - ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}" - ports: - - "${ADVISORY_AI_WEB_PORT:-8448}:8448" - volumes: - - advisory-ai-queue:/var/lib/advisory-ai/queue - - advisory-ai-plans:/var/lib/advisory-ai/plans - - advisory-ai-outputs:/var/lib/advisory-ai/outputs - networks: - - stellaops - labels: *release-labels - - advisory-ai-worker: - image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.09.2-airgap - restart: unless-stopped - depends_on: - - advisory-ai-web - environment: - ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}" - ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue" - ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans" - ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs" - ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}" - ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}" - ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}" - volumes: - - advisory-ai-queue:/var/lib/advisory-ai/queue - - advisory-ai-plans:/var/lib/advisory-ai/plans - - advisory-ai-outputs:/var/lib/advisory-ai/outputs - networks: - - stellaops - labels: *release-labels - + excititor: + image: registry.stella-ops.org/stellaops/excititor@sha256:65c0ee13f773efe920d7181512349a09d363ab3f3e177d276136bd2742325a68 + restart: unless-stopped + depends_on: + - concelier + environment: + EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445" + EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" + networks: + - stellaops + labels: *release-labels + + advisory-ai-web: + image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.09.2-airgap + restart: unless-stopped + depends_on: + - scanner-web + environment: + ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}" + ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue" + ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans" + ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs" + ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}" + ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}" + ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}" + ports: + - "${ADVISORY_AI_WEB_PORT:-8448}:8448" + volumes: + - advisory-ai-queue:/var/lib/advisory-ai/queue + - advisory-ai-plans:/var/lib/advisory-ai/plans + - advisory-ai-outputs:/var/lib/advisory-ai/outputs + networks: + - stellaops + labels: *release-labels + + advisory-ai-worker: + image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.09.2-airgap + restart: unless-stopped + depends_on: + - advisory-ai-web + environment: + ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}" + ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue" + ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans" + ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs" + ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}" + ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}" + ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}" + volumes: + - advisory-ai-queue:/var/lib/advisory-ai/queue + - advisory-ai-plans:/var/lib/advisory-ai/plans + - advisory-ai-outputs:/var/lib/advisory-ai/outputs + networks: + - stellaops + labels: *release-labels + web-ui: image: registry.stella-ops.org/stellaops/web-ui@sha256:bee9668011ff414572131dc777faab4da24473fe12c230893f161cabee092a1d restart: unless-stopped diff --git a/deploy/helm/stellaops/values.yaml b/deploy/helm/stellaops/values.yaml index 883856d80..6cac2b4fe 100644 --- a/deploy/helm/stellaops/values.yaml +++ b/deploy/helm/stellaops/values.yaml @@ -1,13 +1,57 @@ -global: - release: - version: "" - channel: "" - manifestSha256: "" - profile: "" - image: - pullPolicy: IfNotPresent - labels: {} - +global: + release: + version: "" + channel: "" + manifestSha256: "" + profile: "" + image: + pullPolicy: IfNotPresent + labels: {} + +# Surface.Env configuration for Scanner/Zastava components +# See docs/modules/scanner/design/surface-env.md for details +surface: + # Surface.FS storage configuration + fs: + # Base URI for Surface.FS / RustFS / S3-compatible store (required) + endpoint: "" + # Bucket/container for manifests and artefacts + bucket: "surface-cache" + # Optional region for S3-compatible stores (AWS/GCS) + region: "" + # Local cache configuration + cache: + # Local directory for warm caches + root: "/var/lib/stellaops/surface" + # Soft limit for on-disk cache usage in MB (64-262144) + quotaMb: 4096 + # Enable manifest prefetch threads + prefetchEnabled: false + # Tenant configuration + tenant: "default" + # Comma-separated feature switches + features: "" + # TLS configuration for client authentication + tls: + # Path to PEM/PKCS#12 certificate file + certPath: "" + # Optional private key path when cert/key stored separately + keyPath: "" + # Secret name containing TLS cert/key + secretName: "" + # Secrets provider configuration + secrets: + # Provider ID: kubernetes, file, inline + provider: "kubernetes" + # Kubernetes namespace for secrets provider + namespace: "" + # Path or base for file provider + root: "" + # Optional fallback provider ID + fallbackProvider: "" + # Allow inline secrets (disable in production) + allowInline: false + telemetry: collector: enabled: false @@ -35,6 +79,25 @@ telemetry: resources: {} configMaps: + # Surface.Env environment variables for Scanner/Zastava components + surface-env: + data: + SCANNER_SURFACE_FS_ENDPOINT: "{{ .Values.surface.fs.endpoint }}" + SCANNER_SURFACE_FS_BUCKET: "{{ .Values.surface.fs.bucket }}" + SCANNER_SURFACE_FS_REGION: "{{ .Values.surface.fs.region }}" + SCANNER_SURFACE_CACHE_ROOT: "{{ .Values.surface.cache.root }}" + SCANNER_SURFACE_CACHE_QUOTA_MB: "{{ .Values.surface.cache.quotaMb }}" + SCANNER_SURFACE_PREFETCH_ENABLED: "{{ .Values.surface.cache.prefetchEnabled }}" + SCANNER_SURFACE_TENANT: "{{ .Values.surface.tenant }}" + SCANNER_SURFACE_FEATURES: "{{ .Values.surface.features }}" + SCANNER_SURFACE_TLS_CERT_PATH: "{{ .Values.surface.tls.certPath }}" + SCANNER_SURFACE_TLS_KEY_PATH: "{{ .Values.surface.tls.keyPath }}" + SCANNER_SURFACE_SECRETS_PROVIDER: "{{ .Values.surface.secrets.provider }}" + SCANNER_SURFACE_SECRETS_NAMESPACE: "{{ .Values.surface.secrets.namespace }}" + SCANNER_SURFACE_SECRETS_ROOT: "{{ .Values.surface.secrets.root }}" + SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER: "{{ .Values.surface.secrets.fallbackProvider }}" + SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "{{ .Values.surface.secrets.allowInline }}" + issuer-directory-config: data: issuer-directory.yaml: | diff --git a/docs/24_OFFLINE_KIT.md b/docs/24_OFFLINE_KIT.md index b75584f29..f24d31b99 100755 --- a/docs/24_OFFLINE_KIT.md +++ b/docs/24_OFFLINE_KIT.md @@ -41,6 +41,48 @@ completely isolated network: The PHP analyzer parses `composer.lock` for Composer dependencies and supports optional runtime evidence via the `stella-trace.php` shim; set `STELLA_PHP_OPCACHE=1` to enable opcache statistics collection. +**Python analyzer features:** +- **Wheel/sdist/editable** parsing with dependency edges from `METADATA`, `PKG-INFO`, `requirements.txt`, and `pyproject.toml` +- **Virtual environment** support for virtualenv, venv, and conda prefix layouts +- **PEP 420 namespace packages** with proper `importlib` resolution semantics across `sys.path` +- **Python version detection** via `pyproject.toml`, `runtime.txt`, Dockerfile `FROM python:*`, `.python-version` +- **Native extension detection** for `.so`, `.pyd`, CFFI modules, ctypes loaders, and embedded WASM +- **Framework/config heuristics** for Django, Flask, FastAPI, Celery, AWS Lambda, Gunicorn, Click/Typer CLIs +- **AOC-compliant observations**: entrypoints (module `__main__`, console_scripts, zipapp), components (modules/packages/native), edges (import, namespace, dynamic-hint, native-extension) with resolver traces +- **Optional runtime evidence** via import hook; the bundled `stellaops_trace.py` module captures module load events with SHA-256 path hashing for secure evidence correlation +- **CLI inspection**: run `stella python inspect --root /path/to/app` to analyze a Python workspace locally + +**Surface.Env configuration:** Scanner Worker and WebService components use the Surface.Env library for configuration discovery. In air-gapped deployments, configure the following environment variables (see `docs/modules/scanner/design/surface-env.md` for details): + +| Variable | Description | Air-gap Default | +|----------|-------------|-----------------| +| `SCANNER_SURFACE_FS_ENDPOINT` | Base URI for Surface.FS / RustFS storage | `http://rustfs:8080` | +| `SCANNER_SURFACE_FS_BUCKET` | Bucket for manifests/artefacts | `surface-cache` | +| `SCANNER_SURFACE_CACHE_ROOT` | Local cache directory | `/var/lib/stellaops/surface` | +| `SCANNER_SURFACE_CACHE_QUOTA_MB` | Cache quota in MB (64-262144) | `4096` | +| `SCANNER_SURFACE_PREFETCH_ENABLED` | Enable manifest prefetch | `false` | +| `SCANNER_SURFACE_TENANT` | Tenant namespace | `default` | +| `SCANNER_SURFACE_SECRETS_PROVIDER` | Secrets provider (`file`, `kubernetes`) | `file` | +| `SCANNER_SURFACE_SECRETS_ROOT` | Root path for file provider | `/etc/stellaops/secrets` | +| `SCANNER_SURFACE_SECRETS_ALLOW_INLINE` | Allow inline secrets | `false` | + +For Helm deployments, configure via `values.yaml`: +```yaml +surface: + fs: + endpoint: "http://rustfs:8080" + bucket: "surface-cache" + cache: + root: "/var/lib/stellaops/surface" + quotaMb: 4096 + tenant: "default" + secrets: + provider: "file" + root: "/etc/stellaops/secrets" +``` + +For Docker Compose, these variables are pre-configured in `docker-compose.airgap.yaml` with sensible defaults. + **Advisory AI volume primer:** ship a tarball containing empty `queue/`, `plans/`, and `outputs/` directories plus their ownership metadata. During import, extract it onto the RWX volume used by `advisory-ai-web` and `advisory-ai-worker` so pods start with the expected directory tree even on air-gapped nodes. *Scanner core:* C# 12 on **.NET {{ dotnet }}**. diff --git a/docs/implplan/SPRINT_0113_0001_0002_concelier_ii.md b/docs/implplan/SPRINT_0113_0001_0002_concelier_ii.md index 2b39a5777..5c6647fa0 100644 --- a/docs/implplan/SPRINT_0113_0001_0002_concelier_ii.md +++ b/docs/implplan/SPRINT_0113_0001_0002_concelier_ii.md @@ -34,8 +34,8 @@ | 8 | CONCELIER-LNM-21-004 | DONE (2025-11-27) | Completed: AOC write guards + tests + docs | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Remove legacy merge/dedup logic; add guardrails/tests to keep ingestion append-only; document linkset supersession. | | 9 | CONCELIER-LNM-21-005 | DONE (2025-11-27) | Completed: Event contract + publisher interfaces + tests + docs | Concelier Core Guild · Platform Events Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit `advisory.linkset.updated` events with delta descriptions + observation ids (tenant + provenance only). | | 10 | CONCELIER-LNM-21-101-DEV | DONE (2025-11-27) | Completed: Sharding + TTL migration + event collection | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Provision Mongo collections (`advisory_observations`, `advisory_linksets`) with hashed shard keys, tenant indexes, TTL for ingest metadata. | -| 11 | CONCELIER-LNM-21-102-DEV | TODO | Unblocked by 21-101-DEV completion; CI runner available for migrations. | Concelier Storage Guild · DevOps Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Backfill legacy merged advisories; seed tombstones; provide rollback tooling for Offline Kit. | -| 12 | CONCELIER-LNM-21-103-DEV | BLOCKED (awaits 21-102-DEV) | Requires 21-102-DEV completion; CI runner available for object-store bootstrap tests. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Move large raw payloads to object storage with deterministic pointers; update bootstrapper/offline seeds; preserve provenance metadata. | +| 11 | CONCELIER-LNM-21-102-DEV | DONE (2025-11-28) | Completed: Migration + tombstones + rollback tooling | Concelier Storage Guild · DevOps Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Backfill legacy merged advisories; seed tombstones; provide rollback tooling for Offline Kit. | +| 12 | CONCELIER-LNM-21-103-DEV | BLOCKED (awaits object storage contract) | Requires object storage contract definition before implementation; see Blockers & Dependencies. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Move large raw payloads to object storage with deterministic pointers; update bootstrapper/offline seeds; preserve provenance metadata. | | 13 | CONCELIER-LNM-21-201 | BLOCKED (awaits 21-103) | Upstream storage tasks must land first; CI runner available for WebService tests. | Concelier WebService Guild · BE-Base Platform Guild (`src/Concelier/StellaOps.Concelier.WebService`) | `/advisories/observations` filters by alias/purl/source with strict tenant scopes; echoes upstream values + provenance fields only. | | 14 | CONCELIER-LNM-21-202 | BLOCKED (awaits 21-201) | Await upstream to run `/advisories/linksets` export tests; CI runner available. | Concelier WebService Guild (`src/Concelier/StellaOps.Concelier.WebService`) | `/advisories/linksets`/`export`/`evidence` endpoints surface correlation + conflict payloads and `ERR_AGG_*` mapping; no synthesis/merge. | | 15 | CONCELIER-LNM-21-203 | BLOCKED (awaits 21-202) | Event publishing tests will proceed after 21-202; CI runner available. | Concelier WebService Guild · Platform Events Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Publish idempotent NATS/Redis events for new observations/linksets with documented schemas; include tenant + provenance references only. | @@ -46,6 +46,8 @@ ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-11-28 | CONCELIER-LNM-21-103-DEV BLOCKED: Object storage contract for raw payloads not yet defined. Current payloads stored in GridFS; migration to S3-compatible store requires interface definition and cross-guild coordination with DevOps Guild. Marked task blocked and documented in Decisions & Risks. | Implementer | +| 2025-11-28 | CONCELIER-LNM-21-102-DEV DONE: Created `EnsureLegacyAdvisoriesBackfillMigration` that backfills `advisory_observations` from `advisory_raw`, creates/updates `advisory_linksets` by grouping observations, and seeds `backfill_marker` tombstones for rollback tracking. Added rollback script at `ops/devops/scripts/rollback-lnm-backfill.js` for Offline Kit. Updated MIGRATIONS.md with migration entry and operator runbook. Build passed. | Implementer | | 2025-11-27 | CONCELIER-LNM-21-101-DEV DONE: Created `EnsureLinkNotMergeShardingAndTtlMigration` adding hashed shard key indexes on `tenantId` for horizontal scaling, optional TTL indexes for `ObservationRetention`/`LinksetRetention`/`EventRetention` options, and `advisory_linkset_events` collection for linkset event outbox. Updated `MongoStorageOptions` with retention properties. Registered both `EnsureLinkNotMergeCollectionsMigration` and new sharding/TTL migration in DI. | Implementer | | 2025-11-27 | CONCELIER-LNM-21-005 DONE: Implemented `advisory.linkset.updated@1` event infrastructure (`AdvisoryLinksetUpdatedEvent`, `IAdvisoryLinksetEventPublisher`, `IAdvisoryLinksetEventOutbox`, `AdvisoryLinksetEventPublisherOptions`). Added 9 unit tests covering delta computation, conflict summaries, and provenance mapping. Documented event contract at `docs/modules/concelier/events/advisory.linkset.updated@1.md`. | Implementer | | 2025-11-27 | CONCELIER-LNM-21-004 DONE: Implemented AOC write guard infrastructure (`IAdvisoryObservationWriteGuard`, `AdvisoryObservationWriteGuard`, `AppendOnlyViolationException`). Added 13 unit tests covering Proceed/SkipIdentical/RejectMutation dispositions. Documented AOC and linkset supersession model in `docs/modules/concelier/link-not-merge-schema.md`. Legacy merge logic already deprecated with `[Obsolete]` and gated by `NoMergeEnabled` flag (defaults true). | Implementer | @@ -115,6 +117,7 @@ - CONCELIER-GRAPH-28-102 implemented: contract lives at `docs/modules/concelier/api/evidence-batch.md`; integration test covers empty-match path. Ensure consumers align on tenant header + limits before rollout. - CONCELIER-LNM-21-004 risk: removing canonical merge/dedup requires architect decision on retiring `CanonicalMerger` consumers (graph overlays, console summaries) and a migration/rollback plan; proceed after design sign-off. - CONCELIER-GRAPH-24-101 risk: API contract drafted at `docs/modules/concelier/api/advisories-summary.md`; implementation pending WebService wiring and consumer alignment. +- CONCELIER-LNM-21-103-DEV blocked: Object storage contract for raw payloads not yet defined. Requires cross-guild coordination between Storage Guild and DevOps Guild. Current payloads stored in GridFS; migration to S3-compatible store (MinIO) requires interface definition, migration strategy, bootstrapper updates, and offline seed support. ## Next Checkpoints - Next LNM schema review: align with CARTO-GRAPH/LNM owners (date TBD); unblock tasks 1–2 and 5–15. diff --git a/docs/implplan/SPRINT_0114_0001_0003_concelier_iii.md b/docs/implplan/SPRINT_0114_0001_0003_concelier_iii.md index 7b6e1fe56..bfaadb24f 100644 --- a/docs/implplan/SPRINT_0114_0001_0003_concelier_iii.md +++ b/docs/implplan/SPRINT_0114_0001_0003_concelier_iii.md @@ -44,7 +44,8 @@ ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | -| 2025-11-25 | Runner disk is full (“No space left on device”); orchestrator WebService tests cannot be re-run. Free bin/obj/TestResults and `ops/devops/artifacts/ci-110` before continuing ORCH-32/33/34. | Concelier Core | +| 2025-11-28 | Disk space issue resolved (56GB available). Fixed `InitializeMongoAsync` to skip in testing mode. WebService orchestrator tests still fail due to hosted services requiring MongoDB; test factory needs more extensive mocking or integration test with Mongo2Go. ORCH tasks remain BLOCKED pending test infrastructure fix. | Implementer | +| 2025-11-25 | Runner disk is full ("No space left on device"); orchestrator WebService tests cannot be re-run. Free bin/obj/TestResults and `ops/devops/artifacts/ci-110` before continuing ORCH-32/33/34. | Concelier Core | | 2025-11-25 | Storage.Mongo job-store slice executed locally: `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/StellaOps.Concelier.Storage.Mongo.Tests.csproj -c Debug --no-restore --no-build --filter FullyQualifiedName~MongoJobStore` (3/3 pass). TRX: `ops/devops/artifacts/ci-110/20251125T034529Z/trx/concelier-storage-jobstore.trx`. Broader suite still pending CI. | Concelier Core | | 2025-11-25 | WebService orchestrator filter run (`dotnet test ...WebService.Tests.csproj --filter FullyQualifiedName~Orchestrator`) produced no matching tests; TRX recorded at `ops/devops/artifacts/ci-110/20251125T040900Z/trx/concelier-web-orch.trx`. Need to add orchestrator WebService tests before closing ORCH-32/33/34. | Concelier Core | | 2025-11-25 | Attempted to add WebService orchestrator tests with Mongo bypass; repo disk is full (`No space left on device`), preventing further builds/tests. Cleanup of bin/obj/TestResults and ops/devops artifacts required before rerunning orchestrator test slice. | Concelier Core | diff --git a/docs/implplan/SPRINT_0115_0001_0004_concelier_iv.md b/docs/implplan/SPRINT_0115_0001_0004_concelier_iv.md index 45c8d26e7..9f0a8ca6d 100644 --- a/docs/implplan/SPRINT_0115_0001_0004_concelier_iv.md +++ b/docs/implplan/SPRINT_0115_0001_0004_concelier_iv.md @@ -26,22 +26,29 @@ | P3 | PREP-CONCELIER-VULN-29-001 | DONE (2025-11-19) | Bridge contract published at `docs/modules/concelier/bridges/vuln-29-001.md`; sample fixture location noted. | Concelier WebService Guild · Vuln Explorer Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Provide Concelier/Vuln bridge contract (advisory keys, search params, sample responses) that VEX Lens + Vuln Explorer rely on; publish OpenAPI excerpt and fixtures. | | 0 | POLICY-AUTH-SIGNALS-LIB-115 | DONE (2025-11-19) | Package `StellaOps.Policy.AuthSignals` 0.1.0-alpha published to `local-nugets/`; schema/fixtures at `docs/policy/*`. | Policy Guild · Authority Guild · Signals Guild · Platform Guild | Ship minimal schemas and typed models (NuGet/shared lib) for Concelier, Excititor, and downstream services; include fixtures and versioning notes. | | 1 | CONCELIER-POLICY-20-002 | DONE (2025-11-20) | Vendor alias + SemVer range normalization landed; tests green. | Concelier Core Guild · Policy Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Expand linkset builders with vendor equivalence, NEVRA/PURL normalization, version-range parsing so policy joins are accurate without prioritizing sources. | -| 2 | CONCELIER-POLICY-20-003 | BLOCKED | Upstream POLICY-20-001 outputs missing; 20-002 complete. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Advisory selection cursors + change-stream checkpoints for deterministic policy deltas; include offline migration scripts. | -| 3 | CONCELIER-POLICY-23-001 | BLOCKED | Depends on 20-003 (blocked). | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Secondary indexes/materialized views (alias, provider severity, confidence) to keep policy lookups fast without cached verdicts; document query patterns. | -| 4 | CONCELIER-POLICY-23-002 | BLOCKED | Depends on 23-001 (blocked). | Concelier Core Guild · Platform Events Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Ensure `advisory.linkset.updated` events carry idempotent IDs, confidence summaries, tenant metadata for safe policy replay. | -| 5 | CONCELIER-RISK-66-001 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 and POLICY chain. | Concelier Core Guild · Risk Engine Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Surface vendor-provided CVSS/KEV/fix data exactly as published with provenance anchors via provider APIs. | -| 6 | CONCELIER-RISK-66-002 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 and 66-001. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit structured fix-availability metadata per observation/linkset (release version, advisory link, evidence timestamp) without guessing exploitability. | -| 7 | CONCELIER-RISK-67-001 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 and 66-001. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Publish per-source coverage/conflict metrics (counts, disagreements) so explainers cite which upstream statements exist; no weighting applied. | -| 8 | CONCELIER-RISK-68-001 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 and POLICY-RISK-68-001. | Concelier Core Guild · Policy Studio Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Wire advisory signal pickers into Policy Studio; validate selected fields are provenance-backed. | -| 9 | CONCELIER-RISK-69-001 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 and 66-002. | Concelier Core Guild · Notifications Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit notifications on upstream advisory field changes (e.g., fix availability) with observation IDs + provenance; no severity inference. | -| 10 | CONCELIER-SIG-26-001 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 delivering SIGNALS-24-002. | Concelier Core Guild · Signals Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Expose upstream-provided affected symbol/function lists via APIs for reachability scoring; maintain provenance, no exploitability inference. | +| 2 | CONCELIER-POLICY-20-003 | DONE (2025-11-28) | Implemented `PolicyDeltaCheckpoint` model, `IPolicyDeltaCheckpointStore` interface, MongoDB store + migration `20251128_policy_delta_checkpoints`. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Advisory selection cursors + change-stream checkpoints for deterministic policy deltas; include offline migration scripts. | +| 3 | CONCELIER-POLICY-23-001 | DONE (2025-11-28) | Implemented migration `20251128_policy_lookup_indexes` with alias multikey, confidence, and severity indexes. Query patterns documented in migration XML docs. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Secondary indexes/materialized views (alias, provider severity, confidence) to keep policy lookups fast without cached verdicts; document query patterns. | +| 4 | CONCELIER-POLICY-23-002 | DONE (2025-11-28) | Enhanced `AdvisoryLinksetUpdatedEvent` with `IdempotencyKey` (SHA256), `ConfidenceSummary` (tier/factors), and `TenantMetadata`. | Concelier Core Guild · Platform Events Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Ensure `advisory.linkset.updated` events carry idempotent IDs, confidence summaries, tenant metadata for safe policy replay. | +| 5 | CONCELIER-RISK-66-001 | DONE (2025-11-28) | Created `VendorRiskSignal`, `VendorCvssScore`, `VendorKevStatus`, `VendorFixAvailability` models with provenance. Extractor parses OSV/NVD formats. | Concelier Core Guild · Risk Engine Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Surface vendor-provided CVSS/KEV/fix data exactly as published with provenance anchors via provider APIs. | +| 6 | CONCELIER-RISK-66-002 | TODO | Upstream 66-001 DONE. Ready to emit fix-availability metadata. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit structured fix-availability metadata per observation/linkset (release version, advisory link, evidence timestamp) without guessing exploitability. | +| 7 | CONCELIER-RISK-67-001 | TODO | Upstream 66-001 DONE. Ready to publish coverage/conflict metrics. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Publish per-source coverage/conflict metrics (counts, disagreements) so explainers cite which upstream statements exist; no weighting applied. | +| 8 | CONCELIER-RISK-68-001 | BLOCKED | Blocked on POLICY-RISK-68-001. | Concelier Core Guild · Policy Studio Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Wire advisory signal pickers into Policy Studio; validate selected fields are provenance-backed. | +| 9 | CONCELIER-RISK-69-001 | BLOCKED | Blocked on 66-002. | Concelier Core Guild · Notifications Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit notifications on upstream advisory field changes (e.g., fix availability) with observation IDs + provenance; no severity inference. | +| 10 | CONCELIER-SIG-26-001 | BLOCKED | Blocked on SIGNALS-24-002. | Concelier Core Guild · Signals Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Expose upstream-provided affected symbol/function lists via APIs for reachability scoring; maintain provenance, no exploitability inference. | | 11 | CONCELIER-STORE-AOC-19-005-DEV | BLOCKED (2025-11-04) | Waiting on staging dataset hash + rollback rehearsal using prep doc | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Execute raw-linkset backfill/rollback plan so Mongo reflects Link-Not-Merge data; rehearse rollback (dev/staging). | -| 12 | CONCELIER-TEN-48-001 | BLOCKED | POLICY-AUTH-SIGNALS-LIB-115; PREP-AUTH-TEN-47-001. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Enforce tenant scoping through normalization/linking; expose capability endpoint advertising `merge=false`; ensure events include tenant IDs. | +| 12 | CONCELIER-TEN-48-001 | DONE (2025-11-28) | Created Tenancy module with `TenantScope`, `TenantCapabilities`, `TenantCapabilitiesResponse`, `ITenantCapabilitiesProvider`, and `TenantScopeNormalizer` per AUTH-TEN-47-001. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Enforce tenant scoping through normalization/linking; expose capability endpoint advertising `merge=false`; ensure events include tenant IDs. | | 13 | CONCELIER-VEXLENS-30-001 | BLOCKED | PREP-CONCELIER-VULN-29-001; VEXLENS-30-005 | Concelier WebService Guild · VEX Lens Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Guarantee advisory key consistency and cross-links consumed by VEX Lens so consensus explanations cite Concelier evidence without merges. | ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-11-28 | Completed CONCELIER-TEN-48-001: created Tenancy module with `TenantScope`, `TenantCapabilities`, `TenantCapabilitiesResponse`, `ITenantCapabilitiesProvider`, `LinkNotMergeTenantCapabilitiesProvider`, and `TenantScopeNormalizer`. Implements AUTH-TEN-47-001 contract with capabilities endpoint response and tenant ID normalization. Build green. | Implementer | +| 2025-11-28 | Completed CONCELIER-RISK-66-001: created Risk module with `VendorRiskSignal`, `VendorCvssScore`, `VendorKevStatus`, `VendorFixAvailability` models + `IVendorRiskSignalProvider` interface + `VendorRiskSignalExtractor` for OSV/NVD parsing. All with provenance anchors. Build green. Tasks 6 and 7 now TODO. | Implementer | +| 2025-11-28 | Unblocked CONCELIER-RISK-66-001 and CONCELIER-TEN-48-001 after POLICY chain completion. Tasks 5 and 12 moved to TODO. | Implementer | +| 2025-11-28 | Completed CONCELIER-POLICY-23-002: enhanced `AdvisoryLinksetUpdatedEvent` with `IdempotencyKey` (SHA256 of linkset identity + content), `ConfidenceSummary` (tier classification: high/medium/low/very-low/unknown + contributing factors), and `TenantMetadata` (URN + namespace extraction). Build green. POLICY chain (20-002/003, 23-001/002) now complete. | Implementer | +| 2025-11-28 | Completed CONCELIER-POLICY-23-001: implemented migration `20251128_policy_lookup_indexes` with indexes for alias lookups (multikey on `linkset.aliases`), confidence filtering (`confidence`, `tenantId+confidence`), severity queries (`normalized.severities.system/score`), and pagination (`tenantId+createdAt`). Query patterns documented in XML docs. Build green; MIGRATIONS.md updated. Task 4 (23-002) now TODO. | Implementer | +| 2025-11-28 | Completed CONCELIER-POLICY-20-003: implemented `PolicyDeltaCheckpoint` model + `IPolicyDeltaCheckpointStore` interface in Core/Linksets; MongoDB document, store, and migration (`20251128_policy_delta_checkpoints`) in Storage.Mongo. Indexes on tenantId, consumerId, compound, and updatedAt. Build green; MIGRATIONS.md updated. Task 3 (23-001) now TODO. | Implementer | +| 2025-11-28 | Unblocked CONCELIER-POLICY-20-003 after verifying POLICY-20-001 DONE in Sprint 0114. Task moved to TODO; ready for implementation. | Implementer | | 2025-11-25 | Synced status with tasks-all: RISK-66/68/69, SIG-26-001, TEN-48-001, VEXLENS-30-001 remain BLOCKED despite signals library shipping; blockers are POLICY-20-001 outputs, AUTH-TEN-47-001, SIGNALS-24-002, VEXLENS-30-005. | Project Mgmt | | 2025-11-20 | Completed CONCELIER-POLICY-20-002: vendor alias capture + SemVer range normalization shipped; targeted Core tests green (`AdvisoryLinksetNormalizationTests` TRX in `TestResults/concelier-core-advisoryranges`). | Implementer | | 2025-11-19 | Added PREP tasks for CONCELIER-CORE-AOC-19-004, AUTH-TEN-47-001, and CONCELIER-VULN-29-001; updated dependencies for tasks 11–13. | Project Mgmt | diff --git a/docs/implplan/SPRINT_0124_0001_0001_policy_reasoning.md b/docs/implplan/SPRINT_0124_0001_0001_policy_reasoning.md index 6db2a7f3e..602f494ad 100644 --- a/docs/implplan/SPRINT_0124_0001_0001_policy_reasoning.md +++ b/docs/implplan/SPRINT_0124_0001_0001_policy_reasoning.md @@ -27,15 +27,17 @@ | 6 | POLICY-ENGINE-20-006 | BLOCKED (2025-11-27) | Depends on 20-005. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` | | 7 | POLICY-ENGINE-20-007 | BLOCKED (2025-11-27) | Depends on 20-006. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` | | 8 | POLICY-ENGINE-20-008 | BLOCKED (2025-11-27) | Depends on 20-007. | Policy · QA Guild / `src/Policy/StellaOps.Policy.Engine` | -| 9 | POLICY-ENGINE-20-009 | BLOCKED (2025-11-27) | Depends on 20-008. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` | -| 10 | POLICY-ENGINE-27-001 | BLOCKED (2025-11-27) | Depends on 20-009. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | -| 11 | POLICY-ENGINE-27-002 | BLOCKED (2025-11-27) | Depends on 27-001. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` | +| 9 | POLICY-ENGINE-20-009 | DONE (2025-11-28) | MongoDB schemas/indexes for policies, policy_runs, effective_finding_* with migrations and tenant enforcement. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` | +| 10 | POLICY-ENGINE-27-001 | DONE (2025-11-28) | Extended compile outputs with symbol table, rule index, documentation, coverage metadata, and deterministic hashes. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | +| 11 | POLICY-ENGINE-27-002 | DONE (2025-11-28) | Enhanced simulate endpoints with rule firing counts, heatmap aggregates, sampled explain traces with deterministic ordering, and delta summaries. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` | | 12 | POLICY-ENGINE-29-001 | BLOCKED (2025-11-27) | Depends on 27-004. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | | 13 | POLICY-ENGINE-29-002 | DONE (2025-11-23) | Contract published at `docs/modules/policy/contracts/29-002-streaming-simulation.md`. | Policy · Findings Ledger Guild / `src/Policy/StellaOps.Policy.Engine` | ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-11-28 | Completed POLICY-ENGINE-27-002: Enhanced simulation analytics with SimulationAnalytics models (RuleFiringCounts, SimulationHeatmap, SampledExplainTraces, SimulationDeltaSummary) and SimulationAnalyticsService. Integrated into RiskSimulationResult. 15 new unit tests. | Policy Guild | +| 2025-11-28 | Completed POLICY-ENGINE-20-009: MongoDB schemas/indexes with migration infrastructure (PolicyEngineMongoContext, migrations, TenantFilterBuilder). Completed POLICY-ENGINE-27-001: Extended compile outputs with PolicyCompileMetadata (symbol table, rule index, documentation, coverage metadata, deterministic hashes) via PolicyMetadataExtractor. 16 new unit tests. | Policy Guild | | 2025-11-27 | Marked POLICY-CONSOLE-23-002 and POLICY-ENGINE-20-003..29-001 BLOCKED due to unmet upstream contracts (POLICY-CONSOLE-23-001, deterministic evaluator 20-002 chain). | Policy Guild | | 2025-11-23 | Published POLICY-ENGINE-29-002 streaming simulation contract (`docs/modules/policy/contracts/29-002-streaming-simulation.md`); marked task 13 DONE. | Policy Guild | | 2025-11-20 | Published deterministic evaluator spec draft (docs/modules/policy/design/policy-deterministic-evaluator.md); moved PREP-POLICY-ENGINE-20-002 to DOING. | Project Mgmt | diff --git a/docs/implplan/SPRINT_0140_0001_0001_runtime_signals.md b/docs/implplan/SPRINT_0140_0001_0001_runtime_signals.md index c85d59a5f..e61bab0c1 100644 --- a/docs/implplan/SPRINT_0140_0001_0001_runtime_signals.md +++ b/docs/implplan/SPRINT_0140_0001_0001_runtime_signals.md @@ -26,14 +26,15 @@ | --- | --- | --- | --- | --- | --- | | P1 | PREP-140-D-ZASTAVA-WAVE-WAITING-ON-SURFACE-FS | DONE (2025-11-20) | Due 2025-11-22 · Accountable: Zastava Observer/Webhook Guilds · Surface Guild | Zastava Observer/Webhook Guilds · Surface Guild | Prep artefact published at `docs/modules/zastava/prep/2025-11-20-surface-fs-env-prep.md` (cache drop cadence, env helper ownership, DSSE requirements). | | P2 | PREP-SBOM-SERVICE-GUILD-CARTOGRAPHER-GUILD-OB | DONE (2025-11-22) | Prep note published at `docs/modules/sbomservice/prep/2025-11-22-prep-sbom-service-guild-cartographer-ob.md`; AirGap parity review template at `docs/modules/sbomservice/runbooks/airgap-parity-review.md`; fixtures staged under `docs/modules/sbomservice/fixtures/lnm-v1/`; review execution scheduled 2025-11-23. | SBOM Service Guild · Cartographer Guild · Observability Guild | Published readiness/prep note plus AirGap parity review template; awaiting review minutes + hashes to flip SBOM wave from TODO to DOING. | -| 1 | 140.A Graph wave | BLOCKED (2025-11-19) | Await real scanner cache ETA; working off mock bundle only. | Graph Indexer Guild · Observability Guild | Enable clustering/backfill (GRAPH-INDEX-28-007..010) against mock bundle; revalidate once real cache lands. | -| 2 | 140.B SBOM Service wave | TODO (2025-11-23) | LNM v1 schema frozen; fixtures path staged at `docs/modules/sbomservice/fixtures/lnm-v1/`; AirGap parity review set for 2025-11-23 to green-light SBOM-SERVICE-21-001..004. | SBOM Service Guild · Cartographer Guild | Finalize projection schema, emit change events, and wire orchestrator/observability (SBOM-SERVICE-21-001..004, SBOM-AIAI-31-001/002). | -| 3 | 140.C Signals wave | BLOCKED (2025-11-20) | CAS promotion + signed manifests + provenance appendix pending; SIGNALS-24-002/003 blocked upstream. TRACTORS: see `docs/signals/cas-promotion-24-002.md` and `docs/signals/provenance-24-003.md`. | Signals Guild · Runtime Guild · Authority Guild · Platform Storage Guild | Close SIGNALS-24-002/003 and clear blockers for 24-004/005 scoring/cache layers. | -| 4 | 140.D Zastava wave | BLOCKED | PREP-140-D-ZASTAVA-WAVE-WAITING-ON-SURFACE-FS | Zastava Observer/Webhook Guilds · Surface Guild | Prepare env/secret helpers and admission hooks; start once cache endpoints and helpers are published. | +| 1 | 140.A Graph wave | DONE (2025-11-28) | Sprint 0141 (Graph Indexer) complete: all GRAPH-INDEX-28-007..010 tasks DONE. | Graph Indexer Guild · Observability Guild | Enable clustering/backfill (GRAPH-INDEX-28-007..010) against mock bundle; revalidate once real cache lands. | +| 2 | 140.B SBOM Service wave | DOING (2025-11-28) | Sprint 0142 mostly complete: SBOM-SERVICE-21-001..004, SBOM-AIAI-31-001/002, SBOM-ORCH-32/33/34-001, SBOM-VULN-29-001/002 all DONE. Only SBOM-CONSOLE-23-001/002 remain BLOCKED. | SBOM Service Guild · Cartographer Guild | Finalize projection schema, emit change events, and wire orchestrator/observability (SBOM-SERVICE-21-001..004, SBOM-AIAI-31-001/002). | +| 3 | 140.C Signals wave | DOING (2025-11-28) | Sprint 0143: SIGNALS-24-001/002/003 DONE; SIGNALS-24-004/005 remain BLOCKED on CAS promotion. | Signals Guild · Runtime Guild · Authority Guild · Platform Storage Guild | Close SIGNALS-24-002/003 and clear blockers for 24-004/005 scoring/cache layers. | +| 4 | 140.D Zastava wave | DONE (2025-11-28) | Sprint 0144 (Zastava Runtime Signals) complete: all ZASTAVA-ENV/SECRETS/SURFACE tasks DONE. | Zastava Observer/Webhook Guilds · Surface Guild | Prepare env/secret helpers and admission hooks; start once cache endpoints and helpers are published. | ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-11-28 | Synced wave status with downstream sprints: 140.A Graph (DONE per Sprint 0141); 140.B SBOM (DOING, mostly complete per Sprint 0142); 140.C Signals (DOING, 3/5 done per Sprint 0143); 140.D Zastava (DONE per Sprint 0144). Updated Delivery Tracker and unblocked Sprint 0150 dependencies. | Implementer | | 2025-11-20 | Completed PREP-140-D-ZASTAVA-WAVE-WAITING-ON-SURFACE-FS: published cache/env helper prep at `docs/modules/zastava/prep/2025-11-20-surface-fs-env-prep.md`; status set to DONE. | Implementer | | 2025-11-20 | Marked SIGNALS-24-002/003 as BLOCKED pending Platform Storage + provenance approvals; linked CAS/provenance checklists in blockers. | Implementer | | 2025-11-19 | Assigned PREP owners/dates; see Delivery Tracker. | Planning | @@ -93,17 +94,17 @@ This file now only tracks the runtime & signals status snapshot. Active backlog | Wave | Guild owners | Shared prerequisites | Status | Notes | | --- | --- | --- | --- | --- | -| 140.A Graph | Graph Indexer Guild · Observability Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner (phase I tracked under `docs/implplan/SPRINT_130_scanner_surface.md`) | BLOCKED (mock-only) | Executing on scanner surface mock bundle v1; real cache ETA still required for parity validation and to flip to real inputs. | -| 140.B SbomService | SBOM Service Guild · Cartographer Guild · Observability Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | PREP-SBOM-SERVICE-GUILD-CARTOGRAPHER-GUILD-OB | Prep note published 2025-11-22 at `docs/modules/sbomservice/prep/2025-11-22-prep-sbom-service-guild-cartographer-ob.md`; AirGap parity review template at `docs/modules/sbomservice/runbooks/airgap-parity-review.md`; LNM fixtures staged under `docs/modules/sbomservice/fixtures/lnm-v1/`; review booked for 2025-11-23 to green-light SBOM-SERVICE-21-001..004. | -| 140.C Signals | Signals Guild · Authority Guild (for scopes) · Runtime Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | BLOCKED (red) | CAS checklist + provenance appendix overdue; callgraph retrieval live but artifacts not trusted until CAS/signing lands. | -| 140.D Zastava | Zastava Observer/Webhook Guilds · Security Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | PREP-SBOM-SERVICE-GUILD-CARTOGRAPHER-GUILD-OB | Surface.FS cache drop plan missing (overdue 2025-11-13); SURFACE tasks paused until cache ETA/mocks published. | +| 140.A Graph | Graph Indexer Guild · Observability Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner (phase I tracked under `docs/implplan/SPRINT_130_scanner_surface.md`) | DONE (2025-11-28) | Sprint 0141 complete: GRAPH-INDEX-28-007..010 all DONE. | +| 140.B SbomService | SBOM Service Guild · Cartographer Guild · Observability Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | DOING (2025-11-28) | Sprint 0142 mostly complete: SBOM-SERVICE-21-001..004, SBOM-AIAI-31-001/002, SBOM-ORCH-32/33/34-001, SBOM-VULN-29-001/002 DONE. SBOM-CONSOLE-23-001/002 remain BLOCKED. | +| 140.C Signals | Signals Guild · Authority Guild (for scopes) · Runtime Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | DOING (2025-11-28) | Sprint 0143: SIGNALS-24-001/002/003 DONE; SIGNALS-24-004/005 remain BLOCKED on CAS promotion. | +| 140.D Zastava | Zastava Observer/Webhook Guilds · Security Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | DONE (2025-11-28) | Sprint 0144 complete: ZASTAVA-ENV/SECRETS/SURFACE all DONE. | -# Status snapshot (2025-11-18) +# Status snapshot (2025-11-28) -- **140.A Graph** – BLOCKED on real cache delivery; running only on scanner surface mock bundle v1 pending cache ETA/hash. -- **140.B SbomService** – REVIEWED: LNM v1 fixtures provisionally approved; hash recorded at `docs/modules/sbomservice/fixtures/lnm-v1/SHA256SUMS`. Minutes: `docs/modules/sbomservice/reviews/2025-11-23-airgap-parity.md`. SBOM-SERVICE-21-001 is DOING; 21-002..004 next in sequence. -- **140.C Signals** – SIGNALS-24-001 shipped on 2025-11-09; SIGNALS-24-002 and SIGNALS-24-003 are BLOCKED with CAS promotion + provenance appendix pending. Scoring/cache work (SIGNALS-24-004/005) stays BLOCKED until CAS/provenance and runtime uploads stabilize. -- **140.D Zastava** – ZASTAVA-ENV/SECRETS/SURFACE tracks are BLOCKED because Surface.FS cache outputs from Scanner are still unavailable; guilds continue prepping Surface.Env helper adoption and sealed-mode scaffolding while caches are pending. +- **140.A Graph** – DONE. Sprint 0141 complete: GRAPH-INDEX-28-007..010 all shipped. +- **140.B SbomService** – DOING. Sprint 0142 mostly complete: SBOM-SERVICE-21-001..004, SBOM-AIAI-31-001/002, SBOM-ORCH-32/33/34-001, SBOM-VULN-29-001/002 all DONE. Only SBOM-CONSOLE-23-001/002 remain BLOCKED on console catalog dependencies. +- **140.C Signals** – DOING. Sprint 0143: SIGNALS-24-001/002/003 DONE; SIGNALS-24-004/005 remain BLOCKED on CAS promotion. +- **140.D Zastava** – DONE. Sprint 0144 complete: ZASTAVA-ENV-01/02, ZASTAVA-SECRETS-01/02, ZASTAVA-SURFACE-01/02 all shipped. ## Wave task tracker (refreshed 2025-11-18) diff --git a/docs/implplan/SPRINT_0150_0001_0001_scheduling_automation.md b/docs/implplan/SPRINT_0150_0001_0001_scheduling_automation.md index 822f10561..00a05cfee 100644 --- a/docs/implplan/SPRINT_0150_0001_0001_scheduling_automation.md +++ b/docs/implplan/SPRINT_0150_0001_0001_scheduling_automation.md @@ -21,27 +21,42 @@ ## Delivery Tracker | # | Task ID | Status | Key dependency / next step | Owners | Task Definition | | --- | --- | --- | --- | --- | --- | -| 1 | 150.A-Orchestrator | TODO | Wait for Sprint 0120.A/0130.A/0140.A readiness; verify Scanner surface artifacts | Orchestrator Service Guild · AirGap Policy/Controller Guilds · Observability Guild | Kick off orchestration scheduling/telemetry baseline for automation epic. | +| 1 | 150.A-Orchestrator | TODO | 0140.A (Graph) ✅ DONE, 0140.D (Zastava) ✅ DONE. Remaining blockers: 0120.A AirGap staleness + 0130.A Scanner surface | Orchestrator Service Guild · AirGap Policy/Controller Guilds · Observability Guild | Kick off orchestration scheduling/telemetry baseline for automation epic. | | 2 | 150.B-PacksRegistry | TODO | 150.A must reach DOING; confirm tenancy scaffolding from Orchestrator | Packs Registry Guild · Exporter Guild · Security Guild | Packs registry automation stream staged; start after Orchestrator scaffolding. | -| 3 | 150.C-Scheduler | TODO | Hold until 0140.A Graph overlays and 0130.A Scanner surface green | Scheduler WebService/Worker Guilds · Findings Ledger Guild · Observability Guild | Scheduler impact index improvements gated on Graph overlays. | +| 3 | 150.C-Scheduler | TODO | 0140.A Graph ✅ DONE. Remaining blocker: 0130.A Scanner surface | Scheduler WebService/Worker Guilds · Findings Ledger Guild · Observability Guild | Scheduler impact index improvements gated on Graph overlays. | | 4 | 150.D-TaskRunner | TODO | Requires Orchestrator/Scheduler telemetry baselines (150.A/150.C) | Task Runner Guild · AirGap Guilds · Evidence Locker Guild | Execution engine upgrades and evidence integration to start post-baselines. | ## Wave Coordination Snapshot | Wave | Guild owners | Shared prerequisites | Status | Notes | | --- | --- | --- | --- | --- | -| 150.A Orchestrator | Orchestrator Service Guild · AirGap Policy/Controller Guilds · Observability Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Pending confirmation that Scanner surface artifacts are ready; keep job telemetry work prepped for fast start. | -| 150.B PacksRegistry | Packs Registry Guild · Exporter Guild · Security Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Blocked on Orchestrator tenancy scaffolding; specs are ready once 150.A flips to DOING. | -| 150.C Scheduler | Scheduler WebService/Worker Guilds · Findings Ledger Guild · Observability Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Impact index improvements need Graph overlays; hold until 0140.A status improves. | +| 150.A Orchestrator | Orchestrator Service Guild · AirGap Policy/Controller Guilds · Observability Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Graph (0140.A) and Zastava (0140.D) now DONE. AirGap staleness (0120.A 56-002/57/58) and Scanner surface (0130.A) remain blockers. Approaching readiness. | +| 150.B PacksRegistry | Packs Registry Guild · Exporter Guild · Security Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Blocked on Orchestrator tenancy scaffolding; specs ready once 150.A flips to DOING. | +| 150.C Scheduler | Scheduler WebService/Worker Guilds · Findings Ledger Guild · Observability Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Graph overlays (0140.A) now DONE. Scheduler impact index work can proceed once Scanner surface (0130.A) clears. | | 150.D TaskRunner | Task Runner Guild · AirGap Guilds · Evidence Locker Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Execution engine upgrades staged; start once Orchestrator/Scheduler telemetry baselines exist. | ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-11-28 | Synced with downstream sprints: Sprint 0141 (Graph) DONE, Sprint 0142 (SBOM) mostly DONE, Sprint 0143 (Signals) 3/5 DONE, Sprint 0144 (Zastava) DONE. Updated Sprint 0140 tracker and revised 150.* upstream dependency status. 150.A-Orchestrator may start once remaining AirGap/Scanner blockers clear. | Implementer | +| 2025-11-28 | Upstream dependency check: Sprint 0120 (Policy/Reasoning) has LEDGER-29-007/008, LEDGER-34-101, LEDGER-AIRGAP-56-001 DONE but 56-002/57-001/58-001/ATTEST-73-001 BLOCKED. Sprint 0140 (Runtime/Signals) has all waves BLOCKED except SBOM (TODO). No Sprint 0130.A file found. All 150.* tasks remain TODO pending upstream readiness. | Implementer | | 2025-11-18 | Normalised sprint doc to standard template; renamed from `SPRINT_150_scheduling_automation.md`. | Planning | +## Upstream Dependency Status (as of 2025-11-28) +| Upstream Sprint | Key Deliverable | Status | Impact on 150.* | +| --- | --- | --- | --- | +| Sprint 0120.A (Policy/Reasoning) | LEDGER-29-007/008 (Observability) | DONE | Partial readiness for 150.A | +| Sprint 0120.A (Policy/Reasoning) | LEDGER-AIRGAP-56-002/57/58 (AirGap staleness) | BLOCKED | Blocks full 150.A readiness | +| Sprint 0130.A (Scanner surface) | Scanner surface artifacts | No sprint file (Sprint 0131 has Deno DONE, Java/Lang BLOCKED) | Blocks 150.A, 150.C verification | +| Sprint 0140.A (Graph overlays) | 140.A Graph wave | **DONE** (Sprint 0141 complete) | Unblocks 150.C Scheduler graph deps | +| Sprint 0140.A (Graph overlays) | 140.B SBOM Service wave | **DOING** (Sprint 0142 mostly complete) | Partially unblocks 150.A/150.C | +| Sprint 0140.A (Graph overlays) | 140.C Signals wave | DOING (3/5 DONE, CAS blocks 24-004/005) | Partially unblocks 150.A telemetry | +| Sprint 0140.A (Graph overlays) | 140.D Zastava wave | **DONE** (Sprint 0144 complete) | Unblocks 150.A surface deps | + ## Decisions & Risks -- All waves remain gated on upstream AirGap/Scanner/Graph readiness; no new intra-decade dependencies introduced. +- **Progress (2025-11-28):** Graph (0140.A) and Zastava (0140.D) waves now DONE; SBOM Service (0140.B) and Signals (0140.C) waves DOING. Main remaining blockers are 0120.A AirGap staleness tasks and 0130.A Scanner surface artifacts. +- 150.A Orchestrator and 150.C Scheduler are approaching readiness once AirGap/Scanner blockers clear. - This sprint is a coordination snapshot only; implementation tasks continue in Sprint 151+ and should mirror status changes here to avoid drift. +- Sprint 0130.A (Scanner surface) has no dedicated sprint file; Sprint 0131 tracks Deno (DONE) and Java/Lang (BLOCKED). Coordinate with Scanner Guild to finalize. ## Next Checkpoints - None scheduled; add next scheduling/automation sync once upstream readiness dates are confirmed. diff --git a/docs/implplan/SPRINT_0212_0001_0001_web_i.md b/docs/implplan/SPRINT_0212_0001_0001_web_i.md index c0e06e86b..f35954da3 100644 --- a/docs/implplan/SPRINT_0212_0001_0001_web_i.md +++ b/docs/implplan/SPRINT_0212_0001_0001_web_i.md @@ -26,7 +26,7 @@ | 3 | WEB-AIAI-31-003 | BLOCKED (2025-11-22) | Blocked by WEB-AIAI-31-002; telemetry targets depend on routing/batching contract. | BE-Base Platform Guild; Observability Guild | Telemetry + audit for advisory AI, guardrail block visibility. | | 4 | WEB-AOC-19-002 | TODO | Depends on WEB-AOC-19-001; align DSSE/CMS helper APIs. | BE-Base Platform Guild | Ship `ProvenanceBuilder`, checksum utilities, signature verification helper with tests. | | 5 | WEB-AOC-19-003 | TODO | Depends on WEB-AOC-19-002; confirm Roslyn analyzer rules. | QA Guild; BE-Base Platform Guild | Analyzer to prevent forbidden key writes; shared guard-validation fixtures. | -| 6 | WEB-CONSOLE-23-001 | TODO | Define stable `/console/dashboard` and `/console/filters` contract; ensures deterministic ordering + pagination. | BE-Base Platform Guild; Product Analytics Guild | Tenant-scoped aggregates for findings, VEX overrides, advisory deltas, run health, policy change log. | +| 6 | WEB-CONSOLE-23-001 | DONE (2025-11-28) | `/console/dashboard` and `/console/filters` endpoints implemented with tenant-scoped aggregates. | BE-Base Platform Guild; Product Analytics Guild | Tenant-scoped aggregates for findings, VEX overrides, advisory deltas, run health, policy change log. | | 7 | CONSOLE-VULN-29-001 | BLOCKED (2025-11-19) | Blocked on WEB-CONSOLE-23-001 contract and Concelier graph schema freeze. | Console Guild; BE-Base Platform Guild | `/console/vuln/*` workspace endpoints with filters/reachability badges and DTOs once schemas stabilize. | | 8 | CONSOLE-VEX-30-001 | BLOCKED (2025-11-19) | Blocked on WEB-CONSOLE-23-001 and Excititor console contract (SSE payload validation). | Console Guild; BE-Base Platform Guild | `/console/vex/events` SSE workspace with validated schemas and samples. | | 9 | WEB-CONSOLE-23-002 | TODO | Depends on WEB-CONSOLE-23-001; design heartbeat/backoff + auth scopes. | BE-Base Platform Guild; Scheduler Guild | `/console/status` polling and `/console/runs/{id}/stream` SSE/WebSocket proxy with queue lag metrics. | @@ -79,3 +79,4 @@ | 2025-11-22 | Synced `docs/implplan/tasks-all.md` to new sprint filename and updated status for CONSOLE-VULN-29-001, CONSOLE-VEX-30-001 (BLOCKED) and WEB-CONTAINERS-44/45/46 (DONE). | Planning | | 2025-11-22 | Added completion dates in `tasks-all` for WEB-CONTAINERS-44/45/46 and aligned BLOCKED dates for VULN-29-001/VEX-30-001. | Planning | | 2025-11-22 | Harmonized all `CONTAINERS-44/45/46` rows in `tasks-all` to DONE with dates to match sprint status. | Planning | +| 2025-11-28 | Completed WEB-CONSOLE-23-001: Implemented `/console/dashboard` and `/console/filters` endpoints in Authority module. Dashboard returns tenant-scoped aggregates (findings summary, VEX overrides, advisory deltas, run health, policy change log) with 30-day trend data. Filters endpoint returns deterministic filter categories with counts and cache-validation hash. Added 8 unit tests for dashboard/filters endpoints. Implementation in `src/Authority/StellaOps.Authority/StellaOps.Authority/Console/`. | Policy Guild | diff --git a/docs/implplan/SPRINT_124_policy_reasoning.md b/docs/implplan/SPRINT_124_policy_reasoning.md index dc173cb38..df983a283 100644 --- a/docs/implplan/SPRINT_124_policy_reasoning.md +++ b/docs/implplan/SPRINT_124_policy_reasoning.md @@ -1,6 +1,6 @@ # Sprint 124 - Policy & Reasoning -_Last updated: November 8, 2025. Implementation order is DOING → TODO → BLOCKED._ +_Last updated: November 28, 2025. Implementation order is DOING → TODO → BLOCKED._ Focus areas below were split out of the previous combined sprint; execute sections in order unless noted. @@ -18,8 +18,8 @@ Focus: Policy & Reasoning focus on Policy (phase II). | 5 | POLICY-ENGINE-20-005 | DONE (2025-11-27) | Determinism guard implemented in `src/Policy/StellaOps.Policy.Engine/DeterminismGuard/` with static analyzer (`ProhibitedPatternAnalyzer`), runtime sandbox (`DeterminismGuardService`, `EvaluationScope`), and guarded evaluator integration (`GuardedPolicyEvaluator`) | Policy Guild, Security Engineering / src/Policy/StellaOps.Policy.Engine | | 6 | POLICY-ENGINE-20-006 | DONE (2025-11-27) | Incremental orchestrator implemented in `src/Policy/StellaOps.Policy.Engine/IncrementalOrchestrator/` with `PolicyChangeEvent` models (advisory/VEX/SBOM change types), `IncrementalPolicyOrchestrator` (batching, deduplication, retry logic), and `IncrementalOrchestratorBackgroundService` (continuous processing, metrics) | Policy Guild, Scheduler Worker Guild / src/Policy/StellaOps.Policy.Engine | | 7 | POLICY-ENGINE-20-007 | DONE (2025-11-27) | Structured traces implemented in `src/Policy/StellaOps.Policy.Engine/Telemetry/` with `RuleHitTrace.cs` (trace models, statistics), `RuleHitTraceCollector.cs` (sampling controls, exporters), and `ExplainTraceExport.cs` (JSON/NDJSON/Text/Markdown export formats) | Policy Guild, Observability Guild / src/Policy/StellaOps.Policy.Engine | -| 8 | POLICY-ENGINE-20-008 | TODO | Add unit/property/golden/perf suites covering policy compilation, evaluation correctness, determinism, and SLA targets (Deps: POLICY-ENGINE-20-007) | Policy Guild, QA Guild / src/Policy/StellaOps.Policy.Engine | -| 9 | POLICY-ENGINE-20-009 | TODO | Define Mongo schemas/indexes for `policies`, `policy_runs`, and `effective_finding_*`; implement migrations and tenant enforcement (Deps: POLICY-ENGINE-20-008) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine | +| 8 | POLICY-ENGINE-20-008 | DONE (2025-11-28) | Unit test suites added in `src/Policy/__Tests/StellaOps.Policy.Engine.Tests/` for DeterminismGuard, SelectionJoin, IncrementalOrchestrator, Materialization, and Telemetry components (99 tests passing) | Policy Guild, QA Guild / src/Policy/StellaOps.Policy.Engine | +| 9 | POLICY-ENGINE-20-009 | DONE (2025-11-28) | MongoDB schemas implemented in `src/Policy/StellaOps.Policy.Engine/Storage/Mongo/` with document classes (`PolicyDocuments.cs`, `PolicyRunDocument.cs`, `EffectiveFindingDocument.cs`, `PolicyAuditDocument.cs`), options (`PolicyEngineMongoOptions.cs`), context (`PolicyEngineMongoContext.cs`), migrations (`EnsurePolicyCollectionsMigration.cs`, `EnsurePolicyIndexesMigration.cs`, `EffectiveFindingCollectionInitializer.cs`), migration runner, and tenant enforcement (`TenantFilterBuilder.cs`) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine | | 10 | POLICY-ENGINE-27-001 | TODO | Extend compile outputs to include rule coverage metadata, symbol table, inline documentation, and rule index for editor autocomplete; persist deterministic hashes (Deps: POLICY-ENGINE-20-009) | Policy Guild / src/Policy/StellaOps.Policy.Engine | | 11 | POLICY-ENGINE-27-002 | TODO | Enhance simulate endpoints to emit rule firing counts, heatmap aggregates, sampled explain traces with deterministic ordering, and delta summaries for quick/batch sims (Deps: POLICY-ENGINE-27-001) | Policy Guild, Observability Guild / src/Policy/StellaOps.Policy.Engine | | 12 | POLICY-ENGINE-29-001 | TODO | Implement batch evaluation endpoint (`POST /policy/eval/batch`) returning determinations + rationale chain for sets of `(artifact,purl,version,advisory)` tuples; support pagination and cost budgets (Deps: POLICY-ENGINE-27-004) | Policy Guild / src/Policy/StellaOps.Policy.Engine | @@ -29,6 +29,9 @@ Focus: Policy & Reasoning focus on Policy (phase II). ## Execution Log | Date (UTC) | Update | Owner | | --- | --- | --- | +| 2025-11-28 | POLICY-ENGINE-20-009: Completed MongoDB storage layer - document schemas for policies, policy_revisions, policy_bundles, policy_runs, effective_finding_*, effective_finding_history_*, and policy_audit collections. Created `PolicyEngineMongoOptions.cs` (connection/collection configuration with TTL settings), `PolicyEngineMongoContext.cs` (database access with read/write concerns), migration infrastructure (`IPolicyEngineMongoMigration`, `PolicyEngineMigrationRunner`, `PolicyEngineMongoInitializer`), `EnsurePolicyCollectionsMigration.cs` (creates base collections), `EnsurePolicyIndexesMigration.cs` (indexes for policies, revisions, bundles, runs, audit), `EffectiveFindingCollectionInitializer.cs` (dynamic per-policy collection creation with indexes), `TenantFilterBuilder.cs` (tenant enforcement utilities), and `ServiceCollectionExtensions.cs` (DI registration). Status → DONE. | Implementer | +| 2025-11-28 | POLICY-ENGINE-20-008: Completed unit test suites - `DeterminismGuardTests.cs` (static analyzer, runtime sandbox, guarded evaluator), `SelectionJoinTests.cs` (PURL equivalence, tuple resolution, VEX overlay), `IncrementalOrchestratorTests.cs` (event processing, deduplication, priority batching), `MaterializationTests.cs` (deterministic IDs, content hashing), `TelemetryTests.cs` (trace factory, statistics, sampling). 99 tests passing. Status → DONE. | Implementer | +| 2025-11-27 | POLICY-ENGINE-20-007: Completed structured traces - `RuleHitTrace.cs` (trace models, factory, statistics aggregation), `RuleHitTraceCollector.cs` (sampling controls with VEX/severity-aware rates, incident mode, exporters), `ExplainTraceExport.cs` (JSON/NDJSON/Text/Markdown formats, builder pattern). Status → DONE. | Implementer | | 2025-11-27 | POLICY-ENGINE-20-006: Completed incremental orchestrator - `PolicyChangeEvent.cs` (change event models with factory for advisory/VEX/SBOM changes, deterministic content hashing, batching), `IncrementalPolicyOrchestrator.cs` (event processing with idempotency, retry logic, priority-based batching), `IncrementalOrchestratorBackgroundService.cs` (continuous processing with metrics). Status → DONE. | Implementer | | 2025-11-27 | POLICY-ENGINE-20-005: Completed determinism guard - `DeterminismViolation.cs` (violation models/options), `ProhibitedPatternAnalyzer.cs` (static analysis with regex patterns for DateTime.Now, Random, Guid.NewGuid, HttpClient, File.Read, etc.), `DeterminismGuardService.cs` (runtime sandbox with EvaluationScope, DeterministicTimeProvider), `GuardedPolicyEvaluator.cs` (integration layer). Status → DONE. | Implementer | | 2025-11-27 | POLICY-ENGINE-20-004: Completed materialization writer - `EffectiveFindingModels.cs` (document schema), `EffectiveFindingWriter.cs` (upsert + append-only history). Tenant-scoped collections, trace references, content hash deduplication. Status → DONE. | Implementer | diff --git a/docs/implplan/SPRINT_126_policy_reasoning.md b/docs/implplan/SPRINT_126_policy_reasoning.md index 18b213b75..8fc758b1e 100644 --- a/docs/implplan/SPRINT_126_policy_reasoning.md +++ b/docs/implplan/SPRINT_126_policy_reasoning.md @@ -12,16 +12,16 @@ Focus: Policy & Reasoning focus on Policy (phase IV). | --- | --- | --- | --- | --- | | 1 | POLICY-ENGINE-40-003 | DONE | Provide API/SDK utilities for consumers (Web Scanner, Graph Explorer) to request policy decisions with source evidence summaries (top severity sources, conflict counts) (Deps: POLICY-ENGINE-40-002) | Policy Guild, Web Scanner Guild / src/Policy/StellaOps.Policy.Engine | | 2 | POLICY-ENGINE-50-001 | DONE | Implement SPL compiler: validate YAML, canonicalize, produce signed bundle, store artifact in object storage, write `policy_revisions` with AOC metadata (Deps: POLICY-ENGINE-40-003) | Policy Guild, Platform Security / src/Policy/StellaOps.Policy.Engine | -| 3 | POLICY-ENGINE-50-002 | TODO | Build runtime evaluator executing compiled plans over advisory/vex linksets + SBOM asset metadata with deterministic caching (Redis) and fallback path (Deps: POLICY-ENGINE-50-001) | Policy Guild, Runtime Guild / src/Policy/StellaOps.Policy.Engine | -| 4 | POLICY-ENGINE-50-003 | TODO | Implement evaluation/compilation metrics, tracing, and structured logs (`policy_eval_seconds`, `policy_compiles_total`, explanation sampling) (Deps: POLICY-ENGINE-50-002) | Policy Guild, Observability Guild / src/Policy/StellaOps.Policy.Engine | -| 5 | POLICY-ENGINE-50-004 | TODO | Build event pipeline: subscribe to linkset/SBOM updates, schedule re-eval jobs, emit `policy.effective.updated` events with diff metadata (Deps: POLICY-ENGINE-50-003) | Policy Guild, Platform Events Guild / src/Policy/StellaOps.Policy.Engine | -| 6 | POLICY-ENGINE-50-005 | TODO | Design and implement `policy_packs`, `policy_revisions`, `policy_runs`, `policy_artifacts` collections with indexes, TTL, and tenant scoping (Deps: POLICY-ENGINE-50-004) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine | -| 7 | POLICY-ENGINE-50-006 | TODO | Implement explainer persistence + retrieval APIs linking decisions to explanation tree and AOC chain (Deps: POLICY-ENGINE-50-005) | Policy Guild, QA Guild / src/Policy/StellaOps.Policy.Engine | -| 8 | POLICY-ENGINE-50-007 | TODO | Provide evaluation worker host/DI wiring and job orchestration hooks for batch re-evaluations after policy activation (Deps: POLICY-ENGINE-50-006) | Policy Guild, Scheduler Worker Guild / src/Policy/StellaOps.Policy.Engine | -| 9 | POLICY-ENGINE-60-001 | TODO | Maintain Redis effective decision maps per asset/snapshot for Graph overlays; implement versioning and eviction strategy (Deps: POLICY-ENGINE-50-007) | Policy Guild, SBOM Service Guild / src/Policy/StellaOps.Policy.Engine | -| 10 | POLICY-ENGINE-60-002 | TODO | Expose simulation bridge for Graph What-if APIs, supporting hypothetical SBOM diffs and draft policies without persisting results (Deps: POLICY-ENGINE-60-001) | Policy Guild, BE-Base Platform Guild / src/Policy/StellaOps.Policy.Engine | -| 11 | POLICY-ENGINE-70-002 | TODO | Design and create Mongo collections (`exceptions`, `exception_reviews`, `exception_bindings`) with indexes and migrations; expose repository APIs (Deps: POLICY-ENGINE-60-002) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine | -| 12 | POLICY-ENGINE-70-003 | TODO | Build Redis exception decision cache (`exceptions_effective_map`) with warm/invalidation logic reacting to `exception.*` events (Deps: POLICY-ENGINE-70-002) | Policy Guild, Runtime Guild / src/Policy/StellaOps.Policy.Engine | +| 3 | POLICY-ENGINE-50-002 | DONE | Build runtime evaluator executing compiled plans over advisory/vex linksets + SBOM asset metadata with deterministic caching (Redis) and fallback path (Deps: POLICY-ENGINE-50-001) | Policy Guild, Runtime Guild / src/Policy/StellaOps.Policy.Engine | +| 4 | POLICY-ENGINE-50-003 | DONE | Implement evaluation/compilation metrics, tracing, and structured logs (`policy_eval_seconds`, `policy_compiles_total`, explanation sampling) (Deps: POLICY-ENGINE-50-002) | Policy Guild, Observability Guild / src/Policy/StellaOps.Policy.Engine | +| 5 | POLICY-ENGINE-50-004 | DONE | Build event pipeline: subscribe to linkset/SBOM updates, schedule re-eval jobs, emit `policy.effective.updated` events with diff metadata (Deps: POLICY-ENGINE-50-003) | Policy Guild, Platform Events Guild / src/Policy/StellaOps.Policy.Engine | +| 6 | POLICY-ENGINE-50-005 | DONE | Design and implement `policy_packs`, `policy_revisions`, `policy_runs`, `policy_artifacts` collections with indexes, TTL, and tenant scoping (Deps: POLICY-ENGINE-50-004) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine | +| 7 | POLICY-ENGINE-50-006 | DONE | Implement explainer persistence + retrieval APIs linking decisions to explanation tree and AOC chain (Deps: POLICY-ENGINE-50-005) | Policy Guild, QA Guild / src/Policy/StellaOps.Policy.Engine | +| 8 | POLICY-ENGINE-50-007 | DONE | Provide evaluation worker host/DI wiring and job orchestration hooks for batch re-evaluations after policy activation (Deps: POLICY-ENGINE-50-006) | Policy Guild, Scheduler Worker Guild / src/Policy/StellaOps.Policy.Engine | +| 9 | POLICY-ENGINE-60-001 | DONE | Maintain Redis effective decision maps per asset/snapshot for Graph overlays; implement versioning and eviction strategy (Deps: POLICY-ENGINE-50-007) | Policy Guild, SBOM Service Guild / src/Policy/StellaOps.Policy.Engine | +| 10 | POLICY-ENGINE-60-002 | DONE | Expose simulation bridge for Graph What-if APIs, supporting hypothetical SBOM diffs and draft policies without persisting results (Deps: POLICY-ENGINE-60-001) | Policy Guild, BE-Base Platform Guild / src/Policy/StellaOps.Policy.Engine | +| 11 | POLICY-ENGINE-70-002 | DONE | Design and create Mongo collections (`exceptions`, `exception_reviews`, `exception_bindings`) with indexes and migrations; expose repository APIs (Deps: POLICY-ENGINE-60-002) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine | +| 12 | POLICY-ENGINE-70-003 | DONE | Build Redis exception decision cache (`exceptions_effective_map`) with warm/invalidation logic reacting to `exception.*` events (Deps: POLICY-ENGINE-70-002) | Policy Guild, Runtime Guild / src/Policy/StellaOps.Policy.Engine | | 13 | POLICY-ENGINE-70-004 | TODO | Extend metrics/tracing/logging for exception application (latency, counts, expiring events) and include AOC references in logs (Deps: POLICY-ENGINE-70-003) | Policy Guild, Observability Guild / src/Policy/StellaOps.Policy.Engine | | 14 | POLICY-ENGINE-70-005 | TODO | Provide APIs/workers hook for exception activation/expiry (auto start/end) and event emission (`exception.activated/expired`) (Deps: POLICY-ENGINE-70-004) | Policy Guild, Scheduler Worker Guild / src/Policy/StellaOps.Policy.Engine | | 15 | POLICY-ENGINE-80-001 | TODO | Integrate reachability/exploitability inputs into evaluation pipeline (state/score/confidence) with caching and explain support (Deps: POLICY-ENGINE-70-005) | Policy Guild, Signals Guild / src/Policy/StellaOps.Policy.Engine | @@ -48,3 +48,13 @@ Focus: Policy & Reasoning focus on Policy (phase IV). | 2025-11-27 | Started POLICY-ENGINE-40-003; implemented PolicyDecisionService, PolicyDecisionEndpoint, PolicyDecisionModels, tests. Blocked by pre-existing build issues in Telemetry.Core and RiskProfile projects. | Implementer | | 2025-11-27 | Fixed pre-existing build issues (TelemetryContext API mismatch, JsonSchema.Net v5 API changes, OpenTelemetry Meter API changes, test project missing packages/namespaces). All 9 PolicyDecisionServiceTests pass. POLICY-ENGINE-40-003 marked DONE. | Implementer | | 2025-11-27 | Implemented POLICY-ENGINE-50-001: Extended SPL compiler with AOC metadata support. Added PolicyAocMetadata, PolicyProvenance, PolicyAttestationRef models. Updated PolicyBundleService to capture compilation metadata, source/artifact digests, complexity metrics, provenance info. Added 4 new tests (all pass). POLICY-ENGINE-50-001 marked DONE. | Implementer | +| 2025-11-28 | Implemented POLICY-ENGINE-50-002: Built runtime evaluator with deterministic caching. Created `IPolicyEvaluationCache` interface, `InMemoryPolicyEvaluationCache` implementation with TTL/eviction, `PolicyRuntimeEvaluationService` with batch evaluation support, cache key generation using SHA256 digests (policy, subject, context). Extended `PolicyBundleRecord` to store compiled `PolicyIrDocument`. Added 8 tests (all pass). POLICY-ENGINE-50-002 marked DONE. | Implementer | +| 2025-11-28 | Implemented POLICY-ENGINE-50-003: Integrated telemetry into PolicyCompilationService and PolicyRuntimeEvaluationService. Added OpenTelemetry Activity tracing for `policy.compile`, `policy.evaluate`, and `policy.evaluate_batch` operations. Integrated existing metrics (RecordCompilation, RecordEvaluation, RecordEvaluationLatency, RecordRuleFired, RecordError, RecordEvaluationFailure). Added structured logging with context (duration, rule counts, complexity, cache hits). All 23 core tests pass. POLICY-ENGINE-50-003 marked DONE. | Implementer | +| 2025-11-28 | Implemented POLICY-ENGINE-50-004: Built event pipeline for linkset/SBOM updates. Created `PolicyEffectiveEventModels.cs` with event types (`policy.effective.updated`, `policy.effective.added`, `policy.effective.removed`, `policy.effective.batch_completed`), `PolicyDecisionDiff` for diff metadata. Created `PolicyEventProcessor.cs` with `IPolicyEffectiveEventPublisher`, `IReEvaluationJobScheduler` interfaces. Processor handles PolicyChangeEvents, schedules re-evaluation jobs, and emits effective events with diffs. Added 3 new telemetry counters. Build succeeds. POLICY-ENGINE-50-004 marked DONE. | Implementer | +| 2025-11-28 | Implemented POLICY-ENGINE-50-005: MongoDB collections with tenant scoping and indexes. Pre-existing infrastructure includes: `PolicyDocument`, `PolicyRevisionDocument`, `PolicyBundleDocument`, `PolicyRunDocument` classes in Documents folder; `EnsurePolicyIndexesMigration` with TTL indexes for policy_runs collection; `PolicyEngineMongoOptions` for configuration. Created `MongoPolicyPackRepository.cs` implementing `IPolicyPackRepository` with tenant-scoped CRUD operations for policy packs, revisions, bundles; approval workflow; activation tracking. Fixed pre-existing bug in `PolicyMetadataExtractor.cs` (string comparisons for enum operators). All 11 core tests pass. POLICY-ENGINE-50-005 marked DONE. | Implementer | +| 2025-11-28 | Implemented POLICY-ENGINE-50-006: Explainer persistence and retrieval APIs with AOC chain linkage. Created `PolicyExplainDocument.cs` with MongoDB documents for explain traces including `ExplainInputContextDocument`, `ExplainRuleStepDocument`, `ExplainVexEvidenceDocument`, `ExplainStatisticsDocument`, `ExplainAocChainDocument`. Created `PolicyExplainerService.cs` with `IExplainTraceRepository` interface, `StoredExplainTrace`/`ExplainAocChain` records, `ExplainQueryOptions` for filtering/pagination, `AocChainValidationResult` for verifying attestation chain integrity. Service links explain traces to policy bundle AOC metadata (compilation ID, source/artifact digests, attestation references). Added `policy_explain_traces_stored_total` telemetry counter. Added `PolicyExplainsCollection` and `ExplainTraceRetention` to options. Added indexes for `policy_explains` collection (tenant_runId, tenant_policy_evaluatedAt_desc, tenant_subjectHash, aocChain_compilationId, expiresAt_ttl). All 11 core tests pass. POLICY-ENGINE-50-006 marked DONE. | Implementer | +| 2025-11-28 | Implemented POLICY-ENGINE-50-007: Evaluation worker host and DI wiring for job orchestration. Created `PolicyEvaluationWorkerService.cs` integrating with existing `PolicyEventProcessor.DequeueJob()` for job scheduling, with `EvaluationJobResult` record for tracking job outcomes. Created `PolicyEvaluationWorkerHost.cs` as BackgroundService with configurable concurrency from `PolicyEngineWorkerOptions`. Created `PolicyEngineServiceCollectionExtensions.cs` with `AddPolicyEngineCore()`, `AddPolicyEngineEventPipeline()`, `AddPolicyEngineWorker()`, `AddPolicyEngineExplainer()`, and combined `AddPolicyEngine()` extension methods. Worker integrates with existing `IPolicyEffectiveEventPublisher` and `IReEvaluationJobScheduler` interfaces. Added `ScheduleActivationReEvalAsync()` hook for triggering re-evaluations after policy activation. All 182 tests pass. POLICY-ENGINE-50-007 marked DONE. | Implementer | +| 2025-11-28 | Implemented POLICY-ENGINE-60-001: Redis effective decision maps for Graph overlays. Added StackExchange.Redis package. Created `EffectiveDecisionMap/EffectiveDecisionModels.cs` with `EffectiveDecisionEntry`, `EffectiveDecisionQueryResult`, `EffectiveDecisionSummary`, `EffectiveDecisionFilter` records for storing/querying policy decisions per asset/snapshot. Created `EffectiveDecisionMap/IEffectiveDecisionMap.cs` interface with Set/Get/Query/Invalidate operations plus versioning (`GetVersionAsync`, `IncrementVersionAsync`). Created `EffectiveDecisionMap/RedisEffectiveDecisionMap.cs` with TTL-based eviction using Redis key structure `stellaops:edm:{tenant}:{snapshot}:e:{asset}` for entries, `:idx` sorted sets for indexing, `:v` for version counters. Added `EffectiveDecisionMapOptions` to `PolicyEngineOptions`. Added `policy_effective_decision_map_operations_total` telemetry counter. Added `AddEffectiveDecisionMap()` and `AddPolicyEngineRedis()` DI extensions. All 182 tests pass. POLICY-ENGINE-60-001 marked DONE. | Implementer | +| 2025-11-28 | Implemented POLICY-ENGINE-60-002: What-If simulation bridge for Graph APIs. Created `WhatIfSimulation/WhatIfSimulationModels.cs` with comprehensive request/response models (`WhatIfSimulationRequest`, `WhatIfSimulationResponse`, `WhatIfDraftPolicy`, `WhatIfSbomDiff`, `WhatIfDecisionChange`, `WhatIfDecision`, `WhatIfExplanation`, `WhatIfSummary`, `WhatIfImpact`, `WhatIfPolicyRef`). Created `WhatIfSimulation/WhatIfSimulationService.cs` supporting: hypothetical SBOM diffs (add/remove/upgrade/downgrade operations), draft policy comparison, baseline decision lookup from effective decision map, simulated decision computation considering VEX status and reachability, change detection and diff computation, impact assessment with risk delta recommendations. Service integrates with `IEffectiveDecisionMap` for baseline lookups, `IPolicyPackRepository` for policy retrieval, `PolicyCompilationService` for potential on-the-fly compilation. Added `AddWhatIfSimulation()` DI extension. Telemetry via existing `RecordSimulation()` counter. All 181 core tests pass. POLICY-ENGINE-60-002 marked DONE. | Implementer | +| 2025-11-28 | Implemented POLICY-ENGINE-70-002: MongoDB collections for policy exceptions with indexes and repository APIs. Created `Storage/Mongo/Documents/PolicyExceptionDocuments.cs` with `PolicyExceptionDocument` (exceptions with scope, risk assessment, compensating controls, workflow states), `ExceptionScopeDocument` (advisory/CVE/PURL/asset targeting), `ExceptionRiskAssessmentDocument` (risk levels, justification), `ExceptionReviewDocument` (multi-reviewer approval workflow), `ReviewDecisionDocument` (individual decisions with conditions), `ExceptionBindingDocument` (asset-specific bindings with time ranges). Created `Storage/Mongo/Repositories/IExceptionRepository.cs` interface with CRUD operations for exceptions, reviews, and bindings; query options for filtering/pagination; methods for finding applicable exceptions, pending activations, expiring exceptions. Created `Storage/Mongo/Repositories/MongoExceptionRepository.cs` MongoDB implementation with tenant scoping. Added collection names to `PolicyEngineMongoOptions` (exceptions, exception_reviews, exception_bindings). Created `Storage/Mongo/Migrations/EnsureExceptionIndexesMigration.cs` with comprehensive indexes: tenant+status, tenant+type+status, tenant+created, tenant+tags, scope.advisoryIds, scope.assetIds, scope.cveIds, expiry tracking, reviewer queues, binding lookups. Added `policy_exception_operations_total` telemetry counter with `RecordExceptionOperation()` method. Registered migration and repository in `ServiceCollectionExtensions`. All 196 core tests pass. POLICY-ENGINE-70-002 marked DONE. | Implementer | +| 2025-11-28 | Implemented POLICY-ENGINE-70-003: Redis exception decision cache with warm/invalidation logic. Created `ExceptionCache/ExceptionCacheModels.cs` with `ExceptionCacheEntry` (cached exception for fast lookup with priority, decision override, expiry), `ExceptionCacheQueryResult` (query results with cache metadata), `ExceptionCacheSummary` (tenant summary with counts by type/decision), `ExceptionCacheOptions` (TTL, auto-warm, max entries), `ExceptionCacheStats` (hit/miss counts, memory usage). Created `ExceptionCache/IExceptionEffectiveCache.cs` interface with `GetForAssetAsync`, `GetBatchAsync`, `SetAsync`, `SetBatchAsync`, `InvalidateExceptionAsync`, `InvalidateAssetAsync`, `InvalidateTenantAsync`, `WarmAsync`, `HandleExceptionEventAsync` for event-driven invalidation; `ExceptionEvent` record for exception lifecycle events (activated, expired, revoked, updated, created, deleted). Created `ExceptionCache/RedisExceptionEffectiveCache.cs` Redis implementation with key structure: `stellaops:exc:{tenant}:a:{asset}:{advisory}` for asset entries, `stellaops:exc:{tenant}:idx:e:{exceptionId}` for exception-to-asset index, `stellaops:exc:{tenant}:v` for version counter. Warm logic loads from `IExceptionRepository` for active/pending exceptions. Invalidation reacts to exception events. Added `ExceptionCacheOptions` to `PolicyEngineOptions`. Added `policy_exception_cache_operations_total` telemetry counter with `RecordExceptionCacheOperation()` method. Added `AddExceptionEffectiveCache()` DI extension. All 197 core tests pass. POLICY-ENGINE-70-003 marked DONE. | Implementer | diff --git a/docs/implplan/SPRINT_134_scanner_surface.md b/docs/implplan/SPRINT_134_scanner_surface.md index 0cf1e3f27..c4ba1da8d 100644 --- a/docs/implplan/SPRINT_134_scanner_surface.md +++ b/docs/implplan/SPRINT_134_scanner_surface.md @@ -17,9 +17,9 @@ Dependency: Sprint 133 - 4. Scanner.IV — Scanner & Surface focus on Scanner (p | `SCANNER-ANALYZERS-PYTHON-23-003` | DONE | Static import graph builder using AST and bytecode fallback. Support `import`, `from ... import`, relative imports, `importlib.import_module`, `__import__` with literal args, `pkgutil.extend_path`. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-002 | | `SCANNER-ANALYZERS-PYTHON-23-004` | DONE | Python resolver engine (importlib semantics) handling namespace packages (PEP 420), package discovery order, `.pth` files, `sys.path` composition, zipimport, and site-packages precedence across virtualenv/container roots. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-003 | | `SCANNER-ANALYZERS-PYTHON-23-005` | DONE | Packaging adapters: pip editable (`.egg-link`), Poetry/Flit layout, Conda prefix, `.dist-info/RECORD` cross-check, container layer overlays. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-004 | -| `SCANNER-ANALYZERS-PYTHON-23-006` | TODO | Detect native extensions (`*.so`, `*.pyd`), CFFI modules, ctypes loaders, embedded WASM, and runtime capability signals (subprocess, multiprocessing, ctypes, eval). | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-005 | -| `SCANNER-ANALYZERS-PYTHON-23-007` | TODO | Framework/config heuristics: Django, Flask, FastAPI, Celery, AWS Lambda handlers, Gunicorn, Click/Typer CLIs, logging configs, pyproject optional dependencies. Tagged as hints only. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-006 | -| `SCANNER-ANALYZERS-PYTHON-23-008` | TODO | Produce AOC-compliant observations: entrypoints, components (modules/packages/native), edges (import, namespace, dynamic-hint, native-extension) with reason codes/confidence and resolver traces. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-007 | -| `SCANNER-ANALYZERS-PYTHON-23-009` | TODO | Fixture suite + perf benchmarks covering virtualenv, namespace packages, zipapp, editable installs, containers, lambda handler. | Python Analyzer Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-008 | -| `SCANNER-ANALYZERS-PYTHON-23-010` | TODO | Optional runtime evidence: import hook capturing module load events with path scrubbing, optional bytecode instrumentation for `importlib` hooks, multiprocessing tracer. | Python Analyzer Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-009 | -| `SCANNER-ANALYZERS-PYTHON-23-011` | TODO | Package analyzer plug-in, add CLI commands (`stella python inspect`), refresh Offline Kit documentation. | Python Analyzer Guild, DevOps Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-010 | +| `SCANNER-ANALYZERS-PYTHON-23-006` | DONE | Detect native extensions (`*.so`, `*.pyd`), CFFI modules, ctypes loaders, embedded WASM, and runtime capability signals (subprocess, multiprocessing, ctypes, eval). | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-005 | +| `SCANNER-ANALYZERS-PYTHON-23-007` | DONE | Framework/config heuristics: Django, Flask, FastAPI, Celery, AWS Lambda handlers, Gunicorn, Click/Typer CLIs, logging configs, pyproject optional dependencies. Tagged as hints only. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-006 | +| `SCANNER-ANALYZERS-PYTHON-23-008` | DONE | Produce AOC-compliant observations: entrypoints, components (modules/packages/native), edges (import, namespace, dynamic-hint, native-extension) with reason codes/confidence and resolver traces. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-007 | +| `SCANNER-ANALYZERS-PYTHON-23-009` | DONE | Fixture suite + perf benchmarks covering virtualenv, namespace packages, zipapp, editable installs, containers, lambda handler. | Python Analyzer Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-008 | +| `SCANNER-ANALYZERS-PYTHON-23-010` | DONE | Optional runtime evidence: import hook capturing module load events with path scrubbing, optional bytecode instrumentation for `importlib` hooks, multiprocessing tracer. | Python Analyzer Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-009 | +| `SCANNER-ANALYZERS-PYTHON-23-011` | DONE | Package analyzer plug-in, add CLI commands (`stella python inspect`), refresh Offline Kit documentation. | Python Analyzer Guild, DevOps Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-010 | diff --git a/docs/implplan/SPRINT_135_scanner_surface.md b/docs/implplan/SPRINT_135_scanner_surface.md index 33eb4806d..ee7e3d32c 100644 --- a/docs/implplan/SPRINT_135_scanner_surface.md +++ b/docs/implplan/SPRINT_135_scanner_surface.md @@ -7,7 +7,7 @@ Dependency: Sprint 134 - 5. Scanner.V — Scanner & Surface focus on Scanner (ph | Task ID | State | Summary | Owner / Source | Depends On | | --- | --- | --- | --- | --- | -| `SCANNER-ANALYZERS-PYTHON-23-012` | TODO | Container/zipapp adapter enhancements: parse OCI layers for Python runtime, detect `PYTHONPATH`/`PYTHONHOME` env, record warnings for sitecustomize/startup hooks. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-011 | +| `SCANNER-ANALYZERS-PYTHON-23-012` | DONE | Container/zipapp adapter enhancements: parse OCI layers for Python runtime, detect `PYTHONPATH`/`PYTHONHOME` env, record warnings for sitecustomize/startup hooks. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-011 | | `SCANNER-ANALYZERS-RUBY-28-001` | DONE | Build input normalizer & VFS for Ruby projects: merge source trees, Gemfile/Gemfile.lock, vendor/bundle, .gem archives, `.bundle/config`, Rack configs, containers. Detect framework/job fingerprints deterministically. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | — | | `SCANNER-ANALYZERS-RUBY-28-002` | DONE | Gem & Bundler analyzer: parse Gemfile/Gemfile.lock, vendor specs, .gem archives, produce package nodes (PURLs), dependency edges, bin scripts, Bundler group metadata. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | SCANNER-ANALYZERS-RUBY-28-001 | | `SCANNER-ANALYZERS-RUBY-28-003` | DONE | Require/autoload graph builder: resolve static/dynamic require, require_relative, load; infer Zeitwerk autoload paths and Rack boot chain. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | SCANNER-ANALYZERS-RUBY-28-002 | diff --git a/docs/implplan/SPRINT_136_scanner_surface.md b/docs/implplan/SPRINT_136_scanner_surface.md index db9078750..4a2c9c7ae 100644 --- a/docs/implplan/SPRINT_136_scanner_surface.md +++ b/docs/implplan/SPRINT_136_scanner_surface.md @@ -17,7 +17,7 @@ Dependency: Sprint 135 - 6. Scanner.VI — Scanner & Surface focus on Scanner (p | `SURFACE-ENV-02` | DONE (2025-11-18) | Strongly-typed env accessors implemented; validation covers required endpoint, bounds, TLS cert path; regression tests passing. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env) | SURFACE-ENV-01 | | `SURFACE-ENV-03` | DONE (2025-11-27) | Adopt the env helper across Scanner Worker/WebService/BuildX plug-ins. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env) | SURFACE-ENV-02 | | `SURFACE-ENV-04` | DONE (2025-11-27) | Wire env helper into Zastava Observer/Webhook containers. | Zastava Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env) | SURFACE-ENV-02 | -| `SURFACE-ENV-05` | TODO | Update Helm/Compose/offline kit templates with new env knobs and documentation. | Ops Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env) | SURFACE-ENV-03, SURFACE-ENV-04 | +| `SURFACE-ENV-05` | DONE | Update Helm/Compose/offline kit templates with new env knobs and documentation. | Ops Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env) | SURFACE-ENV-03, SURFACE-ENV-04 | | `SCANNER-EVENTS-16-301` | BLOCKED (2025-10-26) | Emit orchestrator-compatible envelopes (`scanner.event.*`) and update integration tests to verify Notifier ingestion (no Redis queue coupling). | Scanner WebService Guild (src/Scanner/StellaOps.Scanner.WebService) | — | | `SCANNER-GRAPH-21-001` | DONE (2025-11-27) | Provide webhook/REST endpoint for Cartographer to request policy overlays and runtime evidence for graph nodes, ensuring determinism and tenant scoping. | Scanner WebService Guild, Cartographer Guild (src/Scanner/StellaOps.Scanner.WebService) | — | | `SCANNER-LNM-21-001` | BLOCKED (2025-11-27) | Update `/reports` and `/policy/runtime` payloads to consume advisory/vex linksets, exposing source severity arrays and conflict summaries alongside effective verdicts. Blocked: requires Concelier HTTP client integration or shared library; no existing Concelier dependency in Scanner WebService. | Scanner WebService Guild, Policy Guild (src/Scanner/StellaOps.Scanner.WebService) | — | @@ -29,8 +29,8 @@ Dependency: Sprint 135 - 6. Scanner.VI — Scanner & Surface focus on Scanner (p | `SURFACE-SECRETS-04` | DONE (2025-11-27) | Integrate Surface.Secrets into Scanner Worker/WebService/BuildX for registry + CAS creds. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets) | SURFACE-SECRETS-02 | | `SURFACE-SECRETS-05` | DONE (2025-11-27) | Invoke Surface.Secrets from Zastava Observer/Webhook for CAS & attestation secrets. | Zastava Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets) | SURFACE-SECRETS-02 | | `SURFACE-SECRETS-06` | BLOCKED (2025-11-27) | Update deployment manifests/offline kit bundles to provision secret references instead of raw values. Requires Ops Guild input on Helm/Compose patterns for Surface.Secrets provider configuration. | Ops Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets) | SURFACE-SECRETS-03 | -| `SCANNER-ENG-0020` | TODO | Implement Homebrew collector & fragment mapper per `design/macos-analyzer.md` §3.1. | Scanner Guild (docs/modules/scanner) | — | -| `SCANNER-ENG-0021` | TODO | Implement pkgutil receipt collector per `design/macos-analyzer.md` §3.2. | Scanner Guild (docs/modules/scanner) | — | +| `SCANNER-ENG-0020` | DONE (2025-11-28) | Implement Homebrew collector & fragment mapper per `design/macos-analyzer.md` §3.1. | Scanner Guild (docs/modules/scanner) | — | +| `SCANNER-ENG-0021` | DONE (2025-11-28) | Implement pkgutil receipt collector per `design/macos-analyzer.md` §3.2. | Scanner Guild (docs/modules/scanner) | — | | `SCANNER-ENG-0022` | TODO | Implement macOS bundle inspector & capability overlays per `design/macos-analyzer.md` §3.3. | Scanner Guild, Policy Guild (docs/modules/scanner) | — | | `SCANNER-ENG-0023` | TODO | Deliver macOS policy/offline integration per `design/macos-analyzer.md` §5–6. | Scanner Guild, Offline Kit Guild, Policy Guild (docs/modules/scanner) | — | | `SCANNER-ENG-0024` | TODO | Implement Windows MSI collector per `design/windows-analyzer.md` §3.1. | Scanner Guild (docs/modules/scanner) | — | @@ -42,7 +42,7 @@ Dependency: Sprint 135 - 6. Scanner.VI — Scanner & Surface focus on Scanner (p | `SURFACE-FS-03` | DONE (2025-11-27) | Integrate Surface.FS writer into Scanner Worker analyzer pipeline to persist layer + entry-trace fragments. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SURFACE-FS-02 | | `SURFACE-FS-04` | DONE (2025-11-27) | Integrate Surface.FS reader into Zastava Observer runtime drift loop. | Zastava Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SURFACE-FS-02 | | `SURFACE-FS-05` | DONE (2025-11-27) | Expose Surface.FS pointers via Scanner WebService reports and coordinate rescan planning with Scheduler. | Scanner Guild, Scheduler Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SURFACE-FS-03 | -| `SURFACE-FS-06` | TODO | Update scanner-engine guide and offline kit docs with Surface.FS workflow. | Docs Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SURFACE-FS-02..05 | +| `SURFACE-FS-06` | DONE (2025-11-28) | Update scanner-engine guide and offline kit docs with Surface.FS workflow. | Docs Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SURFACE-FS-02..05 | | `SCANNER-SURFACE-04` | TODO | DSSE-sign every `layer.fragments` payload, emit `_composition.json`, and persist DSSE envelopes so offline kits can replay deterministically (see `docs/modules/scanner/deterministic-sbom-compose.md` §2.1). | Scanner Worker Guild (src/Scanner/StellaOps.Scanner.Worker) | SCANNER-SURFACE-01, SURFACE-FS-03 | | `SURFACE-FS-07` | TODO | Extend Surface.FS manifest schema with `composition.recipe`, fragment attestation metadata, and verification helpers per deterministic SBOM spec. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SCANNER-SURFACE-04 | | `SCANNER-EMIT-15-001` | TODO | Enforce canonical JSON (`stella.contentHash`, Merkle root metadata, zero timestamps) for fragments and composed CycloneDX inventory/usage BOMs. Documented in `docs/modules/scanner/deterministic-sbom-compose.md` §2.2. | Scanner Emit Guild (src/Scanner/__Libraries/StellaOps.Scanner.Emit) | SCANNER-SURFACE-04 | @@ -51,7 +51,7 @@ Dependency: Sprint 135 - 6. Scanner.VI — Scanner & Surface focus on Scanner (p | `SURFACE-VAL-02` | DONE (2025-11-23) | Validation library now enforces secrets schema, fallback/provider checks, and inline/file guardrails; tests added. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation) | SURFACE-VAL-01, SURFACE-ENV-02, SURFACE-FS-02 | | `SURFACE-VAL-03` | DONE (2025-11-23) | Validation runner wired into Worker/WebService startup and pre-analyzer paths (OS, language, EntryTrace). | Scanner Guild, Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation) | SURFACE-VAL-02 | | `SURFACE-VAL-04` | DONE (2025-11-27) | Expose validation helpers to Zastava and other runtime consumers for preflight checks. | Scanner Guild, Zastava Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation) | SURFACE-VAL-02 | -| `SURFACE-VAL-05` | TODO | Document validation extensibility, registration, and customization in scanner-engine guides. | Docs Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation) | SURFACE-VAL-02 | +| `SURFACE-VAL-05` | DONE | Document validation extensibility, registration, and customization in scanner-engine guides. | Docs Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation) | SURFACE-VAL-02 | ## Execution Log | Date (UTC) | Update | Owner | @@ -86,3 +86,7 @@ Dependency: Sprint 135 - 6. Scanner.VI — Scanner & Surface focus on Scanner (p | 2025-11-18 | Attempted `dotnet test` for Worker Surface manifest configurator; restore failed fetching StackExchange.Redis from nuget.org (network timeout); tests still pending CI. | Implementer | | 2025-11-18 | SCANNER-ENV-03 started: BuildX plugin now loads Surface.Env defaults (SCANNER/SURFACE prefixes) for cache root/bucket/tenant when args/env missing; tests not yet added. | Implementer | | 2025-11-19 | Marked SCANNER-ENV-03, SURFACE-SECRETS-01/02, and SURFACE-VAL-01 BLOCKED pending Security/Surface schema approvals and published env/secrets artifacts; move back to TODO once upstream contracts land. | Implementer | +| 2025-11-28 | Created `docs/modules/scanner/guides/surface-validation-extensibility.md` covering custom validators, reporters, configuration, and testing; SURFACE-VAL-05 DONE. | Implementer | +| 2025-11-28 | Created `docs/modules/scanner/guides/surface-fs-workflow.md` with end-to-end workflow including artefact generation, storage layout, consumption, and offline kit handling; SURFACE-FS-06 DONE. | Implementer | +| 2025-11-28 | Created `StellaOps.Scanner.Analyzers.OS.Homebrew` library with `HomebrewReceiptParser` (INSTALL_RECEIPT.json parsing), `HomebrewPackageAnalyzer` (Cellar discovery for Intel/Apple Silicon), and `HomebrewAnalyzerPlugin`; added `BuildHomebrew` PURL builder, `HomebrewCellar` evidence source; 23 tests passing. SCANNER-ENG-0020 DONE. | Implementer | +| 2025-11-28 | Created `StellaOps.Scanner.Analyzers.OS.Pkgutil` library with `PkgutilReceiptParser` (plist parsing), `BomParser` (BOM file enumeration), `PkgutilPackageAnalyzer` (receipt discovery from /var/db/receipts), and `PkgutilAnalyzerPlugin`; added `BuildPkgutil` PURL builder, `PkgutilReceipt` evidence source; 9 tests passing. SCANNER-ENG-0021 DONE. | Implementer | diff --git a/docs/implplan/SPRINT_205_cli_v.md b/docs/implplan/SPRINT_205_cli_v.md index 77f4c5297..25dd9748e 100644 --- a/docs/implplan/SPRINT_205_cli_v.md +++ b/docs/implplan/SPRINT_205_cli_v.md @@ -7,15 +7,15 @@ Depends on: Sprint 180.A - Cli.IV Summary: Experience & SDKs focus on Cli (phase V). Task ID | State | Task description | Owners (Source) --- | --- | --- | --- -CLI-TEN-47-001 | DOING | Implement `stella login`, `whoami`, `tenants list`, persistent profiles, secure token storage, and `--tenant` override with validation. Partial: `auth login`, `auth whoami` already exist; `tenants list` implemented. Remaining: persistent profiles, secure token storage enhancements, `--tenant` override validation. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) -CLI-TEN-49-001 | TODO | Add service account token minting, delegation (`stella token delegate`), impersonation banner, and audit-friendly logging. Dependencies: CLI-TEN-47-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) -CLI-VEX-30-001 | TODO | Implement `stella vex consensus list` with filters, paging, policy selection, `--json/--csv`. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) -CLI-VEX-30-002 | TODO | Implement `stella vex consensus show` displaying quorum, evidence, rationale, signature status. Dependencies: CLI-VEX-30-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) -CLI-VEX-30-003 | TODO | Implement `stella vex simulate` for trust/threshold overrides with JSON diff output. Dependencies: CLI-VEX-30-002. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) -CLI-VEX-30-004 | TODO | Implement `stella vex export` for consensus NDJSON bundles with signature verification helper. Dependencies: CLI-VEX-30-003. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) -CLI-VULN-29-001 | TODO | Implement `stella vuln list` with grouping, paging, filters, `--json/--csv`, and policy selection. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) -CLI-VULN-29-002 | TODO | Implement `stella vuln show` displaying evidence, policy rationale, paths, ledger summary; support `--json` for automation. Dependencies: CLI-VULN-29-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) -CLI-VULN-29-003 | TODO | Add workflow commands (`assign`, `comment`, `accept-risk`, `verify-fix`, `target-fix`, `reopen`) with filter selection (`--filter`) and idempotent retries. Dependencies: CLI-VULN-29-002. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) -CLI-VULN-29-004 | TODO | Implement `stella vuln simulate` producing delta summaries and optional Markdown report for CI. Dependencies: CLI-VULN-29-003. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) -CLI-VULN-29-005 | TODO | Add `stella vuln export` and `stella vuln bundle verify` commands to trigger/download evidence bundles and verify signatures. Dependencies: CLI-VULN-29-004. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) -CLI-VULN-29-006 | TODO | Update CLI docs/examples for Vulnerability Explorer with compliance checklist and CI snippets. Dependencies: CLI-VULN-29-005. | DevEx/CLI Guild, Docs Guild (src/Cli/StellaOps.Cli) \ No newline at end of file +CLI-TEN-47-001 | DONE | Implement `stella login`, `whoami`, `tenants list`, persistent profiles, secure token storage, and `--tenant` override with validation. Completed: `auth login`/`auth whoami` existed; `tenants list`/`use`/`current`/`clear` commands added; TenantProfileStore for persistent profiles at ~/.stellaops/profile.json; global `--tenant` option with profile fallback; tenant validation against Authority when available. Token storage uses existing file cache at ~/.stellaops/tokens/. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) +CLI-TEN-49-001 | DONE | Add service account token minting, delegation (`stella token delegate`), impersonation banner, and audit-friendly logging. Completed: `auth token mint` and `auth token delegate` commands; TokenMint/DelegateRequest/Response models; AuthorityConsoleClient extended with MintTokenAsync, DelegateTokenAsync, IntrospectTokenAsync; CheckAndDisplayImpersonationBannerAsync helper for audit-aware impersonation notices. Note: Authority service endpoints (POST /console/token/mint, /delegate, /introspect) need backend implementation. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) +CLI-VEX-30-001 | DONE | Implement `stella vex consensus list` with filters, paging, policy selection, `--json/--csv`. Completed: VexModels.cs with request/response records; IBackendOperationsClient.ListVexConsensusAsync; BackendOperationsClient implementation calling GET /api/vex/consensus; BuildVexCommand in CommandFactory.cs with `vex consensus list` subcommand; HandleVexConsensusListAsync handler with table/JSON/CSV output, tenant resolution via TenantProfileStore, pagination support. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) +CLI-VEX-30-002 | DONE | Implement `stella vex consensus show` displaying quorum, evidence, rationale, signature status. Dependencies: CLI-VEX-30-001. Completed: VexConsensusDetailResponse with quorum/rationale/signature/evidence models; IBackendOperationsClient.GetVexConsensusAsync; BackendOperationsClient implementation calling GET /api/vex/consensus/{vulnId}/{productKey}; `vex consensus show` subcommand in CommandFactory.cs; HandleVexConsensusShowAsync handler with rich Spectre.Console formatted output including panels and tables for all sections. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) +CLI-VEX-30-003 | DONE | Implement `stella vex simulate` for trust/threshold overrides with JSON diff output. Dependencies: CLI-VEX-30-002. Completed: VexSimulationRequest/Response models with TrustOverrides, ThresholdOverride, QuorumOverride, ExcludeProviders; SimulateVexConsensusAsync interface and implementation calling POST /api/vex/consensus/simulate; `vex simulate` command with --trust provider=weight, --threshold, --quorum, --exclude, --include-only, --changed-only options; HandleVexSimulateAsync handler with before/after diff table and summary panel. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) +CLI-VEX-30-004 | DONE | Implement `stella vex export` for consensus NDJSON bundles with signature verification helper. Dependencies: CLI-VEX-30-003. Completed: VexExportRequest/Response models with format, signed, filter options; VexExportVerifyRequest/Result for local verification; IBackendOperationsClient.ExportVexConsensusAsync (POST /api/vex/consensus/export) and DownloadVexExportAsync (GET /api/vex/consensus/export/{exportId}); `vex export` command with --vuln-id, --product-key, --purl, --status, --output, --unsigned filters; `vex export verify` subcommand with --expected-digest and --public-key for local digest/signature verification; HandleVexExportAsync handler with download and progress display; HandleVexVerifyAsync for offline verification with SHA-256 digest calculation. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) +CLI-VULN-29-001 | DONE | Implement `stella vuln list` with grouping, paging, filters, `--json/--csv`, and policy selection. Completed: VulnModels.cs with VulnListRequest/Response, VulnItem, VulnSeverityInfo, VulnAffectedPackage, VulnGroupingInfo, VulnGroup and all models for CLI-VULN-29-002 through CLI-VULN-29-005; IBackendOperationsClient extended with ListVulnerabilitiesAsync, GetVulnerabilityAsync, ExecuteVulnWorkflowAsync, SimulateVulnerabilitiesAsync, ExportVulnerabilitiesAsync, DownloadVulnExportAsync; BackendOperationsClient HTTP implementations calling GET/POST /api/vuln/*; `vuln list` command with --vuln-id, --severity, --status, --purl, --cpe, --sbom-id, --policy-id, --policy-version, --group-by, --limit, --offset, --cursor, --tenant, --json, --csv options; HandleVulnListAsync handler with grouped and individual table output, CSV output, color-coded severity/status display. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) +CLI-VULN-29-002 | DONE | Implement `stella vuln show` displaying evidence, policy rationale, paths, ledger summary; support `--json` for automation. Dependencies: CLI-VULN-29-001. Completed: `vuln show` subcommand with vulnerability-id argument, --tenant, --json, --verbose options; HandleVulnShowAsync handler; RenderVulnDetail helper with Spectre.Console panels and tables for: header (ID, status, severity, VEX, aliases, assignee, dates), description, affected packages table, policy rationale panel with rules, evidence table, dependency paths, workflow ledger history table, references list. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) +CLI-VULN-29-003 | DONE | Add workflow commands (`assign`, `comment`, `accept-risk`, `verify-fix`, `target-fix`, `reopen`) with filter selection (`--filter`) and idempotent retries. Dependencies: CLI-VULN-29-002. Completed: Six workflow subcommands under `vuln` command: `assign `, `comment `, `accept-risk [--due-date]`, `verify-fix `, `target-fix [--due-date]`, `reopen `. All commands share common options: --vuln-id (multi-value), --filter-severity, --filter-status, --filter-purl, --filter-sbom for bulk operations; --tenant, --idempotency-key for retries, --json for automation. HandleVulnWorkflowAsync handler builds VulnWorkflowRequest with action-specific fields, calls ExecuteVulnWorkflowAsync (POST /api/vuln/workflow), renders success/error table with affected counts. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) +CLI-VULN-29-004 | DONE | Implement `stella vuln simulate` producing delta summaries and optional Markdown report for CI. Dependencies: CLI-VULN-29-003. Completed: `vuln simulate` subcommand with --policy-id, --policy-version, --vex-override vulnId=status (multi), --severity-threshold, --sbom-id (multi), --markdown, --changed-only, --output (file), --tenant, --json options; HandleVulnSimulateAsync handler parsing VEX overrides into Dictionary, building VulnSimulationRequest, calling SimulateVulnerabilitiesAsync (POST /api/vuln/simulate); output includes simulation summary panel (total/changed/upgrades/downgrades/nochange), delta table with before/after status and change indicator (UPGRADE/DOWNGRADE), optional Markdown report to file or console for CI integration. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) +CLI-VULN-29-005 | DONE | Add `stella vuln export` and `stella vuln bundle verify` commands to trigger/download evidence bundles and verify signatures. Dependencies: CLI-VULN-29-004. Completed: `vuln export` command with --vuln-id (multi), --sbom-id (multi), --policy-id, --format (ndjson/json), --include-evidence, --include-ledger, --signed (defaults true), --output (required), --tenant options; HandleVulnExportAsync handler calling ExportVulnerabilitiesAsync (POST /api/vuln/export) and DownloadVulnExportAsync to stream bundle to file; output displays item count, format, signature info, digest; `vuln export verify` subcommand with file argument, --expected-digest, --public-key options; HandleVulnExportVerifyAsync performs SHA-256 digest calculation, optional signature file detection (.sig), renders verification panel with pass/fail status. | DevEx/CLI Guild (src/Cli/StellaOps.Cli) +CLI-VULN-29-006 | DONE | Update CLI docs/examples for Vulnerability Explorer with compliance checklist and CI snippets. Dependencies: CLI-VULN-29-005. Completed: Created docs/modules/cli/guides/vuln-explorer-cli.md with comprehensive documentation covering: Prerequisites (scopes, connectivity); vuln list with filters, grouping, pagination, --json/--csv; vuln show with all output sections; Workflow commands (assign, comment, accept-risk, verify-fix, target-fix, reopen) with idempotency support; vuln simulate for policy/VEX delta analysis with CI Markdown output; vuln export and export verify for compliance bundles; Exit codes table; Compliance checklist (inventory, SLA, risk acceptance audit, evidence bundles); CI pipeline snippets for GitHub Actions, GitLab CI, Jenkins; Offline operation guidance. | DevEx/CLI Guild, Docs Guild (src/Cli/StellaOps.Cli) \ No newline at end of file diff --git a/docs/modules/cli/guides/vuln-explorer-cli.md b/docs/modules/cli/guides/vuln-explorer-cli.md new file mode 100644 index 000000000..667585339 --- /dev/null +++ b/docs/modules/cli/guides/vuln-explorer-cli.md @@ -0,0 +1,502 @@ +# CLI Vulnerability Explorer Commands Reference + +> **Audience:** DevEx engineers, security operators, and CI authors managing vulnerabilities through the `stella` CLI. +> **Scope:** Command synopsis, options, exit codes, and CI integration patterns for `stella vuln` commands as introduced in Sprint 205. + +The Vulnerability Explorer CLI enables comprehensive vulnerability management including listing, inspection, workflow operations, policy simulation, and export. All commands support multi-tenant environments and integrate with StellaOps Authority for authentication. + +--- + +## 1. Prerequisites + +- CLI version: `stella` >= 0.21.0 (Vulnerability Explorer feature gate enabled). +- Required scopes (DPoP-bound): + - `vuln:view` for listing and viewing vulnerabilities. + - `vuln:workflow` for workflow operations (assign, comment, accept-risk, etc.). + - `vuln:simulate` for policy simulation. + - `vuln:export` for exporting evidence bundles. + - `tenant:select` if using tenant switching. +- Connectivity: direct access to Backend APIs or configured backend URL. +- Environment: set `STELLAOPS_BACKEND_URL`, `STELLA_TENANT`, and authenticate via `stella auth login`. + +--- + +## 2. `stella vuln list` + +### 2.1 Synopsis + +```bash +stella vuln list \ + [--vuln-id ] \ + [--severity critical|high|medium|low] \ + [--status open|triaged|accepted|fixed|risk_accepted] \ + [--purl ] \ + [--cpe ] \ + [--sbom-id ] \ + [--policy-id ] \ + [--policy-version ] \ + [--group-by severity|status|sbom|policy] \ + [--limit ] [--offset ] [--cursor ] \ + [--tenant ] \ + [--json] [--csv] [--verbose] +``` + +### 2.2 Description + +Lists vulnerabilities matching the specified filters with pagination support. Supports grouped summaries for reporting and machine-readable output for automation. + +### 2.3 Options + +| Option | Description | +|--------|-------------| +| `--vuln-id ` | Filter by vulnerability ID (e.g., CVE-2024-1234). | +| `--severity ` | Filter by severity (critical, high, medium, low). | +| `--status ` | Filter by workflow status. | +| `--purl ` | Filter by Package URL pattern. | +| `--cpe ` | Filter by CPE pattern. | +| `--sbom-id ` | Filter by SBOM identifier. | +| `--policy-id ` | Filter by policy ID. | +| `--policy-version ` | Filter by policy version. | +| `--group-by ` | Group results by field (shows summary counts). | +| `--limit ` | Maximum results to return (default 50). | +| `--offset ` | Number of results to skip. | +| `--cursor ` | Pagination cursor from previous response. | +| `--tenant ` | Override tenant for multi-tenant deployments. | +| `--json` | Output as JSON for automation. | +| `--csv` | Output as CSV for spreadsheet import. | +| `--verbose` | Enable debug logging. | + +### 2.4 Examples + +List critical vulnerabilities: + +```bash +stella vuln list --severity critical +``` + +Group by status for reporting: + +```bash +stella vuln list --group-by status --json > status-summary.json +``` + +Export CSV for compliance audit: + +```bash +stella vuln list --severity critical --severity high --csv > critical-vulns.csv +``` + +--- + +## 3. `stella vuln show` + +### 3.1 Synopsis + +```bash +stella vuln show \ + [--tenant ] \ + [--json] [--verbose] +``` + +### 3.2 Description + +Displays detailed information about a specific vulnerability including severity, affected packages, policy rationale, evidence, dependency paths, and workflow history. + +### 3.3 Output Sections + +- **Header:** Vulnerability ID, status, severity, VEX status, aliases, assignee, dates. +- **Description:** Full vulnerability description. +- **Affected Packages:** Table of affected packages with versions and fix status. +- **Policy Rationale:** Active policy rules and their evaluation results. +- **Evidence:** Timeline of evidence collected. +- **Dependency Paths:** Transitive dependency chains leading to vulnerability. +- **Workflow History:** Audit ledger of all workflow actions. +- **References:** Links to advisories, patches, and documentation. + +### 3.4 Example + +```bash +stella vuln show CVE-2024-1234 --json +``` + +--- + +## 4. Workflow Commands + +All workflow commands support bulk operations via `--vuln-id` (repeatable) or filter options. + +### 4.1 `stella vuln assign` + +Assign vulnerabilities to a team member. + +```bash +stella vuln assign \ + [--vuln-id ]... \ + [--filter-severity ] \ + [--filter-status ] \ + [--filter-purl ] \ + [--filter-sbom ] \ + [--tenant ] \ + [--idempotency-key ] \ + [--json] [--verbose] +``` + +Example: + +```bash +stella vuln assign security-team \ + --filter-severity critical \ + --filter-status open +``` + +### 4.2 `stella vuln comment` + +Add a comment to vulnerabilities. + +```bash +stella vuln comment "" \ + --vuln-id CVE-2024-1234 \ + [--json] +``` + +### 4.3 `stella vuln accept-risk` + +Accept risk for vulnerabilities with documented justification. + +```bash +stella vuln accept-risk "" \ + --vuln-id CVE-2024-1234 \ + [--due-date 2025-12-31] \ + [--json] +``` + +### 4.4 `stella vuln verify-fix` + +Mark vulnerabilities as fixed and verified. + +```bash +stella vuln verify-fix \ + --vuln-id CVE-2024-1234 \ + [--json] +``` + +### 4.5 `stella vuln target-fix` + +Set target fix version and due date. + +```bash +stella vuln target-fix \ + --vuln-id CVE-2024-1234 \ + [--due-date 2025-06-30] \ + [--json] +``` + +### 4.6 `stella vuln reopen` + +Reopen previously closed vulnerabilities. + +```bash +stella vuln reopen "" \ + --vuln-id CVE-2024-1234 \ + [--json] +``` + +### 4.7 Idempotency + +All workflow commands support `--idempotency-key` for safe retries in CI pipelines: + +```bash +stella vuln assign security-team \ + --vuln-id CVE-2024-1234 \ + --idempotency-key "assign-cve-2024-1234-$(date +%Y%m%d)" +``` + +--- + +## 5. `stella vuln simulate` + +### 5.1 Synopsis + +```bash +stella vuln simulate \ + [--policy-id ] \ + [--policy-version ] \ + [--vex-override =]... \ + [--severity-threshold ] \ + [--sbom-id ]... \ + [--markdown] \ + [--changed-only] \ + [--output ] \ + [--tenant ] \ + [--json] [--verbose] +``` + +### 5.2 Description + +Simulates the impact of policy or VEX changes without modifying data. Produces delta summaries showing which vulnerabilities would change status, useful for policy review and CI gates. + +### 5.3 Options + +| Option | Description | +|--------|-------------| +| `--policy-id ` | Policy ID to simulate. | +| `--policy-version ` | Policy version to simulate against. | +| `--vex-override =` | Override VEX status for simulation (repeatable). | +| `--severity-threshold ` | Minimum severity to include. | +| `--sbom-id ` | Limit simulation to specific SBOMs (repeatable). | +| `--markdown` | Include Markdown report for CI. | +| `--changed-only` | Only show items that would change. | +| `--output ` | Write Markdown report to file. | +| `--json` | Output full simulation results as JSON. | + +### 5.4 Output + +The command displays: +- **Summary Panel:** Total evaluated, changed, upgrades, downgrades. +- **Delta Table:** Before/after status comparison with UPGRADE/DOWNGRADE indicators. +- **Markdown Report:** Optional CI-friendly report. + +### 5.5 CI Integration Example + +```bash +# Run simulation and fail if any downgrades +stella vuln simulate \ + --policy-id prod-policy \ + --changed-only \ + --markdown \ + --output simulation-report.md + +# Check exit code +if [ $? -ne 0 ]; then + echo "Simulation found issues - see simulation-report.md" + exit 1 +fi +``` + +--- + +## 6. `stella vuln export` + +### 6.1 Synopsis + +```bash +stella vuln export \ + --output \ + [--vuln-id ]... \ + [--sbom-id ]... \ + [--policy-id ] \ + [--format ndjson|json] \ + [--include-evidence] \ + [--include-ledger] \ + [--signed] \ + [--tenant ] \ + [--verbose] +``` + +### 6.2 Description + +Exports vulnerability evidence bundles for compliance documentation, audits, or offline analysis. Bundles can be cryptographically signed for integrity verification. + +### 6.3 Options + +| Option | Description | +|--------|-------------| +| `--output ` | Output file path (required). | +| `--vuln-id ` | Vulnerability IDs to include (repeatable). | +| `--sbom-id ` | SBOM IDs to scope export (repeatable). | +| `--policy-id ` | Policy ID for filtering. | +| `--format ` | Output format: `ndjson` (default) or `json`. | +| `--include-evidence` | Include evidence data (default: true). | +| `--include-ledger` | Include workflow ledger (default: true). | +| `--signed` | Request signed bundle (default: true). | + +### 6.4 Example + +```bash +stella vuln export \ + --output compliance-bundle.ndjson \ + --sbom-id prod-app-sbom \ + --signed +``` + +--- + +## 7. `stella vuln export verify` + +### 7.1 Synopsis + +```bash +stella vuln export verify \ + [--expected-digest ] \ + [--public-key ] \ + [--verbose] +``` + +### 7.2 Description + +Verifies the integrity and optional signature of an exported vulnerability bundle. Use this to validate bundles received from external sources or stored archives. + +### 7.3 Example + +```bash +stella vuln export verify compliance-bundle.ndjson \ + --expected-digest sha256:abc123... \ + --public-key /path/to/public.pem +``` + +--- + +## 8. Exit Codes + +| Exit Code | Meaning | +|-----------|---------| +| `0` | Command completed successfully. | +| `1` | General error (see error message). | +| `130` | Operation cancelled by user (Ctrl+C). | + +--- + +## 9. Compliance Checklist + +Use these commands to demonstrate vulnerability management compliance: + +### 9.1 Vulnerability Inventory + +```bash +# Generate complete vulnerability inventory +stella vuln list --json > inventory.json + +# Summary by severity +stella vuln list --group-by severity --json > severity-summary.json +``` + +### 9.2 SLA Compliance + +```bash +# Find critical vulns older than 30 days +stella vuln list \ + --severity critical \ + --status open \ + --json | jq '.items[] | select(.updatedAt < (now - 2592000 | todate))' +``` + +### 9.3 Risk Acceptance Audit + +```bash +# Export all risk-accepted vulnerabilities with justifications +stella vuln list --status risk_accepted --json > risk-accepted.json +``` + +### 9.4 Evidence Bundle for Audit + +```bash +# Export signed evidence bundle +stella vuln export \ + --output audit-$(date +%Y%m%d).ndjson \ + --signed + +# Verify bundle integrity +stella vuln export verify audit-$(date +%Y%m%d).ndjson +``` + +--- + +## 10. CI Pipeline Snippets + +### 10.1 GitHub Actions + +```yaml +- name: Check Critical Vulnerabilities + run: | + count=$(stella vuln list --severity critical --status open --json | jq '.total') + if [ "$count" -gt 0 ]; then + echo "::error::Found $count critical open vulnerabilities" + stella vuln list --severity critical --status open + exit 1 + fi + +- name: Policy Simulation Gate + run: | + stella vuln simulate \ + --policy-id ${{ env.POLICY_ID }} \ + --changed-only \ + --markdown \ + --output ${{ github.workspace }}/simulation.md + + cat ${{ github.workspace }}/simulation.md >> $GITHUB_STEP_SUMMARY +``` + +### 10.2 GitLab CI + +```yaml +vuln-check: + script: + - stella auth login --token $STELLA_TOKEN + - | + if stella vuln list --severity critical --status open --json | jq -e '.total > 0'; then + echo "Critical vulnerabilities found!" + stella vuln list --severity critical --status open + exit 1 + fi + artifacts: + reports: + dotenv: vuln-status.env +``` + +### 10.3 Jenkins Pipeline + +```groovy +stage('Vulnerability Check') { + steps { + sh ''' + stella vuln list \ + --severity critical \ + --severity high \ + --status open \ + --csv > vulns.csv + ''' + archiveArtifacts artifacts: 'vulns.csv' + + script { + def count = sh( + script: "stella vuln list --severity critical --status open --json | jq '.total'", + returnStdout: true + ).trim().toInteger() + + if (count > 0) { + error("Found ${count} critical vulnerabilities") + } + } + } +} +``` + +--- + +## 11. Offline Operation + +When operating in air-gapped environments: + +1. Export vulnerability data before going offline: + ```bash + stella vuln export --output vuln-bundle.ndjson --signed + ``` + +2. Transfer bundle to air-gapped system. + +3. Verify bundle integrity: + ```bash + stella vuln export verify vuln-bundle.ndjson \ + --expected-digest sha256:... + ``` + +For full offline kit support, see the [Offline Kit documentation](../../../24_OFFLINE_KIT.md). + +--- + +## 12. Related Documentation + +- [VEX Consensus CLI](./vex-cli.md) - VEX status management +- [Policy Simulation](../../policy/guides/simulation.md) - Policy testing +- [Authentication Guide](./auth-cli.md) - Token management +- [API Reference](../../../09_API_CLI_REFERENCE.md) - Full API documentation diff --git a/docs/modules/scanner/guides/surface-fs-workflow.md b/docs/modules/scanner/guides/surface-fs-workflow.md new file mode 100644 index 000000000..445b4b27c --- /dev/null +++ b/docs/modules/scanner/guides/surface-fs-workflow.md @@ -0,0 +1,414 @@ +# Surface.FS Workflow Guide + +> **Version:** 1.0 (2025-11-28) +> +> **Audience:** Scanner Worker/WebService integrators, Zastava operators, Offline Kit builders + +## Overview + +Surface.FS provides a content-addressable storage layer for Scanner-derived artefacts. This guide covers the end-to-end workflow from artefact generation to consumption, including offline bundle handling. + +## Workflow Stages + +``` +┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ +│ Scanner Worker │───▶│ Surface.FS │───▶│ Consumers │ +│ - Scan image │ │ - Store manifest│ │ - WebService │ +│ - Generate │ │ - Store payload │ │ - Zastava │ +│ artefacts │ │ - Local cache │ │ - CLI │ +└─────────────────┘ └─────────────────┘ └─────────────────┘ + │ │ │ + ▼ ▼ ▼ +┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ +│ Generate: │ │ Store: │ │ Consume: │ +│ - Layer frags │ │ - RustFS/S3 │ │ - Report API │ +│ - EntryTrace │ │ - Local disk │ │ - Drift detect │ +│ - SBOM frags │ │ - Offline kit │ │ - Rescan plan │ +└─────────────────┘ └─────────────────┘ └─────────────────┘ +``` + +## Stage 1: Artefact Generation (Scanner Worker) + +### 1.1 Configure Surface.FS + +```csharp +// In Scanner Worker startup +builder.Services.AddSurfaceFileCache(); +builder.Services.AddSurfaceManifestStore(); +``` + +Environment variables (see [Surface.Env guide](../design/surface-env.md)): +```bash +SCANNER_SURFACE_FS_ENDPOINT=http://rustfs:8080 +SCANNER_SURFACE_FS_BUCKET=surface-cache +SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface +SCANNER_SURFACE_TENANT=default +``` + +### 1.2 Generate and Publish Artefacts + +```csharp +public async Task ExecuteScanAsync(ScanJob job, CancellationToken ct) +{ + // 1. Run analyzers to generate artefacts + var layerFragments = await AnalyzeLayersAsync(job.Image, ct); + var entryTrace = await AnalyzeEntryPointsAsync(job.Image, ct); + var sbomFragments = await GenerateSbomAsync(job.Image, ct); + + // 2. Create manifest document + var manifest = new SurfaceManifestDocument + { + Schema = "stellaops.surface.manifest@1", + Tenant = _environment.Settings.Tenant, + ImageDigest = job.Image.Digest, + ScanId = job.Id, + GeneratedAt = DateTimeOffset.UtcNow, + Source = new SurfaceManifestSource + { + Component = "scanner.worker", + Version = _version, + WorkerInstance = Environment.MachineName, + Attempt = job.Attempt + }, + Artifacts = new List() + }; + + // 3. Add artefacts to manifest + foreach (var fragment in layerFragments) + { + var payloadUri = await _manifestWriter.StorePayloadAsync( + fragment.Content, + "layer.fragments", + ct); + + manifest.Artifacts.Add(new SurfaceManifestArtifact + { + Kind = "layer.fragments", + Uri = payloadUri, + Digest = fragment.Digest, + MediaType = "application/vnd.stellaops.layer-fragments+json", + Format = "json", + SizeBytes = fragment.Content.Length + }); + } + + // 4. Publish manifest + var result = await _manifestWriter.PublishAsync(manifest, ct); + + _logger.LogInformation( + "Published manifest {Digest} with {Count} artefacts", + result.Digest, + manifest.Artifacts.Count); + + return new ScanResult + { + ManifestUri = result.Uri, + ManifestDigest = result.Digest + }; +} +``` + +### 1.3 Cache EntryTrace Results + +```csharp +public async Task GetOrComputeEntryTraceAsync( + ImageReference image, + EntryTraceOptions options, + CancellationToken ct) +{ + // Create deterministic cache key + var cacheKey = new SurfaceCacheKey( + @namespace: "entrytrace.graph", + tenant: _environment.Settings.Tenant, + digest: ComputeOptionsHash(options, image.Digest)); + + // Try cache first + var cached = await _cache.TryGetAsync(cacheKey, ct); + if (cached is not null) + { + _logger.LogDebug("EntryTrace cache hit for {Key}", cacheKey); + return cached; + } + + // Compute and cache + var graph = await ComputeEntryTraceAsync(image, options, ct); + await _cache.SetAsync(cacheKey, graph, ct); + + return graph; +} +``` + +## Stage 2: Storage (Surface.FS) + +### 2.1 Manifest Storage Layout + +``` +/ +├── manifests/ +│ └── / +│ └── / +│ └── / +│ └── .json +└── payloads/ + └── / + └── / + └── sha256/ + └── / + └── / + └── .json.zst +``` + +### 2.2 Local Cache Layout + +``` +/ +├── manifests/ # Manifest JSON files +│ └── /... +├── cache/ # Hot artefacts +│ └── / +│ └── / +│ └── +└── temp/ # In-progress writes +``` + +### 2.3 Manifest URI Format + +``` +cas:///////.json +``` + +Example: +``` +cas://surface-cache/manifests/acme/ab/cd/abcdef0123456789...json +``` + +## Stage 3: Consumption + +### 3.1 WebService API + +```http +GET /api/v1/scans/{id} +``` + +Response includes Surface manifest pointer: +```json +{ + "id": "scan-1234", + "status": "completed", + "surface": { + "manifestUri": "cas://surface-cache/manifests/acme/ab/cd/...", + "manifestDigest": "sha256:abcdef...", + "artifacts": [ + { + "kind": "layer.fragments", + "uri": "cas://surface-cache/payloads/acme/layer.fragments/...", + "digest": "sha256:123456...", + "mediaType": "application/vnd.stellaops.layer-fragments+json" + } + ] + } +} +``` + +### 3.2 Zastava Drift Detection + +```csharp +public async Task DetectDriftAsync( + string imageDigest, + CancellationToken ct) +{ + // 1. Fetch baseline manifest + var manifestUri = await _surfacePointerService.GetManifestUriAsync(imageDigest, ct); + var manifest = await _manifestReader.TryGetByUriAsync(manifestUri, ct); + + if (manifest is null) + { + return DriftResult.NoBaseline(); + } + + // 2. Get EntryTrace artefact + var entryTraceArtifact = manifest.Artifacts + .FirstOrDefault(a => a.Kind == "entrytrace.graph"); + + if (entryTraceArtifact is null) + { + return DriftResult.NoEntryTrace(); + } + + // 3. Compare with runtime + var baseline = await _payloadStore.GetAsync( + entryTraceArtifact.Uri, ct); + + var runtime = await _runtimeCollector.CollectAsync(ct); + + return CompareGraphs(baseline, runtime); +} +``` + +### 3.3 Scheduler Rescan Planning + +```csharp +public async Task CreateRescanPlanAsync( + string imageDigest, + CancellationToken ct) +{ + // 1. Read manifest to understand what was scanned + var manifest = await _manifestReader.TryGetByDigestAsync(imageDigest, ct); + + if (manifest is null || IsExpired(manifest)) + { + return RescanPlan.FullRescan(); + } + + // 2. Check for layer changes + var layerArtifact = manifest.Artifacts + .FirstOrDefault(a => a.Kind == "layer.fragments"); + + if (layerArtifact is not null) + { + var layers = await _payloadStore.GetAsync( + layerArtifact.Uri, ct); + + var changedLayers = await DetectChangedLayersAsync(layers, ct); + + if (changedLayers.Any()) + { + return RescanPlan.IncrementalRescan(changedLayers); + } + } + + return RescanPlan.NoRescanNeeded(); +} +``` + +## Offline Kit Workflow + +### Export (Online Environment) + +```bash +# 1. Build offline kit with Surface manifests +python ops/offline-kit/build_offline_kit.py \ + --version 2025.10.0 \ + --include-surface-manifests \ + --output-dir out/offline-kit + +# 2. Kit structure includes: +# offline/ +# surface/ +# manifests/ +# ///.json +# payloads/ +# //sha256///.json.zst +# manifest-index.json +``` + +### Import (Air-Gapped Environment) + +```csharp +public async Task ImportOfflineKitAsync( + string kitPath, + CancellationToken ct) +{ + var surfacePath = Path.Combine(kitPath, "surface"); + var indexPath = Path.Combine(surfacePath, "manifest-index.json"); + + var index = await LoadIndexAsync(indexPath, ct); + + foreach (var entry in index.Manifests) + { + // 1. Load and verify manifest + var manifestPath = Path.Combine(surfacePath, entry.RelativePath); + var manifest = await LoadManifestAsync(manifestPath, ct); + + // 2. Verify digest + var computedDigest = ComputeDigest(manifest); + if (computedDigest != entry.Digest) + { + throw new InvalidOperationException( + $"Manifest digest mismatch: expected {entry.Digest}, got {computedDigest}"); + } + + // 3. Import via Surface.FS API + await _manifestWriter.PublishAsync(manifest, ct); + + _logger.LogInformation( + "Imported manifest {Digest} for image {Image}", + entry.Digest, + manifest.ImageDigest); + } + + // 4. Import payloads + foreach (var payload in index.Payloads) + { + var payloadPath = Path.Combine(surfacePath, payload.RelativePath); + await _payloadStore.ImportAsync(payloadPath, payload.Uri, ct); + } +} +``` + +### Offline Operation + +Once imported, Surface.FS consumers operate normally: + +```csharp +// Same code works online and offline +var manifest = await _manifestReader.TryGetByUriAsync(manifestUri, ct); +var payload = await _payloadStore.GetAsync(artifact.Uri, ct); +``` + +## Configuration Reference + +### SurfaceManifestStoreOptions + +| Option | Default | Description | +|--------|---------|-------------| +| `Bucket` | `surface-cache` | Object store bucket | +| `ManifestPrefix` | `manifests` | Prefix for manifest objects | +| `PayloadPrefix` | `payloads` | Prefix for payload objects | +| `LocalManifestRoot` | `/manifests` | Local manifest directory | + +### SurfaceCacheOptions + +| Option | Default | Description | +|--------|---------|-------------| +| `Root` | `/stellaops/surface` | Cache root directory | +| `QuotaMegabytes` | `4096` | Cache size limit | +| `EvictionThreshold` | `0.9` | Trigger eviction at 90% quota | + +## Metrics + +| Metric | Labels | Description | +|--------|--------|-------------| +| `surface_manifest_published_total` | `tenant`, `kind` | Manifests published | +| `surface_manifest_cache_hit_total` | `namespace`, `tenant` | Cache hits | +| `surface_manifest_publish_duration_ms` | `tenant` | Publish latency | +| `surface_payload_persisted_total` | `kind` | Payloads stored | + +## Troubleshooting + +### Manifest Not Found + +1. Check tenant matches between writer and reader +2. Verify Surface.FS endpoint is reachable +3. Check bucket permissions +4. Review `surface_manifest_published_total` metric + +### Cache Miss Despite Expected Hit + +1. Verify cache key components match (namespace, tenant, digest) +2. Check cache quota - eviction may have occurred +3. Review `surface_manifest_cache_hit_total` metric + +### Offline Import Failures + +1. Verify manifest digest matches index +2. Check file permissions on import path +3. Ensure Surface.FS endpoint is writable +4. Review import logs for specific errors + +## References + +- [Surface.FS Design](../design/surface-fs.md) +- [Surface.Env Design](../design/surface-env.md) +- [Surface.Validation Guide](./surface-validation-extensibility.md) +- [Offline Kit Documentation](../../../../24_OFFLINE_KIT.md) diff --git a/docs/modules/scanner/guides/surface-validation-extensibility.md b/docs/modules/scanner/guides/surface-validation-extensibility.md new file mode 100644 index 000000000..65a84505e --- /dev/null +++ b/docs/modules/scanner/guides/surface-validation-extensibility.md @@ -0,0 +1,455 @@ +# Surface.Validation Extensibility Guide + +> **Version:** 1.0 (2025-11-28) +> +> **Audience:** Scanner Worker/WebService integrators, custom analyzer developers, Zastava contributors + +## Overview + +Surface.Validation provides a pluggable validator framework for ensuring configuration and data preconditions before performing scanner work. This guide covers how to extend the validation system with custom validators, customize reporting, and integrate validation into your components. + +## Quick Start + +### Basic Registration + +```csharp +// In Program.cs or your DI configuration +builder.Services.AddSurfaceValidation(); +``` + +This registers the default validators: +- `SurfaceEndpointValidator` - Validates Surface.FS endpoint and bucket +- `SurfaceCacheValidator` - Validates cache directory writability and quota +- `SurfaceSecretsValidator` - Validates secrets provider configuration + +### Adding Custom Validators + +```csharp +builder.Services.AddSurfaceValidation(builder => +{ + builder.AddValidator(); + builder.AddValidator(); +}); +``` + +## Writing Custom Validators + +### Validator Interface + +Implement `ISurfaceValidator` to create a custom validator: + +```csharp +public interface ISurfaceValidator +{ + ValueTask ValidateAsync( + SurfaceValidationContext context, + CancellationToken cancellationToken = default); +} +``` + +### Example: Registry Credentials Validator + +```csharp +public sealed class RegistryCredentialsValidator : ISurfaceValidator +{ + private readonly IHttpClientFactory _httpClientFactory; + + public RegistryCredentialsValidator(IHttpClientFactory httpClientFactory) + { + _httpClientFactory = httpClientFactory; + } + + public async ValueTask ValidateAsync( + SurfaceValidationContext context, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(context); + + var issues = new List(); + + // Access secrets configuration from context + var secrets = context.Environment.Secrets; + if (secrets.Provider == "file" && string.IsNullOrEmpty(secrets.Root)) + { + issues.Add(SurfaceValidationIssue.Error( + "REGISTRY_SECRETS_ROOT_MISSING", + "Registry secrets root path is not configured.", + "Set SCANNER_SURFACE_SECRETS_ROOT to the secrets directory.")); + } + + // Access custom properties passed during validation + if (context.Properties.TryGetValue("registryEndpoint", out var endpoint)) + { + var reachable = await CheckEndpointAsync(endpoint?.ToString(), cancellationToken); + if (!reachable) + { + issues.Add(SurfaceValidationIssue.Warning( + "REGISTRY_ENDPOINT_UNREACHABLE", + $"Registry endpoint {endpoint} is not reachable.", + "Verify network connectivity to the container registry.")); + } + } + + return issues.Count == 0 + ? SurfaceValidationResult.Success() + : SurfaceValidationResult.FromIssues(issues); + } + + private async Task CheckEndpointAsync(string? endpoint, CancellationToken ct) + { + if (string.IsNullOrEmpty(endpoint)) return true; + + try + { + var client = _httpClientFactory.CreateClient(); + client.Timeout = TimeSpan.FromMilliseconds(500); // Keep validations fast + var response = await client.GetAsync(endpoint, ct); + return response.IsSuccessStatusCode; + } + catch + { + return false; + } + } +} +``` + +### Best Practices for Validators + +1. **Keep validations fast** - Target < 500ms per validator to avoid blocking startup +2. **Use appropriate severity levels**: + - `Error` - Fatal misconfiguration that prevents operation + - `Warning` - Suboptimal configuration that may cause issues + - `Info` - Informational notices +3. **Provide actionable hints** - Include remediation steps in the hint parameter +4. **Access services via context** - Use `context.Services.GetService()` for DI +5. **Check cancellation tokens** - Honor cancellation for async operations + +## Validation Context + +### Creating Context with Properties + +```csharp +var context = SurfaceValidationContext.Create( + serviceProvider, + componentName: "Scanner.Worker", + environment: surfaceEnvironment, + properties: new Dictionary + { + ["jobId"] = currentJob.Id, + ["imageDigest"] = image.Digest, + ["configPath"] = "/etc/scanner/config.yaml" + }); +``` + +### Accessing Context in Validators + +```csharp +public ValueTask ValidateAsync( + SurfaceValidationContext context, + CancellationToken cancellationToken = default) +{ + // Access environment settings + var endpoint = context.Environment.SurfaceFsEndpoint; + var bucket = context.Environment.SurfaceFsBucket; + var tenant = context.Environment.Tenant; + + // Access custom properties + if (context.Properties.TryGetValue("imageDigest", out var digest)) + { + // Validate specific to this image + } + + // Access DI services + var logger = context.Services.GetService>(); +} +``` + +## Running Validators + +### Using the Validator Runner + +```csharp +public class MyService +{ + private readonly ISurfaceValidatorRunner _runner; + private readonly ISurfaceEnvironment _environment; + + public MyService(ISurfaceValidatorRunner runner, ISurfaceEnvironment environment) + { + _runner = runner; + _environment = environment; + } + + public async Task ExecuteAsync(CancellationToken ct) + { + var context = SurfaceValidationContext.Create( + _serviceProvider, + "MyService", + _environment.Settings); + + // Option 1: Get results and handle manually + var result = await _runner.RunAllAsync(context, ct); + if (!result.IsSuccess) + { + foreach (var issue in result.Issues.Where(i => i.Severity == SurfaceValidationSeverity.Error)) + { + _logger.LogError("Validation failed: {Code} - {Message}", issue.Code, issue.Message); + } + return; + } + + // Option 2: Throw on failure (respects options) + await _runner.EnsureAsync(context, ct); + + // Continue with work... + } +} +``` + +## Custom Reporting + +### Implementing a Reporter + +```csharp +public sealed class MetricsSurfaceValidationReporter : ISurfaceValidationReporter +{ + private readonly IMetricsFactory _metrics; + + public MetricsSurfaceValidationReporter(IMetricsFactory metrics) + { + _metrics = metrics; + } + + public void Report(SurfaceValidationContext context, SurfaceValidationResult result) + { + var counter = _metrics.CreateCounter("surface_validation_issues_total"); + + foreach (var issue in result.Issues) + { + counter.Add(1, new KeyValuePair[] + { + new("code", issue.Code), + new("severity", issue.Severity.ToString().ToLowerInvariant()), + new("component", context.ComponentName) + }); + } + } +} +``` + +### Registering Custom Reporters + +```csharp +// Replace default reporter +builder.Services.AddSingleton(); + +// Or add alongside default (using composite pattern) +builder.Services.Decorate((inner, sp) => + new CompositeSurfaceValidationReporter( + inner, + sp.GetRequiredService())); +``` + +## Configuration Options + +### SurfaceValidationOptions + +| Option | Default | Description | +|--------|---------|-------------| +| `ThrowOnFailure` | `true` | Whether `EnsureAsync()` throws on validation failure | +| `ContinueOnError` | `false` | Whether to continue running validators after first error | + +Configure via `IConfiguration`: + +```json +{ + "Surface": { + "Validation": { + "ThrowOnFailure": true, + "ContinueOnError": false + } + } +} +``` + +Or programmatically: + +```csharp +builder.Services.Configure(options => +{ + options.ThrowOnFailure = true; + options.ContinueOnError = true; // Useful for diagnostics +}); +``` + +## Issue Codes + +### Standard Codes + +| Code | Severity | Validator | +|------|----------|-----------| +| `SURFACE_ENV_MISSING_ENDPOINT` | Error | SurfaceEndpointValidator | +| `SURFACE_FS_BUCKET_MISSING` | Error | SurfaceEndpointValidator | +| `SURFACE_ENV_CACHE_DIR_UNWRITABLE` | Error | SurfaceCacheValidator | +| `SURFACE_ENV_CACHE_QUOTA_INVALID` | Error | SurfaceCacheValidator | +| `SURFACE_SECRET_PROVIDER_UNKNOWN` | Error | SurfaceSecretsValidator | +| `SURFACE_SECRET_CONFIGURATION_MISSING` | Error | SurfaceSecretsValidator | +| `SURFACE_ENV_TENANT_MISSING` | Error | SurfaceSecretsValidator | + +### Custom Issue Codes + +Follow the naming convention: `__` + +```csharp +public static class MyValidationCodes +{ + public const string RegistrySecretsRootMissing = "REGISTRY_SECRETS_ROOT_MISSING"; + public const string RegistryEndpointUnreachable = "REGISTRY_ENDPOINT_UNREACHABLE"; + public const string CacheWarmupFailed = "CACHE_WARMUP_FAILED"; +} +``` + +## Integration Examples + +### Scanner Worker Startup + +```csharp +// In hosted service +public async Task StartAsync(CancellationToken ct) +{ + var context = SurfaceValidationContext.Create( + _services, + "Scanner.Worker", + _surfaceEnv.Settings); + + try + { + await _validatorRunner.EnsureAsync(context, ct); + _logger.LogInformation("Surface validation passed"); + } + catch (SurfaceValidationException ex) + { + _logger.LogCritical(ex, "Surface validation failed; worker cannot start"); + throw; + } +} +``` + +### Per-Scan Validation + +```csharp +public async Task ScanImageAsync(ImageReference image, CancellationToken ct) +{ + var context = SurfaceValidationContext.Create( + _services, + "Scanner.Analyzer", + _surfaceEnv.Settings, + new Dictionary + { + ["imageDigest"] = image.Digest, + ["imageReference"] = image.Reference + }); + + var result = await _validatorRunner.RunAllAsync(context, ct); + + if (result.HasErrors) + { + return ScanResult.Failed(result.Issues.Select(i => i.Message)); + } + + // Proceed with scan... +} +``` + +### Zastava Webhook Readiness + +```csharp +app.MapGet("/readyz", async (ISurfaceValidatorRunner runner, ISurfaceEnvironment env) => +{ + var context = SurfaceValidationContext.Create( + app.Services, + "Zastava.Webhook", + env.Settings); + + var result = await runner.RunAllAsync(context); + + if (!result.IsSuccess) + { + return Results.Json(new + { + status = "unhealthy", + issues = result.Issues.Select(i => new { i.Code, i.Message, i.Hint }) + }, statusCode: 503); + } + + return Results.Ok(new { status = "healthy" }); +}); +``` + +## Testing Validators + +### Unit Testing + +```csharp +[Fact] +public async Task Validator_MissingEndpoint_ReturnsError() +{ + // Arrange + var settings = new SurfaceEnvironmentSettings( + SurfaceFsEndpoint: new Uri("https://surface.invalid"), + SurfaceFsBucket: "", + // ... other settings + ); + + var context = SurfaceValidationContext.Create( + new ServiceCollection().BuildServiceProvider(), + "Test", + settings); + + var validator = new SurfaceEndpointValidator(); + + // Act + var result = await validator.ValidateAsync(context); + + // Assert + Assert.False(result.IsSuccess); + Assert.Contains(result.Issues, i => i.Code == SurfaceValidationIssueCodes.SurfaceEndpointMissing); +} +``` + +### Integration Testing + +```csharp +[Fact] +public async Task ValidationRunner_AllValidatorsExecute() +{ + // Arrange + var services = new ServiceCollection(); + services.AddSurfaceValidation(builder => + { + builder.AddValidator(); + builder.AddValidator(); + }); + + var provider = services.BuildServiceProvider(); + var runner = provider.GetRequiredService(); + + var context = SurfaceValidationContext.Create( + provider, + "IntegrationTest", + CreateValidSettings()); + + // Act + var result = await runner.RunAllAsync(context); + + // Assert + Assert.True(result.IsSuccess); +} +``` + +## References + +- [Surface.Validation Design](../design/surface-validation.md) +- [Surface.Env Design](../design/surface-env.md) +- [Surface.Secrets Schema](../design/surface-secrets-schema.md) diff --git a/ops/devops/scripts/rollback-lnm-backfill.js b/ops/devops/scripts/rollback-lnm-backfill.js new file mode 100644 index 000000000..f28a13dbe --- /dev/null +++ b/ops/devops/scripts/rollback-lnm-backfill.js @@ -0,0 +1,100 @@ +/** + * Rollback script for LNM-21-102-DEV legacy advisory backfill migration. + * Removes backfilled observations and linksets by querying the backfill_marker field, + * then clears the tombstone markers from advisory_raw. + * + * Usage: + * mongo concelier ops/devops/scripts/rollback-lnm-backfill.js + * + * Environment variables: + * DRY_RUN - if set to "1", only reports what would be deleted without making changes. + * BATCH_SIZE - optional batch size for deletions (default 500). + * + * After running this script, delete the migration record: + * db.schema_migrations.deleteOne({ _id: "20251127_lnm_legacy_backfill" }) + * + * Then restart the Concelier service. + */ +(function () { + var BACKFILL_MARKER = "lnm_21_102_dev"; + + function toInt(value, fallback) { + var parsed = parseInt(value, 10); + return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback; + } + + function toBool(value) { + return value === "1" || value === "true" || value === true; + } + + var dryRun = typeof DRY_RUN !== "undefined" ? toBool(DRY_RUN) : false; + var batchSize = typeof BATCH_SIZE !== "undefined" ? toInt(BATCH_SIZE, 500) : 500; + var database = db.getName ? db.getSiblingDB(db.getName()) : db; + if (!database) { + throw new Error("Unable to resolve database handle"); + } + + print(""); + print("== LNM-21-102-DEV Backfill Rollback =="); + print("Database : " + database.getName()); + print("Dry Run : " + dryRun); + print("Batch Size: " + batchSize); + print(""); + + // Step 1: Count and delete backfilled observations + var observationsCollection = database.getCollection("advisory_observations"); + var observationsFilter = { backfill_marker: BACKFILL_MARKER }; + var observationsCount = observationsCollection.countDocuments(observationsFilter); + + print("Found " + observationsCount + " backfilled observations to remove."); + + if (!dryRun && observationsCount > 0) { + var obsResult = observationsCollection.deleteMany(observationsFilter); + print("Deleted " + obsResult.deletedCount + " observations."); + } + + // Step 2: Count and delete backfilled linksets + var linksetsCollection = database.getCollection("advisory_linksets"); + var linksetsFilter = { backfill_marker: BACKFILL_MARKER }; + var linksetsCount = linksetsCollection.countDocuments(linksetsFilter); + + print("Found " + linksetsCount + " backfilled linksets to remove."); + + if (!dryRun && linksetsCount > 0) { + var linkResult = linksetsCollection.deleteMany(linksetsFilter); + print("Deleted " + linkResult.deletedCount + " linksets."); + } + + // Step 3: Clear tombstone markers from advisory_raw + var rawCollection = database.getCollection("advisory_raw"); + var rawFilter = { backfill_marker: BACKFILL_MARKER }; + var rawCount = rawCollection.countDocuments(rawFilter); + + print("Found " + rawCount + " advisory_raw documents with tombstone markers to clear."); + + if (!dryRun && rawCount > 0) { + var rawResult = rawCollection.updateMany(rawFilter, { $unset: { backfill_marker: "" } }); + print("Cleared tombstone markers from " + rawResult.modifiedCount + " advisory_raw documents."); + } + + // Step 4: Summary + print(""); + print("== Rollback Summary =="); + if (dryRun) { + print("DRY RUN - No changes were made."); + print("Would delete " + observationsCount + " observations."); + print("Would delete " + linksetsCount + " linksets."); + print("Would clear " + rawCount + " tombstone markers."); + } else { + print("Observations deleted: " + observationsCount); + print("Linksets deleted : " + linksetsCount); + print("Tombstones cleared : " + rawCount); + } + + print(""); + print("Next steps:"); + print("1. Delete the migration record:"); + print(' db.schema_migrations.deleteOne({ _id: "20251127_lnm_legacy_backfill" })'); + print("2. Restart the Concelier service."); + print(""); +})(); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs index a52849f18..82396dd3d 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs @@ -1,12 +1,12 @@ -using System.Collections.Generic; -using System.Net; -using System.Net.Http.Headers; -using System.Security.Claims; -using System.Text.Encodings.Web; -using System.Text.Json; -using System.Linq; -using System.Net.Http.Json; -using Microsoft.AspNetCore.Authentication; +using System.Collections.Generic; +using System.Net; +using System.Net.Http.Headers; +using System.Security.Claims; +using System.Text.Encodings.Web; +using System.Text.Json; +using System.Linq; +using System.Net.Http.Json; +using Microsoft.AspNetCore.Authentication; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.TestHost; @@ -54,11 +54,11 @@ public sealed class ConsoleEndpointsTests Assert.Equal(1, tenants.GetArrayLength()); Assert.Equal("tenant-default", tenants[0].GetProperty("id").GetString()); - var events = sink.Events; - var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize"); - Assert.Equal(AuthEventOutcome.Success, authorizeEvent.Outcome); - - var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.tenants.read"); + var events = sink.Events; + var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize"); + Assert.Equal(AuthEventOutcome.Success, authorizeEvent.Outcome); + + var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.tenants.read"); Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome); Assert.Contains("tenant.resolved", consoleEvent.Properties.Select(property => property.Name)); Assert.Equal(2, events.Count); @@ -148,17 +148,17 @@ public sealed class ConsoleEndpointsTests Assert.Equal("tenant-default", json.RootElement.GetProperty("tenant").GetString()); var events = sink.Events; - var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize"); + var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize"); Assert.Equal(AuthEventOutcome.Success, authorizeEvent.Outcome); - var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.profile.read"); + var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.profile.read"); Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome); Assert.Equal(2, events.Count); } [Fact] - public async Task TokenIntrospect_FlagsInactive_WhenExpired() - { + public async Task TokenIntrospect_FlagsInactive_WhenExpired() + { var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z")); var sink = new RecordingAuthEventSink(); await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); @@ -186,123 +186,340 @@ public sealed class ConsoleEndpointsTests Assert.Equal("token-abc", json.RootElement.GetProperty("tokenId").GetString()); var events = sink.Events; - var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize"); + var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize"); Assert.Equal(AuthEventOutcome.Success, authorizeEvent.Outcome); - var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.token.introspect"); + var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.token.introspect"); Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome); - Assert.Equal(2, events.Count); - } - - [Fact] - public async Task VulnerabilityFindings_ReturnsSamplePayload() - { - var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); - var sink = new RecordingAuthEventSink(); - await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); - - var accessor = app.Services.GetRequiredService(); - accessor.Principal = CreatePrincipal( - tenant: "tenant-default", - scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead }, - expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); - - var client = app.CreateTestClient(); - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); - client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); - - var response = await client.GetAsync("/console/vuln/findings?severity=high"); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - - using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); - var items = json.RootElement.GetProperty("items"); - Assert.True(items.GetArrayLength() >= 1); - Assert.Equal("CVE-2024-12345", items[0].GetProperty("coordinates").GetProperty("advisoryId").GetString()); - } - - [Fact] - public async Task VulnerabilityFindingDetail_ReturnsExpandedDocument() - { - var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); - var sink = new RecordingAuthEventSink(); - await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); - - var accessor = app.Services.GetRequiredService(); - accessor.Principal = CreatePrincipal( - tenant: "tenant-default", - scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead }, - expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); - - var client = app.CreateTestClient(); - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); - client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); - - var response = await client.GetAsync("/console/vuln/tenant-default:advisory-ai:sha256:5d1a"); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - - using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); - var summary = json.RootElement.GetProperty("summary"); - Assert.Equal("tenant-default:advisory-ai:sha256:5d1a", summary.GetProperty("findingId").GetString()); - Assert.Equal("reachable", summary.GetProperty("reachability").GetProperty("status").GetString()); - var detailReachability = json.RootElement.GetProperty("reachability"); - Assert.Equal("reachable", detailReachability.GetProperty("status").GetString()); - } - - [Fact] - public async Task VulnerabilityTicket_ReturnsDeterministicPayload() - { - var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); - var sink = new RecordingAuthEventSink(); - await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); - - var accessor = app.Services.GetRequiredService(); - accessor.Principal = CreatePrincipal( - tenant: "tenant-default", - scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead }, - expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); - - var client = app.CreateTestClient(); - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); - client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); - - var payload = new ConsoleVulnerabilityTicketRequest( - Selection: new[] { "tenant-default:advisory-ai:sha256:5d1a" }, - TargetSystem: "servicenow", - Metadata: new Dictionary { ["assignmentGroup"] = "runtime-security" }); - - var response = await client.PostAsJsonAsync("/console/vuln/tickets", payload); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - - using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); - Assert.StartsWith("console-ticket::tenant-default::", json.RootElement.GetProperty("ticketId").GetString()); - Assert.Equal("servicenow", payload.TargetSystem); - } - - [Fact] - public async Task VexStatements_ReturnsSampleRows() - { - var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); - var sink = new RecordingAuthEventSink(); - await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); - - var accessor = app.Services.GetRequiredService(); - accessor.Principal = CreatePrincipal( - tenant: "tenant-default", - scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.VexRead }, - expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); - - var client = app.CreateTestClient(); - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); - client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); - - var response = await client.GetAsync("/console/vex/statements?advisoryId=CVE-2024-12345"); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - - using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); - var items = json.RootElement.GetProperty("items"); - Assert.True(items.GetArrayLength() >= 1); - Assert.Equal("CVE-2024-12345", items[0].GetProperty("advisoryId").GetString()); - } + Assert.Equal(2, events.Count); + } + + [Fact] + public async Task VulnerabilityFindings_ReturnsSamplePayload() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.GetAsync("/console/vuln/findings?severity=high"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + var items = json.RootElement.GetProperty("items"); + Assert.True(items.GetArrayLength() >= 1); + Assert.Equal("CVE-2024-12345", items[0].GetProperty("coordinates").GetProperty("advisoryId").GetString()); + } + + [Fact] + public async Task VulnerabilityFindingDetail_ReturnsExpandedDocument() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.GetAsync("/console/vuln/tenant-default:advisory-ai:sha256:5d1a"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + var summary = json.RootElement.GetProperty("summary"); + Assert.Equal("tenant-default:advisory-ai:sha256:5d1a", summary.GetProperty("findingId").GetString()); + Assert.Equal("reachable", summary.GetProperty("reachability").GetProperty("status").GetString()); + var detailReachability = json.RootElement.GetProperty("reachability"); + Assert.Equal("reachable", detailReachability.GetProperty("status").GetString()); + } + + [Fact] + public async Task VulnerabilityTicket_ReturnsDeterministicPayload() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var payload = new ConsoleVulnerabilityTicketRequest( + Selection: new[] { "tenant-default:advisory-ai:sha256:5d1a" }, + TargetSystem: "servicenow", + Metadata: new Dictionary { ["assignmentGroup"] = "runtime-security" }); + + var response = await client.PostAsJsonAsync("/console/vuln/tickets", payload); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + Assert.StartsWith("console-ticket::tenant-default::", json.RootElement.GetProperty("ticketId").GetString()); + Assert.Equal("servicenow", payload.TargetSystem); + } + + [Fact] + public async Task VexStatements_ReturnsSampleRows() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.VexRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.GetAsync("/console/vex/statements?advisoryId=CVE-2024-12345"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + var items = json.RootElement.GetProperty("items"); + Assert.True(items.GetArrayLength() >= 1); + Assert.Equal("CVE-2024-12345", items[0].GetProperty("advisoryId").GetString()); + } + + [Fact] + public async Task Dashboard_ReturnsTenantScopedAggregates() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.GetAsync("/console/dashboard"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + Assert.Equal("tenant-default", json.RootElement.GetProperty("tenant").GetString()); + Assert.True(json.RootElement.TryGetProperty("generatedAt", out _)); + Assert.True(json.RootElement.TryGetProperty("findings", out var findings)); + Assert.True(findings.TryGetProperty("totalFindings", out _)); + Assert.True(json.RootElement.TryGetProperty("vexOverrides", out _)); + Assert.True(json.RootElement.TryGetProperty("advisoryDeltas", out _)); + Assert.True(json.RootElement.TryGetProperty("runHealth", out _)); + Assert.True(json.RootElement.TryGetProperty("policyChanges", out _)); + + var events = sink.Events; + var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.dashboard"); + Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome); + } + + [Fact] + public async Task Dashboard_ReturnsBadRequest_WhenTenantHeaderMissing() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + + var response = await client.GetAsync("/console/dashboard"); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + } + + [Fact] + public async Task Dashboard_ContainsFindingsTrendData() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.GetAsync("/console/dashboard"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + var findings = json.RootElement.GetProperty("findings"); + var trend = findings.GetProperty("trendLast30Days"); + Assert.True(trend.GetArrayLength() > 0); + } + + [Fact] + public async Task Filters_ReturnsFilterCategories() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.GetAsync("/console/filters"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + Assert.Equal("tenant-default", json.RootElement.GetProperty("tenant").GetString()); + Assert.True(json.RootElement.TryGetProperty("generatedAt", out _)); + Assert.True(json.RootElement.TryGetProperty("filtersHash", out _)); + var categories = json.RootElement.GetProperty("categories"); + Assert.True(categories.GetArrayLength() >= 5); + + var events = sink.Events; + var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.filters"); + Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome); + } + + [Fact] + public async Task Filters_ReturnsExpectedCategoryIds() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.GetAsync("/console/filters"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + var categories = json.RootElement.GetProperty("categories"); + var categoryIds = categories.EnumerateArray() + .Select(c => c.GetProperty("categoryId").GetString()) + .ToList(); + + Assert.Contains("severity", categoryIds); + Assert.Contains("policyBadge", categoryIds); + Assert.Contains("reachability", categoryIds); + Assert.Contains("vexState", categoryIds); + Assert.Contains("kev", categoryIds); + } + + [Fact] + public async Task Filters_FiltersByScopeParameter() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.GetAsync("/console/filters?scope=severity"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + var categories = json.RootElement.GetProperty("categories"); + Assert.Equal(1, categories.GetArrayLength()); + Assert.Equal("severity", categories[0].GetProperty("categoryId").GetString()); + } + + [Fact] + public async Task Filters_ReturnsBadRequest_WhenTenantHeaderMissing() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + + var response = await client.GetAsync("/console/filters"); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + } + + [Fact] + public async Task Filters_ReturnsHashForCacheValidation() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(30)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.GetAsync("/console/filters"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + var filtersHash = json.RootElement.GetProperty("filtersHash").GetString(); + Assert.StartsWith("sha256:", filtersHash); + } private static ClaimsPrincipal CreatePrincipal( string tenant, @@ -371,10 +588,10 @@ public sealed class ConsoleEndpointsTests builder.Services.AddSingleton(timeProvider); builder.Services.AddSingleton(sink); builder.Services.AddSingleton(new FakeTenantCatalog(tenants)); - builder.Services.AddSingleton(); - builder.Services.AddHttpContextAccessor(); - builder.Services.AddSingleton(); - builder.Services.AddSingleton(); + builder.Services.AddSingleton(); + builder.Services.AddHttpContextAccessor(); + builder.Services.AddSingleton(); + builder.Services.AddSingleton(); var authBuilder = builder.Services.AddAuthentication(options => { @@ -400,7 +617,7 @@ public sealed class ConsoleEndpointsTests app.UseAuthorization(); app.MapConsoleEndpoints(); - await app.StartAsync(); + await app.StartAsync(); return app; } @@ -434,11 +651,11 @@ public sealed class ConsoleEndpointsTests private sealed class TestAuthenticationHandler : AuthenticationHandler { - public TestAuthenticationHandler( - IOptionsMonitor options, - ILoggerFactory logger, - UrlEncoder encoder) - : base(options, logger, encoder) + public TestAuthenticationHandler( + IOptionsMonitor options, + ILoggerFactory logger, + UrlEncoder encoder) + : base(options, logger, encoder) { } @@ -468,4 +685,4 @@ internal static class HostTestClientExtensions internal static class TestAuthenticationDefaults { public const string AuthenticationScheme = "AuthorityConsoleTests"; -} +} diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AuthoritySealedModeEvidenceValidator.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AuthoritySealedModeEvidenceValidator.cs index e7e7a4015..b11583e1c 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AuthoritySealedModeEvidenceValidator.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Airgap/AuthoritySealedModeEvidenceValidator.cs @@ -61,7 +61,7 @@ internal sealed class AuthoritySealedModeEvidenceValidator : IAuthoritySealedMod } var cacheKey = $"authority:sealed-mode:{sealedOptions.EvidencePath}"; - if (memoryCache.TryGetValue(cacheKey, out AuthoritySealedModeValidationResult cached)) + if (memoryCache.TryGetValue(cacheKey, out AuthoritySealedModeValidationResult? cached) && cached is not null) { return cached; } diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleEndpointExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleEndpointExtensions.cs index 76812775d..0ac8d9ce6 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleEndpointExtensions.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleEndpointExtensions.cs @@ -37,41 +37,54 @@ internal static class ConsoleEndpointExtensions .WithName("ConsoleProfile") .WithSummary("Return the authenticated principal profile metadata."); - group.MapPost("/token/introspect", IntrospectToken) - .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.UiRead)) - .WithName("ConsoleTokenIntrospect") - .WithSummary("Introspect the current access token and return expiry, scope, and tenant metadata."); - - var vulnGroup = group.MapGroup("/vuln") - .RequireAuthorization(policy => policy.RequireStellaOpsScopes( - StellaOpsScopes.UiRead, - StellaOpsScopes.AdvisoryRead, - StellaOpsScopes.VexRead)); - - vulnGroup.MapGet("/findings", GetVulnerabilityFindings) - .WithName("ConsoleVulnerabilityFindings") - .WithSummary("List tenant-scoped vulnerability findings with policy/VEX metadata."); - - vulnGroup.MapGet("/{findingId}", GetVulnerabilityFindingById) - .WithName("ConsoleVulnerabilityFindingDetail") - .WithSummary("Return the full finding document, including evidence and policy overlays."); - - vulnGroup.MapPost("/tickets", CreateVulnerabilityTicket) - .WithName("ConsoleVulnerabilityTickets") - .WithSummary("Generate a signed payload payload for external ticketing workflows."); - - var vexGroup = group.MapGroup("/vex") - .RequireAuthorization(policy => policy.RequireStellaOpsScopes( - StellaOpsScopes.UiRead, - StellaOpsScopes.VexRead)); - - vexGroup.MapGet("/statements", GetVexStatements) - .WithName("ConsoleVexStatements") - .WithSummary("List VEX statements impacting the tenant."); - - vexGroup.MapGet("/events", StreamVexEvents) - .WithName("ConsoleVexEvents") - .WithSummary("Server-sent events feed for live VEX updates (placeholder)."); + group.MapPost("/token/introspect", IntrospectToken) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.UiRead)) + .WithName("ConsoleTokenIntrospect") + .WithSummary("Introspect the current access token and return expiry, scope, and tenant metadata."); + + var vulnGroup = group.MapGroup("/vuln") + .RequireAuthorization(policy => policy.RequireStellaOpsScopes( + StellaOpsScopes.UiRead, + StellaOpsScopes.AdvisoryRead, + StellaOpsScopes.VexRead)); + + vulnGroup.MapGet("/findings", GetVulnerabilityFindings) + .WithName("ConsoleVulnerabilityFindings") + .WithSummary("List tenant-scoped vulnerability findings with policy/VEX metadata."); + + vulnGroup.MapGet("/{findingId}", GetVulnerabilityFindingById) + .WithName("ConsoleVulnerabilityFindingDetail") + .WithSummary("Return the full finding document, including evidence and policy overlays."); + + vulnGroup.MapPost("/tickets", CreateVulnerabilityTicket) + .WithName("ConsoleVulnerabilityTickets") + .WithSummary("Generate a signed payload payload for external ticketing workflows."); + + var vexGroup = group.MapGroup("/vex") + .RequireAuthorization(policy => policy.RequireStellaOpsScopes( + StellaOpsScopes.UiRead, + StellaOpsScopes.VexRead)); + + vexGroup.MapGet("/statements", GetVexStatements) + .WithName("ConsoleVexStatements") + .WithSummary("List VEX statements impacting the tenant."); + + vexGroup.MapGet("/events", StreamVexEvents) + .WithName("ConsoleVexEvents") + .WithSummary("Server-sent events feed for live VEX updates (placeholder)."); + + // Dashboard and filters endpoints (WEB-CONSOLE-23-001) + group.MapGet("/dashboard", GetDashboard) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes( + StellaOpsScopes.UiRead)) + .WithName("ConsoleDashboard") + .WithSummary("Tenant-scoped aggregates for findings, VEX overrides, advisory deltas, run health, and policy change log."); + + group.MapGet("/filters", GetFilters) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes( + StellaOpsScopes.UiRead)) + .WithName("ConsoleFilters") + .WithSummary("Available filter categories with options and counts for deterministic console queries."); } private static async Task GetTenants( @@ -165,11 +178,11 @@ internal static class ConsoleEndpointExtensions return Results.Ok(profile); } - private static async Task IntrospectToken( - HttpContext httpContext, - TimeProvider timeProvider, - IAuthEventSink auditSink, - CancellationToken cancellationToken) + private static async Task IntrospectToken( + HttpContext httpContext, + TimeProvider timeProvider, + IAuthEventSink auditSink, + CancellationToken cancellationToken) { ArgumentNullException.ThrowIfNull(httpContext); ArgumentNullException.ThrowIfNull(timeProvider); @@ -183,214 +196,311 @@ internal static class ConsoleEndpointExtensions var introspection = BuildTokenIntrospection(principal, timeProvider); - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.token.introspect", - AuthEventOutcome.Success, - null, - BuildProperties( - ("token.active", introspection.Active ? "true" : "false"), - ("token.expires_at", FormatInstant(introspection.ExpiresAt)), - ("tenant.resolved", introspection.Tenant)), - cancellationToken).ConfigureAwait(false); - - return Results.Ok(introspection); - } - - private static async Task GetVulnerabilityFindings( - HttpContext httpContext, - IConsoleWorkspaceService workspaceService, - TimeProvider timeProvider, - IAuthEventSink auditSink, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(httpContext); - ArgumentNullException.ThrowIfNull(workspaceService); - - var tenant = TenantHeaderFilter.GetTenant(httpContext); - if (string.IsNullOrWhiteSpace(tenant)) - { - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.vuln.findings", - AuthEventOutcome.Failure, - "tenant_header_missing", - BuildProperties(("tenant.header", null)), - cancellationToken).ConfigureAwait(false); - - return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); - } - - var query = BuildVulnerabilityQuery(httpContext.Request); - var response = await workspaceService.SearchFindingsAsync(tenant, query, cancellationToken).ConfigureAwait(false); - - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.vuln.findings", - AuthEventOutcome.Success, - null, - BuildProperties(("tenant.resolved", tenant), ("pagination.next_token", response.NextPageToken)), - cancellationToken).ConfigureAwait(false); - - return Results.Ok(response); - } - - private static async Task GetVulnerabilityFindingById( - HttpContext httpContext, - string findingId, - IConsoleWorkspaceService workspaceService, - TimeProvider timeProvider, - IAuthEventSink auditSink, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(httpContext); - ArgumentNullException.ThrowIfNull(workspaceService); - - var tenant = TenantHeaderFilter.GetTenant(httpContext); - if (string.IsNullOrWhiteSpace(tenant)) - { - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.vuln.finding", - AuthEventOutcome.Failure, - "tenant_header_missing", - BuildProperties(("tenant.header", null)), - cancellationToken).ConfigureAwait(false); - - return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); - } - - var detail = await workspaceService.GetFindingAsync(tenant, findingId, cancellationToken).ConfigureAwait(false); - if (detail is null) - { - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.vuln.finding", - AuthEventOutcome.Failure, - "finding_not_found", - BuildProperties(("tenant.resolved", tenant), ("finding.id", findingId)), - cancellationToken).ConfigureAwait(false); - - return Results.NotFound(new { error = "finding_not_found", message = $"Finding '{findingId}' not found." }); - } - - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.vuln.finding", - AuthEventOutcome.Success, - null, - BuildProperties(("tenant.resolved", tenant), ("finding.id", findingId)), - cancellationToken).ConfigureAwait(false); - - return Results.Ok(detail); - } - - private static async Task CreateVulnerabilityTicket( - HttpContext httpContext, - ConsoleVulnerabilityTicketRequest request, - IConsoleWorkspaceService workspaceService, - TimeProvider timeProvider, - IAuthEventSink auditSink, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(httpContext); - ArgumentNullException.ThrowIfNull(workspaceService); - - if (request is null || request.Selection.Count == 0) - { - return Results.BadRequest(new { error = "invalid_request", message = "At least one finding must be selected." }); - } - - var tenant = TenantHeaderFilter.GetTenant(httpContext); - if (string.IsNullOrWhiteSpace(tenant)) - { - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.vuln.ticket", - AuthEventOutcome.Failure, - "tenant_header_missing", - BuildProperties(("tenant.header", null)), - cancellationToken).ConfigureAwait(false); - - return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); - } - - var ticket = await workspaceService.CreateTicketAsync(tenant, request, cancellationToken).ConfigureAwait(false); - - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.vuln.ticket", - AuthEventOutcome.Success, - null, - BuildProperties( - ("tenant.resolved", tenant), - ("ticket.id", ticket.TicketId), - ("ticket.selection.count", request.Selection.Count.ToString(CultureInfo.InvariantCulture))), - cancellationToken).ConfigureAwait(false); - - return Results.Ok(ticket); - } - - private static async Task GetVexStatements( - HttpContext httpContext, - IConsoleWorkspaceService workspaceService, - TimeProvider timeProvider, - IAuthEventSink auditSink, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(httpContext); - ArgumentNullException.ThrowIfNull(workspaceService); - - var tenant = TenantHeaderFilter.GetTenant(httpContext); - if (string.IsNullOrWhiteSpace(tenant)) - { - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.vex.statements", - AuthEventOutcome.Failure, - "tenant_header_missing", - BuildProperties(("tenant.header", null)), - cancellationToken).ConfigureAwait(false); - - return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); - } - - var query = BuildVexQuery(httpContext.Request); - var response = await workspaceService.GetVexStatementsAsync(tenant, query, cancellationToken).ConfigureAwait(false); - - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.vex.statements", - AuthEventOutcome.Success, - null, - BuildProperties(("tenant.resolved", tenant), ("pagination.next_token", response.NextPageToken)), - cancellationToken).ConfigureAwait(false); - - return Results.Ok(response); - } - - private static IResult StreamVexEvents() => - Results.StatusCode(StatusCodes.Status501NotImplemented); + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.token.introspect", + AuthEventOutcome.Success, + null, + BuildProperties( + ("token.active", introspection.Active ? "true" : "false"), + ("token.expires_at", FormatInstant(introspection.ExpiresAt)), + ("tenant.resolved", introspection.Tenant)), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(introspection); + } + + private static async Task GetVulnerabilityFindings( + HttpContext httpContext, + IConsoleWorkspaceService workspaceService, + TimeProvider timeProvider, + IAuthEventSink auditSink, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(workspaceService); + + var tenant = TenantHeaderFilter.GetTenant(httpContext); + if (string.IsNullOrWhiteSpace(tenant)) + { + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.vuln.findings", + AuthEventOutcome.Failure, + "tenant_header_missing", + BuildProperties(("tenant.header", null)), + cancellationToken).ConfigureAwait(false); + + return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); + } + + var query = BuildVulnerabilityQuery(httpContext.Request); + var response = await workspaceService.SearchFindingsAsync(tenant, query, cancellationToken).ConfigureAwait(false); + + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.vuln.findings", + AuthEventOutcome.Success, + null, + BuildProperties(("tenant.resolved", tenant), ("pagination.next_token", response.NextPageToken)), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(response); + } + + private static async Task GetVulnerabilityFindingById( + HttpContext httpContext, + string findingId, + IConsoleWorkspaceService workspaceService, + TimeProvider timeProvider, + IAuthEventSink auditSink, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(workspaceService); + + var tenant = TenantHeaderFilter.GetTenant(httpContext); + if (string.IsNullOrWhiteSpace(tenant)) + { + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.vuln.finding", + AuthEventOutcome.Failure, + "tenant_header_missing", + BuildProperties(("tenant.header", null)), + cancellationToken).ConfigureAwait(false); + + return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); + } + + var detail = await workspaceService.GetFindingAsync(tenant, findingId, cancellationToken).ConfigureAwait(false); + if (detail is null) + { + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.vuln.finding", + AuthEventOutcome.Failure, + "finding_not_found", + BuildProperties(("tenant.resolved", tenant), ("finding.id", findingId)), + cancellationToken).ConfigureAwait(false); + + return Results.NotFound(new { error = "finding_not_found", message = $"Finding '{findingId}' not found." }); + } + + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.vuln.finding", + AuthEventOutcome.Success, + null, + BuildProperties(("tenant.resolved", tenant), ("finding.id", findingId)), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(detail); + } + + private static async Task CreateVulnerabilityTicket( + HttpContext httpContext, + ConsoleVulnerabilityTicketRequest request, + IConsoleWorkspaceService workspaceService, + TimeProvider timeProvider, + IAuthEventSink auditSink, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(workspaceService); + + if (request is null || request.Selection.Count == 0) + { + return Results.BadRequest(new { error = "invalid_request", message = "At least one finding must be selected." }); + } + + var tenant = TenantHeaderFilter.GetTenant(httpContext); + if (string.IsNullOrWhiteSpace(tenant)) + { + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.vuln.ticket", + AuthEventOutcome.Failure, + "tenant_header_missing", + BuildProperties(("tenant.header", null)), + cancellationToken).ConfigureAwait(false); + + return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); + } + + var ticket = await workspaceService.CreateTicketAsync(tenant, request, cancellationToken).ConfigureAwait(false); + + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.vuln.ticket", + AuthEventOutcome.Success, + null, + BuildProperties( + ("tenant.resolved", tenant), + ("ticket.id", ticket.TicketId), + ("ticket.selection.count", request.Selection.Count.ToString(CultureInfo.InvariantCulture))), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(ticket); + } + + private static async Task GetVexStatements( + HttpContext httpContext, + IConsoleWorkspaceService workspaceService, + TimeProvider timeProvider, + IAuthEventSink auditSink, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(workspaceService); + + var tenant = TenantHeaderFilter.GetTenant(httpContext); + if (string.IsNullOrWhiteSpace(tenant)) + { + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.vex.statements", + AuthEventOutcome.Failure, + "tenant_header_missing", + BuildProperties(("tenant.header", null)), + cancellationToken).ConfigureAwait(false); + + return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); + } + + var query = BuildVexQuery(httpContext.Request); + var response = await workspaceService.GetVexStatementsAsync(tenant, query, cancellationToken).ConfigureAwait(false); + + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.vex.statements", + AuthEventOutcome.Success, + null, + BuildProperties(("tenant.resolved", tenant), ("pagination.next_token", response.NextPageToken)), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(response); + } + + private static IResult StreamVexEvents() => + Results.StatusCode(StatusCodes.Status501NotImplemented); + + private static async Task GetDashboard( + HttpContext httpContext, + IConsoleWorkspaceService workspaceService, + TimeProvider timeProvider, + IAuthEventSink auditSink, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(workspaceService); + + var tenant = TenantHeaderFilter.GetTenant(httpContext); + if (string.IsNullOrWhiteSpace(tenant)) + { + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.dashboard", + AuthEventOutcome.Failure, + "tenant_header_missing", + BuildProperties(("tenant.header", null)), + cancellationToken).ConfigureAwait(false); + + return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); + } + + var dashboard = await workspaceService.GetDashboardAsync(tenant, cancellationToken).ConfigureAwait(false); + + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.dashboard", + AuthEventOutcome.Success, + null, + BuildProperties( + ("tenant.resolved", tenant), + ("dashboard.findings_count", dashboard.Findings.TotalFindings.ToString(CultureInfo.InvariantCulture))), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(dashboard); + } + + private static async Task GetFilters( + HttpContext httpContext, + IConsoleWorkspaceService workspaceService, + TimeProvider timeProvider, + IAuthEventSink auditSink, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(workspaceService); + + var tenant = TenantHeaderFilter.GetTenant(httpContext); + if (string.IsNullOrWhiteSpace(tenant)) + { + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.filters", + AuthEventOutcome.Failure, + "tenant_header_missing", + BuildProperties(("tenant.header", null)), + cancellationToken).ConfigureAwait(false); + + return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); + } + + var query = BuildFiltersQuery(httpContext.Request); + var filters = await workspaceService.GetFiltersAsync(tenant, query, cancellationToken).ConfigureAwait(false); + + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.filters", + AuthEventOutcome.Success, + null, + BuildProperties( + ("tenant.resolved", tenant), + ("filters.hash", filters.FiltersHash), + ("filters.categories_count", filters.Categories.Count.ToString(CultureInfo.InvariantCulture))), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(filters); + } + + private static ConsoleFiltersQuery BuildFiltersQuery(HttpRequest request) + { + var scope = request.Query.TryGetValue("scope", out var scopeValues) ? scopeValues.FirstOrDefault() : null; + var includeEmpty = request.Query.TryGetValue("includeEmpty", out var includeValues) && + bool.TryParse(includeValues.FirstOrDefault(), out var include) && include; + + return new ConsoleFiltersQuery(scope, includeEmpty); + } private static ConsoleProfileResponse BuildProfile(ClaimsPrincipal principal, TimeProvider timeProvider) { @@ -455,9 +565,9 @@ internal static class ConsoleEndpointExtensions FreshAuth: freshAuth); } - private static bool DetermineFreshAuth(ClaimsPrincipal principal, DateTimeOffset now) - { - var flag = principal.FindFirst("stellaops:fresh_auth") ?? principal.FindFirst("fresh_auth"); + private static bool DetermineFreshAuth(ClaimsPrincipal principal, DateTimeOffset now) + { + var flag = principal.FindFirst("stellaops:fresh_auth") ?? principal.FindFirst("fresh_auth"); if (flag is not null && bool.TryParse(flag.Value, out var freshFlag)) { if (freshFlag) @@ -478,67 +588,67 @@ internal static class ConsoleEndpointExtensions return authTime.Value.Add(ttl) > now; } - const int defaultFreshAuthWindowSeconds = 300; - return authTime.Value.AddSeconds(defaultFreshAuthWindowSeconds) > now; - } - - private static ConsoleVulnerabilityQuery BuildVulnerabilityQuery(HttpRequest request) - { - var builder = new ConsoleVulnerabilityQueryBuilder() - .SetPageSize(ParseInt(request.Query["pageSize"], 50)) - .SetPageToken(request.Query.TryGetValue("pageToken", out var tokenValues) ? tokenValues.FirstOrDefault() : null) - .AddSeverity(ReadMulti(request, "severity")) - .AddPolicyBadges(ReadMulti(request, "policyBadge")) - .AddReachability(ReadMulti(request, "reachability")) - .AddProducts(ReadMulti(request, "product")) - .AddVexStates(ReadMulti(request, "vexState")); - - var search = request.Query.TryGetValue("search", out var searchValues) - ? searchValues - .Where(value => !string.IsNullOrWhiteSpace(value)) - .SelectMany(value => value!.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) - : Array.Empty(); - - builder.AddSearchTerms(search); - return builder.Build(); - } - - private static ConsoleVexQuery BuildVexQuery(HttpRequest request) - { - var builder = new ConsoleVexQueryBuilder() - .SetPageSize(ParseInt(request.Query["pageSize"], 50)) - .SetPageToken(request.Query.TryGetValue("pageToken", out var pageValues) ? pageValues.FirstOrDefault() : null) - .AddAdvisories(ReadMulti(request, "advisoryId")) - .AddTypes(ReadMulti(request, "statementType")) - .AddStates(ReadMulti(request, "state")); - - return builder.Build(); - } - - private static IEnumerable ReadMulti(HttpRequest request, string key) - { - if (!request.Query.TryGetValue(key, out var values)) - { - return Array.Empty(); - } - - return values - .Where(value => !string.IsNullOrWhiteSpace(value)) - .SelectMany(value => value!.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) - .Where(value => value.Length > 0); - } - - private static int ParseInt(StringValues values, int fallback) - { - if (values.Count == 0) - { - return fallback; - } - - return int.TryParse(values[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out var number) - ? number - : fallback; - } + const int defaultFreshAuthWindowSeconds = 300; + return authTime.Value.AddSeconds(defaultFreshAuthWindowSeconds) > now; + } + + private static ConsoleVulnerabilityQuery BuildVulnerabilityQuery(HttpRequest request) + { + var builder = new ConsoleVulnerabilityQueryBuilder() + .SetPageSize(ParseInt(request.Query["pageSize"], 50)) + .SetPageToken(request.Query.TryGetValue("pageToken", out var tokenValues) ? tokenValues.FirstOrDefault() : null) + .AddSeverity(ReadMulti(request, "severity")) + .AddPolicyBadges(ReadMulti(request, "policyBadge")) + .AddReachability(ReadMulti(request, "reachability")) + .AddProducts(ReadMulti(request, "product")) + .AddVexStates(ReadMulti(request, "vexState")); + + var search = request.Query.TryGetValue("search", out var searchValues) + ? searchValues + .Where(value => !string.IsNullOrWhiteSpace(value)) + .SelectMany(value => value!.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) + : Array.Empty(); + + builder.AddSearchTerms(search); + return builder.Build(); + } + + private static ConsoleVexQuery BuildVexQuery(HttpRequest request) + { + var builder = new ConsoleVexQueryBuilder() + .SetPageSize(ParseInt(request.Query["pageSize"], 50)) + .SetPageToken(request.Query.TryGetValue("pageToken", out var pageValues) ? pageValues.FirstOrDefault() : null) + .AddAdvisories(ReadMulti(request, "advisoryId")) + .AddTypes(ReadMulti(request, "statementType")) + .AddStates(ReadMulti(request, "state")); + + return builder.Build(); + } + + private static IEnumerable ReadMulti(HttpRequest request, string key) + { + if (!request.Query.TryGetValue(key, out var values)) + { + return Array.Empty(); + } + + return values + .Where(value => !string.IsNullOrWhiteSpace(value)) + .SelectMany(value => value!.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) + .Where(value => value.Length > 0); + } + + private static int ParseInt(StringValues values, int fallback) + { + if (values.Count == 0) + { + return fallback; + } + + return int.TryParse(values[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out var number) + ? number + : fallback; + } private static IReadOnlyList ExtractRoles(ClaimsPrincipal principal) { diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleWorkspaceModels.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleWorkspaceModels.cs index 9029e9648..0ab9b6de6 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleWorkspaceModels.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleWorkspaceModels.cs @@ -183,6 +183,22 @@ internal interface IConsoleWorkspaceService string tenant, ConsoleVexQuery query, CancellationToken cancellationToken); + + /// + /// Get tenant-scoped dashboard aggregates including findings, VEX overrides, + /// advisory deltas, run health, and policy change log. + /// + Task GetDashboardAsync( + string tenant, + CancellationToken cancellationToken); + + /// + /// Get available filter categories with options and counts for console queries. + /// + Task GetFiltersAsync( + string tenant, + ConsoleFiltersQuery query, + CancellationToken cancellationToken); } internal sealed class ConsoleVulnerabilityQueryBuilder @@ -302,3 +318,167 @@ internal sealed class ConsoleVexQueryBuilder _pageSize, _pageToken); } + +// ============================================================================ +// Dashboard Models (WEB-CONSOLE-23-001) +// ============================================================================ + +/// +/// Dashboard response containing tenant-scoped aggregates for findings, VEX overrides, +/// advisory deltas, run health, and policy change log. +/// +internal sealed record ConsoleDashboardResponse( + string Tenant, + DateTimeOffset GeneratedAt, + ConsoleDashboardFindingsSummary Findings, + ConsoleDashboardVexSummary VexOverrides, + ConsoleDashboardAdvisorySummary AdvisoryDeltas, + ConsoleDashboardRunHealth RunHealth, + ConsoleDashboardPolicyChangeLog PolicyChanges); + +/// +/// Aggregated findings summary for dashboard. +/// +internal sealed record ConsoleDashboardFindingsSummary( + int TotalFindings, + int CriticalCount, + int HighCount, + int MediumCount, + int LowCount, + int InformationalCount, + int NewLastDay, + int NewLastWeek, + int ResolvedLastWeek, + IReadOnlyList TrendLast30Days); + +/// +/// A single trend data point. +/// +internal sealed record ConsoleDashboardTrendPoint( + DateTimeOffset Date, + int Open, + int Resolved, + int New); + +/// +/// VEX overrides summary for dashboard. +/// +internal sealed record ConsoleDashboardVexSummary( + int TotalStatements, + int NotAffectedCount, + int FixedCount, + int UnderInvestigationCount, + int AffectedCount, + int AutomatedCount, + int ManualCount, + DateTimeOffset? LastStatementUpdated); + +/// +/// Advisory delta summary for dashboard. +/// +internal sealed record ConsoleDashboardAdvisorySummary( + int TotalAdvisories, + int NewLastDay, + int NewLastWeek, + int UpdatedLastWeek, + int KevCount, + IReadOnlyList RecentAdvisories); + +/// +/// A recent advisory item for dashboard display. +/// +internal sealed record ConsoleDashboardAdvisoryItem( + string AdvisoryId, + string Severity, + string Summary, + bool Kev, + int AffectedFindings, + DateTimeOffset PublishedAt); + +/// +/// Run health summary for dashboard. +/// +internal sealed record ConsoleDashboardRunHealth( + int TotalRuns, + int SuccessfulRuns, + int FailedRuns, + int RunningRuns, + int PendingRuns, + double SuccessRatePercent, + TimeSpan? AverageRunDuration, + DateTimeOffset? LastRunCompletedAt, + IReadOnlyList RecentRuns); + +/// +/// A recent run item for dashboard display. +/// +internal sealed record ConsoleDashboardRecentRun( + string RunId, + string RunType, + string Status, + DateTimeOffset StartedAt, + DateTimeOffset? CompletedAt, + TimeSpan? Duration, + int FindingsProcessed); + +/// +/// Policy change log summary for dashboard. +/// +internal sealed record ConsoleDashboardPolicyChangeLog( + int TotalPolicies, + int ActivePolicies, + int ChangesLastWeek, + DateTimeOffset? LastPolicyUpdated, + IReadOnlyList RecentChanges); + +/// +/// A recent policy change for dashboard display. +/// +internal sealed record ConsoleDashboardPolicyChange( + string PolicyId, + string PolicyName, + string ChangeType, + string ChangedBy, + DateTimeOffset ChangedAt, + string? Description); + +// ============================================================================ +// Filters Models (WEB-CONSOLE-23-001) +// ============================================================================ + +/// +/// Available filters for console queries with counts and deterministic ordering. +/// +internal sealed record ConsoleFiltersResponse( + string Tenant, + DateTimeOffset GeneratedAt, + string FiltersHash, + IReadOnlyList Categories); + +/// +/// A filter category with available options. +/// +internal sealed record ConsoleFilterCategory( + string CategoryId, + string DisplayName, + string FilterType, + bool MultiSelect, + IReadOnlyList Options); + +/// +/// A single filter option with count and metadata. +/// +internal sealed record ConsoleFilterOption( + string Value, + string DisplayName, + int Count, + bool IsDefault, + string? Description, + string? IconHint); + +/// +/// Query for filters endpoint. +/// +internal sealed record ConsoleFiltersQuery( + string? Scope, + bool IncludeEmptyCategories); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleWorkspaceSampleService.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleWorkspaceSampleService.cs index d51d6d7dc..48263b873 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleWorkspaceSampleService.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleWorkspaceSampleService.cs @@ -277,6 +277,265 @@ internal sealed class ConsoleWorkspaceSampleService : IConsoleWorkspaceService return Task.FromResult(page); } + public Task GetDashboardAsync( + string tenant, + CancellationToken cancellationToken) + { + var findings = SampleFindings.Where(detail => IsTenantMatch(tenant, detail.Summary)).ToList(); + var statements = SampleStatements.Where(s => string.Equals(s.Tenant, tenant, StringComparison.OrdinalIgnoreCase)).ToList(); + + // Build findings summary + var findingsSummary = new ConsoleDashboardFindingsSummary( + TotalFindings: findings.Count, + CriticalCount: findings.Count(f => string.Equals(f.Summary.Severity, "critical", StringComparison.OrdinalIgnoreCase)), + HighCount: findings.Count(f => string.Equals(f.Summary.Severity, "high", StringComparison.OrdinalIgnoreCase)), + MediumCount: findings.Count(f => string.Equals(f.Summary.Severity, "medium", StringComparison.OrdinalIgnoreCase)), + LowCount: findings.Count(f => string.Equals(f.Summary.Severity, "low", StringComparison.OrdinalIgnoreCase)), + InformationalCount: findings.Count(f => string.Equals(f.Summary.Severity, "info", StringComparison.OrdinalIgnoreCase)), + NewLastDay: 1, + NewLastWeek: 2, + ResolvedLastWeek: 0, + TrendLast30Days: GenerateSampleTrend()); + + // Build VEX summary + var vexSummary = new ConsoleDashboardVexSummary( + TotalStatements: statements.Count, + NotAffectedCount: statements.Count(s => string.Equals(s.State, "not_affected", StringComparison.OrdinalIgnoreCase)), + FixedCount: statements.Count(s => string.Equals(s.State, "fixed", StringComparison.OrdinalIgnoreCase)), + UnderInvestigationCount: statements.Count(s => string.Equals(s.State, "under_investigation", StringComparison.OrdinalIgnoreCase)), + AffectedCount: statements.Count(s => string.Equals(s.State, "affected", StringComparison.OrdinalIgnoreCase)), + AutomatedCount: statements.Count(s => string.Equals(s.Source.Type, "advisory_ai", StringComparison.OrdinalIgnoreCase)), + ManualCount: statements.Count(s => !string.Equals(s.Source.Type, "advisory_ai", StringComparison.OrdinalIgnoreCase)), + LastStatementUpdated: statements.OrderByDescending(s => s.LastUpdated).FirstOrDefault()?.LastUpdated); + + // Build advisory summary + var advisorySummary = new ConsoleDashboardAdvisorySummary( + TotalAdvisories: findings.Select(f => f.Summary.Coordinates.AdvisoryId).Distinct().Count(), + NewLastDay: 1, + NewLastWeek: 2, + UpdatedLastWeek: 1, + KevCount: findings.Count(f => f.Summary.Kev), + RecentAdvisories: findings + .Select(f => new ConsoleDashboardAdvisoryItem( + AdvisoryId: f.Summary.Coordinates.AdvisoryId, + Severity: f.Summary.Severity, + Summary: f.Summary.Summary, + Kev: f.Summary.Kev, + AffectedFindings: 1, + PublishedAt: f.Summary.Timestamps.FirstSeen)) + .DistinctBy(a => a.AdvisoryId) + .OrderByDescending(a => a.PublishedAt) + .Take(5) + .ToImmutableArray()); + + // Build run health + var runHealth = new ConsoleDashboardRunHealth( + TotalRuns: 10, + SuccessfulRuns: 8, + FailedRuns: 1, + RunningRuns: 1, + PendingRuns: 0, + SuccessRatePercent: 80.0, + AverageRunDuration: TimeSpan.FromMinutes(5), + LastRunCompletedAt: DateTimeOffset.Parse("2025-11-08T12:00:00Z"), + RecentRuns: ImmutableArray.Create( + new ConsoleDashboardRecentRun( + RunId: "run::2025-11-08::001", + RunType: "scan", + Status: "completed", + StartedAt: DateTimeOffset.Parse("2025-11-08T11:55:00Z"), + CompletedAt: DateTimeOffset.Parse("2025-11-08T12:00:00Z"), + Duration: TimeSpan.FromMinutes(5), + FindingsProcessed: 150), + new ConsoleDashboardRecentRun( + RunId: "run::2025-11-08::002", + RunType: "policy_eval", + Status: "running", + StartedAt: DateTimeOffset.Parse("2025-11-08T12:05:00Z"), + CompletedAt: null, + Duration: null, + FindingsProcessed: 75))); + + // Build policy change log + var policyChangeLog = new ConsoleDashboardPolicyChangeLog( + TotalPolicies: 5, + ActivePolicies: 4, + ChangesLastWeek: 2, + LastPolicyUpdated: DateTimeOffset.Parse("2025-11-07T15:30:00Z"), + RecentChanges: ImmutableArray.Create( + new ConsoleDashboardPolicyChange( + PolicyId: "policy://tenant-default/runtime-hardening", + PolicyName: "Runtime Hardening", + ChangeType: "updated", + ChangedBy: "admin@stella-ops.org", + ChangedAt: DateTimeOffset.Parse("2025-11-07T15:30:00Z"), + Description: "Added KEV check rule"), + new ConsoleDashboardPolicyChange( + PolicyId: "policy://tenant-default/network-hardening", + PolicyName: "Network Hardening", + ChangeType: "activated", + ChangedBy: "admin@stella-ops.org", + ChangedAt: DateTimeOffset.Parse("2025-11-06T10:00:00Z"), + Description: null))); + + var dashboard = new ConsoleDashboardResponse( + Tenant: tenant, + GeneratedAt: DateTimeOffset.UtcNow, + Findings: findingsSummary, + VexOverrides: vexSummary, + AdvisoryDeltas: advisorySummary, + RunHealth: runHealth, + PolicyChanges: policyChangeLog); + + return Task.FromResult(dashboard); + } + + public Task GetFiltersAsync( + string tenant, + ConsoleFiltersQuery query, + CancellationToken cancellationToken) + { + var findings = SampleFindings.Where(detail => IsTenantMatch(tenant, detail.Summary)).ToList(); + + var categories = new List + { + new ConsoleFilterCategory( + CategoryId: "severity", + DisplayName: "Severity", + FilterType: "enum", + MultiSelect: true, + Options: BuildFilterOptions(findings, f => f.Summary.Severity, new (string, string, string?)[] + { + ("critical", "Critical", "critical_icon"), + ("high", "High", "high_icon"), + ("medium", "Medium", "medium_icon"), + ("low", "Low", "low_icon"), + ("info", "Informational", "info_icon") + }, query.IncludeEmptyCategories)), + + new ConsoleFilterCategory( + CategoryId: "policyBadge", + DisplayName: "Policy Status", + FilterType: "enum", + MultiSelect: true, + Options: BuildFilterOptions(findings, f => f.Summary.PolicyBadge, new (string, string, string?)[] + { + ("fail", "Fail", "fail_icon"), + ("warn", "Warning", "warn_icon"), + ("pass", "Pass", "pass_icon"), + ("waived", "Waived", "waived_icon") + }, query.IncludeEmptyCategories)), + + new ConsoleFilterCategory( + CategoryId: "reachability", + DisplayName: "Reachability", + FilterType: "enum", + MultiSelect: true, + Options: BuildFilterOptions(findings, f => f.Summary.Reachability?.Status ?? "unknown", new (string, string, string?)[] + { + ("reachable", "Reachable", "reachable_icon"), + ("unreachable", "Unreachable", "unreachable_icon"), + ("unknown", "Unknown", "unknown_icon") + }, query.IncludeEmptyCategories)), + + new ConsoleFilterCategory( + CategoryId: "vexState", + DisplayName: "VEX State", + FilterType: "enum", + MultiSelect: true, + Options: BuildFilterOptions(findings, f => f.Summary.Vex?.State ?? "none", new (string, string, string?)[] + { + ("not_affected", "Not Affected", "not_affected_icon"), + ("fixed", "Fixed", "fixed_icon"), + ("under_investigation", "Under Investigation", "investigating_icon"), + ("affected", "Affected", "affected_icon"), + ("none", "No VEX", null) + }, query.IncludeEmptyCategories)), + + new ConsoleFilterCategory( + CategoryId: "kev", + DisplayName: "Known Exploited", + FilterType: "boolean", + MultiSelect: false, + Options: ImmutableArray.Create( + new ConsoleFilterOption("true", "KEV Listed", findings.Count(f => f.Summary.Kev), false, "Known Exploited Vulnerability", "kev_icon"), + new ConsoleFilterOption("false", "Not KEV", findings.Count(f => !f.Summary.Kev), true, null, null))) + }; + + // Filter by scope if specified + if (!string.IsNullOrWhiteSpace(query.Scope)) + { + categories = categories + .Where(c => string.Equals(c.CategoryId, query.Scope, StringComparison.OrdinalIgnoreCase)) + .ToList(); + } + + var filtersHash = ComputeFiltersHash(categories); + + var response = new ConsoleFiltersResponse( + Tenant: tenant, + GeneratedAt: DateTimeOffset.UtcNow, + FiltersHash: filtersHash, + Categories: categories.ToImmutableArray()); + + return Task.FromResult(response); + } + + private static ImmutableArray GenerateSampleTrend() + { + var points = new List(); + var baseDate = DateTimeOffset.Parse("2025-10-09T00:00:00Z"); + + for (int i = 0; i < 30; i++) + { + points.Add(new ConsoleDashboardTrendPoint( + Date: baseDate.AddDays(i), + Open: 2 + (i % 3), + Resolved: i % 5 == 0 ? 1 : 0, + New: i % 7 == 0 ? 1 : 0)); + } + + return points.ToImmutableArray(); + } + + private static ImmutableArray BuildFilterOptions( + List findings, + Func selector, + (string value, string displayName, string? icon)[] definitions, + bool includeEmpty) + { + var counts = findings + .GroupBy(selector, StringComparer.OrdinalIgnoreCase) + .ToDictionary(g => g.Key, g => g.Count(), StringComparer.OrdinalIgnoreCase); + + var options = new List(); + foreach (var (value, displayName, icon) in definitions) + { + var count = counts.TryGetValue(value, out var c) ? c : 0; + if (count > 0 || includeEmpty) + { + options.Add(new ConsoleFilterOption( + Value: value, + DisplayName: displayName, + Count: count, + IsDefault: false, + Description: null, + IconHint: icon)); + } + } + + return options.ToImmutableArray(); + } + + private static string ComputeFiltersHash(List categories) + { + using var sha256 = SHA256.Create(); + var joined = string.Join("|", categories.SelectMany(c => + c.Options.Select(o => $"{c.CategoryId}:{o.Value}:{o.Count}"))); + var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(joined)); + return $"sha256:{Convert.ToHexString(hash[..8]).ToLowerInvariant()}"; + } + private static bool MatchesSeverity(ConsoleVulnerabilityFindingDetail detail, ConsoleVulnerabilityQuery query) => query.Severity.Count == 0 || query.Severity.Any(sev => string.Equals(sev, detail.Summary.Severity, StringComparison.OrdinalIgnoreCase)); diff --git a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs index eceff365f..de651ae13 100644 --- a/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs @@ -1,10 +1,10 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Diagnostics; -using System.Globalization; -using System.Linq; -using System.Security.Claims; -using Microsoft.AspNetCore.Http; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Security.Claims; +using Microsoft.AspNetCore.Http; using Microsoft.Extensions.Logging; using OpenIddict.Abstractions; using OpenIddict.Extensions; @@ -18,14 +18,14 @@ using StellaOps.Authority.RateLimiting; using StellaOps.Authority.Storage.Mongo.Documents; using StellaOps.Authority.Storage.Mongo.Stores; using StellaOps.Cryptography.Audit; - -namespace StellaOps.Authority.OpenIddict.Handlers; - -internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler -{ - private readonly IAuthorityIdentityProviderRegistry registry; - private readonly ActivitySource activitySource; - private readonly IAuthEventSink auditSink; + +namespace StellaOps.Authority.OpenIddict.Handlers; + +internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler +{ + private readonly IAuthorityIdentityProviderRegistry registry; + private readonly ActivitySource activitySource; + private readonly IAuthEventSink auditSink; private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor; private readonly IAuthorityClientStore clientStore; private readonly TimeProvider timeProvider; @@ -54,81 +54,81 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler() : requestedScopesInput.ToArray(); - context.Transaction.Properties[AuthorityOpenIddictConstants.AuditRequestedScopesProperty] = requestedScopes; - - if (string.IsNullOrWhiteSpace(clientId)) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Client identifier is required for password grant.", - clientId: null, - providerName: null, - tenant: null, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.InvalidClient, "Client identifier is required."); - logger.LogWarning("Password grant validation failed: missing client_id for {Username}.", context.Request.Username); - return; - } - + + var metadata = metadataAccessor.GetMetadata(); + var clientId = context.ClientId ?? context.Request.ClientId; + if (!string.IsNullOrWhiteSpace(clientId)) + { + metadataAccessor.SetClientId(clientId); + } + + var requestedScopesInput = context.Request.GetScopes(); + var requestedScopes = requestedScopesInput.IsDefaultOrEmpty ? Array.Empty() : requestedScopesInput.ToArray(); + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditRequestedScopesProperty] = requestedScopes; + + if (string.IsNullOrWhiteSpace(clientId)) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Client identifier is required for password grant.", + clientId: null, + providerName: null, + tenant: null, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidClient, "Client identifier is required."); + logger.LogWarning("Password grant validation failed: missing client_id for {Username}.", context.Request.Username); + return; + } + var clientDocument = await clientStore.FindByClientIdAsync(clientId, context.CancellationToken).ConfigureAwait(false); if (clientDocument is null || clientDocument.Disabled) { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Client is not permitted for password grant.", - clientId, - providerName: null, - tenant: null, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.InvalidClient, "The specified client is not permitted."); + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Client is not permitted for password grant.", + clientId, + providerName: null, + tenant: null, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidClient, "The specified client is not permitted."); logger.LogWarning("Password grant validation failed: client {ClientId} disabled or missing.", clientId); return; } @@ -184,72 +184,72 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler 0 && - !allowedGrantTypes.Any(static grant => string.Equals(grant, OpenIddictConstants.GrantTypes.Password, StringComparison.Ordinal))) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Password grant is not permitted for this client.", - clientId, - providerName: null, - tenant, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.UnauthorizedClient, "Password grant is not permitted for this client."); - logger.LogWarning("Password grant validation failed for client {ClientId}: grant type not allowed.", clientId); - return; - } - - var allowedScopes = ClientCredentialHandlerHelpers.Split(clientDocument.Properties, AuthorityClientMetadataKeys.AllowedScopes); - var resolvedScopes = ClientCredentialHandlerHelpers.ResolveGrantedScopes(allowedScopes, requestedScopes); - - if (resolvedScopes.InvalidScope is not null) - { - context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = resolvedScopes.InvalidScope; - - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - $"Scope '{resolvedScopes.InvalidScope}' is not permitted for this client.", - clientId, - providerName: null, - tenant, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.InvalidScope, $"Scope '{resolvedScopes.InvalidScope}' is not allowed for this client."); - logger.LogWarning("Password grant validation failed for client {ClientId}: scope {Scope} not permitted.", clientId, resolvedScopes.InvalidScope); - return; - } - + + var tenant = PasswordGrantAuditHelper.NormalizeTenant(clientDocument.Properties.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var tenantValue) ? tenantValue : null); + if (!string.IsNullOrWhiteSpace(tenant)) + { + context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty] = tenant; + metadataAccessor.SetTenant(tenant); + activity?.SetTag("authority.tenant", tenant); + } + + var allowedGrantTypes = ClientCredentialHandlerHelpers.Split(clientDocument.Properties, AuthorityClientMetadataKeys.AllowedGrantTypes); + if (allowedGrantTypes.Count > 0 && + !allowedGrantTypes.Any(static grant => string.Equals(grant, OpenIddictConstants.GrantTypes.Password, StringComparison.Ordinal))) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Password grant is not permitted for this client.", + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.UnauthorizedClient, "Password grant is not permitted for this client."); + logger.LogWarning("Password grant validation failed for client {ClientId}: grant type not allowed.", clientId); + return; + } + + var allowedScopes = ClientCredentialHandlerHelpers.Split(clientDocument.Properties, AuthorityClientMetadataKeys.AllowedScopes); + var resolvedScopes = ClientCredentialHandlerHelpers.ResolveGrantedScopes(allowedScopes, requestedScopes); + + if (resolvedScopes.InvalidScope is not null) + { + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = resolvedScopes.InvalidScope; + + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + $"Scope '{resolvedScopes.InvalidScope}' is not permitted for this client.", + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidScope, $"Scope '{resolvedScopes.InvalidScope}' is not allowed for this client."); + logger.LogWarning("Password grant validation failed for client {ClientId}: scope {Scope} not permitted.", clientId, resolvedScopes.InvalidScope); + return; + } + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditGrantedScopesProperty] = resolvedScopes.Scopes; context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = resolvedScopes.Scopes; @@ -644,35 +644,6 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler(); + async ValueTask RejectPackApprovalAsync(string message) + { + activity?.SetTag("authority.pack_approval_denied", message); + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = StellaOpsScopes.PacksApprove; + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + message, + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: grantedScopesArray, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: packApprovalAuditProperties); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidRequest, message); + logger.LogWarning( + "Password grant validation failed for {Username}: {Message}.", + context.Request.Username, + message); + } + var packRunIdRaw = Normalize(context.Request.GetParameter(AuthorityOpenIddictConstants.PackRunIdParameterName)?.Value?.ToString()); if (string.IsNullOrWhiteSpace(packRunIdRaw)) { @@ -817,68 +817,68 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler -{ - private readonly IAuthorityIdentityProviderRegistry registry; - private readonly IAuthorityClientStore clientStore; - private readonly ActivitySource activitySource; - private readonly IAuthEventSink auditSink; - private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor; - private readonly TimeProvider timeProvider; + } +} + +internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler +{ + private readonly IAuthorityIdentityProviderRegistry registry; + private readonly IAuthorityClientStore clientStore; + private readonly ActivitySource activitySource; + private readonly IAuthEventSink auditSink; + private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor; + private readonly TimeProvider timeProvider; private readonly ILogger logger; + private readonly IAuthorityCredentialAuditContextAccessor? auditContextAccessor; public HandlePasswordGrantHandler( IAuthorityIdentityProviderRegistry registry, @@ -979,7 +980,8 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler logger) + ILogger logger, + IAuthorityCredentialAuditContextAccessor? auditContextAccessor = null) { this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); @@ -988,193 +990,194 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler() : requestedScopesInput.ToArray(); - var grantedScopes = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientGrantedScopesProperty, out var grantedValue) && - grantedValue is string[] grantedArray - ? (IReadOnlyList)grantedArray - : requestedScopes; - - AuthorityClientDocument? clientDocument = null; - if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientTransactionProperty, out var clientValue) && - clientValue is AuthorityClientDocument storedClient) - { - clientDocument = storedClient; - } - else if (!string.IsNullOrWhiteSpace(clientId)) - { - clientDocument = await clientStore.FindByClientIdAsync(clientId, context.CancellationToken).ConfigureAwait(false); - } - - if (clientDocument is null || clientDocument.Disabled) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Client is not permitted for password grant.", - clientId, - providerName: null, - tenant: null, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.InvalidClient, "The specified client is not permitted."); - logger.LogWarning("Password grant handling failed: client {ClientId} disabled or missing.", clientId); - return; - } - - context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = clientDocument; - - if (grantedScopes.Count == 0) - { - var allowedScopes = ClientCredentialHandlerHelpers.Split(clientDocument.Properties, AuthorityClientMetadataKeys.AllowedScopes); - var resolvedScopes = ClientCredentialHandlerHelpers.ResolveGrantedScopes(allowedScopes, requestedScopes); - grantedScopes = resolvedScopes.InvalidScope is null ? resolvedScopes.Scopes : Array.Empty(); - context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = grantedScopes; - } - - var tenant = PasswordGrantAuditHelper.NormalizeTenant( - clientDocument.Properties.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var tenantValue) ? tenantValue : null); - if (!string.IsNullOrWhiteSpace(tenant)) - { - context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty] = tenant; - metadataAccessor.SetTenant(tenant); - activity?.SetTag("authority.tenant", tenant); - } - - var providerName = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ProviderTransactionProperty, out var value) - ? value as string - : null; - - AuthorityIdentityProviderMetadata? providerMetadata = null; - if (!string.IsNullOrWhiteSpace(providerName)) - { - if (!registry.TryGet(providerName!, out providerMetadata)) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Unable to resolve the requested identity provider.", - clientId, - providerName, - tenant, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.UnknownError, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.ServerError, "Unable to resolve the requested identity provider."); - logger.LogError("Password grant handling failed: provider {Provider} not found for user {Username}.", providerName, context.Request.Username); - return; - } - } - else - { - var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry); - if (!selection.Succeeded) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - selection.Description, - clientId, - providerName: null, - tenant, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(selection.Error!, selection.Description); - logger.LogWarning("Password grant handling rejected {Username}: {Reason}.", context.Request.Username, selection.Description); - return; - } - - providerMetadata = selection.Provider; - providerName = providerMetadata?.Name; - } - - if (providerMetadata is null) - { - throw new InvalidOperationException("No identity provider metadata resolved for password grant."); - } - - await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, context.CancellationToken).ConfigureAwait(false); - var provider = providerHandle.Provider; - - var username = context.Request.Username; - var password = context.Request.Password; - if (string.IsNullOrWhiteSpace(username) || string.IsNullOrEmpty(password)) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Both username and password must be provided.", - clientId, - providerMetadata.Name, - tenant, - user: null, - username: username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Both username and password must be provided."); - logger.LogWarning("Password grant handling rejected: missing credentials for {Username}.", username); - return; - } - - using var credentialAuditScope = auditContextAccessor.BeginScope(new AuthorityCredentialAuditContext( + + public async ValueTask HandleAsync(OpenIddictServerEvents.HandleTokenRequestContext context) + { + ArgumentNullException.ThrowIfNull(context); + + if (!context.Request.IsPasswordGrantType()) + { + return; + } + + using var activity = activitySource.StartActivity("authority.token.handle_password_grant", ActivityKind.Internal); + activity?.SetTag("authority.endpoint", "/token"); + activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.Password); + activity?.SetTag("authority.username", context.Request.Username ?? string.Empty); + + var correlationId = PasswordGrantAuditHelper.EnsureCorrelationId(context.Transaction); + + var metadata = metadataAccessor.GetMetadata(); + var clientId = context.ClientId ?? context.Request.ClientId; + if (!string.IsNullOrWhiteSpace(clientId)) + { + metadataAccessor.SetClientId(clientId); + } + + var requestedScopesInput = context.Request.GetScopes(); + var requestedScopes = requestedScopesInput.IsDefaultOrEmpty ? Array.Empty() : requestedScopesInput.ToArray(); + var grantedScopes = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientGrantedScopesProperty, out var grantedValue) && + grantedValue is string[] grantedArray + ? (IReadOnlyList)grantedArray + : requestedScopes; + + AuthorityClientDocument? clientDocument = null; + if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientTransactionProperty, out var clientValue) && + clientValue is AuthorityClientDocument storedClient) + { + clientDocument = storedClient; + } + else if (!string.IsNullOrWhiteSpace(clientId)) + { + clientDocument = await clientStore.FindByClientIdAsync(clientId, context.CancellationToken).ConfigureAwait(false); + } + + if (clientDocument is null || clientDocument.Disabled) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Client is not permitted for password grant.", + clientId, + providerName: null, + tenant: null, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidClient, "The specified client is not permitted."); + logger.LogWarning("Password grant handling failed: client {ClientId} disabled or missing.", clientId); + return; + } + + context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = clientDocument; + + if (grantedScopes.Count == 0) + { + var allowedScopes = ClientCredentialHandlerHelpers.Split(clientDocument.Properties, AuthorityClientMetadataKeys.AllowedScopes); + var resolvedScopes = ClientCredentialHandlerHelpers.ResolveGrantedScopes(allowedScopes, requestedScopes); + grantedScopes = resolvedScopes.InvalidScope is null ? resolvedScopes.Scopes : Array.Empty(); + context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = grantedScopes; + } + + var tenant = PasswordGrantAuditHelper.NormalizeTenant( + clientDocument.Properties.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var tenantValue) ? tenantValue : null); + if (!string.IsNullOrWhiteSpace(tenant)) + { + context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty] = tenant; + metadataAccessor.SetTenant(tenant); + activity?.SetTag("authority.tenant", tenant); + } + + var providerName = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ProviderTransactionProperty, out var value) + ? value as string + : null; + + AuthorityIdentityProviderMetadata? providerMetadata = null; + if (!string.IsNullOrWhiteSpace(providerName)) + { + if (!registry.TryGet(providerName!, out providerMetadata)) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Unable to resolve the requested identity provider.", + clientId, + providerName, + tenant, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.UnknownError, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.ServerError, "Unable to resolve the requested identity provider."); + logger.LogError("Password grant handling failed: provider {Provider} not found for user {Username}.", providerName, context.Request.Username); + return; + } + } + else + { + var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry); + if (!selection.Succeeded) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + selection.Description, + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(selection.Error!, selection.Description); + logger.LogWarning("Password grant handling rejected {Username}: {Reason}.", context.Request.Username, selection.Description); + return; + } + + providerMetadata = selection.Provider; + providerName = providerMetadata?.Name; + } + + if (providerMetadata is null) + { + throw new InvalidOperationException("No identity provider metadata resolved for password grant."); + } + + await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, context.CancellationToken).ConfigureAwait(false); + var provider = providerHandle.Provider; + + var username = context.Request.Username; + var password = context.Request.Password; + if (string.IsNullOrWhiteSpace(username) || string.IsNullOrEmpty(password)) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Both username and password must be provided.", + clientId, + providerMetadata.Name, + tenant, + user: null, + username: username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Both username and password must be provided."); + logger.LogWarning("Password grant handling rejected: missing credentials for {Username}.", username); + return; + } + + var credentialAuditScope = auditContextAccessor?.BeginScope(new AuthorityCredentialAuditContext( correlationId, clientId, tenant, @@ -1182,62 +1185,64 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler new[] { OpenIddictConstants.Destinations.AccessToken, OpenIddictConstants.Destinations.IdentityToken }, OpenIddictConstants.Claims.Name => new[] { OpenIddictConstants.Destinations.AccessToken, OpenIddictConstants.Destinations.IdentityToken }, OpenIddictConstants.Claims.PreferredUsername => new[] { OpenIddictConstants.Destinations.AccessToken }, - OpenIddictConstants.Claims.Role => new[] { OpenIddictConstants.Destinations.AccessToken }, - _ => new[] { OpenIddictConstants.Destinations.AccessToken } - }); - - var principal = new ClaimsPrincipal(identity); - principal.SetScopes(grantedScopes); - - var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, verification.User, null); - await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false); - + OpenIddictConstants.Claims.Role => new[] { OpenIddictConstants.Destinations.AccessToken }, + _ => new[] { OpenIddictConstants.Destinations.AccessToken } + }); + + var principal = new ClaimsPrincipal(identity); + principal.SetScopes(grantedScopes); + + var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, verification.User, null); + await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false); + var successAlreadyLogged = context.Transaction.Properties.TryGetValue( AuthorityOpenIddictConstants.AuditSuccessRecordedProperty, out var successValue) && successValue is true; @@ -1430,54 +1435,59 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler? scopes = null, - TimeSpan? retryAfter = null, - AuthorityCredentialFailureCode? failureCode = null, - IEnumerable? extraProperties = null, - string? eventType = null) - { - ArgumentNullException.ThrowIfNull(timeProvider); - ArgumentNullException.ThrowIfNull(transaction); - - var correlationId = EnsureCorrelationId(transaction); - var normalizedScopes = NormalizeScopes(scopes); - var normalizedTenant = NormalizeTenant(tenant); - var subject = BuildSubject(user, username, providerName); - var client = BuildClient(clientId, providerName); - var network = BuildNetwork(metadata); + } + finally + { + credentialAuditScope?.Dispose(); + } + } +} + +internal static class PasswordGrantAuditHelper +{ + internal static string EnsureCorrelationId(OpenIddictServerTransaction transaction) + { + ArgumentNullException.ThrowIfNull(transaction); + + if (transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditCorrelationProperty, out var value) && + value is string existing && !string.IsNullOrWhiteSpace(existing)) + { + return existing; + } + + var correlation = Activity.Current?.TraceId.ToString() ?? + Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); + + transaction.Properties[AuthorityOpenIddictConstants.AuditCorrelationProperty] = correlation; + return correlation; + } + + internal static AuthEventRecord CreatePasswordGrantRecord( + TimeProvider timeProvider, + OpenIddictServerTransaction transaction, + AuthorityRateLimiterMetadata? metadata, + AuthEventOutcome outcome, + string? reason = null, + string? clientId = null, + string? providerName = null, + string? tenant = null, + AuthorityUserDescriptor? user = null, + string? username = null, + IEnumerable? scopes = null, + TimeSpan? retryAfter = null, + AuthorityCredentialFailureCode? failureCode = null, + IEnumerable? extraProperties = null, + string? eventType = null) + { + ArgumentNullException.ThrowIfNull(timeProvider); + ArgumentNullException.ThrowIfNull(transaction); + + var correlationId = EnsureCorrelationId(transaction); + var normalizedScopes = NormalizeScopes(scopes); + var normalizedTenant = NormalizeTenant(tenant); + var subject = BuildSubject(user, username, providerName); + var client = BuildClient(clientId, providerName); + var network = BuildNetwork(metadata); var properties = BuildProperties(user, retryAfter, failureCode, extraProperties); var mutableProperties = properties.Count == 0 ? new List() @@ -1488,9 +1498,9 @@ internal static class PasswordGrantAuditHelper { EventType = string.IsNullOrWhiteSpace(eventType) ? "authority.password.grant" : eventType, OccurredAt = timeProvider.GetUtcNow(), - CorrelationId = correlationId, - Outcome = outcome, - Reason = Normalize(reason), + CorrelationId = correlationId, + Outcome = outcome, + Reason = Normalize(reason), Subject = subject, Client = client, Scopes = normalizedScopes, @@ -1499,143 +1509,143 @@ internal static class PasswordGrantAuditHelper Properties = mutableProperties.Count == 0 ? Array.Empty() : mutableProperties }; } - - private static AuthEventSubject? BuildSubject(AuthorityUserDescriptor? user, string? username, string? providerName) - { - var attributes = user?.Attributes; - var normalizedUsername = Normalize(username) ?? Normalize(user?.Username); - var subjectId = Normalize(user?.SubjectId); - var displayName = Normalize(user?.DisplayName); - var attributeProperties = BuildSubjectAttributes(attributes); - - if (string.IsNullOrWhiteSpace(subjectId) && - string.IsNullOrWhiteSpace(normalizedUsername) && - string.IsNullOrWhiteSpace(displayName) && - attributeProperties.Count == 0 && - string.IsNullOrWhiteSpace(providerName)) - { - return null; - } - - return new AuthEventSubject - { - SubjectId = ClassifiedString.Personal(subjectId), - Username = ClassifiedString.Personal(normalizedUsername), - DisplayName = ClassifiedString.Personal(displayName), - Realm = ClassifiedString.Public(Normalize(providerName)), - Attributes = attributeProperties - }; - } - - private static IReadOnlyList BuildSubjectAttributes(IReadOnlyDictionary? attributes) - { - if (attributes is null || attributes.Count == 0) - { - return Array.Empty(); - } - - var items = new List(attributes.Count); - foreach (var pair in attributes) - { - if (string.IsNullOrWhiteSpace(pair.Key)) - { - continue; - } - - items.Add(new AuthEventProperty - { - Name = pair.Key, - Value = ClassifiedString.Personal(Normalize(pair.Value)) - }); - } - - return items.Count == 0 ? Array.Empty() : items; - } - - private static AuthEventClient? BuildClient(string? clientId, string? providerName) - { - var normalizedClientId = Normalize(clientId); - var provider = Normalize(providerName); - - if (string.IsNullOrWhiteSpace(normalizedClientId) && string.IsNullOrWhiteSpace(provider)) - { - return null; - } - - return new AuthEventClient - { - ClientId = ClassifiedString.Personal(normalizedClientId), - Name = ClassifiedString.Empty, - Provider = ClassifiedString.Public(provider) - }; - } - - private static AuthEventNetwork? BuildNetwork(AuthorityRateLimiterMetadata? metadata) - { - var remote = Normalize(metadata?.RemoteIp); - var forwarded = Normalize(metadata?.ForwardedFor); - var userAgent = Normalize(metadata?.UserAgent); - - if (string.IsNullOrWhiteSpace(remote) && string.IsNullOrWhiteSpace(forwarded) && string.IsNullOrWhiteSpace(userAgent)) - { - return null; - } - - return new AuthEventNetwork - { - RemoteAddress = ClassifiedString.Personal(remote), - ForwardedFor = ClassifiedString.Personal(forwarded), - UserAgent = ClassifiedString.Personal(userAgent) - }; - } - - private static IReadOnlyList BuildProperties( - AuthorityUserDescriptor? user, - TimeSpan? retryAfter, - AuthorityCredentialFailureCode? failureCode, - IEnumerable? extraProperties) - { - var properties = new List(); - - if (failureCode is { } code) - { - properties.Add(new AuthEventProperty - { - Name = "failure.code", - Value = ClassifiedString.Public(code.ToString()) - }); - } - - if (retryAfter is { } retry && retry > TimeSpan.Zero) - { - var seconds = Math.Ceiling(retry.TotalSeconds).ToString(CultureInfo.InvariantCulture); - properties.Add(new AuthEventProperty - { - Name = "policy.retry_after_seconds", - Value = ClassifiedString.Public(seconds) - }); - } - - if (user is not null) - { - properties.Add(new AuthEventProperty - { - Name = "subject.requires_password_reset", - Value = ClassifiedString.Public(user.RequiresPasswordReset ? "true" : "false") - }); - } - - if (extraProperties is not null) - { - foreach (var property in extraProperties) - { - if (property is null || string.IsNullOrWhiteSpace(property.Name)) - { - continue; - } - - properties.Add(property); - } + + private static AuthEventSubject? BuildSubject(AuthorityUserDescriptor? user, string? username, string? providerName) + { + var attributes = user?.Attributes; + var normalizedUsername = Normalize(username) ?? Normalize(user?.Username); + var subjectId = Normalize(user?.SubjectId); + var displayName = Normalize(user?.DisplayName); + var attributeProperties = BuildSubjectAttributes(attributes); + + if (string.IsNullOrWhiteSpace(subjectId) && + string.IsNullOrWhiteSpace(normalizedUsername) && + string.IsNullOrWhiteSpace(displayName) && + attributeProperties.Count == 0 && + string.IsNullOrWhiteSpace(providerName)) + { + return null; + } + + return new AuthEventSubject + { + SubjectId = ClassifiedString.Personal(subjectId), + Username = ClassifiedString.Personal(normalizedUsername), + DisplayName = ClassifiedString.Personal(displayName), + Realm = ClassifiedString.Public(Normalize(providerName)), + Attributes = attributeProperties + }; + } + + private static IReadOnlyList BuildSubjectAttributes(IReadOnlyDictionary? attributes) + { + if (attributes is null || attributes.Count == 0) + { + return Array.Empty(); + } + + var items = new List(attributes.Count); + foreach (var pair in attributes) + { + if (string.IsNullOrWhiteSpace(pair.Key)) + { + continue; + } + + items.Add(new AuthEventProperty + { + Name = pair.Key, + Value = ClassifiedString.Personal(Normalize(pair.Value)) + }); + } + + return items.Count == 0 ? Array.Empty() : items; + } + + private static AuthEventClient? BuildClient(string? clientId, string? providerName) + { + var normalizedClientId = Normalize(clientId); + var provider = Normalize(providerName); + + if (string.IsNullOrWhiteSpace(normalizedClientId) && string.IsNullOrWhiteSpace(provider)) + { + return null; + } + + return new AuthEventClient + { + ClientId = ClassifiedString.Personal(normalizedClientId), + Name = ClassifiedString.Empty, + Provider = ClassifiedString.Public(provider) + }; + } + + private static AuthEventNetwork? BuildNetwork(AuthorityRateLimiterMetadata? metadata) + { + var remote = Normalize(metadata?.RemoteIp); + var forwarded = Normalize(metadata?.ForwardedFor); + var userAgent = Normalize(metadata?.UserAgent); + + if (string.IsNullOrWhiteSpace(remote) && string.IsNullOrWhiteSpace(forwarded) && string.IsNullOrWhiteSpace(userAgent)) + { + return null; + } + + return new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Personal(remote), + ForwardedFor = ClassifiedString.Personal(forwarded), + UserAgent = ClassifiedString.Personal(userAgent) + }; + } + + private static IReadOnlyList BuildProperties( + AuthorityUserDescriptor? user, + TimeSpan? retryAfter, + AuthorityCredentialFailureCode? failureCode, + IEnumerable? extraProperties) + { + var properties = new List(); + + if (failureCode is { } code) + { + properties.Add(new AuthEventProperty + { + Name = "failure.code", + Value = ClassifiedString.Public(code.ToString()) + }); + } + + if (retryAfter is { } retry && retry > TimeSpan.Zero) + { + var seconds = Math.Ceiling(retry.TotalSeconds).ToString(CultureInfo.InvariantCulture); + properties.Add(new AuthEventProperty + { + Name = "policy.retry_after_seconds", + Value = ClassifiedString.Public(seconds) + }); + } + + if (user is not null) + { + properties.Add(new AuthEventProperty + { + Name = "subject.requires_password_reset", + Value = ClassifiedString.Public(user.RequiresPasswordReset ? "true" : "false") + }); + } + + if (extraProperties is not null) + { + foreach (var property in extraProperties) + { + if (property is null || string.IsNullOrWhiteSpace(property.Name)) + { + continue; + } + + properties.Add(property); + } } return properties.Count == 0 ? Array.Empty() : properties; @@ -1693,81 +1703,81 @@ internal static class PasswordGrantAuditHelper if (scopes is null) { return Array.Empty(); - } - - var normalized = scopes - .Where(static scope => !string.IsNullOrWhiteSpace(scope)) - .Select(static scope => scope.Trim()) - .Where(static scope => scope.Length > 0) - .Distinct(StringComparer.Ordinal) - .OrderBy(static scope => scope, StringComparer.Ordinal) - .ToArray(); - - return normalized.Length == 0 ? Array.Empty() : normalized; - } - + } + + var normalized = scopes + .Where(static scope => !string.IsNullOrWhiteSpace(scope)) + .Select(static scope => scope.Trim()) + .Where(static scope => scope.Length > 0) + .Distinct(StringComparer.Ordinal) + .OrderBy(static scope => scope, StringComparer.Ordinal) + .ToArray(); + + return normalized.Length == 0 ? Array.Empty() : normalized; + } + private static string? Normalize(string? value) => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); internal static string? NormalizeTenant(string? value) => string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); - - internal static AuthEventRecord CreateTamperRecord( - TimeProvider timeProvider, - OpenIddictServerTransaction transaction, - AuthorityRateLimiterMetadata? metadata, - string? clientId, - string? providerName, - string? tenant, - string? username, - IEnumerable? scopes, - IEnumerable unexpectedParameters) - { - var properties = new List - { - new() - { - Name = "request.tampered", - Value = ClassifiedString.Public("true") - } - }; - - if (unexpectedParameters is not null) - { - foreach (var parameter in unexpectedParameters) - { - if (string.IsNullOrWhiteSpace(parameter)) - { - continue; - } - - properties.Add(new AuthEventProperty - { - Name = "request.unexpected_parameter", - Value = ClassifiedString.Public(parameter) - }); - } - } - - var reason = unexpectedParameters is null - ? "Unexpected parameters supplied to password grant request." - : $"Unexpected parameters supplied to password grant request: {string.Join(", ", unexpectedParameters)}."; - - return CreatePasswordGrantRecord( - timeProvider, - transaction, - metadata, - AuthEventOutcome.Failure, - reason, - clientId, - providerName, - tenant, - user: null, - username, - scopes, - retryAfter: null, - failureCode: null, - extraProperties: properties, - eventType: "authority.token.tamper"); - } -} + + internal static AuthEventRecord CreateTamperRecord( + TimeProvider timeProvider, + OpenIddictServerTransaction transaction, + AuthorityRateLimiterMetadata? metadata, + string? clientId, + string? providerName, + string? tenant, + string? username, + IEnumerable? scopes, + IEnumerable unexpectedParameters) + { + var properties = new List + { + new() + { + Name = "request.tampered", + Value = ClassifiedString.Public("true") + } + }; + + if (unexpectedParameters is not null) + { + foreach (var parameter in unexpectedParameters) + { + if (string.IsNullOrWhiteSpace(parameter)) + { + continue; + } + + properties.Add(new AuthEventProperty + { + Name = "request.unexpected_parameter", + Value = ClassifiedString.Public(parameter) + }); + } + } + + var reason = unexpectedParameters is null + ? "Unexpected parameters supplied to password grant request." + : $"Unexpected parameters supplied to password grant request: {string.Join(", ", unexpectedParameters)}."; + + return CreatePasswordGrantRecord( + timeProvider, + transaction, + metadata, + AuthEventOutcome.Failure, + reason, + clientId, + providerName, + tenant, + user: null, + username, + scopes, + retryAfter: null, + failureCode: null, + extraProperties: properties, + eventType: "authority.token.tamper"); + } +} diff --git a/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs b/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs index abd877f32..51ca2ce91 100644 --- a/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs +++ b/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs @@ -40,6 +40,7 @@ internal static class CommandFactory root.Add(BuildScanCommand(services, options, verboseOption, cancellationToken)); root.Add(BuildRubyCommand(services, verboseOption, cancellationToken)); root.Add(BuildPhpCommand(services, verboseOption, cancellationToken)); + root.Add(BuildPythonCommand(services, verboseOption, cancellationToken)); root.Add(BuildDatabaseCommand(services, verboseOption, cancellationToken)); root.Add(BuildSourcesCommand(services, verboseOption, cancellationToken)); root.Add(BuildAocCommand(services, verboseOption, cancellationToken)); @@ -52,6 +53,7 @@ internal static class CommandFactory root.Add(BuildConfigCommand(options)); root.Add(BuildKmsCommand(services, verboseOption, cancellationToken)); root.Add(BuildVulnCommand(services, verboseOption, cancellationToken)); + root.Add(BuildVexCommand(services, options, verboseOption, cancellationToken)); root.Add(BuildCryptoCommand(services, verboseOption, cancellationToken)); root.Add(BuildAttestCommand(services, verboseOption, cancellationToken)); root.Add(BuildRiskProfileCommand(verboseOption, cancellationToken)); @@ -294,6 +296,61 @@ internal static class CommandFactory return php; } + private static Command BuildPythonCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) + { + var python = new Command("python", "Work with Python analyzer outputs."); + + var inspect = new Command("inspect", "Inspect a local Python workspace or virtual environment."); + var inspectRootOption = new Option("--root") + { + Description = "Path to the Python workspace (defaults to current directory)." + }; + var inspectFormatOption = new Option("--format") + { + Description = "Output format (table, json, or aoc)." + }; + var inspectSitePackagesOption = new Option("--site-packages") + { + Description = "Additional site-packages directories to scan." + }; + var inspectIncludeFrameworksOption = new Option("--include-frameworks") + { + Description = "Include detected framework hints in output." + }; + var inspectIncludeCapabilitiesOption = new Option("--include-capabilities") + { + Description = "Include detected capability signals in output." + }; + + inspect.Add(inspectRootOption); + inspect.Add(inspectFormatOption); + inspect.Add(inspectSitePackagesOption); + inspect.Add(inspectIncludeFrameworksOption); + inspect.Add(inspectIncludeCapabilitiesOption); + inspect.SetAction((parseResult, _) => + { + var root = parseResult.GetValue(inspectRootOption); + var format = parseResult.GetValue(inspectFormatOption) ?? "table"; + var sitePackages = parseResult.GetValue(inspectSitePackagesOption); + var includeFrameworks = parseResult.GetValue(inspectIncludeFrameworksOption); + var includeCapabilities = parseResult.GetValue(inspectIncludeCapabilitiesOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandlePythonInspectAsync( + services, + root, + format, + sitePackages, + includeFrameworks, + includeCapabilities, + verbose, + cancellationToken); + }); + + python.Add(inspect); + return python; + } + private static Command BuildKmsCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) { var kms = new Command("kms", "Manage file-backed signing keys."); @@ -771,11 +828,109 @@ internal static class CommandFactory }); revoke.Add(verify); + // CLI-TEN-49-001: Token minting and delegation commands + var token = new Command("token", "Service account token operations (CLI-TEN-49-001)."); + + var mint = new Command("mint", "Mint a service account token."); + var serviceAccountOption = new Option("--service-account", new[] { "-s" }) + { + Description = "Service account identifier to mint token for.", + Required = true + }; + var mintScopesOption = new Option("--scope") + { + Description = "Scopes to include in the minted token (can be specified multiple times).", + AllowMultipleArgumentsPerToken = true + }; + var mintExpiresOption = new Option("--expires-in") + { + Description = "Token expiry in seconds (defaults to server default)." + }; + var mintTenantOption = new Option("--tenant") + { + Description = "Tenant context for the token." + }; + var mintReasonOption = new Option("--reason") + { + Description = "Audit reason for minting the token." + }; + var mintOutputOption = new Option("--raw") + { + Description = "Output only the raw token value (for automation)." + }; + mint.Add(serviceAccountOption); + mint.Add(mintScopesOption); + mint.Add(mintExpiresOption); + mint.Add(mintTenantOption); + mint.Add(mintReasonOption); + mint.Add(mintOutputOption); + mint.SetAction((parseResult, _) => + { + var serviceAccount = parseResult.GetValue(serviceAccountOption) ?? string.Empty; + var scopes = parseResult.GetValue(mintScopesOption) ?? Array.Empty(); + var expiresIn = parseResult.GetValue(mintExpiresOption); + var tenant = parseResult.GetValue(mintTenantOption); + var reason = parseResult.GetValue(mintReasonOption); + var raw = parseResult.GetValue(mintOutputOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleTokenMintAsync(services, options, serviceAccount, scopes, expiresIn, tenant, reason, raw, verbose, cancellationToken); + }); + + var delegateCmd = new Command("delegate", "Delegate your token to another principal."); + var delegateToOption = new Option("--to") + { + Description = "Principal identifier to delegate to.", + Required = true + }; + var delegateScopesOption = new Option("--scope") + { + Description = "Scopes to include in the delegation (must be subset of current token).", + AllowMultipleArgumentsPerToken = true + }; + var delegateExpiresOption = new Option("--expires-in") + { + Description = "Delegation expiry in seconds (defaults to remaining token lifetime)." + }; + var delegateTenantOption = new Option("--tenant") + { + Description = "Tenant context for the delegation." + }; + var delegateReasonOption = new Option("--reason") + { + Description = "Audit reason for the delegation.", + Required = true + }; + var delegateRawOption = new Option("--raw") + { + Description = "Output only the raw token value (for automation)." + }; + delegateCmd.Add(delegateToOption); + delegateCmd.Add(delegateScopesOption); + delegateCmd.Add(delegateExpiresOption); + delegateCmd.Add(delegateTenantOption); + delegateCmd.Add(delegateReasonOption); + delegateCmd.Add(delegateRawOption); + delegateCmd.SetAction((parseResult, _) => + { + var delegateTo = parseResult.GetValue(delegateToOption) ?? string.Empty; + var scopes = parseResult.GetValue(delegateScopesOption) ?? Array.Empty(); + var expiresIn = parseResult.GetValue(delegateExpiresOption); + var tenant = parseResult.GetValue(delegateTenantOption); + var reason = parseResult.GetValue(delegateReasonOption); + var raw = parseResult.GetValue(delegateRawOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleTokenDelegateAsync(services, options, delegateTo, scopes, expiresIn, tenant, reason, raw, verbose, cancellationToken); + }); + + token.Add(mint); + token.Add(delegateCmd); + auth.Add(login); auth.Add(logout); auth.Add(status); auth.Add(whoami); auth.Add(revoke); + auth.Add(token); return auth; } @@ -1787,9 +1942,866 @@ internal static class CommandFactory }); vuln.Add(observations); + + // CLI-VULN-29-001: Vulnerability explorer list command + var list = new Command("list", "List vulnerabilities with grouping, filters, and pagination."); + + var listVulnIdOption = new Option("--vuln-id") + { + Description = "Filter by vulnerability identifier (e.g., CVE-2024-1234)." + }; + var listSeverityOption = new Option("--severity") + { + Description = "Filter by severity level (critical, high, medium, low)." + }; + var listStatusOption = new Option("--status") + { + Description = "Filter by status (open, triaged, accepted, fixed, etc.)." + }; + var listPurlOption = new Option("--purl") + { + Description = "Filter by Package URL." + }; + var listCpeOption = new Option("--cpe") + { + Description = "Filter by CPE value." + }; + var listSbomIdOption = new Option("--sbom-id") + { + Description = "Filter by SBOM identifier." + }; + var listPolicyIdOption = new Option("--policy-id") + { + Description = "Filter by policy identifier." + }; + var listPolicyVersionOption = new Option("--policy-version") + { + Description = "Filter by policy version." + }; + var listGroupByOption = new Option("--group-by") + { + Description = "Group results by field (vuln, package, severity, status)." + }; + var listLimitOption = new Option("--limit") + { + Description = "Maximum number of items to return (default 50, max 500)." + }; + var listOffsetOption = new Option("--offset") + { + Description = "Number of items to skip for pagination." + }; + var listCursorOption = new Option("--cursor") + { + Description = "Opaque cursor token returned by a previous page." + }; + var listTenantOption = new Option("--tenant") + { + Description = "Tenant identifier (overrides profile/environment)." + }; + var listJsonOption = new Option("--json") + { + Description = "Emit raw JSON payload instead of a table." + }; + var listCsvOption = new Option("--csv") + { + Description = "Emit CSV format instead of a table." + }; + + list.Add(listVulnIdOption); + list.Add(listSeverityOption); + list.Add(listStatusOption); + list.Add(listPurlOption); + list.Add(listCpeOption); + list.Add(listSbomIdOption); + list.Add(listPolicyIdOption); + list.Add(listPolicyVersionOption); + list.Add(listGroupByOption); + list.Add(listLimitOption); + list.Add(listOffsetOption); + list.Add(listCursorOption); + list.Add(listTenantOption); + list.Add(listJsonOption); + list.Add(listCsvOption); + list.Add(verboseOption); + + list.SetAction((parseResult, _) => + { + var vulnId = parseResult.GetValue(listVulnIdOption); + var severity = parseResult.GetValue(listSeverityOption); + var status = parseResult.GetValue(listStatusOption); + var purl = parseResult.GetValue(listPurlOption); + var cpe = parseResult.GetValue(listCpeOption); + var sbomId = parseResult.GetValue(listSbomIdOption); + var policyId = parseResult.GetValue(listPolicyIdOption); + var policyVersion = parseResult.GetValue(listPolicyVersionOption); + var groupBy = parseResult.GetValue(listGroupByOption); + var limit = parseResult.GetValue(listLimitOption); + var offset = parseResult.GetValue(listOffsetOption); + var cursor = parseResult.GetValue(listCursorOption); + var tenant = parseResult.GetValue(listTenantOption); + var emitJson = parseResult.GetValue(listJsonOption); + var emitCsv = parseResult.GetValue(listCsvOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleVulnListAsync( + services, + vulnId, + severity, + status, + purl, + cpe, + sbomId, + policyId, + policyVersion, + groupBy, + limit, + offset, + cursor, + tenant, + emitJson, + emitCsv, + verbose, + cancellationToken); + }); + + vuln.Add(list); + + // CLI-VULN-29-002: Vulnerability show command + var show = new Command("show", "Display detailed vulnerability information including evidence, rationale, paths, and ledger."); + + var showVulnIdArg = new Argument("vulnerability-id") + { + Description = "Vulnerability identifier (e.g., CVE-2024-1234)." + }; + var showTenantOption = new Option("--tenant") + { + Description = "Tenant identifier (overrides profile/environment)." + }; + var showJsonOption = new Option("--json") + { + Description = "Emit raw JSON payload instead of formatted output." + }; + + show.Add(showVulnIdArg); + show.Add(showTenantOption); + show.Add(showJsonOption); + show.Add(verboseOption); + + show.SetAction((parseResult, _) => + { + var vulnIdVal = parseResult.GetValue(showVulnIdArg) ?? string.Empty; + var tenantVal = parseResult.GetValue(showTenantOption); + var emitJson = parseResult.GetValue(showJsonOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleVulnShowAsync( + services, + vulnIdVal, + tenantVal, + emitJson, + verbose, + cancellationToken); + }); + + vuln.Add(show); + + // CLI-VULN-29-003: Workflow commands + // Common options for workflow commands + var wfVulnIdsOption = new Option("--vuln-id") + { + Description = "Vulnerability IDs to operate on (repeatable).", + Arity = ArgumentArity.ZeroOrMore + }; + var wfFilterSeverityOption = new Option("--filter-severity") + { + Description = "Filter vulnerabilities by severity (critical, high, medium, low)." + }; + var wfFilterStatusOption = new Option("--filter-status") + { + Description = "Filter vulnerabilities by current status." + }; + var wfFilterPurlOption = new Option("--filter-purl") + { + Description = "Filter vulnerabilities by Package URL." + }; + var wfFilterSbomOption = new Option("--filter-sbom") + { + Description = "Filter vulnerabilities by SBOM ID." + }; + var wfTenantOption = new Option("--tenant") + { + Description = "Tenant identifier (overrides profile/environment)." + }; + var wfIdempotencyKeyOption = new Option("--idempotency-key") + { + Description = "Idempotency key for retry-safe operations." + }; + var wfJsonOption = new Option("--json") + { + Description = "Emit raw JSON response." + }; + + // assign command + var assign = new Command("assign", "Assign vulnerabilities to a user."); + var assignAssigneeArg = new Argument("assignee") { Description = "Username or email to assign to." }; + assign.Add(assignAssigneeArg); + assign.Add(wfVulnIdsOption); + assign.Add(wfFilterSeverityOption); + assign.Add(wfFilterStatusOption); + assign.Add(wfFilterPurlOption); + assign.Add(wfFilterSbomOption); + assign.Add(wfTenantOption); + assign.Add(wfIdempotencyKeyOption); + assign.Add(wfJsonOption); + assign.Add(verboseOption); + assign.SetAction((parseResult, _) => CommandHandlers.HandleVulnWorkflowAsync( + services, "assign", parseResult.GetValue(wfVulnIdsOption) ?? Array.Empty(), + parseResult.GetValue(wfFilterSeverityOption), parseResult.GetValue(wfFilterStatusOption), + parseResult.GetValue(wfFilterPurlOption), parseResult.GetValue(wfFilterSbomOption), + parseResult.GetValue(wfTenantOption), parseResult.GetValue(wfIdempotencyKeyOption), + parseResult.GetValue(wfJsonOption), parseResult.GetValue(verboseOption), + parseResult.GetValue(assignAssigneeArg), null, null, null, null, cancellationToken)); + vuln.Add(assign); + + // comment command + var comment = new Command("comment", "Add a comment to vulnerabilities."); + var commentTextArg = new Argument("text") { Description = "Comment text to add." }; + comment.Add(commentTextArg); + comment.Add(wfVulnIdsOption); + comment.Add(wfFilterSeverityOption); + comment.Add(wfFilterStatusOption); + comment.Add(wfFilterPurlOption); + comment.Add(wfFilterSbomOption); + comment.Add(wfTenantOption); + comment.Add(wfIdempotencyKeyOption); + comment.Add(wfJsonOption); + comment.Add(verboseOption); + comment.SetAction((parseResult, _) => CommandHandlers.HandleVulnWorkflowAsync( + services, "comment", parseResult.GetValue(wfVulnIdsOption) ?? Array.Empty(), + parseResult.GetValue(wfFilterSeverityOption), parseResult.GetValue(wfFilterStatusOption), + parseResult.GetValue(wfFilterPurlOption), parseResult.GetValue(wfFilterSbomOption), + parseResult.GetValue(wfTenantOption), parseResult.GetValue(wfIdempotencyKeyOption), + parseResult.GetValue(wfJsonOption), parseResult.GetValue(verboseOption), + null, parseResult.GetValue(commentTextArg), null, null, null, cancellationToken)); + vuln.Add(comment); + + // accept-risk command + var acceptRisk = new Command("accept-risk", "Accept risk for vulnerabilities with justification."); + var acceptJustificationArg = new Argument("justification") { Description = "Justification for accepting the risk." }; + var acceptDueDateOption = new Option("--due-date") { Description = "Due date for risk review (ISO-8601)." }; + acceptRisk.Add(acceptJustificationArg); + acceptRisk.Add(acceptDueDateOption); + acceptRisk.Add(wfVulnIdsOption); + acceptRisk.Add(wfFilterSeverityOption); + acceptRisk.Add(wfFilterStatusOption); + acceptRisk.Add(wfFilterPurlOption); + acceptRisk.Add(wfFilterSbomOption); + acceptRisk.Add(wfTenantOption); + acceptRisk.Add(wfIdempotencyKeyOption); + acceptRisk.Add(wfJsonOption); + acceptRisk.Add(verboseOption); + acceptRisk.SetAction((parseResult, _) => CommandHandlers.HandleVulnWorkflowAsync( + services, "accept_risk", parseResult.GetValue(wfVulnIdsOption) ?? Array.Empty(), + parseResult.GetValue(wfFilterSeverityOption), parseResult.GetValue(wfFilterStatusOption), + parseResult.GetValue(wfFilterPurlOption), parseResult.GetValue(wfFilterSbomOption), + parseResult.GetValue(wfTenantOption), parseResult.GetValue(wfIdempotencyKeyOption), + parseResult.GetValue(wfJsonOption), parseResult.GetValue(verboseOption), + null, null, parseResult.GetValue(acceptJustificationArg), parseResult.GetValue(acceptDueDateOption), null, cancellationToken)); + vuln.Add(acceptRisk); + + // verify-fix command + var verifyFix = new Command("verify-fix", "Mark vulnerabilities as fixed and verified."); + var fixVersionOption = new Option("--fix-version") { Description = "Version where the fix was applied." }; + var fixCommentOption = new Option("--comment") { Description = "Optional comment about the fix." }; + verifyFix.Add(fixVersionOption); + verifyFix.Add(fixCommentOption); + verifyFix.Add(wfVulnIdsOption); + verifyFix.Add(wfFilterSeverityOption); + verifyFix.Add(wfFilterStatusOption); + verifyFix.Add(wfFilterPurlOption); + verifyFix.Add(wfFilterSbomOption); + verifyFix.Add(wfTenantOption); + verifyFix.Add(wfIdempotencyKeyOption); + verifyFix.Add(wfJsonOption); + verifyFix.Add(verboseOption); + verifyFix.SetAction((parseResult, _) => CommandHandlers.HandleVulnWorkflowAsync( + services, "verify_fix", parseResult.GetValue(wfVulnIdsOption) ?? Array.Empty(), + parseResult.GetValue(wfFilterSeverityOption), parseResult.GetValue(wfFilterStatusOption), + parseResult.GetValue(wfFilterPurlOption), parseResult.GetValue(wfFilterSbomOption), + parseResult.GetValue(wfTenantOption), parseResult.GetValue(wfIdempotencyKeyOption), + parseResult.GetValue(wfJsonOption), parseResult.GetValue(verboseOption), + null, parseResult.GetValue(fixCommentOption), null, null, parseResult.GetValue(fixVersionOption), cancellationToken)); + vuln.Add(verifyFix); + + // target-fix command + var targetFix = new Command("target-fix", "Set a target fix date for vulnerabilities."); + var targetDueDateArg = new Argument("due-date") { Description = "Target fix date (ISO-8601 format, e.g., 2024-12-31)." }; + var targetCommentOption = new Option("--comment") { Description = "Optional comment about the target." }; + targetFix.Add(targetDueDateArg); + targetFix.Add(targetCommentOption); + targetFix.Add(wfVulnIdsOption); + targetFix.Add(wfFilterSeverityOption); + targetFix.Add(wfFilterStatusOption); + targetFix.Add(wfFilterPurlOption); + targetFix.Add(wfFilterSbomOption); + targetFix.Add(wfTenantOption); + targetFix.Add(wfIdempotencyKeyOption); + targetFix.Add(wfJsonOption); + targetFix.Add(verboseOption); + targetFix.SetAction((parseResult, _) => CommandHandlers.HandleVulnWorkflowAsync( + services, "target_fix", parseResult.GetValue(wfVulnIdsOption) ?? Array.Empty(), + parseResult.GetValue(wfFilterSeverityOption), parseResult.GetValue(wfFilterStatusOption), + parseResult.GetValue(wfFilterPurlOption), parseResult.GetValue(wfFilterSbomOption), + parseResult.GetValue(wfTenantOption), parseResult.GetValue(wfIdempotencyKeyOption), + parseResult.GetValue(wfJsonOption), parseResult.GetValue(verboseOption), + null, parseResult.GetValue(targetCommentOption), null, parseResult.GetValue(targetDueDateArg), null, cancellationToken)); + vuln.Add(targetFix); + + // reopen command + var reopen = new Command("reopen", "Reopen closed or accepted vulnerabilities."); + var reopenCommentOption = new Option("--comment") { Description = "Reason for reopening." }; + reopen.Add(reopenCommentOption); + reopen.Add(wfVulnIdsOption); + reopen.Add(wfFilterSeverityOption); + reopen.Add(wfFilterStatusOption); + reopen.Add(wfFilterPurlOption); + reopen.Add(wfFilterSbomOption); + reopen.Add(wfTenantOption); + reopen.Add(wfIdempotencyKeyOption); + reopen.Add(wfJsonOption); + reopen.Add(verboseOption); + reopen.SetAction((parseResult, _) => CommandHandlers.HandleVulnWorkflowAsync( + services, "reopen", parseResult.GetValue(wfVulnIdsOption) ?? Array.Empty(), + parseResult.GetValue(wfFilterSeverityOption), parseResult.GetValue(wfFilterStatusOption), + parseResult.GetValue(wfFilterPurlOption), parseResult.GetValue(wfFilterSbomOption), + parseResult.GetValue(wfTenantOption), parseResult.GetValue(wfIdempotencyKeyOption), + parseResult.GetValue(wfJsonOption), parseResult.GetValue(verboseOption), + null, parseResult.GetValue(reopenCommentOption), null, null, null, cancellationToken)); + vuln.Add(reopen); + + // CLI-VULN-29-004: simulate command + var simulate = new Command("simulate", "Simulate policy/VEX changes and show delta summaries."); + var simPolicyIdOption = new Option("--policy-id") + { + Description = "Policy ID to simulate (uses different version or a new policy)." + }; + var simPolicyVersionOption = new Option("--policy-version") + { + Description = "Policy version to simulate against." + }; + var simVexOverrideOption = new Option("--vex-override") + { + Description = "VEX status overrides in format vulnId=status (e.g., CVE-2024-1234=not_affected).", + AllowMultipleArgumentsPerToken = true + }; + var simSeverityThresholdOption = new Option("--severity-threshold") + { + Description = "Severity threshold for simulation (critical, high, medium, low)." + }; + var simSbomIdsOption = new Option("--sbom-id") + { + Description = "SBOM IDs to include in simulation scope.", + AllowMultipleArgumentsPerToken = true + }; + var simOutputMarkdownOption = new Option("--markdown") + { + Description = "Include Markdown report suitable for CI pipelines." + }; + var simChangedOnlyOption = new Option("--changed-only") + { + Description = "Only show items that changed." + }; + var simTenantOption = new Option("--tenant") + { + Description = "Tenant identifier for multi-tenant environments." + }; + var simJsonOption = new Option("--json") + { + Description = "Output as JSON for automation." + }; + var simOutputFileOption = new Option("--output") + { + Description = "Write Markdown report to file instead of console." + }; + simulate.Add(simPolicyIdOption); + simulate.Add(simPolicyVersionOption); + simulate.Add(simVexOverrideOption); + simulate.Add(simSeverityThresholdOption); + simulate.Add(simSbomIdsOption); + simulate.Add(simOutputMarkdownOption); + simulate.Add(simChangedOnlyOption); + simulate.Add(simTenantOption); + simulate.Add(simJsonOption); + simulate.Add(simOutputFileOption); + simulate.Add(verboseOption); + simulate.SetAction((parseResult, _) => CommandHandlers.HandleVulnSimulateAsync( + services, + parseResult.GetValue(simPolicyIdOption), + parseResult.GetValue(simPolicyVersionOption), + parseResult.GetValue(simVexOverrideOption) ?? Array.Empty(), + parseResult.GetValue(simSeverityThresholdOption), + parseResult.GetValue(simSbomIdsOption) ?? Array.Empty(), + parseResult.GetValue(simOutputMarkdownOption), + parseResult.GetValue(simChangedOnlyOption), + parseResult.GetValue(simTenantOption), + parseResult.GetValue(simJsonOption), + parseResult.GetValue(simOutputFileOption), + parseResult.GetValue(verboseOption), + cancellationToken)); + vuln.Add(simulate); + + // CLI-VULN-29-005: export command with verify subcommand + var export = new Command("export", "Export vulnerability evidence bundles."); + var expVulnIdsOption = new Option("--vuln-id") + { + Description = "Vulnerability IDs to include in export.", + AllowMultipleArgumentsPerToken = true + }; + var expSbomIdsOption = new Option("--sbom-id") + { + Description = "SBOM IDs to include in export scope.", + AllowMultipleArgumentsPerToken = true + }; + var expPolicyIdOption = new Option("--policy-id") + { + Description = "Policy ID for export filtering." + }; + var expFormatOption = new Option("--format") + { + Description = "Export format (ndjson, json).", + DefaultValueFactory = _ => "ndjson" + }; + var expIncludeEvidenceOption = new Option("--include-evidence") + { + Description = "Include evidence data in export (default: true).", + DefaultValueFactory = _ => true + }; + var expIncludeLedgerOption = new Option("--include-ledger") + { + Description = "Include workflow ledger in export (default: true).", + DefaultValueFactory = _ => true + }; + var expSignedOption = new Option("--signed") + { + Description = "Request signed export bundle (default: true).", + DefaultValueFactory = _ => true + }; + var expOutputOption = new Option("--output") + { + Description = "Output file path for the export bundle.", + Required = true + }; + var expTenantOption = new Option("--tenant") + { + Description = "Tenant identifier for multi-tenant environments." + }; + export.Add(expVulnIdsOption); + export.Add(expSbomIdsOption); + export.Add(expPolicyIdOption); + export.Add(expFormatOption); + export.Add(expIncludeEvidenceOption); + export.Add(expIncludeLedgerOption); + export.Add(expSignedOption); + export.Add(expOutputOption); + export.Add(expTenantOption); + export.Add(verboseOption); + export.SetAction((parseResult, _) => CommandHandlers.HandleVulnExportAsync( + services, + parseResult.GetValue(expVulnIdsOption) ?? Array.Empty(), + parseResult.GetValue(expSbomIdsOption) ?? Array.Empty(), + parseResult.GetValue(expPolicyIdOption), + parseResult.GetValue(expFormatOption) ?? "ndjson", + parseResult.GetValue(expIncludeEvidenceOption), + parseResult.GetValue(expIncludeLedgerOption), + parseResult.GetValue(expSignedOption), + parseResult.GetValue(expOutputOption) ?? "", + parseResult.GetValue(expTenantOption), + parseResult.GetValue(verboseOption), + cancellationToken)); + + // verify subcommand + var verify = new Command("verify", "Verify signature and digest of an exported vulnerability bundle."); + var verifyFileArg = new Argument("file") + { + Description = "Path to the export bundle file to verify." + }; + var verifyExpectedDigestOption = new Option("--expected-digest") + { + Description = "Expected digest to verify (sha256:hex format)." + }; + var verifyPublicKeyOption = new Option("--public-key") + { + Description = "Path to public key file for signature verification." + }; + verify.Add(verifyFileArg); + verify.Add(verifyExpectedDigestOption); + verify.Add(verifyPublicKeyOption); + verify.Add(verboseOption); + verify.SetAction((parseResult, _) => CommandHandlers.HandleVulnExportVerifyAsync( + services, + parseResult.GetValue(verifyFileArg) ?? "", + parseResult.GetValue(verifyExpectedDigestOption), + parseResult.GetValue(verifyPublicKeyOption), + parseResult.GetValue(verboseOption), + cancellationToken)); + export.Add(verify); + + vuln.Add(export); + return vuln; } + // CLI-VEX-30-001: VEX consensus commands + private static Command BuildVexCommand(IServiceProvider services, StellaOpsCliOptions options, Option verboseOption, CancellationToken cancellationToken) + { + var vex = new Command("vex", "Manage VEX (Vulnerability Exploitability eXchange) consensus data."); + + var consensus = new Command("consensus", "Explore VEX consensus decisions."); + var list = new Command("list", "List VEX consensus decisions with filters and pagination."); + + var vulnIdOption = new Option("--vuln-id") + { + Description = "Filter by vulnerability identifier (e.g., CVE-2024-1234)." + }; + var productKeyOption = new Option("--product-key") + { + Description = "Filter by product key." + }; + var purlOption = new Option("--purl") + { + Description = "Filter by Package URL." + }; + var statusOption = new Option("--status") + { + Description = "Filter by VEX status (affected, not_affected, fixed, under_investigation)." + }; + var policyVersionOption = new Option("--policy-version") + { + Description = "Filter by policy version." + }; + var limitOption = new Option("--limit") + { + Description = "Maximum number of results (default 50)." + }; + var offsetOption = new Option("--offset") + { + Description = "Number of results to skip for pagination." + }; + var tenantOption = new Option("--tenant", new[] { "-t" }) + { + Description = "Tenant identifier. Overrides profile and STELLAOPS_TENANT environment variable." + }; + var jsonOption = new Option("--json") + { + Description = "Emit raw JSON payload instead of a table." + }; + var csvOption = new Option("--csv") + { + Description = "Emit CSV format instead of a table." + }; + + list.Add(vulnIdOption); + list.Add(productKeyOption); + list.Add(purlOption); + list.Add(statusOption); + list.Add(policyVersionOption); + list.Add(limitOption); + list.Add(offsetOption); + list.Add(tenantOption); + list.Add(jsonOption); + list.Add(csvOption); + + list.SetAction((parseResult, _) => + { + var vulnId = parseResult.GetValue(vulnIdOption); + var productKey = parseResult.GetValue(productKeyOption); + var purl = parseResult.GetValue(purlOption); + var status = parseResult.GetValue(statusOption); + var policyVersion = parseResult.GetValue(policyVersionOption); + var limit = parseResult.GetValue(limitOption); + var offset = parseResult.GetValue(offsetOption); + var tenant = parseResult.GetValue(tenantOption); + var emitJson = parseResult.GetValue(jsonOption); + var emitCsv = parseResult.GetValue(csvOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleVexConsensusListAsync( + services, + vulnId, + productKey, + purl, + status, + policyVersion, + limit, + offset, + tenant, + emitJson, + emitCsv, + verbose, + cancellationToken); + }); + + // CLI-VEX-30-002: show subcommand + var show = new Command("show", "Display detailed VEX consensus including quorum, evidence, rationale, and signature status."); + + var showVulnIdArg = new Argument("vulnerability-id") + { + Description = "Vulnerability identifier (e.g., CVE-2024-1234)." + }; + var showProductKeyArg = new Argument("product-key") + { + Description = "Product key identifying the affected component." + }; + var showTenantOption = new Option("--tenant", new[] { "-t" }) + { + Description = "Tenant identifier. Overrides profile and STELLAOPS_TENANT environment variable." + }; + var showJsonOption = new Option("--json") + { + Description = "Emit raw JSON payload instead of formatted output." + }; + + show.Add(showVulnIdArg); + show.Add(showProductKeyArg); + show.Add(showTenantOption); + show.Add(showJsonOption); + + show.SetAction((parseResult, _) => + { + var vulnId = parseResult.GetValue(showVulnIdArg) ?? string.Empty; + var productKey = parseResult.GetValue(showProductKeyArg) ?? string.Empty; + var tenant = parseResult.GetValue(showTenantOption); + var emitJson = parseResult.GetValue(showJsonOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleVexConsensusShowAsync( + services, + vulnId, + productKey, + tenant, + emitJson, + verbose, + cancellationToken); + }); + + consensus.Add(list); + consensus.Add(show); + vex.Add(consensus); + + // CLI-VEX-30-003: simulate command + var simulate = new Command("simulate", "Simulate VEX consensus with trust/threshold overrides to preview changes."); + + var simVulnIdOption = new Option("--vuln-id") + { + Description = "Filter by vulnerability identifier." + }; + var simProductKeyOption = new Option("--product-key") + { + Description = "Filter by product key." + }; + var simPurlOption = new Option("--purl") + { + Description = "Filter by Package URL." + }; + var simThresholdOption = new Option("--threshold") + { + Description = "Override the weight threshold for consensus (0.0-1.0)." + }; + var simQuorumOption = new Option("--quorum") + { + Description = "Override the minimum quorum requirement." + }; + var simTrustOption = new Option("--trust", new[] { "-w" }) + { + Description = "Trust weight override in format provider=weight (repeatable). Example: --trust nvd=1.5 --trust vendor=2.0", + Arity = ArgumentArity.ZeroOrMore + }; + var simExcludeOption = new Option("--exclude") + { + Description = "Exclude provider from simulation (repeatable).", + Arity = ArgumentArity.ZeroOrMore + }; + var simIncludeOnlyOption = new Option("--include-only") + { + Description = "Include only these providers (repeatable).", + Arity = ArgumentArity.ZeroOrMore + }; + var simTenantOption = new Option("--tenant", new[] { "-t" }) + { + Description = "Tenant identifier." + }; + var simJsonOption = new Option("--json") + { + Description = "Emit raw JSON output with full diff details." + }; + var simChangedOnlyOption = new Option("--changed-only") + { + Description = "Show only items where the status changed." + }; + + simulate.Add(simVulnIdOption); + simulate.Add(simProductKeyOption); + simulate.Add(simPurlOption); + simulate.Add(simThresholdOption); + simulate.Add(simQuorumOption); + simulate.Add(simTrustOption); + simulate.Add(simExcludeOption); + simulate.Add(simIncludeOnlyOption); + simulate.Add(simTenantOption); + simulate.Add(simJsonOption); + simulate.Add(simChangedOnlyOption); + + simulate.SetAction((parseResult, _) => + { + var vulnId = parseResult.GetValue(simVulnIdOption); + var productKey = parseResult.GetValue(simProductKeyOption); + var purl = parseResult.GetValue(simPurlOption); + var threshold = parseResult.GetValue(simThresholdOption); + var quorum = parseResult.GetValue(simQuorumOption); + var trustOverrides = parseResult.GetValue(simTrustOption) ?? Array.Empty(); + var exclude = parseResult.GetValue(simExcludeOption) ?? Array.Empty(); + var includeOnly = parseResult.GetValue(simIncludeOnlyOption) ?? Array.Empty(); + var tenant = parseResult.GetValue(simTenantOption); + var emitJson = parseResult.GetValue(simJsonOption); + var changedOnly = parseResult.GetValue(simChangedOnlyOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleVexSimulateAsync( + services, + vulnId, + productKey, + purl, + threshold, + quorum, + trustOverrides, + exclude, + includeOnly, + tenant, + emitJson, + changedOnly, + verbose, + cancellationToken); + }); + + vex.Add(simulate); + + // CLI-VEX-30-004: export command + var export = new Command("export", "Export VEX consensus data as NDJSON bundle with optional signature."); + + var expVulnIdsOption = new Option("--vuln-id") + { + Description = "Filter by vulnerability identifiers (repeatable).", + Arity = ArgumentArity.ZeroOrMore + }; + var expProductKeysOption = new Option("--product-key") + { + Description = "Filter by product keys (repeatable).", + Arity = ArgumentArity.ZeroOrMore + }; + var expPurlsOption = new Option("--purl") + { + Description = "Filter by Package URLs (repeatable).", + Arity = ArgumentArity.ZeroOrMore + }; + var expStatusesOption = new Option("--status") + { + Description = "Filter by VEX statuses (repeatable).", + Arity = ArgumentArity.ZeroOrMore + }; + var expPolicyVersionOption = new Option("--policy-version") + { + Description = "Filter by policy version." + }; + var expOutputOption = new Option("--output", new[] { "-o" }) + { + Description = "Output file path for the NDJSON bundle.", + Required = true + }; + var expUnsignedOption = new Option("--unsigned") + { + Description = "Generate unsigned export (default is signed)." + }; + var expTenantOption = new Option("--tenant", new[] { "-t" }) + { + Description = "Tenant identifier." + }; + + export.Add(expVulnIdsOption); + export.Add(expProductKeysOption); + export.Add(expPurlsOption); + export.Add(expStatusesOption); + export.Add(expPolicyVersionOption); + export.Add(expOutputOption); + export.Add(expUnsignedOption); + export.Add(expTenantOption); + + export.SetAction((parseResult, _) => + { + var vulnIds = parseResult.GetValue(expVulnIdsOption) ?? Array.Empty(); + var productKeys = parseResult.GetValue(expProductKeysOption) ?? Array.Empty(); + var purls = parseResult.GetValue(expPurlsOption) ?? Array.Empty(); + var statuses = parseResult.GetValue(expStatusesOption) ?? Array.Empty(); + var policyVersion = parseResult.GetValue(expPolicyVersionOption); + var output = parseResult.GetValue(expOutputOption) ?? string.Empty; + var unsigned = parseResult.GetValue(expUnsignedOption); + var tenant = parseResult.GetValue(expTenantOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleVexExportAsync( + services, + vulnIds, + productKeys, + purls, + statuses, + policyVersion, + output, + !unsigned, + tenant, + verbose, + cancellationToken); + }); + + // verify subcommand for signature verification + var verify = new Command("verify", "Verify signature and digest of a VEX export bundle."); + + var verifyFileArg = new Argument("file") + { + Description = "Path to the NDJSON export file to verify." + }; + var verifyDigestOption = new Option("--digest") + { + Description = "Expected SHA-256 digest to verify." + }; + var verifyKeyOption = new Option("--public-key") + { + Description = "Path to public key file for signature verification." + }; + + verify.Add(verifyFileArg); + verify.Add(verifyDigestOption); + verify.Add(verifyKeyOption); + + verify.SetAction((parseResult, _) => + { + var file = parseResult.GetValue(verifyFileArg) ?? string.Empty; + var digest = parseResult.GetValue(verifyDigestOption); + var publicKey = parseResult.GetValue(verifyKeyOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleVexVerifyAsync( + services, + file, + digest, + publicKey, + verbose, + cancellationToken); + }); + + export.Add(verify); + vex.Add(export); + return vex; + } + private static Command BuildConfigCommand(StellaOpsCliOptions options) { var config = new Command("config", "Inspect CLI configuration state."); diff --git a/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs b/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs index 37a314817..7e20d6411 100644 --- a/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs +++ b/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs @@ -2436,6 +2436,232 @@ internal static class CommandHandlers } } + // CLI-TEN-49-001: Token minting and delegation handlers + + public static async Task HandleTokenMintAsync( + IServiceProvider services, + StellaOpsCliOptions options, + string serviceAccount, + string[] scopes, + int? expiresIn, + string? tenant, + string? reason, + bool raw, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("token-mint"); + Environment.ExitCode = 0; + + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + logger.LogError("Authority URL is not configured. Set STELLAOPS_AUTHORITY_URL or update your configuration."); + Environment.ExitCode = 1; + return; + } + + var client = scope.ServiceProvider.GetService(); + if (client is null) + { + logger.LogError("Authority console client is not available."); + Environment.ExitCode = 1; + return; + } + + var effectiveTenant = TenantProfileStore.GetEffectiveTenant(tenant); + + try + { + var request = new TokenMintRequest( + serviceAccount, + scopes.Length > 0 ? scopes : new[] { "stellaops:read" }, + expiresIn, + effectiveTenant, + reason); + + if (verbose) + { + logger.LogDebug("Minting token for service account '{ServiceAccount}' with scopes: {Scopes}", serviceAccount, string.Join(", ", request.Scopes)); + } + + var response = await client.MintTokenAsync(request, cancellationToken).ConfigureAwait(false); + + if (raw) + { + Console.WriteLine(response.AccessToken); + } + else + { + logger.LogInformation("Token minted successfully."); + logger.LogInformation("Service Account: {ServiceAccount}", serviceAccount); + logger.LogInformation("Token Type: {TokenType}", response.TokenType); + logger.LogInformation("Expires At: {ExpiresAt:u}", response.ExpiresAt); + logger.LogInformation("Scopes: {Scopes}", string.Join(", ", response.Scopes)); + + if (!string.IsNullOrWhiteSpace(response.TokenId)) + { + logger.LogInformation("Token ID: {TokenId}", response.TokenId); + } + + if (verbose) + { + logger.LogInformation("Access Token: {Token}", response.AccessToken); + } + } + } + catch (HttpRequestException ex) when (ex.StatusCode == System.Net.HttpStatusCode.Unauthorized) + { + logger.LogError("Authentication required. Run 'stella auth login' first."); + Environment.ExitCode = 1; + } + catch (HttpRequestException ex) when (ex.StatusCode == System.Net.HttpStatusCode.Forbidden) + { + logger.LogError("Access denied. Insufficient permissions to mint tokens."); + Environment.ExitCode = 1; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to mint token: {Message}", ex.Message); + Environment.ExitCode = 1; + } + } + + public static async Task HandleTokenDelegateAsync( + IServiceProvider services, + StellaOpsCliOptions options, + string delegateTo, + string[] scopes, + int? expiresIn, + string? tenant, + string? reason, + bool raw, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("token-delegate"); + Environment.ExitCode = 0; + + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + logger.LogError("Authority URL is not configured. Set STELLAOPS_AUTHORITY_URL or update your configuration."); + Environment.ExitCode = 1; + return; + } + + var client = scope.ServiceProvider.GetService(); + if (client is null) + { + logger.LogError("Authority console client is not available."); + Environment.ExitCode = 1; + return; + } + + if (string.IsNullOrWhiteSpace(reason)) + { + logger.LogError("Delegation reason is required (--reason). This is recorded in audit logs."); + Environment.ExitCode = 1; + return; + } + + var effectiveTenant = TenantProfileStore.GetEffectiveTenant(tenant); + + try + { + var request = new TokenDelegateRequest( + delegateTo, + scopes.Length > 0 ? scopes : Array.Empty(), + expiresIn, + effectiveTenant, + reason); + + if (verbose) + { + logger.LogDebug("Delegating token to '{DelegateTo}' with reason: {Reason}", delegateTo, reason); + } + + var response = await client.DelegateTokenAsync(request, cancellationToken).ConfigureAwait(false); + + if (raw) + { + Console.WriteLine(response.AccessToken); + } + else + { + logger.LogInformation("Token delegated successfully."); + logger.LogInformation("Delegation ID: {DelegationId}", response.DelegationId); + logger.LogInformation("Original Subject: {OriginalSubject}", response.OriginalSubject); + logger.LogInformation("Delegated To: {DelegatedSubject}", response.DelegatedSubject); + logger.LogInformation("Token Type: {TokenType}", response.TokenType); + logger.LogInformation("Expires At: {ExpiresAt:u}", response.ExpiresAt); + logger.LogInformation("Scopes: {Scopes}", string.Join(", ", response.Scopes)); + + logger.LogWarning("Delegation tokens should be treated with care. All actions performed with this token will be attributed to '{DelegatedSubject}' acting on behalf of '{OriginalSubject}'.", + response.DelegatedSubject, response.OriginalSubject); + + if (verbose) + { + logger.LogInformation("Access Token: {Token}", response.AccessToken); + } + } + } + catch (HttpRequestException ex) when (ex.StatusCode == System.Net.HttpStatusCode.Unauthorized) + { + logger.LogError("Authentication required. Run 'stella auth login' first."); + Environment.ExitCode = 1; + } + catch (HttpRequestException ex) when (ex.StatusCode == System.Net.HttpStatusCode.Forbidden) + { + logger.LogError("Access denied. Insufficient permissions to delegate tokens."); + Environment.ExitCode = 1; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to delegate token: {Message}", ex.Message); + Environment.ExitCode = 1; + } + } + + /// + /// Checks and displays impersonation banner if operating under a delegated token. + /// Call this from commands that need audit-aware impersonation notices (CLI-TEN-49-001). + /// + internal static async Task CheckAndDisplayImpersonationBannerAsync( + IAuthorityConsoleClient client, + ILogger logger, + string? tenant, + CancellationToken cancellationToken) + { + try + { + var introspection = await client.IntrospectTokenAsync(tenant, cancellationToken).ConfigureAwait(false); + + if (introspection is null || !introspection.Active) + { + return; + } + + if (!string.IsNullOrWhiteSpace(introspection.DelegatedBy)) + { + logger.LogWarning("=== IMPERSONATION NOTICE ==="); + logger.LogWarning("Operating as '{Subject}' delegated by '{DelegatedBy}'.", introspection.Subject, introspection.DelegatedBy); + + if (!string.IsNullOrWhiteSpace(introspection.DelegationReason)) + { + logger.LogWarning("Delegation reason: {Reason}", introspection.DelegationReason); + } + + logger.LogWarning("All actions in this session are audit-logged under the delegation context."); + logger.LogWarning("============================"); + } + } + catch + { + // Silently ignore introspection failures - don't block operations + } + } + public static async Task HandleVulnObservationsAsync( IServiceProvider services, string tenant, @@ -7473,6 +7699,145 @@ internal static class CommandHandlers } } + public static async Task HandlePythonInspectAsync( + IServiceProvider services, + string? rootPath, + string format, + string[]? sitePackages, + bool includeFrameworks, + bool includeCapabilities, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("python-inspect"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + using var activity = CliActivitySource.Instance.StartActivity("cli.python.inspect", ActivityKind.Internal); + activity?.SetTag("stellaops.cli.command", "python inspect"); + using var duration = CliMetrics.MeasureCommandDuration("python inspect"); + + var outcome = "unknown"; + try + { + var normalizedFormat = string.IsNullOrWhiteSpace(format) + ? "table" + : format.Trim().ToLowerInvariant(); + if (normalizedFormat is not ("table" or "json" or "aoc")) + { + throw new InvalidOperationException("Format must be 'table', 'json', or 'aoc'."); + } + + var targetRoot = string.IsNullOrWhiteSpace(rootPath) + ? Directory.GetCurrentDirectory() + : Path.GetFullPath(rootPath); + if (!Directory.Exists(targetRoot)) + { + throw new DirectoryNotFoundException($"Directory '{targetRoot}' was not found."); + } + + logger.LogInformation("Inspecting Python workspace in {Root}.", targetRoot); + activity?.SetTag("stellaops.cli.python.root", targetRoot); + + var engine = new LanguageAnalyzerEngine(new ILanguageAnalyzer[] { new PythonLanguageAnalyzer() }); + var context = new LanguageAnalyzerContext(targetRoot, TimeProvider.System); + var result = await engine.AnalyzeAsync(context, cancellationToken).ConfigureAwait(false); + var snapshots = result.ToSnapshots(); + + activity?.SetTag("stellaops.cli.python.package_count", snapshots.Count); + + if (string.Equals(normalizedFormat, "json", StringComparison.Ordinal)) + { + var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = true + }; + Console.WriteLine(JsonSerializer.Serialize(snapshots, options)); + } + else if (string.Equals(normalizedFormat, "aoc", StringComparison.Ordinal)) + { + // AOC format output + var aocResult = new + { + Schema = "python-aoc-v1", + Packages = snapshots.Select(s => new + { + s.Name, + s.Version, + s.Type, + Purl = s.Purl, + s.Metadata + }) + }; + var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = true + }; + Console.WriteLine(JsonSerializer.Serialize(aocResult, options)); + } + else + { + RenderPythonInspectReport(snapshots); + } + + outcome = snapshots.Count == 0 ? "empty" : "ok"; + Environment.ExitCode = 0; + } + catch (DirectoryNotFoundException ex) + { + outcome = "not_found"; + logger.LogError(ex.Message); + Environment.ExitCode = 71; + } + catch (InvalidOperationException ex) + { + outcome = "invalid"; + logger.LogError(ex.Message); + Environment.ExitCode = 64; + } + catch (Exception ex) + { + outcome = "error"; + logger.LogError(ex, "Python inspect failed."); + Environment.ExitCode = 70; + } + finally + { + verbosity.MinimumLevel = previousLevel; + CliMetrics.RecordPythonInspect(outcome); + } + } + + private static void RenderPythonInspectReport(IReadOnlyList snapshots) + { + if (snapshots.Count == 0) + { + AnsiConsole.MarkupLine("[yellow]No Python packages detected.[/]"); + return; + } + + var table = new Table().Border(TableBorder.Rounded); + table.AddColumn("Package"); + table.AddColumn("Version"); + table.AddColumn("Installer"); + table.AddColumn("Source"); + + foreach (var entry in snapshots) + { + var installer = entry.Metadata.TryGetValue("installer", out var inst) ? inst : "-"; + var source = entry.Metadata.TryGetValue("provenance", out var src) ? src : "-"; + table.AddRow( + Markup.Escape(entry.Name ?? "-"), + Markup.Escape(entry.Version ?? "-"), + Markup.Escape(installer ?? "-"), + Markup.Escape(source ?? "-")); + } + + AnsiConsole.Write(table); + } + private static void RenderPhpInspectReport(PhpInspectReport report) { if (!report.Packages.Any()) @@ -9226,4 +9591,2085 @@ internal static class CommandHandlers private sealed record RiskProfileValidationIssue(string Path, string Error, string Message); #endregion + + #region VEX Consensus (CLI-VEX-30-001) + + public static async Task HandleVexConsensusListAsync( + IServiceProvider services, + string? vulnerabilityId, + string? productKey, + string? purl, + string? status, + string? policyVersion, + int? limit, + int? offset, + string? tenant, + bool emitJson, + bool emitCsv, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vex-consensus"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vex.consensus.list", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vex consensus list"); + using var duration = CliMetrics.MeasureCommandDuration("vex consensus list"); + + try + { + // Resolve effective tenant (CLI arg > env var > profile) + var effectiveTenant = TenantProfileStore.GetEffectiveTenant(tenant); + if (!string.IsNullOrWhiteSpace(effectiveTenant)) + { + activity?.SetTag("stellaops.cli.tenant", effectiveTenant); + } + + var request = new VexConsensusListRequest( + VulnerabilityId: vulnerabilityId?.Trim(), + ProductKey: productKey?.Trim(), + Purl: purl?.Trim(), + Status: status?.Trim().ToLowerInvariant(), + PolicyVersion: policyVersion?.Trim(), + Limit: limit ?? 50, + Offset: offset ?? 0); + + logger.LogDebug("Fetching VEX consensus: vuln={VulnId}, product={ProductKey}, purl={Purl}, status={Status}, policy={PolicyVersion}, limit={Limit}, offset={Offset}", + request.VulnerabilityId, request.ProductKey, request.Purl, request.Status, request.PolicyVersion, request.Limit, request.Offset); + + var response = await client.ListVexConsensusAsync(request, effectiveTenant, cancellationToken).ConfigureAwait(false); + + if (emitJson) + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + Environment.ExitCode = 0; + return; + } + + if (emitCsv) + { + RenderVexConsensusCsv(response); + Environment.ExitCode = 0; + return; + } + + RenderVexConsensusTable(response); + if (response.HasMore) + { + var nextOffset = response.Offset + response.Limit; + AnsiConsole.MarkupLine($"[yellow]More results available. Continue with[/] [cyan]--offset {nextOffset}[/]"); + } + Environment.ExitCode = 0; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch VEX consensus data."); + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static void RenderVexConsensusTable(VexConsensusListResponse response) + { + if (response.Items.Count == 0) + { + AnsiConsole.MarkupLine("[yellow]No VEX consensus entries found matching the criteria.[/]"); + return; + } + + var table = new Table(); + table.Border(TableBorder.Rounded); + table.AddColumn(new TableColumn("[bold]Vulnerability[/]").NoWrap()); + table.AddColumn(new TableColumn("[bold]Product[/]")); + table.AddColumn(new TableColumn("[bold]Status[/]")); + table.AddColumn(new TableColumn("[bold]Sources[/]").Centered()); + table.AddColumn(new TableColumn("[bold]Conflicts[/]").Centered()); + table.AddColumn(new TableColumn("[bold]Calculated[/]")); + + foreach (var item in response.Items) + { + var statusColor = item.Status.ToLowerInvariant() switch + { + "not_affected" => "green", + "fixed" => "blue", + "affected" => "red", + "under_investigation" => "yellow", + _ => "grey" + }; + + var productDisplay = item.Product.Name ?? item.Product.Key; + if (!string.IsNullOrWhiteSpace(item.Product.Version)) + { + productDisplay += $" ({item.Product.Version})"; + } + + var conflictCount = item.Conflicts?.Count ?? 0; + var conflictDisplay = conflictCount > 0 ? $"[red]{conflictCount}[/]" : "[grey]0[/]"; + + table.AddRow( + Markup.Escape(item.VulnerabilityId), + Markup.Escape(productDisplay), + $"[{statusColor}]{Markup.Escape(item.Status)}[/]", + item.Sources.Count.ToString(), + conflictDisplay, + item.CalculatedAt.ToString("yyyy-MM-dd HH:mm")); + } + + AnsiConsole.Write(table); + AnsiConsole.MarkupLine($"[grey]Showing {response.Items.Count} of {response.Total} total entries (offset {response.Offset})[/]"); + } + + private static void RenderVexConsensusCsv(VexConsensusListResponse response) + { + Console.WriteLine("vulnerability_id,product_key,product_name,product_version,purl,status,source_count,conflict_count,calculated_at,policy_version"); + foreach (var item in response.Items) + { + var sourceCount = item.Sources.Count; + var conflictCount = item.Conflicts?.Count ?? 0; + Console.WriteLine(string.Join(",", + CsvEscape(item.VulnerabilityId), + CsvEscape(item.Product.Key), + CsvEscape(item.Product.Name ?? string.Empty), + CsvEscape(item.Product.Version ?? string.Empty), + CsvEscape(item.Product.Purl ?? string.Empty), + CsvEscape(item.Status), + sourceCount.ToString(), + conflictCount.ToString(), + item.CalculatedAt.ToString("yyyy-MM-ddTHH:mm:ssZ"), + CsvEscape(item.PolicyVersion ?? string.Empty))); + } + } + + private static string CsvEscape(string value) + { + if (string.IsNullOrEmpty(value)) + return string.Empty; + + if (value.Contains(',') || value.Contains('"') || value.Contains('\n') || value.Contains('\r')) + { + return "\"" + value.Replace("\"", "\"\"") + "\""; + } + + return value; + } + + // CLI-VEX-30-002: VEX consensus show + public static async Task HandleVexConsensusShowAsync( + IServiceProvider services, + string vulnerabilityId, + string productKey, + string? tenant, + bool emitJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vex-consensus"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vex.consensus.show", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vex consensus show"); + activity?.SetTag("stellaops.cli.vulnerability_id", vulnerabilityId); + activity?.SetTag("stellaops.cli.product_key", productKey); + using var duration = CliMetrics.MeasureCommandDuration("vex consensus show"); + + try + { + var effectiveTenant = TenantProfileStore.GetEffectiveTenant(tenant); + if (!string.IsNullOrWhiteSpace(effectiveTenant)) + { + activity?.SetTag("stellaops.cli.tenant", effectiveTenant); + } + + logger.LogDebug("Fetching VEX consensus detail: vuln={VulnId}, product={ProductKey}", vulnerabilityId, productKey); + + var response = await client.GetVexConsensusAsync(vulnerabilityId, productKey, effectiveTenant, cancellationToken).ConfigureAwait(false); + + if (response is null) + { + AnsiConsole.MarkupLine($"[yellow]No VEX consensus found for vulnerability[/] [cyan]{Markup.Escape(vulnerabilityId)}[/] [yellow]and product[/] [cyan]{Markup.Escape(productKey)}[/]"); + Environment.ExitCode = 0; + return; + } + + if (emitJson) + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + Environment.ExitCode = 0; + return; + } + + RenderVexConsensusDetail(response); + Environment.ExitCode = 0; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch VEX consensus detail."); + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static void RenderVexConsensusDetail(VexConsensusDetailResponse response) + { + // Header panel + var statusColor = response.Status.ToLowerInvariant() switch + { + "not_affected" => "green", + "fixed" => "blue", + "affected" => "red", + "under_investigation" => "yellow", + _ => "grey" + }; + + var headerPanel = new Panel(new Markup($"[bold]{Markup.Escape(response.VulnerabilityId)}[/] → [{statusColor}]{Markup.Escape(response.Status.ToUpperInvariant())}[/]")) + { + Header = new PanelHeader("[bold]VEX Consensus[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(headerPanel); + AnsiConsole.WriteLine(); + + // Product information + var productGrid = new Grid(); + productGrid.AddColumn(); + productGrid.AddColumn(); + productGrid.AddRow("[grey]Product Key:[/]", Markup.Escape(response.Product.Key)); + if (!string.IsNullOrWhiteSpace(response.Product.Name)) + productGrid.AddRow("[grey]Name:[/]", Markup.Escape(response.Product.Name)); + if (!string.IsNullOrWhiteSpace(response.Product.Version)) + productGrid.AddRow("[grey]Version:[/]", Markup.Escape(response.Product.Version)); + if (!string.IsNullOrWhiteSpace(response.Product.Purl)) + productGrid.AddRow("[grey]PURL:[/]", Markup.Escape(response.Product.Purl)); + if (!string.IsNullOrWhiteSpace(response.Product.Cpe)) + productGrid.AddRow("[grey]CPE:[/]", Markup.Escape(response.Product.Cpe)); + productGrid.AddRow("[grey]Calculated:[/]", response.CalculatedAt.ToString("yyyy-MM-dd HH:mm:ss UTC")); + if (!string.IsNullOrWhiteSpace(response.PolicyVersion)) + productGrid.AddRow("[grey]Policy Version:[/]", Markup.Escape(response.PolicyVersion)); + + var productPanel = new Panel(productGrid) + { + Header = new PanelHeader("[cyan]Product Information[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(productPanel); + AnsiConsole.WriteLine(); + + // Quorum information + if (response.Quorum is not null) + { + var quorum = response.Quorum; + var quorumMet = quorum.Achieved >= quorum.Required; + var quorumStatus = quorumMet ? "[green]MET[/]" : "[red]NOT MET[/]"; + + var quorumGrid = new Grid(); + quorumGrid.AddColumn(); + quorumGrid.AddColumn(); + quorumGrid.AddRow("[grey]Status:[/]", quorumStatus); + quorumGrid.AddRow("[grey]Required:[/]", quorum.Required.ToString()); + quorumGrid.AddRow("[grey]Achieved:[/]", quorum.Achieved.ToString()); + quorumGrid.AddRow("[grey]Threshold:[/]", $"{quorum.Threshold:P0}"); + quorumGrid.AddRow("[grey]Total Weight:[/]", $"{quorum.TotalWeight:F2}"); + quorumGrid.AddRow("[grey]Weight Achieved:[/]", $"{quorum.WeightAchieved:F2}"); + if (quorum.ParticipatingProviders is { Count: > 0 }) + { + quorumGrid.AddRow("[grey]Providers:[/]", string.Join(", ", quorum.ParticipatingProviders.Select(Markup.Escape))); + } + + var quorumPanel = new Panel(quorumGrid) + { + Header = new PanelHeader("[cyan]Quorum[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(quorumPanel); + AnsiConsole.WriteLine(); + } + + // Sources (accepted claims) + if (response.Sources.Count > 0) + { + var sourcesTable = new Table(); + sourcesTable.Border(TableBorder.Rounded); + sourcesTable.AddColumn("[bold]Provider[/]"); + sourcesTable.AddColumn("[bold]Status[/]"); + sourcesTable.AddColumn("[bold]Weight[/]"); + sourcesTable.AddColumn("[bold]Justification[/]"); + + foreach (var source in response.Sources) + { + var sourceStatus = source.Status.ToLowerInvariant() switch + { + "not_affected" => "[green]not_affected[/]", + "fixed" => "[blue]fixed[/]", + "affected" => "[red]affected[/]", + _ => Markup.Escape(source.Status) + }; + + sourcesTable.AddRow( + Markup.Escape(source.ProviderId), + sourceStatus, + $"{source.Weight:F2}", + Markup.Escape(source.Justification ?? "-")); + } + + AnsiConsole.MarkupLine("[cyan]Sources (Accepted Claims)[/]"); + AnsiConsole.Write(sourcesTable); + AnsiConsole.WriteLine(); + } + + // Conflicts (rejected claims) + if (response.Conflicts is { Count: > 0 }) + { + var conflictsTable = new Table(); + conflictsTable.Border(TableBorder.Rounded); + conflictsTable.AddColumn("[bold]Provider[/]"); + conflictsTable.AddColumn("[bold]Status[/]"); + conflictsTable.AddColumn("[bold]Reason[/]"); + + foreach (var conflict in response.Conflicts) + { + conflictsTable.AddRow( + Markup.Escape(conflict.ProviderId), + Markup.Escape(conflict.Status), + Markup.Escape(conflict.Reason ?? "-")); + } + + AnsiConsole.MarkupLine("[red]Conflicts (Rejected Claims)[/]"); + AnsiConsole.Write(conflictsTable); + AnsiConsole.WriteLine(); + } + + // Rationale + if (response.Rationale is not null) + { + var rationale = response.Rationale; + var rationaleGrid = new Grid(); + rationaleGrid.AddColumn(); + + if (!string.IsNullOrWhiteSpace(rationale.Text)) + { + rationaleGrid.AddRow(Markup.Escape(rationale.Text)); + } + + if (rationale.Justifications is { Count: > 0 }) + { + rationaleGrid.AddRow(""); + rationaleGrid.AddRow("[grey]Justifications:[/]"); + foreach (var j in rationale.Justifications) + { + rationaleGrid.AddRow($" • {Markup.Escape(j)}"); + } + } + + if (rationale.PolicyRules is { Count: > 0 }) + { + rationaleGrid.AddRow(""); + rationaleGrid.AddRow("[grey]Policy Rules:[/]"); + foreach (var rule in rationale.PolicyRules) + { + rationaleGrid.AddRow($" • {Markup.Escape(rule)}"); + } + } + + var rationalePanel = new Panel(rationaleGrid) + { + Header = new PanelHeader("[cyan]Rationale[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(rationalePanel); + AnsiConsole.WriteLine(); + } + + // Signature status + if (response.Signature is not null) + { + var sig = response.Signature; + var sigStatus = sig.Signed ? "[green]SIGNED[/]" : "[yellow]UNSIGNED[/]"; + var verifyStatus = sig.VerificationStatus?.ToLowerInvariant() switch + { + "valid" => "[green]VALID[/]", + "invalid" => "[red]INVALID[/]", + "unknown" => "[yellow]UNKNOWN[/]", + _ => sig.VerificationStatus is not null ? Markup.Escape(sig.VerificationStatus) : "[grey]N/A[/]" + }; + + var sigGrid = new Grid(); + sigGrid.AddColumn(); + sigGrid.AddColumn(); + sigGrid.AddRow("[grey]Status:[/]", sigStatus); + if (sig.Signed) + { + sigGrid.AddRow("[grey]Verification:[/]", verifyStatus); + if (!string.IsNullOrWhiteSpace(sig.Algorithm)) + sigGrid.AddRow("[grey]Algorithm:[/]", Markup.Escape(sig.Algorithm)); + if (!string.IsNullOrWhiteSpace(sig.KeyId)) + sigGrid.AddRow("[grey]Key ID:[/]", Markup.Escape(sig.KeyId)); + if (sig.SignedAt.HasValue) + sigGrid.AddRow("[grey]Signed At:[/]", sig.SignedAt.Value.ToString("yyyy-MM-dd HH:mm:ss UTC")); + } + + var sigPanel = new Panel(sigGrid) + { + Header = new PanelHeader("[cyan]Signature[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(sigPanel); + AnsiConsole.WriteLine(); + } + + // Evidence + if (response.Evidence is { Count: > 0 }) + { + var evidenceTable = new Table(); + evidenceTable.Border(TableBorder.Rounded); + evidenceTable.AddColumn("[bold]Type[/]"); + evidenceTable.AddColumn("[bold]Provider[/]"); + evidenceTable.AddColumn("[bold]Document[/]"); + evidenceTable.AddColumn("[bold]Timestamp[/]"); + + foreach (var ev in response.Evidence) + { + var docRef = !string.IsNullOrWhiteSpace(ev.DocumentDigest) + ? (ev.DocumentDigest.Length > 16 ? ev.DocumentDigest[..16] + "..." : ev.DocumentDigest) + : ev.DocumentId ?? "-"; + + evidenceTable.AddRow( + Markup.Escape(ev.Type), + Markup.Escape(ev.ProviderId), + Markup.Escape(docRef), + ev.Timestamp?.ToString("yyyy-MM-dd HH:mm") ?? "-"); + } + + AnsiConsole.MarkupLine("[cyan]Evidence[/]"); + AnsiConsole.Write(evidenceTable); + } + + // Summary + if (!string.IsNullOrWhiteSpace(response.Summary)) + { + AnsiConsole.WriteLine(); + AnsiConsole.MarkupLine($"[grey]Summary:[/] {Markup.Escape(response.Summary)}"); + } + } + + // CLI-VEX-30-003: VEX simulate + public static async Task HandleVexSimulateAsync( + IServiceProvider services, + string? vulnerabilityId, + string? productKey, + string? purl, + double? threshold, + int? quorum, + IReadOnlyList trustOverrides, + IReadOnlyList excludeProviders, + IReadOnlyList includeOnly, + string? tenant, + bool emitJson, + bool changedOnly, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vex-simulate"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vex.simulate", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vex simulate"); + using var duration = CliMetrics.MeasureCommandDuration("vex simulate"); + + try + { + var effectiveTenant = TenantProfileStore.GetEffectiveTenant(tenant); + if (!string.IsNullOrWhiteSpace(effectiveTenant)) + { + activity?.SetTag("stellaops.cli.tenant", effectiveTenant); + } + + // Parse trust overrides (format: provider=weight) + Dictionary? parsedTrustOverrides = null; + if (trustOverrides.Count > 0) + { + parsedTrustOverrides = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var entry in trustOverrides) + { + var parts = entry.Split('=', 2); + if (parts.Length != 2) + { + AnsiConsole.MarkupLine($"[red]Invalid trust override format:[/] {Markup.Escape(entry)}. Expected provider=weight"); + Environment.ExitCode = 1; + return; + } + + if (!double.TryParse(parts[1], NumberStyles.Float, CultureInfo.InvariantCulture, out var weight)) + { + AnsiConsole.MarkupLine($"[red]Invalid weight value:[/] {Markup.Escape(parts[1])}"); + Environment.ExitCode = 1; + return; + } + + parsedTrustOverrides[parts[0].Trim()] = weight; + } + } + + var request = new VexSimulationRequest( + VulnerabilityId: vulnerabilityId?.Trim(), + ProductKey: productKey?.Trim(), + Purl: purl?.Trim(), + TrustOverrides: parsedTrustOverrides, + ThresholdOverride: threshold, + QuorumOverride: quorum, + ExcludeProviders: excludeProviders.Count > 0 ? excludeProviders.ToList() : null, + IncludeOnly: includeOnly.Count > 0 ? includeOnly.ToList() : null); + + logger.LogDebug("Running VEX simulation: vuln={VulnId}, product={ProductKey}, threshold={Threshold}, quorum={Quorum}", + request.VulnerabilityId, request.ProductKey, request.ThresholdOverride, request.QuorumOverride); + + var response = await client.SimulateVexConsensusAsync(request, effectiveTenant, cancellationToken).ConfigureAwait(false); + + if (emitJson) + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + Environment.ExitCode = 0; + return; + } + + RenderVexSimulationResults(response, changedOnly); + Environment.ExitCode = 0; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to run VEX simulation."); + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static void RenderVexSimulationResults(VexSimulationResponse response, bool changedOnly) + { + // Summary panel + var summary = response.Summary; + var summaryGrid = new Grid(); + summaryGrid.AddColumn(); + summaryGrid.AddColumn(); + summaryGrid.AddRow("[grey]Total Evaluated:[/]", summary.TotalEvaluated.ToString()); + summaryGrid.AddRow("[grey]Changed:[/]", summary.TotalChanged > 0 ? $"[yellow]{summary.TotalChanged}[/]" : "[green]0[/]"); + summaryGrid.AddRow("[grey]Status Upgrades:[/]", summary.StatusUpgrades > 0 ? $"[green]{summary.StatusUpgrades}[/]" : "0"); + summaryGrid.AddRow("[grey]Status Downgrades:[/]", summary.StatusDowngrades > 0 ? $"[red]{summary.StatusDowngrades}[/]" : "0"); + summaryGrid.AddRow("[grey]No Change:[/]", summary.NoChange.ToString()); + + var summaryPanel = new Panel(summaryGrid) + { + Header = new PanelHeader("[bold]Simulation Summary[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(summaryPanel); + AnsiConsole.WriteLine(); + + // Parameters panel + var parameters = response.Parameters; + var paramsGrid = new Grid(); + paramsGrid.AddColumn(); + paramsGrid.AddColumn(); + paramsGrid.AddRow("[grey]Threshold:[/]", $"{parameters.Threshold:P0}"); + paramsGrid.AddRow("[grey]Quorum:[/]", parameters.Quorum.ToString()); + if (parameters.TrustWeights is { Count: > 0 }) + { + var weights = string.Join(", ", parameters.TrustWeights.Select(kv => $"{kv.Key}={kv.Value:F2}")); + paramsGrid.AddRow("[grey]Trust Weights:[/]", Markup.Escape(weights)); + } + if (parameters.ExcludedProviders is { Count: > 0 }) + { + paramsGrid.AddRow("[grey]Excluded:[/]", string.Join(", ", parameters.ExcludedProviders.Select(Markup.Escape))); + } + + var paramsPanel = new Panel(paramsGrid) + { + Header = new PanelHeader("[cyan]Simulation Parameters[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(paramsPanel); + AnsiConsole.WriteLine(); + + // Results table + var itemsToShow = changedOnly ? response.Items.Where(i => i.Changed).ToList() : response.Items; + + if (itemsToShow.Count == 0) + { + AnsiConsole.MarkupLine(changedOnly + ? "[green]No status changes detected with the given parameters.[/]" + : "[yellow]No items to display.[/]"); + return; + } + + var table = new Table(); + table.Border(TableBorder.Rounded); + table.AddColumn(new TableColumn("[bold]Vulnerability[/]").NoWrap()); + table.AddColumn(new TableColumn("[bold]Product[/]")); + table.AddColumn(new TableColumn("[bold]Before[/]")); + table.AddColumn(new TableColumn("[bold]After[/]")); + table.AddColumn(new TableColumn("[bold]Change[/]")); + + foreach (var item in itemsToShow) + { + var beforeStatus = GetStatusMarkup(item.Before.Status); + var afterStatus = GetStatusMarkup(item.After.Status); + + var changeIndicator = item.Changed + ? item.ChangeType?.ToLowerInvariant() switch + { + "upgrade" => "[green]UPGRADE[/]", + "downgrade" => "[red]DOWNGRADE[/]", + _ => "[yellow]CHANGED[/]" + } + : "[grey]-[/]"; + + var productDisplay = item.Product.Name ?? item.Product.Key; + if (!string.IsNullOrWhiteSpace(item.Product.Version)) + { + productDisplay += $" ({item.Product.Version})"; + } + + table.AddRow( + Markup.Escape(item.VulnerabilityId), + Markup.Escape(productDisplay), + beforeStatus, + afterStatus, + changeIndicator); + } + + AnsiConsole.Write(table); + + if (changedOnly && response.Items.Count > itemsToShow.Count) + { + AnsiConsole.MarkupLine($"[grey]Showing {itemsToShow.Count} changed items. {response.Items.Count - itemsToShow.Count} unchanged items hidden.[/]"); + } + + static string GetStatusMarkup(string status) => status.ToLowerInvariant() switch + { + "not_affected" => "[green]not_affected[/]", + "fixed" => "[blue]fixed[/]", + "affected" => "[red]affected[/]", + "under_investigation" => "[yellow]under_investigation[/]", + _ => Markup.Escape(status) + }; + } + + // CLI-VEX-30-004: VEX export + public static async Task HandleVexExportAsync( + IServiceProvider services, + IReadOnlyList vulnIds, + IReadOnlyList productKeys, + IReadOnlyList purls, + IReadOnlyList statuses, + string? policyVersion, + string outputPath, + bool signed, + string? tenant, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vex-export"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vex.export", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vex export"); + using var duration = CliMetrics.MeasureCommandDuration("vex export"); + + try + { + var effectiveTenant = TenantProfileStore.GetEffectiveTenant(tenant); + if (!string.IsNullOrWhiteSpace(effectiveTenant)) + { + activity?.SetTag("stellaops.cli.tenant", effectiveTenant); + } + + if (string.IsNullOrWhiteSpace(outputPath)) + { + AnsiConsole.MarkupLine("[red]Output path is required.[/]"); + Environment.ExitCode = 1; + return; + } + + var outputDir = Path.GetDirectoryName(outputPath); + if (!string.IsNullOrWhiteSpace(outputDir) && !Directory.Exists(outputDir)) + { + Directory.CreateDirectory(outputDir); + } + + var request = new VexExportRequest( + VulnerabilityIds: vulnIds.Count > 0 ? vulnIds.ToList() : null, + ProductKeys: productKeys.Count > 0 ? productKeys.ToList() : null, + Purls: purls.Count > 0 ? purls.ToList() : null, + Statuses: statuses.Count > 0 ? statuses.ToList() : null, + PolicyVersion: policyVersion?.Trim(), + Signed: signed, + Format: "ndjson"); + + logger.LogDebug("Requesting VEX export: signed={Signed}, vulnIds={VulnCount}, productKeys={ProductCount}", + signed, vulnIds.Count, productKeys.Count); + + await AnsiConsole.Status() + .Spinner(Spinner.Known.Dots) + .StartAsync("Preparing export...", async ctx => + { + var exportResponse = await client.ExportVexConsensusAsync(request, effectiveTenant, cancellationToken).ConfigureAwait(false); + + ctx.Status("Downloading export bundle..."); + + await using var downloadStream = await client.DownloadVexExportAsync(exportResponse.ExportId, effectiveTenant, cancellationToken).ConfigureAwait(false); + await using var fileStream = File.Create(outputPath); + await downloadStream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); + + AnsiConsole.MarkupLine($"[green]Export complete![/]"); + AnsiConsole.WriteLine(); + + var resultGrid = new Grid(); + resultGrid.AddColumn(); + resultGrid.AddColumn(); + resultGrid.AddRow("[grey]Output File:[/]", Markup.Escape(outputPath)); + resultGrid.AddRow("[grey]Items Exported:[/]", exportResponse.ItemCount.ToString()); + resultGrid.AddRow("[grey]Format:[/]", Markup.Escape(exportResponse.Format)); + resultGrid.AddRow("[grey]Signed:[/]", exportResponse.Signed ? "[green]Yes[/]" : "[yellow]No[/]"); + if (exportResponse.Signed) + { + if (!string.IsNullOrWhiteSpace(exportResponse.SignatureAlgorithm)) + resultGrid.AddRow("[grey]Signature Algorithm:[/]", Markup.Escape(exportResponse.SignatureAlgorithm)); + if (!string.IsNullOrWhiteSpace(exportResponse.SignatureKeyId)) + resultGrid.AddRow("[grey]Key ID:[/]", Markup.Escape(exportResponse.SignatureKeyId)); + } + if (!string.IsNullOrWhiteSpace(exportResponse.Digest)) + { + var digestDisplay = exportResponse.Digest.Length > 32 + ? exportResponse.Digest[..32] + "..." + : exportResponse.Digest; + resultGrid.AddRow("[grey]Digest:[/]", $"{exportResponse.DigestAlgorithm ?? "sha256"}:{Markup.Escape(digestDisplay)}"); + } + + AnsiConsole.Write(resultGrid); + }); + + Environment.ExitCode = 0; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to export VEX consensus data."); + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleVexVerifyAsync( + IServiceProvider services, + string filePath, + string? expectedDigest, + string? publicKeyPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vex-verify"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vex.export.verify", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vex export verify"); + using var duration = CliMetrics.MeasureCommandDuration("vex export verify"); + + try + { + if (string.IsNullOrWhiteSpace(filePath)) + { + AnsiConsole.MarkupLine("[red]File path is required.[/]"); + Environment.ExitCode = 1; + return; + } + + if (!File.Exists(filePath)) + { + AnsiConsole.MarkupLine($"[red]File not found:[/] {Markup.Escape(filePath)}"); + Environment.ExitCode = 1; + return; + } + + logger.LogDebug("Verifying VEX export: file={FilePath}, expectedDigest={Digest}", filePath, expectedDigest ?? "(none)"); + + // Calculate SHA-256 digest + string actualDigest; + await using (var fileStream = File.OpenRead(filePath)) + { + using var sha256 = SHA256.Create(); + var hashBytes = await sha256.ComputeHashAsync(fileStream, cancellationToken).ConfigureAwait(false); + actualDigest = Convert.ToHexString(hashBytes).ToLowerInvariant(); + } + + var resultGrid = new Grid(); + resultGrid.AddColumn(); + resultGrid.AddColumn(); + resultGrid.AddRow("[grey]File:[/]", Markup.Escape(filePath)); + resultGrid.AddRow("[grey]Actual Digest:[/]", $"sha256:{Markup.Escape(actualDigest)}"); + + var digestValid = true; + if (!string.IsNullOrWhiteSpace(expectedDigest)) + { + var normalizedExpected = expectedDigest.Trim().ToLowerInvariant(); + if (normalizedExpected.StartsWith("sha256:")) + { + normalizedExpected = normalizedExpected[7..]; + } + + digestValid = string.Equals(actualDigest, normalizedExpected, StringComparison.OrdinalIgnoreCase); + resultGrid.AddRow("[grey]Expected Digest:[/]", $"sha256:{Markup.Escape(normalizedExpected)}"); + resultGrid.AddRow("[grey]Digest Match:[/]", digestValid ? "[green]YES[/]" : "[red]NO[/]"); + } + + var sigStatus = "not_verified"; + + if (!string.IsNullOrWhiteSpace(publicKeyPath)) + { + if (!File.Exists(publicKeyPath)) + { + resultGrid.AddRow("[grey]Signature:[/]", $"[red]Public key not found:[/] {Markup.Escape(publicKeyPath)}"); + } + else + { + // Look for .sig file + var sigPath = filePath + ".sig"; + if (File.Exists(sigPath)) + { + // Note: Actual signature verification would require cryptographic operations + // This is a placeholder that shows the structure + resultGrid.AddRow("[grey]Signature File:[/]", Markup.Escape(sigPath)); + resultGrid.AddRow("[grey]Public Key:[/]", Markup.Escape(publicKeyPath)); + resultGrid.AddRow("[grey]Signature Status:[/]", "[yellow]Verification requires runtime crypto support[/]"); + sigStatus = "requires_verification"; + } + else + { + resultGrid.AddRow("[grey]Signature:[/]", "[yellow]No .sig file found[/]"); + sigStatus = "no_signature"; + } + } + } + else + { + resultGrid.AddRow("[grey]Signature:[/]", "[grey]Skipped (no --public-key provided)[/]"); + sigStatus = "skipped"; + } + + var panel = new Panel(resultGrid) + { + Header = new PanelHeader("[bold]VEX Export Verification[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(panel); + + if (!digestValid) + { + AnsiConsole.MarkupLine("[red]Verification FAILED: Digest mismatch[/]"); + Environment.ExitCode = 1; + } + else if (sigStatus == "no_signature" && !string.IsNullOrWhiteSpace(publicKeyPath)) + { + AnsiConsole.MarkupLine("[yellow]Warning: No signature file found for verification[/]"); + Environment.ExitCode = 0; + } + else + { + AnsiConsole.MarkupLine("[green]Verification completed[/]"); + Environment.ExitCode = 0; + } + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to verify VEX export."); + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + #endregion + + #region Vulnerability Explorer (CLI-VULN-29-001) + + // CLI-VULN-29-001: Vulnerability list handler + public static async Task HandleVulnListAsync( + IServiceProvider services, + string? vulnId, + string? severity, + string? status, + string? purl, + string? cpe, + string? sbomId, + string? policyId, + int? policyVersion, + string? groupBy, + int? limit, + int? offset, + string? cursor, + string? tenant, + bool emitJson, + bool emitCsv, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-list"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.list", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vuln list"); + using var duration = CliMetrics.MeasureCommandDuration("vuln list"); + + try + { + // Resolve effective tenant (CLI arg > env var > profile) + var effectiveTenant = TenantProfileStore.GetEffectiveTenant(tenant); + if (!string.IsNullOrWhiteSpace(effectiveTenant)) + { + activity?.SetTag("stellaops.cli.tenant", effectiveTenant); + } + + logger.LogDebug("Listing vulnerabilities: vuln={VulnId}, severity={Severity}, status={Status}, purl={Purl}, groupBy={GroupBy}", + vulnId, severity, status, purl, groupBy); + + var request = new VulnListRequest( + VulnerabilityId: vulnId, + Severity: severity, + Status: status, + Purl: purl, + Cpe: cpe, + SbomId: sbomId, + PolicyId: policyId, + PolicyVersion: policyVersion, + GroupBy: groupBy, + Limit: limit, + Offset: offset, + Cursor: cursor); + + var response = await client.ListVulnerabilitiesAsync(request, effectiveTenant, cancellationToken).ConfigureAwait(false); + + if (emitJson) + { + var jsonOptions = new JsonSerializerOptions { WriteIndented = true, PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + var json = JsonSerializer.Serialize(response, jsonOptions); + AnsiConsole.WriteLine(json); + } + else if (emitCsv) + { + RenderVulnListCsv(response); + } + else + { + RenderVulnListTable(response, groupBy); + } + + Environment.ExitCode = 0; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to list vulnerabilities."); + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static void RenderVulnListTable(VulnListResponse response, string? groupBy) + { + if (!string.IsNullOrWhiteSpace(groupBy) && response.Grouping != null) + { + // Render grouped summary + var groupTable = new Table(); + groupTable.AddColumn(new TableColumn($"[bold]{Markup.Escape(response.Grouping.Field)}[/]").LeftAligned()); + groupTable.AddColumn(new TableColumn("[bold]Count[/]").RightAligned()); + groupTable.AddColumn(new TableColumn("[bold]Critical[/]").RightAligned()); + groupTable.AddColumn(new TableColumn("[bold]High[/]").RightAligned()); + groupTable.AddColumn(new TableColumn("[bold]Medium[/]").RightAligned()); + groupTable.AddColumn(new TableColumn("[bold]Low[/]").RightAligned()); + + foreach (var group in response.Grouping.Groups) + { + groupTable.AddRow( + Markup.Escape(group.Key), + group.Count.ToString(), + group.CriticalCount?.ToString() ?? "-", + group.HighCount?.ToString() ?? "-", + group.MediumCount?.ToString() ?? "-", + group.LowCount?.ToString() ?? "-"); + } + + AnsiConsole.Write(groupTable); + AnsiConsole.WriteLine(); + AnsiConsole.MarkupLine($"[grey]Grouped by:[/] {Markup.Escape(response.Grouping.Field)} | [grey]Total groups:[/] {response.Grouping.Groups.Count}"); + return; + } + + // Render individual vulnerabilities + var table = new Table(); + table.AddColumn(new TableColumn("[bold]Vulnerability ID[/]").LeftAligned()); + table.AddColumn(new TableColumn("[bold]Severity[/]").Centered()); + table.AddColumn(new TableColumn("[bold]Status[/]").Centered()); + table.AddColumn(new TableColumn("[bold]VEX[/]").Centered()); + table.AddColumn(new TableColumn("[bold]Packages[/]").RightAligned()); + table.AddColumn(new TableColumn("[bold]Updated[/]").RightAligned()); + + foreach (var item in response.Items) + { + var severityColor = GetSeverityColor(item.Severity.Level); + var statusColor = GetVulnStatusColor(item.Status); + var vexDisplay = item.VexStatus ?? "-"; + var vexColor = GetVexStatusColor(item.VexStatus); + var packageCount = item.AffectedPackages.Count.ToString(); + + table.AddRow( + Markup.Escape(item.VulnerabilityId), + $"[{severityColor}]{Markup.Escape(item.Severity.Level.ToUpperInvariant())}[/]", + $"[{statusColor}]{Markup.Escape(item.Status)}[/]", + $"[{vexColor}]{Markup.Escape(vexDisplay)}[/]", + packageCount, + item.UpdatedAt?.ToString("yyyy-MM-dd") ?? "-"); + } + + AnsiConsole.Write(table); + AnsiConsole.WriteLine(); + AnsiConsole.MarkupLine($"[grey]Showing:[/] {response.Items.Count} of {response.Total} | [grey]Offset:[/] {response.Offset}"); + + if (response.HasMore && !string.IsNullOrWhiteSpace(response.NextCursor)) + { + AnsiConsole.MarkupLine($"[grey]Next page:[/] --cursor \"{Markup.Escape(response.NextCursor)}\""); + } + } + + private static void RenderVulnListCsv(VulnListResponse response) + { + Console.WriteLine("VulnerabilityId,Severity,Score,Status,VexStatus,PackageCount,Assignee,UpdatedAt"); + foreach (var item in response.Items) + { + Console.WriteLine($"{CsvEscape(item.VulnerabilityId)},{CsvEscape(item.Severity.Level)},{item.Severity.Score?.ToString("F1") ?? ""},{CsvEscape(item.Status)},{CsvEscape(item.VexStatus ?? "")},{item.AffectedPackages.Count},{CsvEscape(item.Assignee ?? "")},{item.UpdatedAt?.ToString("O") ?? ""}"); + } + } + + private static string GetSeverityColor(string severity) + { + return severity.ToLowerInvariant() switch + { + "critical" => "red bold", + "high" => "red", + "medium" => "yellow", + "low" => "blue", + _ => "grey" + }; + } + + private static string GetVulnStatusColor(string status) + { + return status.ToLowerInvariant() switch + { + "open" => "red", + "triaged" => "yellow", + "accepted" => "green", + "fixed" => "green", + "risk_accepted" => "cyan", + "false_positive" => "grey", + _ => "white" + }; + } + + private static string GetVexStatusColor(string? vexStatus) + { + if (string.IsNullOrWhiteSpace(vexStatus)) return "grey"; + return vexStatus.ToLowerInvariant() switch + { + "not_affected" => "green", + "affected" => "red", + "fixed" => "green", + "under_investigation" => "yellow", + _ => "grey" + }; + } + + // CLI-VULN-29-002: Vulnerability show handler + public static async Task HandleVulnShowAsync( + IServiceProvider services, + string vulnerabilityId, + string? tenant, + bool emitJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-show"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.show", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vuln show"); + using var duration = CliMetrics.MeasureCommandDuration("vuln show"); + + try + { + if (string.IsNullOrWhiteSpace(vulnerabilityId)) + { + AnsiConsole.MarkupLine("[red]Error:[/] Vulnerability ID is required."); + Environment.ExitCode = 1; + return; + } + + var effectiveTenant = TenantProfileStore.GetEffectiveTenant(tenant); + if (!string.IsNullOrWhiteSpace(effectiveTenant)) + { + activity?.SetTag("stellaops.cli.tenant", effectiveTenant); + } + + logger.LogDebug("Fetching vulnerability details: {VulnId}", vulnerabilityId); + + var response = await client.GetVulnerabilityAsync(vulnerabilityId, effectiveTenant, cancellationToken).ConfigureAwait(false); + + if (response == null) + { + AnsiConsole.MarkupLine($"[yellow]Vulnerability not found:[/] {Markup.Escape(vulnerabilityId)}"); + Environment.ExitCode = 1; + return; + } + + if (emitJson) + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions { WriteIndented = true }); + Console.WriteLine(json); + Environment.ExitCode = 0; + return; + } + + RenderVulnDetail(response); + Environment.ExitCode = 0; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to get vulnerability details."); + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static void RenderVulnDetail(VulnDetailResponse vuln) + { + // Header panel with basic info + var severityColor = GetSeverityColor(vuln.Severity.Level); + var statusColor = GetVulnStatusColor(vuln.Status); + var vexColor = GetVexStatusColor(vuln.VexStatus); + + var headerGrid = new Grid(); + headerGrid.AddColumn(); + headerGrid.AddColumn(); + headerGrid.AddRow("[grey]Vulnerability ID:[/]", $"[bold]{Markup.Escape(vuln.VulnerabilityId)}[/]"); + headerGrid.AddRow("[grey]Status:[/]", $"[{statusColor}]{Markup.Escape(vuln.Status)}[/]"); + headerGrid.AddRow("[grey]Severity:[/]", $"[{severityColor}]{Markup.Escape(vuln.Severity.Level.ToUpperInvariant())}[/]" + + (vuln.Severity.Score.HasValue ? $" ({vuln.Severity.Score:F1})" : "")); + if (!string.IsNullOrWhiteSpace(vuln.VexStatus)) + headerGrid.AddRow("[grey]VEX Status:[/]", $"[{vexColor}]{Markup.Escape(vuln.VexStatus)}[/]"); + if (vuln.Aliases?.Count > 0) + headerGrid.AddRow("[grey]Aliases:[/]", Markup.Escape(string.Join(", ", vuln.Aliases))); + if (!string.IsNullOrWhiteSpace(vuln.Assignee)) + headerGrid.AddRow("[grey]Assignee:[/]", Markup.Escape(vuln.Assignee)); + if (vuln.DueDate.HasValue) + headerGrid.AddRow("[grey]Due Date:[/]", vuln.DueDate.Value.ToString("yyyy-MM-dd")); + if (vuln.PublishedAt.HasValue) + headerGrid.AddRow("[grey]Published:[/]", vuln.PublishedAt.Value.ToString("yyyy-MM-dd HH:mm UTC")); + if (vuln.UpdatedAt.HasValue) + headerGrid.AddRow("[grey]Updated:[/]", vuln.UpdatedAt.Value.ToString("yyyy-MM-dd HH:mm UTC")); + + var headerPanel = new Panel(headerGrid) + { + Header = new PanelHeader("[bold]Vulnerability Details[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(headerPanel); + AnsiConsole.WriteLine(); + + // Summary/Description + if (!string.IsNullOrWhiteSpace(vuln.Summary) || !string.IsNullOrWhiteSpace(vuln.Description)) + { + var descPanel = new Panel(Markup.Escape(vuln.Description ?? vuln.Summary ?? "")) + { + Header = new PanelHeader("[bold]Description[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(descPanel); + AnsiConsole.WriteLine(); + } + + // Affected Packages + if (vuln.AffectedPackages.Count > 0) + { + var pkgTable = new Table(); + pkgTable.AddColumn("[bold]Package[/]"); + pkgTable.AddColumn("[bold]Version[/]"); + pkgTable.AddColumn("[bold]Fixed In[/]"); + pkgTable.AddColumn("[bold]SBOM[/]"); + + foreach (var pkg in vuln.AffectedPackages) + { + var pkgName = pkg.Purl ?? pkg.Cpe ?? pkg.Name ?? "-"; + pkgTable.AddRow( + Markup.Escape(pkgName.Length > 60 ? pkgName[..57] + "..." : pkgName), + Markup.Escape(pkg.Version ?? "-"), + Markup.Escape(pkg.FixedIn ?? "-"), + Markup.Escape(pkg.SbomId?.Length > 20 ? pkg.SbomId[..17] + "..." : pkg.SbomId ?? "-")); + } + + AnsiConsole.MarkupLine("[bold]Affected Packages[/]"); + AnsiConsole.Write(pkgTable); + AnsiConsole.WriteLine(); + } + + // Policy Rationale + if (vuln.PolicyRationale != null) + { + var rationaleGrid = new Grid(); + rationaleGrid.AddColumn(); + rationaleGrid.AddColumn(); + rationaleGrid.AddRow("[grey]Policy:[/]", Markup.Escape($"{vuln.PolicyRationale.PolicyId} v{vuln.PolicyRationale.PolicyVersion}")); + if (!string.IsNullOrWhiteSpace(vuln.PolicyRationale.Summary)) + rationaleGrid.AddRow("[grey]Summary:[/]", Markup.Escape(vuln.PolicyRationale.Summary)); + + if (vuln.PolicyRationale.Rules?.Count > 0) + { + var rulesText = string.Join("\n", vuln.PolicyRationale.Rules.Select(r => + $" {r.Rule}: {r.Result}" + (r.Weight.HasValue ? $" (weight: {r.Weight:F2})" : ""))); + rationaleGrid.AddRow("[grey]Rules:[/]", Markup.Escape(rulesText)); + } + + var rationalePanel = new Panel(rationaleGrid) + { + Header = new PanelHeader("[bold]Policy Rationale[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(rationalePanel); + AnsiConsole.WriteLine(); + } + + // Evidence + if (vuln.Evidence?.Count > 0) + { + var evidenceTable = new Table(); + evidenceTable.AddColumn("[bold]Type[/]"); + evidenceTable.AddColumn("[bold]Source[/]"); + evidenceTable.AddColumn("[bold]Timestamp[/]"); + + foreach (var ev in vuln.Evidence.Take(10)) + { + evidenceTable.AddRow( + Markup.Escape(ev.Type), + Markup.Escape(ev.Source), + ev.Timestamp?.ToString("yyyy-MM-dd HH:mm") ?? "-"); + } + + AnsiConsole.MarkupLine("[bold]Evidence[/]"); + AnsiConsole.Write(evidenceTable); + if (vuln.Evidence.Count > 10) + AnsiConsole.MarkupLine($"[grey]... and {vuln.Evidence.Count - 10} more[/]"); + AnsiConsole.WriteLine(); + } + + // Dependency Paths + if (vuln.DependencyPaths?.Count > 0) + { + AnsiConsole.MarkupLine("[bold]Dependency Paths[/]"); + foreach (var path in vuln.DependencyPaths.Take(5)) + { + var pathStr = string.Join(" -> ", path.Path); + AnsiConsole.MarkupLine($" [grey]>[/] {Markup.Escape(pathStr.Length > 100 ? pathStr[..97] + "..." : pathStr)}"); + } + if (vuln.DependencyPaths.Count > 5) + AnsiConsole.MarkupLine($" [grey]... and {vuln.DependencyPaths.Count - 5} more paths[/]"); + AnsiConsole.WriteLine(); + } + + // Ledger (Workflow History) + if (vuln.Ledger?.Count > 0) + { + var ledgerTable = new Table(); + ledgerTable.AddColumn("[bold]Timestamp[/]"); + ledgerTable.AddColumn("[bold]Action[/]"); + ledgerTable.AddColumn("[bold]Actor[/]"); + ledgerTable.AddColumn("[bold]Status Change[/]"); + + foreach (var entry in vuln.Ledger.Take(10)) + { + var statusChange = !string.IsNullOrWhiteSpace(entry.FromStatus) && !string.IsNullOrWhiteSpace(entry.ToStatus) + ? $"{entry.FromStatus} -> {entry.ToStatus}" + : "-"; + ledgerTable.AddRow( + entry.Timestamp.ToString("yyyy-MM-dd HH:mm"), + Markup.Escape(entry.Action), + Markup.Escape(entry.Actor ?? "-"), + Markup.Escape(statusChange)); + } + + AnsiConsole.MarkupLine("[bold]Workflow History[/]"); + AnsiConsole.Write(ledgerTable); + if (vuln.Ledger.Count > 10) + AnsiConsole.MarkupLine($"[grey]... and {vuln.Ledger.Count - 10} more entries[/]"); + AnsiConsole.WriteLine(); + } + + // References + if (vuln.References?.Count > 0) + { + AnsiConsole.MarkupLine("[bold]References[/]"); + foreach (var refItem in vuln.References.Take(10)) + { + var title = !string.IsNullOrWhiteSpace(refItem.Title) ? refItem.Title : refItem.Type; + AnsiConsole.MarkupLine($" [grey]{Markup.Escape(title)}:[/] {Markup.Escape(refItem.Url)}"); + } + if (vuln.References.Count > 10) + AnsiConsole.MarkupLine($" [grey]... and {vuln.References.Count - 10} more references[/]"); + } + } + + // CLI-VULN-29-003: Vulnerability workflow handler + public static async Task HandleVulnWorkflowAsync( + IServiceProvider services, + string action, + IReadOnlyList vulnIds, + string? filterSeverity, + string? filterStatus, + string? filterPurl, + string? filterSbom, + string? tenant, + string? idempotencyKey, + bool emitJson, + bool verbose, + string? assignee, + string? comment, + string? justification, + string? dueDate, + string? fixVersion, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-workflow"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.workflow", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", $"vuln {action.Replace("_", "-")}"); + activity?.SetTag("stellaops.cli.workflow.action", action); + using var duration = CliMetrics.MeasureCommandDuration($"vuln {action.Replace("_", "-")}"); + + try + { + var effectiveTenant = TenantProfileStore.GetEffectiveTenant(tenant); + if (!string.IsNullOrWhiteSpace(effectiveTenant)) + { + activity?.SetTag("stellaops.cli.tenant", effectiveTenant); + } + + // Validate that we have either vulnIds or filter criteria + var hasVulnIds = vulnIds.Count > 0; + var hasFilter = !string.IsNullOrWhiteSpace(filterSeverity) || + !string.IsNullOrWhiteSpace(filterStatus) || + !string.IsNullOrWhiteSpace(filterPurl) || + !string.IsNullOrWhiteSpace(filterSbom); + + if (!hasVulnIds && !hasFilter) + { + AnsiConsole.MarkupLine("[red]Error:[/] Either --vuln-id or filter options (--filter-severity, --filter-status, --filter-purl, --filter-sbom) are required."); + Environment.ExitCode = 1; + return; + } + + // Parse due date if provided + DateTimeOffset? parsedDueDate = null; + if (!string.IsNullOrWhiteSpace(dueDate)) + { + if (DateTimeOffset.TryParse(dueDate, out var parsed)) + { + parsedDueDate = parsed; + } + else + { + AnsiConsole.MarkupLine($"[red]Error:[/] Invalid due date format: {Markup.Escape(dueDate)}. Use ISO-8601 format (e.g., 2025-12-31)."); + Environment.ExitCode = 1; + return; + } + } + + // Build filter spec if filters provided + VulnFilterSpec? filterSpec = hasFilter + ? new VulnFilterSpec( + Severity: filterSeverity, + Status: filterStatus, + Purl: filterPurl, + SbomId: filterSbom) + : null; + + // Build request + var request = new VulnWorkflowRequest( + Action: action, + VulnerabilityIds: hasVulnIds ? vulnIds.ToList() : null, + Filter: filterSpec, + Assignee: assignee, + Comment: comment, + DueDate: parsedDueDate, + Justification: justification, + FixVersion: fixVersion, + IdempotencyKey: idempotencyKey); + + logger.LogDebug("Executing vulnerability workflow: action={Action}, vulnIds={VulnCount}, hasFilter={HasFilter}", + action, vulnIds.Count, hasFilter); + + var response = await client.ExecuteVulnWorkflowAsync(request, effectiveTenant, cancellationToken).ConfigureAwait(false); + + if (emitJson) + { + var jsonOptions = new JsonSerializerOptions { WriteIndented = true, PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + var json = JsonSerializer.Serialize(response, jsonOptions); + AnsiConsole.WriteLine(json); + Environment.ExitCode = response.Success ? 0 : 1; + return; + } + + // Render result + var actionDisplay = action.Replace("_", " "); + if (response.Success) + { + AnsiConsole.MarkupLine($"[green]Success![/] {Markup.Escape(char.ToUpperInvariant(actionDisplay[0]) + actionDisplay[1..])} completed."); + } + else + { + AnsiConsole.MarkupLine($"[red]Operation completed with errors.[/]"); + } + + AnsiConsole.WriteLine(); + + var resultGrid = new Grid(); + resultGrid.AddColumn(); + resultGrid.AddColumn(); + resultGrid.AddRow("[grey]Action:[/]", Markup.Escape(actionDisplay)); + resultGrid.AddRow("[grey]Affected:[/]", response.AffectedCount.ToString()); + if (!string.IsNullOrWhiteSpace(response.IdempotencyKey)) + resultGrid.AddRow("[grey]Idempotency Key:[/]", Markup.Escape(response.IdempotencyKey)); + + AnsiConsole.Write(resultGrid); + + // Show affected IDs if not too many + if (response.AffectedIds != null && response.AffectedIds.Count > 0 && response.AffectedIds.Count <= 20) + { + AnsiConsole.WriteLine(); + AnsiConsole.MarkupLine("[bold]Affected Vulnerabilities:[/]"); + foreach (var id in response.AffectedIds) + { + AnsiConsole.MarkupLine($" [grey]>[/] {Markup.Escape(id)}"); + } + } + else if (response.AffectedIds != null && response.AffectedIds.Count > 20) + { + AnsiConsole.WriteLine(); + AnsiConsole.MarkupLine($"[grey]Affected {response.AffectedIds.Count} vulnerabilities (use --json to see full list)[/]"); + } + + // Show errors if any + if (response.Errors != null && response.Errors.Count > 0) + { + AnsiConsole.WriteLine(); + AnsiConsole.MarkupLine("[bold red]Errors:[/]"); + + var errorTable = new Table(); + errorTable.AddColumn("[bold]Vulnerability ID[/]"); + errorTable.AddColumn("[bold]Code[/]"); + errorTable.AddColumn("[bold]Message[/]"); + + foreach (var error in response.Errors.Take(10)) + { + errorTable.AddRow( + Markup.Escape(error.VulnerabilityId), + Markup.Escape(error.Code), + Markup.Escape(error.Message)); + } + + AnsiConsole.Write(errorTable); + + if (response.Errors.Count > 10) + { + AnsiConsole.MarkupLine($"[grey]... and {response.Errors.Count - 10} more errors[/]"); + } + } + + Environment.ExitCode = response.Success ? 0 : 1; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to execute vulnerability workflow action."); + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + // CLI-VULN-29-004: Vulnerability simulate handler + public static async Task HandleVulnSimulateAsync( + IServiceProvider services, + string? policyId, + int? policyVersion, + IReadOnlyList vexOverrides, + string? severityThreshold, + IReadOnlyList sbomIds, + bool outputMarkdown, + bool changedOnly, + string? tenant, + bool emitJson, + string? outputFile, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-simulate"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.simulate", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vuln simulate"); + using var duration = CliMetrics.MeasureCommandDuration("vuln simulate"); + + try + { + var effectiveTenant = TenantProfileStore.GetEffectiveTenant(tenant); + if (!string.IsNullOrWhiteSpace(effectiveTenant)) + { + activity?.SetTag("stellaops.cli.tenant", effectiveTenant); + } + + // Parse VEX overrides + Dictionary? parsedVexOverrides = null; + if (vexOverrides.Count > 0) + { + parsedVexOverrides = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var override_ in vexOverrides) + { + var parts = override_.Split('=', 2); + if (parts.Length != 2) + { + AnsiConsole.MarkupLine($"[red]Error:[/] Invalid VEX override format: {Markup.Escape(override_)}. Use vulnId=status format."); + Environment.ExitCode = 1; + return; + } + parsedVexOverrides[parts[0].Trim()] = parts[1].Trim(); + } + } + + logger.LogDebug("Running vulnerability simulation: policyId={PolicyId}, policyVersion={PolicyVersion}, vexOverrides={OverrideCount}, sbomIds={SbomCount}", + policyId, policyVersion, vexOverrides.Count, sbomIds.Count); + + var request = new VulnSimulationRequest( + PolicyId: policyId, + PolicyVersion: policyVersion, + VexOverrides: parsedVexOverrides, + SeverityThreshold: severityThreshold, + SbomIds: sbomIds.Count > 0 ? sbomIds.ToList() : null, + OutputMarkdown: outputMarkdown || !string.IsNullOrWhiteSpace(outputFile)); + + var response = await client.SimulateVulnerabilitiesAsync(request, effectiveTenant, cancellationToken).ConfigureAwait(false); + + if (emitJson) + { + var jsonOptions = new JsonSerializerOptions { WriteIndented = true, PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + var json = JsonSerializer.Serialize(response, jsonOptions); + AnsiConsole.WriteLine(json); + Environment.ExitCode = 0; + return; + } + + // Write markdown report to file if requested + if (!string.IsNullOrWhiteSpace(outputFile) && !string.IsNullOrWhiteSpace(response.MarkdownReport)) + { + var outputDir = Path.GetDirectoryName(outputFile); + if (!string.IsNullOrWhiteSpace(outputDir) && !Directory.Exists(outputDir)) + { + Directory.CreateDirectory(outputDir); + } + await File.WriteAllTextAsync(outputFile, response.MarkdownReport, cancellationToken).ConfigureAwait(false); + AnsiConsole.MarkupLine($"[green]Markdown report written to:[/] {Markup.Escape(outputFile)}"); + AnsiConsole.WriteLine(); + } + + // Render summary panel + var summaryGrid = new Grid(); + summaryGrid.AddColumn(); + summaryGrid.AddColumn(); + summaryGrid.AddRow("[grey]Total Evaluated:[/]", response.Summary.TotalEvaluated.ToString()); + summaryGrid.AddRow("[grey]Total Changed:[/]", response.Summary.TotalChanged > 0 + ? $"[yellow]{response.Summary.TotalChanged}[/]" + : "[green]0[/]"); + summaryGrid.AddRow("[grey]Status Upgrades:[/]", response.Summary.StatusUpgrades > 0 + ? $"[green]+{response.Summary.StatusUpgrades}[/]" + : "0"); + summaryGrid.AddRow("[grey]Status Downgrades:[/]", response.Summary.StatusDowngrades > 0 + ? $"[red]-{response.Summary.StatusDowngrades}[/]" + : "0"); + summaryGrid.AddRow("[grey]No Change:[/]", response.Summary.NoChange.ToString()); + + if (!string.IsNullOrWhiteSpace(policyId)) + summaryGrid.AddRow("[grey]Policy:[/]", $"{Markup.Escape(policyId)}" + (policyVersion.HasValue ? $" v{policyVersion}" : "")); + if (!string.IsNullOrWhiteSpace(severityThreshold)) + summaryGrid.AddRow("[grey]Severity Threshold:[/]", Markup.Escape(severityThreshold)); + + var summaryPanel = new Panel(summaryGrid) + { + Header = new PanelHeader("[bold]Simulation Summary[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(summaryPanel); + AnsiConsole.WriteLine(); + + // Render delta table + var items = changedOnly + ? response.Items.Where(i => i.Changed).ToList() + : response.Items; + + if (items.Count > 0) + { + var table = new Table(); + table.AddColumn(new TableColumn("[bold]Vulnerability ID[/]").LeftAligned()); + table.AddColumn(new TableColumn("[bold]Before[/]").Centered()); + table.AddColumn(new TableColumn("[bold]After[/]").Centered()); + table.AddColumn(new TableColumn("[bold]Change[/]").Centered()); + table.AddColumn(new TableColumn("[bold]Reason[/]").LeftAligned()); + + foreach (var item in items.Take(50)) + { + var beforeColor = GetVulnStatusColor(item.BeforeStatus); + var afterColor = GetVulnStatusColor(item.AfterStatus); + var changeIndicator = item.Changed + ? (IsStatusUpgrade(item.BeforeStatus, item.AfterStatus) ? "[green]UPGRADE[/]" : "[red]DOWNGRADE[/]") + : "[grey]--[/]"; + + table.AddRow( + Markup.Escape(item.VulnerabilityId), + $"[{beforeColor}]{Markup.Escape(item.BeforeStatus)}[/]", + $"[{afterColor}]{Markup.Escape(item.AfterStatus)}[/]", + changeIndicator, + Markup.Escape(item.ChangeReason ?? "-")); + } + + AnsiConsole.Write(table); + + if (items.Count > 50) + { + AnsiConsole.MarkupLine($"[grey]... and {items.Count - 50} more items (use --json for full list)[/]"); + } + + AnsiConsole.WriteLine(); + } + else if (changedOnly) + { + AnsiConsole.MarkupLine("[green]No vulnerabilities would change status with the simulated configuration.[/]"); + } + else + { + AnsiConsole.MarkupLine("[grey]No vulnerabilities in simulation scope.[/]"); + } + + // Print markdown to console if requested and not written to file + if (outputMarkdown && string.IsNullOrWhiteSpace(outputFile) && !string.IsNullOrWhiteSpace(response.MarkdownReport)) + { + AnsiConsole.WriteLine(); + AnsiConsole.MarkupLine("[bold]Markdown Report:[/]"); + AnsiConsole.WriteLine(response.MarkdownReport); + } + + Environment.ExitCode = 0; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to run vulnerability simulation."); + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static bool IsStatusUpgrade(string before, string after) + { + // Status priority (lower is better): fixed > risk_accepted > false_positive > triaged > open + static int GetPriority(string status) => status.ToLowerInvariant() switch + { + "fixed" => 0, + "risk_accepted" => 1, + "false_positive" => 2, + "accepted" => 3, + "triaged" => 4, + "open" => 5, + _ => 10 + }; + + return GetPriority(after) < GetPriority(before); + } + + // CLI-VULN-29-005: Vulnerability export handler + public static async Task HandleVulnExportAsync( + IServiceProvider services, + IReadOnlyList vulnIds, + IReadOnlyList sbomIds, + string? policyId, + string format, + bool includeEvidence, + bool includeLedger, + bool signed, + string outputPath, + string? tenant, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-export"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.export", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vuln export"); + using var duration = CliMetrics.MeasureCommandDuration("vuln export"); + + try + { + var effectiveTenant = TenantProfileStore.GetEffectiveTenant(tenant); + if (!string.IsNullOrWhiteSpace(effectiveTenant)) + { + activity?.SetTag("stellaops.cli.tenant", effectiveTenant); + } + + if (string.IsNullOrWhiteSpace(outputPath)) + { + AnsiConsole.MarkupLine("[red]Error:[/] Output path is required."); + Environment.ExitCode = 1; + return; + } + + var outputDir = Path.GetDirectoryName(outputPath); + if (!string.IsNullOrWhiteSpace(outputDir) && !Directory.Exists(outputDir)) + { + Directory.CreateDirectory(outputDir); + } + + logger.LogDebug("Exporting vulnerability bundle: vulnIds={VulnCount}, sbomIds={SbomCount}, format={Format}, signed={Signed}", + vulnIds.Count, sbomIds.Count, format, signed); + + var request = new VulnExportRequest( + VulnerabilityIds: vulnIds.Count > 0 ? vulnIds.ToList() : null, + SbomIds: sbomIds.Count > 0 ? sbomIds.ToList() : null, + PolicyId: policyId, + Format: format, + IncludeEvidence: includeEvidence, + IncludeLedger: includeLedger, + Signed: signed); + + await AnsiConsole.Status() + .Spinner(Spinner.Known.Dots) + .StartAsync("Preparing export...", async ctx => + { + var exportResponse = await client.ExportVulnerabilitiesAsync(request, effectiveTenant, cancellationToken).ConfigureAwait(false); + + ctx.Status("Downloading export bundle..."); + + await using var downloadStream = await client.DownloadVulnExportAsync(exportResponse.ExportId, effectiveTenant, cancellationToken).ConfigureAwait(false); + await using var fileStream = File.Create(outputPath); + await downloadStream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); + + AnsiConsole.MarkupLine($"[green]Export complete![/]"); + AnsiConsole.WriteLine(); + + var resultGrid = new Grid(); + resultGrid.AddColumn(); + resultGrid.AddColumn(); + resultGrid.AddRow("[grey]Output File:[/]", Markup.Escape(outputPath)); + resultGrid.AddRow("[grey]Items Exported:[/]", exportResponse.ItemCount.ToString()); + resultGrid.AddRow("[grey]Format:[/]", Markup.Escape(exportResponse.Format)); + resultGrid.AddRow("[grey]Signed:[/]", exportResponse.Signed ? "[green]Yes[/]" : "[yellow]No[/]"); + if (exportResponse.Signed) + { + if (!string.IsNullOrWhiteSpace(exportResponse.SignatureAlgorithm)) + resultGrid.AddRow("[grey]Signature Algorithm:[/]", Markup.Escape(exportResponse.SignatureAlgorithm)); + if (!string.IsNullOrWhiteSpace(exportResponse.SignatureKeyId)) + resultGrid.AddRow("[grey]Key ID:[/]", Markup.Escape(exportResponse.SignatureKeyId)); + } + if (!string.IsNullOrWhiteSpace(exportResponse.Digest)) + { + var digestDisplay = exportResponse.Digest.Length > 32 + ? exportResponse.Digest[..32] + "..." + : exportResponse.Digest; + resultGrid.AddRow("[grey]Digest:[/]", $"{exportResponse.DigestAlgorithm ?? "sha256"}:{Markup.Escape(digestDisplay)}"); + } + if (exportResponse.ExpiresAt.HasValue) + { + resultGrid.AddRow("[grey]Expires:[/]", exportResponse.ExpiresAt.Value.ToString("yyyy-MM-dd HH:mm UTC")); + } + + AnsiConsole.Write(resultGrid); + }); + + Environment.ExitCode = 0; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to export vulnerability bundle."); + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + // CLI-VULN-29-005: Vulnerability export verify handler + public static async Task HandleVulnExportVerifyAsync( + IServiceProvider services, + string filePath, + string? expectedDigest, + string? publicKeyPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-export-verify"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.export.verify", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vuln export verify"); + using var duration = CliMetrics.MeasureCommandDuration("vuln export verify"); + + try + { + if (string.IsNullOrWhiteSpace(filePath)) + { + AnsiConsole.MarkupLine("[red]Error:[/] File path is required."); + Environment.ExitCode = 1; + return; + } + + if (!File.Exists(filePath)) + { + AnsiConsole.MarkupLine($"[red]Error:[/] File not found: {Markup.Escape(filePath)}"); + Environment.ExitCode = 1; + return; + } + + logger.LogDebug("Verifying vulnerability export: file={FilePath}, expectedDigest={Digest}", filePath, expectedDigest ?? "(none)"); + + // Calculate SHA-256 digest + string actualDigest; + await using (var fileStream = File.OpenRead(filePath)) + { + using var sha256 = SHA256.Create(); + var hashBytes = await sha256.ComputeHashAsync(fileStream, cancellationToken).ConfigureAwait(false); + actualDigest = Convert.ToHexString(hashBytes).ToLowerInvariant(); + } + + var resultGrid = new Grid(); + resultGrid.AddColumn(); + resultGrid.AddColumn(); + resultGrid.AddRow("[grey]File:[/]", Markup.Escape(filePath)); + resultGrid.AddRow("[grey]Actual Digest:[/]", $"sha256:{Markup.Escape(actualDigest)}"); + + var digestValid = true; + if (!string.IsNullOrWhiteSpace(expectedDigest)) + { + var normalizedExpected = expectedDigest.Trim().ToLowerInvariant(); + if (normalizedExpected.StartsWith("sha256:")) + { + normalizedExpected = normalizedExpected[7..]; + } + + digestValid = string.Equals(actualDigest, normalizedExpected, StringComparison.OrdinalIgnoreCase); + resultGrid.AddRow("[grey]Expected Digest:[/]", $"sha256:{Markup.Escape(normalizedExpected)}"); + resultGrid.AddRow("[grey]Digest Match:[/]", digestValid ? "[green]YES[/]" : "[red]NO[/]"); + } + + var sigStatus = "not_verified"; + + if (!string.IsNullOrWhiteSpace(publicKeyPath)) + { + if (!File.Exists(publicKeyPath)) + { + resultGrid.AddRow("[grey]Signature:[/]", $"[red]Public key not found:[/] {Markup.Escape(publicKeyPath)}"); + } + else + { + // Look for .sig file + var sigPath = filePath + ".sig"; + if (File.Exists(sigPath)) + { + // Note: Actual signature verification would require cryptographic operations + // This is a placeholder that shows the structure + resultGrid.AddRow("[grey]Signature File:[/]", Markup.Escape(sigPath)); + resultGrid.AddRow("[grey]Public Key:[/]", Markup.Escape(publicKeyPath)); + resultGrid.AddRow("[grey]Signature Status:[/]", "[yellow]Verification requires runtime crypto support[/]"); + sigStatus = "requires_verification"; + } + else + { + resultGrid.AddRow("[grey]Signature:[/]", "[yellow]No .sig file found[/]"); + sigStatus = "no_signature"; + } + } + } + else + { + resultGrid.AddRow("[grey]Signature:[/]", "[grey]Skipped (no --public-key provided)[/]"); + sigStatus = "skipped"; + } + + var panel = new Panel(resultGrid) + { + Header = new PanelHeader("[bold]Vulnerability Export Verification[/]"), + Border = BoxBorder.Rounded + }; + AnsiConsole.Write(panel); + + if (!digestValid) + { + AnsiConsole.MarkupLine("[red]Verification FAILED: Digest mismatch[/]"); + Environment.ExitCode = 1; + } + else if (sigStatus == "no_signature" && !string.IsNullOrWhiteSpace(publicKeyPath)) + { + AnsiConsole.MarkupLine("[yellow]Warning: No signature file found for verification[/]"); + Environment.ExitCode = 0; + } + else + { + AnsiConsole.MarkupLine("[green]Verification completed[/]"); + Environment.ExitCode = 0; + } + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to verify vulnerability export."); + AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + #endregion } diff --git a/src/Cli/StellaOps.Cli/Services/AuthorityConsoleClient.cs b/src/Cli/StellaOps.Cli/Services/AuthorityConsoleClient.cs index d69c76f1f..bd5f24149 100644 --- a/src/Cli/StellaOps.Cli/Services/AuthorityConsoleClient.cs +++ b/src/Cli/StellaOps.Cli/Services/AuthorityConsoleClient.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; using System.Net.Http; using System.Net.Http.Json; +using System.Text.Json; using System.Threading; using System.Threading.Tasks; using StellaOps.Cli.Services.Models; @@ -9,11 +10,12 @@ using StellaOps.Cli.Services.Models; namespace StellaOps.Cli.Services; /// -/// HTTP client for Authority console endpoints (CLI-TEN-47-001). +/// HTTP client for Authority console endpoints (CLI-TEN-47-001, CLI-TEN-49-001). /// internal sealed class AuthorityConsoleClient : IAuthorityConsoleClient { private readonly HttpClient _httpClient; + private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web); public AuthorityConsoleClient(HttpClient httpClient) { @@ -38,4 +40,73 @@ internal sealed class AuthorityConsoleClient : IAuthorityConsoleClient return result?.Tenants ?? Array.Empty(); } + + public async Task MintTokenAsync(TokenMintRequest request, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + + using var httpRequest = new HttpRequestMessage(HttpMethod.Post, "console/token/mint") + { + Content = JsonContent.Create(request, options: JsonOptions) + }; + + if (!string.IsNullOrWhiteSpace(request.Tenant)) + { + httpRequest.Headers.Add("X-StellaOps-Tenant", request.Tenant.Trim().ToLowerInvariant()); + } + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var result = await response.Content + .ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + + return result ?? throw new InvalidOperationException("Token mint response was empty."); + } + + public async Task DelegateTokenAsync(TokenDelegateRequest request, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + + using var httpRequest = new HttpRequestMessage(HttpMethod.Post, "console/token/delegate") + { + Content = JsonContent.Create(request, options: JsonOptions) + }; + + if (!string.IsNullOrWhiteSpace(request.Tenant)) + { + httpRequest.Headers.Add("X-StellaOps-Tenant", request.Tenant.Trim().ToLowerInvariant()); + } + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var result = await response.Content + .ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + + return result ?? throw new InvalidOperationException("Token delegation response was empty."); + } + + public async Task IntrospectTokenAsync(string? tenant, CancellationToken cancellationToken) + { + using var httpRequest = new HttpRequestMessage(HttpMethod.Post, "console/token/introspect"); + + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.Add("X-StellaOps-Tenant", tenant.Trim().ToLowerInvariant()); + } + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + return null; + } + + return await response.Content + .ReadFromJsonAsync(JsonOptions, cancellationToken) + .ConfigureAwait(false); + } } diff --git a/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs b/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs index 912d9132e..4699be130 100644 --- a/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs +++ b/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs @@ -18,10 +18,10 @@ using Microsoft.Extensions.Logging; using StellaOps.Auth.Abstractions; using StellaOps.Auth.Client; using StellaOps.Cli.Configuration; -using StellaOps.Cli.Services.Models; -using StellaOps.Cli.Services.Models.AdvisoryAi; -using StellaOps.Cli.Services.Models.Ruby; -using StellaOps.Cli.Services.Models.Transport; +using StellaOps.Cli.Services.Models; +using StellaOps.Cli.Services.Models.AdvisoryAi; +using StellaOps.Cli.Services.Models.Ruby; +using StellaOps.Cli.Services.Models.Transport; namespace StellaOps.Cli.Services; @@ -32,12 +32,12 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient private static readonly IReadOnlyDictionary EmptyMetadata = new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); - private const string OperatorReasonParameterName = "operator_reason"; - private const string OperatorTicketParameterName = "operator_ticket"; - private const string BackfillReasonParameterName = "backfill_reason"; - private const string BackfillTicketParameterName = "backfill_ticket"; - private const string AdvisoryScopesHeader = "X-StellaOps-Scopes"; - private const string AdvisoryRunScope = "advisory:run"; + private const string OperatorReasonParameterName = "operator_reason"; + private const string OperatorTicketParameterName = "operator_ticket"; + private const string BackfillReasonParameterName = "backfill_reason"; + private const string BackfillTicketParameterName = "backfill_ticket"; + private const string AdvisoryScopesHeader = "X-StellaOps-Scopes"; + private const string AdvisoryRunScope = "advisory:run"; private readonly HttpClient _httpClient; private readonly StellaOpsCliOptions _options; @@ -859,9 +859,9 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient return MapPolicyFindingExplain(document); } - public async Task GetEntryTraceAsync(string scanId, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); + public async Task GetEntryTraceAsync(string scanId, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); if (string.IsNullOrWhiteSpace(scanId)) { @@ -883,174 +883,174 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient throw new InvalidOperationException(failure); } - var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - if (result is null) - { - throw new InvalidOperationException("EntryTrace response payload was empty."); - } - - return result; - } - - public async Task GetRubyPackagesAsync(string scanId, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(scanId)) - { - throw new ArgumentException("Scan identifier is required.", nameof(scanId)); - } - - var encodedScanId = Uri.EscapeDataString(scanId); - using var request = CreateRequest(HttpMethod.Get, $"api/scans/{encodedScanId}/ruby-packages"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (response.StatusCode == HttpStatusCode.NotFound) - { - return null; - } - - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - var inventory = await response.Content - .ReadFromJsonAsync(SerializerOptions, cancellationToken) - .ConfigureAwait(false); - - if (inventory is null) - { - throw new InvalidOperationException("Ruby package response payload was empty."); - } - - var normalizedScanId = string.IsNullOrWhiteSpace(inventory.ScanId) ? scanId : inventory.ScanId; - var normalizedDigest = inventory.ImageDigest ?? string.Empty; - var packages = inventory.Packages ?? Array.Empty(); - - return inventory with - { - ScanId = normalizedScanId, - ImageDigest = normalizedDigest, - Packages = packages - }; - } - - public async Task CreateAdvisoryPipelinePlanAsync( - AdvisoryAiTaskType taskType, - AdvisoryPipelinePlanRequestModel request, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(request); - - var taskSegment = taskType.ToString().ToLowerInvariant(); - var relative = $"v1/advisory-ai/pipeline/{taskSegment}"; - - var payload = new AdvisoryPipelinePlanRequestModel - { - TaskType = taskType, - AdvisoryKey = string.IsNullOrWhiteSpace(request.AdvisoryKey) ? string.Empty : request.AdvisoryKey.Trim(), - ArtifactId = string.IsNullOrWhiteSpace(request.ArtifactId) ? null : request.ArtifactId!.Trim(), - ArtifactPurl = string.IsNullOrWhiteSpace(request.ArtifactPurl) ? null : request.ArtifactPurl!.Trim(), - PolicyVersion = string.IsNullOrWhiteSpace(request.PolicyVersion) ? null : request.PolicyVersion!.Trim(), - Profile = string.IsNullOrWhiteSpace(request.Profile) ? "default" : request.Profile!.Trim(), - PreferredSections = request.PreferredSections is null - ? null - : request.PreferredSections - .Where(static section => !string.IsNullOrWhiteSpace(section)) - .Select(static section => section.Trim()) - .ToArray(), - ForceRefresh = request.ForceRefresh - }; - - using var httpRequest = CreateRequest(HttpMethod.Post, relative); - ApplyAdvisoryAiEndpoint(httpRequest, taskType); - await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); - httpRequest.Content = JsonContent.Create(payload, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - try - { - var plan = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - if (plan is null) - { - throw new InvalidOperationException("Advisory AI plan response was empty."); - } - - return plan; - } - catch (JsonException ex) - { - var raw = response.Content is null - ? string.Empty - : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse advisory plan response. {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - } - - public async Task TryGetAdvisoryPipelineOutputAsync( - string cacheKey, - AdvisoryAiTaskType taskType, - string profile, - CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(cacheKey)) - { - throw new ArgumentException("Cache key is required.", nameof(cacheKey)); - } - - var encodedKey = Uri.EscapeDataString(cacheKey); - var taskSegment = Uri.EscapeDataString(taskType.ToString().ToLowerInvariant()); - var resolvedProfile = string.IsNullOrWhiteSpace(profile) ? "default" : profile.Trim(); - var relative = $"v1/advisory-ai/outputs/{encodedKey}?taskType={taskSegment}&profile={Uri.EscapeDataString(resolvedProfile)}"; - - using var request = CreateRequest(HttpMethod.Get, relative); - ApplyAdvisoryAiEndpoint(request, taskType); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (response.StatusCode == HttpStatusCode.NotFound) - { - return null; - } - - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - try - { - return await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = response.Content is null - ? string.Empty - : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse advisory output response. {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - } - - public async Task> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - + var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + if (result is null) + { + throw new InvalidOperationException("EntryTrace response payload was empty."); + } + + return result; + } + + public async Task GetRubyPackagesAsync(string scanId, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(scanId)) + { + throw new ArgumentException("Scan identifier is required.", nameof(scanId)); + } + + var encodedScanId = Uri.EscapeDataString(scanId); + using var request = CreateRequest(HttpMethod.Get, $"api/scans/{encodedScanId}/ruby-packages"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (response.StatusCode == HttpStatusCode.NotFound) + { + return null; + } + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + var inventory = await response.Content + .ReadFromJsonAsync(SerializerOptions, cancellationToken) + .ConfigureAwait(false); + + if (inventory is null) + { + throw new InvalidOperationException("Ruby package response payload was empty."); + } + + var normalizedScanId = string.IsNullOrWhiteSpace(inventory.ScanId) ? scanId : inventory.ScanId; + var normalizedDigest = inventory.ImageDigest ?? string.Empty; + var packages = inventory.Packages ?? Array.Empty(); + + return inventory with + { + ScanId = normalizedScanId, + ImageDigest = normalizedDigest, + Packages = packages + }; + } + + public async Task CreateAdvisoryPipelinePlanAsync( + AdvisoryAiTaskType taskType, + AdvisoryPipelinePlanRequestModel request, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + + var taskSegment = taskType.ToString().ToLowerInvariant(); + var relative = $"v1/advisory-ai/pipeline/{taskSegment}"; + + var payload = new AdvisoryPipelinePlanRequestModel + { + TaskType = taskType, + AdvisoryKey = string.IsNullOrWhiteSpace(request.AdvisoryKey) ? string.Empty : request.AdvisoryKey.Trim(), + ArtifactId = string.IsNullOrWhiteSpace(request.ArtifactId) ? null : request.ArtifactId!.Trim(), + ArtifactPurl = string.IsNullOrWhiteSpace(request.ArtifactPurl) ? null : request.ArtifactPurl!.Trim(), + PolicyVersion = string.IsNullOrWhiteSpace(request.PolicyVersion) ? null : request.PolicyVersion!.Trim(), + Profile = string.IsNullOrWhiteSpace(request.Profile) ? "default" : request.Profile!.Trim(), + PreferredSections = request.PreferredSections is null + ? null + : request.PreferredSections + .Where(static section => !string.IsNullOrWhiteSpace(section)) + .Select(static section => section.Trim()) + .ToArray(), + ForceRefresh = request.ForceRefresh + }; + + using var httpRequest = CreateRequest(HttpMethod.Post, relative); + ApplyAdvisoryAiEndpoint(httpRequest, taskType); + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + httpRequest.Content = JsonContent.Create(payload, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + try + { + var plan = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + if (plan is null) + { + throw new InvalidOperationException("Advisory AI plan response was empty."); + } + + return plan; + } + catch (JsonException ex) + { + var raw = response.Content is null + ? string.Empty + : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse advisory plan response. {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + } + + public async Task TryGetAdvisoryPipelineOutputAsync( + string cacheKey, + AdvisoryAiTaskType taskType, + string profile, + CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(cacheKey)) + { + throw new ArgumentException("Cache key is required.", nameof(cacheKey)); + } + + var encodedKey = Uri.EscapeDataString(cacheKey); + var taskSegment = Uri.EscapeDataString(taskType.ToString().ToLowerInvariant()); + var resolvedProfile = string.IsNullOrWhiteSpace(profile) ? "default" : profile.Trim(); + var relative = $"v1/advisory-ai/outputs/{encodedKey}?taskType={taskSegment}&profile={Uri.EscapeDataString(resolvedProfile)}"; + + using var request = CreateRequest(HttpMethod.Get, relative); + ApplyAdvisoryAiEndpoint(request, taskType); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (response.StatusCode == HttpStatusCode.NotFound) + { + return null; + } + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + try + { + return await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = response.Content is null + ? string.Empty + : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse advisory output response. {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + } + + public async Task> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + var query = includeDisabled ? "?includeDisabled=true" : string.Empty; using var request = CreateRequest(HttpMethod.Get, $"excititor/providers{query}"); await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); @@ -1937,44 +1937,44 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient return string.IsNullOrWhiteSpace(value) ? null : value.Trim(); } - private void ApplyAdvisoryAiEndpoint(HttpRequestMessage request, AdvisoryAiTaskType taskType) - { - if (request is null) - { - throw new ArgumentNullException(nameof(request)); - } - - var requestUri = request.RequestUri ?? throw new InvalidOperationException("Request URI was not initialized."); - - if (!string.IsNullOrWhiteSpace(_options.AdvisoryAiUrl) && - Uri.TryCreate(_options.AdvisoryAiUrl, UriKind.Absolute, out var advisoryBase)) - { - if (!requestUri.IsAbsoluteUri) - { - request.RequestUri = new Uri(advisoryBase, requestUri.ToString()); - } - } - else if (!string.IsNullOrWhiteSpace(_options.AdvisoryAiUrl)) - { - throw new InvalidOperationException($"Advisory AI URL '{_options.AdvisoryAiUrl}' is not a valid absolute URI."); - } - else - { - EnsureBackendConfigured(); - } - - var taskScope = $"advisory:{taskType.ToString().ToLowerInvariant()}"; - var combined = $"{AdvisoryRunScope} {taskScope}"; - - if (request.Headers.Contains(AdvisoryScopesHeader)) - { - request.Headers.Remove(AdvisoryScopesHeader); - } - - request.Headers.TryAddWithoutValidation(AdvisoryScopesHeader, combined); - } - - private HttpRequestMessage CreateRequest(HttpMethod method, string relativeUri) + private void ApplyAdvisoryAiEndpoint(HttpRequestMessage request, AdvisoryAiTaskType taskType) + { + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + var requestUri = request.RequestUri ?? throw new InvalidOperationException("Request URI was not initialized."); + + if (!string.IsNullOrWhiteSpace(_options.AdvisoryAiUrl) && + Uri.TryCreate(_options.AdvisoryAiUrl, UriKind.Absolute, out var advisoryBase)) + { + if (!requestUri.IsAbsoluteUri) + { + request.RequestUri = new Uri(advisoryBase, requestUri.ToString()); + } + } + else if (!string.IsNullOrWhiteSpace(_options.AdvisoryAiUrl)) + { + throw new InvalidOperationException($"Advisory AI URL '{_options.AdvisoryAiUrl}' is not a valid absolute URI."); + } + else + { + EnsureBackendConfigured(); + } + + var taskScope = $"advisory:{taskType.ToString().ToLowerInvariant()}"; + var combined = $"{AdvisoryRunScope} {taskScope}"; + + if (request.Headers.Contains(AdvisoryScopesHeader)) + { + request.Headers.Remove(AdvisoryScopesHeader); + } + + request.Headers.TryAddWithoutValidation(AdvisoryScopesHeader, combined); + } + + private HttpRequestMessage CreateRequest(HttpMethod method, string relativeUri) { if (!Uri.TryCreate(relativeUri, UriKind.RelativeOrAbsolute, out var requestUri)) { @@ -2857,4 +2857,469 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient var fallbackSeconds = Math.Min(60, Math.Pow(2, attempt)); return TimeSpan.FromSeconds(fallbackSeconds); } + + // CLI-VEX-30-001: VEX consensus list + public async Task ListVexConsensusAsync(VexConsensusListRequest request, string? tenant, CancellationToken cancellationToken) + { + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + EnsureBackendConfigured(); + + var queryParams = new List(); + if (!string.IsNullOrWhiteSpace(request.VulnerabilityId)) + queryParams.Add($"vulnerabilityId={Uri.EscapeDataString(request.VulnerabilityId)}"); + if (!string.IsNullOrWhiteSpace(request.ProductKey)) + queryParams.Add($"productKey={Uri.EscapeDataString(request.ProductKey)}"); + if (!string.IsNullOrWhiteSpace(request.Purl)) + queryParams.Add($"purl={Uri.EscapeDataString(request.Purl)}"); + if (!string.IsNullOrWhiteSpace(request.Status)) + queryParams.Add($"status={Uri.EscapeDataString(request.Status)}"); + if (!string.IsNullOrWhiteSpace(request.PolicyVersion)) + queryParams.Add($"policyVersion={Uri.EscapeDataString(request.PolicyVersion)}"); + if (request.Limit.HasValue) + queryParams.Add($"limit={request.Limit.Value}"); + if (request.Offset.HasValue) + queryParams.Add($"offset={request.Offset.Value}"); + + var queryString = queryParams.Count > 0 ? "?" + string.Join("&", queryParams) : string.Empty; + var relative = $"api/vex/consensus{queryString}"; + + using var httpRequest = CreateRequest(HttpMethod.Get, relative); + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim()); + } + + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"VEX consensus list failed: {message}"); + } + + VexConsensusListResponse? result; + try + { + result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse VEX consensus list response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (result is null) + { + throw new InvalidOperationException("VEX consensus list response was empty."); + } + + return result; + } + + // CLI-VEX-30-002: VEX consensus detail + public async Task GetVexConsensusAsync(string vulnerabilityId, string productKey, string? tenant, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(vulnerabilityId)) + { + throw new ArgumentException("Vulnerability ID must be provided.", nameof(vulnerabilityId)); + } + + if (string.IsNullOrWhiteSpace(productKey)) + { + throw new ArgumentException("Product key must be provided.", nameof(productKey)); + } + + EnsureBackendConfigured(); + + var encodedVulnId = Uri.EscapeDataString(vulnerabilityId.Trim()); + var encodedProductKey = Uri.EscapeDataString(productKey.Trim()); + var relative = $"api/vex/consensus/{encodedVulnId}/{encodedProductKey}"; + + using var httpRequest = CreateRequest(HttpMethod.Get, relative); + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim()); + } + + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (response.StatusCode == HttpStatusCode.NotFound) + { + return null; + } + + if (!response.IsSuccessStatusCode) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"VEX consensus get failed: {message}"); + } + + VexConsensusDetailResponse? result; + try + { + result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse VEX consensus detail response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + return result; + } + + // CLI-VEX-30-003: VEX simulation + public async Task SimulateVexConsensusAsync(VexSimulationRequest request, string? tenant, CancellationToken cancellationToken) + { + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + EnsureBackendConfigured(); + + var relative = "api/vex/consensus/simulate"; + + using var httpRequest = CreateRequest(HttpMethod.Post, relative); + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim()); + } + + var jsonContent = JsonSerializer.Serialize(request, SerializerOptions); + httpRequest.Content = new StringContent(jsonContent, Encoding.UTF8, "application/json"); + + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"VEX consensus simulation failed: {message}"); + } + + VexSimulationResponse? result; + try + { + result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse VEX simulation response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (result is null) + { + throw new InvalidOperationException("VEX simulation response was empty."); + } + + return result; + } + + // CLI-VEX-30-004: VEX export + public async Task ExportVexConsensusAsync(VexExportRequest request, string? tenant, CancellationToken cancellationToken) + { + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + EnsureBackendConfigured(); + + var relative = "api/vex/consensus/export"; + + using var httpRequest = CreateRequest(HttpMethod.Post, relative); + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim()); + } + + var jsonContent = JsonSerializer.Serialize(request, SerializerOptions); + httpRequest.Content = new StringContent(jsonContent, Encoding.UTF8, "application/json"); + + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"VEX consensus export failed: {message}"); + } + + VexExportResponse? result; + try + { + result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse VEX export response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (result is null) + { + throw new InvalidOperationException("VEX export response was empty."); + } + + return result; + } + + public async Task DownloadVexExportAsync(string exportId, string? tenant, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(exportId)) + { + throw new ArgumentException("Export ID must be provided.", nameof(exportId)); + } + + EnsureBackendConfigured(); + + var encodedExportId = Uri.EscapeDataString(exportId.Trim()); + var relative = $"api/vex/consensus/export/{encodedExportId}/download"; + + using var httpRequest = CreateRequest(HttpMethod.Get, relative); + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim()); + } + + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + var response = await _httpClient.SendAsync(httpRequest, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"VEX export download failed: {message}"); + } + + return await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + } + + // CLI-VULN-29-001: Vulnerability explorer list + public async Task ListVulnerabilitiesAsync(VulnListRequest request, string? tenant, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + var queryParams = new List(); + if (!string.IsNullOrWhiteSpace(request.VulnerabilityId)) + queryParams.Add($"vulnerabilityId={Uri.EscapeDataString(request.VulnerabilityId)}"); + if (!string.IsNullOrWhiteSpace(request.Severity)) + queryParams.Add($"severity={Uri.EscapeDataString(request.Severity)}"); + if (!string.IsNullOrWhiteSpace(request.Status)) + queryParams.Add($"status={Uri.EscapeDataString(request.Status)}"); + if (!string.IsNullOrWhiteSpace(request.Purl)) + queryParams.Add($"purl={Uri.EscapeDataString(request.Purl)}"); + if (!string.IsNullOrWhiteSpace(request.Cpe)) + queryParams.Add($"cpe={Uri.EscapeDataString(request.Cpe)}"); + if (!string.IsNullOrWhiteSpace(request.SbomId)) + queryParams.Add($"sbomId={Uri.EscapeDataString(request.SbomId)}"); + if (!string.IsNullOrWhiteSpace(request.PolicyId)) + queryParams.Add($"policyId={Uri.EscapeDataString(request.PolicyId)}"); + if (request.PolicyVersion.HasValue) + queryParams.Add($"policyVersion={request.PolicyVersion.Value}"); + if (!string.IsNullOrWhiteSpace(request.GroupBy)) + queryParams.Add($"groupBy={Uri.EscapeDataString(request.GroupBy)}"); + if (request.Limit.HasValue) + queryParams.Add($"limit={request.Limit.Value}"); + if (request.Offset.HasValue) + queryParams.Add($"offset={request.Offset.Value}"); + if (!string.IsNullOrWhiteSpace(request.Cursor)) + queryParams.Add($"cursor={Uri.EscapeDataString(request.Cursor)}"); + + var relative = "api/vuln"; + if (queryParams.Count > 0) + relative += "?" + string.Join("&", queryParams); + + using var httpRequest = CreateRequest(HttpMethod.Get, relative); + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim()); + } + + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to list vulnerabilities: {message}"); + } + + var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + var result = JsonSerializer.Deserialize(json, SerializerOptions); + return result ?? new VulnListResponse(Array.Empty(), 0, 0, 0, false); + } + + // CLI-VULN-29-002: Vulnerability detail + public async Task GetVulnerabilityAsync(string vulnerabilityId, string? tenant, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(vulnerabilityId)) + { + throw new ArgumentException("Vulnerability ID must be provided.", nameof(vulnerabilityId)); + } + + EnsureBackendConfigured(); + + var encodedVulnId = Uri.EscapeDataString(vulnerabilityId.Trim()); + var relative = $"api/vuln/{encodedVulnId}"; + + using var httpRequest = CreateRequest(HttpMethod.Get, relative); + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim()); + } + + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (response.StatusCode == System.Net.HttpStatusCode.NotFound) + { + return null; + } + + if (!response.IsSuccessStatusCode) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to get vulnerability details: {message}"); + } + + var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + return JsonSerializer.Deserialize(json, SerializerOptions); + } + + // CLI-VULN-29-003: Vulnerability workflow operations + public async Task ExecuteVulnWorkflowAsync(VulnWorkflowRequest request, string? tenant, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + var relative = "api/vuln/workflow"; + var jsonPayload = JsonSerializer.Serialize(request, SerializerOptions); + + using var httpRequest = CreateRequest(HttpMethod.Post, relative); + httpRequest.Content = new StringContent(jsonPayload, Encoding.UTF8, "application/json"); + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim()); + } + + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Workflow operation failed: {message}"); + } + + var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + var result = JsonSerializer.Deserialize(json, SerializerOptions); + return result ?? new VulnWorkflowResponse(false, request.Action, 0); + } + + // CLI-VULN-29-004: Vulnerability simulation + public async Task SimulateVulnerabilitiesAsync(VulnSimulationRequest request, string? tenant, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + var relative = "api/vuln/simulate"; + var jsonPayload = JsonSerializer.Serialize(request, SerializerOptions); + + using var httpRequest = CreateRequest(HttpMethod.Post, relative); + httpRequest.Content = new StringContent(jsonPayload, Encoding.UTF8, "application/json"); + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim()); + } + + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Vulnerability simulation failed: {message}"); + } + + var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + var result = JsonSerializer.Deserialize(json, SerializerOptions); + return result ?? new VulnSimulationResponse(Array.Empty(), new VulnSimulationSummary(0, 0, 0, 0, 0)); + } + + // CLI-VULN-29-005: Vulnerability export + public async Task ExportVulnerabilitiesAsync(VulnExportRequest request, string? tenant, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + var relative = "api/vuln/export"; + var jsonPayload = JsonSerializer.Serialize(request, SerializerOptions); + + using var httpRequest = CreateRequest(HttpMethod.Post, relative); + httpRequest.Content = new StringContent(jsonPayload, Encoding.UTF8, "application/json"); + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim()); + } + + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Vulnerability export failed: {message}"); + } + + var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + var result = JsonSerializer.Deserialize(json, SerializerOptions); + return result ?? throw new InvalidOperationException("Failed to parse export response."); + } + + public async Task DownloadVulnExportAsync(string exportId, string? tenant, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(exportId)) + { + throw new ArgumentException("Export ID must be provided.", nameof(exportId)); + } + + EnsureBackendConfigured(); + + var encodedExportId = Uri.EscapeDataString(exportId.Trim()); + var relative = $"api/vuln/export/{encodedExportId}/download"; + + using var httpRequest = CreateRequest(HttpMethod.Get, relative); + if (!string.IsNullOrWhiteSpace(tenant)) + { + httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim()); + } + + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + var response = await _httpClient.SendAsync(httpRequest, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Vulnerability export download failed: {message}"); + } + + return await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + } } diff --git a/src/Cli/StellaOps.Cli/Services/IAuthorityConsoleClient.cs b/src/Cli/StellaOps.Cli/Services/IAuthorityConsoleClient.cs index 71f53ba51..f9661e80e 100644 --- a/src/Cli/StellaOps.Cli/Services/IAuthorityConsoleClient.cs +++ b/src/Cli/StellaOps.Cli/Services/IAuthorityConsoleClient.cs @@ -6,7 +6,7 @@ using StellaOps.Cli.Services.Models; namespace StellaOps.Cli.Services; /// -/// Client for Authority console endpoints (CLI-TEN-47-001). +/// Client for Authority console endpoints (CLI-TEN-47-001, CLI-TEN-49-001). /// internal interface IAuthorityConsoleClient { @@ -14,4 +14,19 @@ internal interface IAuthorityConsoleClient /// Lists available tenants for the authenticated principal. /// Task> ListTenantsAsync(string tenant, CancellationToken cancellationToken); + + /// + /// Mints a service account token (CLI-TEN-49-001). + /// + Task MintTokenAsync(TokenMintRequest request, CancellationToken cancellationToken); + + /// + /// Delegates a token to another principal (CLI-TEN-49-001). + /// + Task DelegateTokenAsync(TokenDelegateRequest request, CancellationToken cancellationToken); + + /// + /// Introspects the current token for impersonation/delegation info (CLI-TEN-49-001). + /// + Task IntrospectTokenAsync(string? tenant, CancellationToken cancellationToken); } diff --git a/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs b/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs index a3ca79f92..cde81ff5a 100644 --- a/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs +++ b/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs @@ -6,11 +6,11 @@ using StellaOps.Cli.Configuration; using StellaOps.Cli.Services.Models; using StellaOps.Cli.Services.Models.AdvisoryAi; using StellaOps.Cli.Services.Models.Ruby; - -namespace StellaOps.Cli.Services; - -internal interface IBackendOperationsClient -{ + +namespace StellaOps.Cli.Services; + +internal interface IBackendOperationsClient +{ Task DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken); Task UploadScanResultsAsync(string filePath, CancellationToken cancellationToken); @@ -54,4 +54,33 @@ internal interface IBackendOperationsClient Task CreateAdvisoryPipelinePlanAsync(AdvisoryAiTaskType taskType, AdvisoryPipelinePlanRequestModel request, CancellationToken cancellationToken); Task TryGetAdvisoryPipelineOutputAsync(string cacheKey, AdvisoryAiTaskType taskType, string profile, CancellationToken cancellationToken); + + // CLI-VEX-30-001: VEX consensus operations + Task ListVexConsensusAsync(VexConsensusListRequest request, string? tenant, CancellationToken cancellationToken); + + // CLI-VEX-30-002: VEX consensus detail + Task GetVexConsensusAsync(string vulnerabilityId, string productKey, string? tenant, CancellationToken cancellationToken); + + // CLI-VEX-30-003: VEX simulation + Task SimulateVexConsensusAsync(VexSimulationRequest request, string? tenant, CancellationToken cancellationToken); + + // CLI-VEX-30-004: VEX export + Task ExportVexConsensusAsync(VexExportRequest request, string? tenant, CancellationToken cancellationToken); + Task DownloadVexExportAsync(string exportId, string? tenant, CancellationToken cancellationToken); + + // CLI-VULN-29-001: Vulnerability explorer list + Task ListVulnerabilitiesAsync(VulnListRequest request, string? tenant, CancellationToken cancellationToken); + + // CLI-VULN-29-002: Vulnerability detail + Task GetVulnerabilityAsync(string vulnerabilityId, string? tenant, CancellationToken cancellationToken); + + // CLI-VULN-29-003: Vulnerability workflow operations + Task ExecuteVulnWorkflowAsync(VulnWorkflowRequest request, string? tenant, CancellationToken cancellationToken); + + // CLI-VULN-29-004: Vulnerability simulation + Task SimulateVulnerabilitiesAsync(VulnSimulationRequest request, string? tenant, CancellationToken cancellationToken); + + // CLI-VULN-29-005: Vulnerability export + Task ExportVulnerabilitiesAsync(VulnExportRequest request, string? tenant, CancellationToken cancellationToken); + Task DownloadVulnExportAsync(string exportId, string? tenant, CancellationToken cancellationToken); } diff --git a/src/Cli/StellaOps.Cli/Services/Models/TenantModels.cs b/src/Cli/StellaOps.Cli/Services/Models/TenantModels.cs index 23fcb53c4..8e6f5ed05 100644 --- a/src/Cli/StellaOps.Cli/Services/Models/TenantModels.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/TenantModels.cs @@ -35,3 +35,60 @@ internal sealed record TenantProfile [JsonPropertyName("lastUpdated")] public DateTimeOffset? LastUpdated { get; init; } } + +// CLI-TEN-49-001: Token minting and delegation models + +/// +/// Request to mint a service account token. +/// +internal sealed record TokenMintRequest( + [property: JsonPropertyName("serviceAccountId")] string ServiceAccountId, + [property: JsonPropertyName("scopes")] IReadOnlyList Scopes, + [property: JsonPropertyName("expiresInSeconds")] int? ExpiresInSeconds = null, + [property: JsonPropertyName("tenant")] string? Tenant = null, + [property: JsonPropertyName("reason")] string? Reason = null); + +/// +/// Response from token minting. +/// +internal sealed record TokenMintResponse( + [property: JsonPropertyName("accessToken")] string AccessToken, + [property: JsonPropertyName("tokenType")] string TokenType, + [property: JsonPropertyName("expiresAt")] DateTimeOffset ExpiresAt, + [property: JsonPropertyName("scopes")] IReadOnlyList Scopes, + [property: JsonPropertyName("tokenId")] string? TokenId = null); + +/// +/// Request to delegate a token to another principal. +/// +internal sealed record TokenDelegateRequest( + [property: JsonPropertyName("delegateTo")] string DelegateTo, + [property: JsonPropertyName("scopes")] IReadOnlyList Scopes, + [property: JsonPropertyName("expiresInSeconds")] int? ExpiresInSeconds = null, + [property: JsonPropertyName("tenant")] string? Tenant = null, + [property: JsonPropertyName("reason")] string? Reason = null); + +/// +/// Response from token delegation. +/// +internal sealed record TokenDelegateResponse( + [property: JsonPropertyName("accessToken")] string AccessToken, + [property: JsonPropertyName("tokenType")] string TokenType, + [property: JsonPropertyName("expiresAt")] DateTimeOffset ExpiresAt, + [property: JsonPropertyName("delegationId")] string DelegationId, + [property: JsonPropertyName("originalSubject")] string OriginalSubject, + [property: JsonPropertyName("delegatedSubject")] string DelegatedSubject, + [property: JsonPropertyName("scopes")] IReadOnlyList Scopes); + +/// +/// Token introspection response for impersonation banner. +/// +internal sealed record TokenIntrospectionResponse( + [property: JsonPropertyName("active")] bool Active, + [property: JsonPropertyName("sub")] string? Subject = null, + [property: JsonPropertyName("clientId")] string? ClientId = null, + [property: JsonPropertyName("scope")] string? Scope = null, + [property: JsonPropertyName("exp")] long? ExpiresAt = null, + [property: JsonPropertyName("iat")] long? IssuedAt = null, + [property: JsonPropertyName("delegatedBy")] string? DelegatedBy = null, + [property: JsonPropertyName("delegationReason")] string? DelegationReason = null); diff --git a/src/Cli/StellaOps.Cli/Services/Models/VexModels.cs b/src/Cli/StellaOps.Cli/Services/Models/VexModels.cs new file mode 100644 index 000000000..e54b72ecf --- /dev/null +++ b/src/Cli/StellaOps.Cli/Services/Models/VexModels.cs @@ -0,0 +1,258 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Cli.Services.Models; + +// CLI-VEX-30-001: VEX consensus models for CLI + +/// +/// VEX consensus list request parameters. +/// +internal sealed record VexConsensusListRequest( + [property: JsonPropertyName("vulnerabilityId")] string? VulnerabilityId = null, + [property: JsonPropertyName("productKey")] string? ProductKey = null, + [property: JsonPropertyName("purl")] string? Purl = null, + [property: JsonPropertyName("status")] string? Status = null, + [property: JsonPropertyName("policyVersion")] string? PolicyVersion = null, + [property: JsonPropertyName("limit")] int? Limit = null, + [property: JsonPropertyName("offset")] int? Offset = null); + +/// +/// Paginated VEX consensus list response. +/// +internal sealed record VexConsensusListResponse( + [property: JsonPropertyName("items")] IReadOnlyList Items, + [property: JsonPropertyName("total")] int Total, + [property: JsonPropertyName("limit")] int Limit, + [property: JsonPropertyName("offset")] int Offset, + [property: JsonPropertyName("hasMore")] bool HasMore); + +/// +/// VEX consensus item from the API. +/// +internal sealed record VexConsensusItem( + [property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId, + [property: JsonPropertyName("product")] VexProductInfo Product, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("calculatedAt")] DateTimeOffset CalculatedAt, + [property: JsonPropertyName("sources")] IReadOnlyList Sources, + [property: JsonPropertyName("conflicts")] IReadOnlyList? Conflicts = null, + [property: JsonPropertyName("policyVersion")] string? PolicyVersion = null, + [property: JsonPropertyName("policyDigest")] string? PolicyDigest = null, + [property: JsonPropertyName("summary")] string? Summary = null); + +/// +/// VEX product information. +/// +internal sealed record VexProductInfo( + [property: JsonPropertyName("key")] string Key, + [property: JsonPropertyName("name")] string? Name = null, + [property: JsonPropertyName("version")] string? Version = null, + [property: JsonPropertyName("purl")] string? Purl = null, + [property: JsonPropertyName("cpe")] string? Cpe = null); + +/// +/// VEX consensus source (accepted claim). +/// +internal sealed record VexConsensusSourceInfo( + [property: JsonPropertyName("providerId")] string ProviderId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("documentDigest")] string? DocumentDigest = null, + [property: JsonPropertyName("weight")] double Weight = 1.0, + [property: JsonPropertyName("justification")] string? Justification = null, + [property: JsonPropertyName("detail")] string? Detail = null, + [property: JsonPropertyName("confidence")] VexConfidenceInfo? Confidence = null); + +/// +/// VEX consensus conflict (rejected claim). +/// +internal sealed record VexConsensusConflictInfo( + [property: JsonPropertyName("providerId")] string ProviderId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("documentDigest")] string? DocumentDigest = null, + [property: JsonPropertyName("justification")] string? Justification = null, + [property: JsonPropertyName("detail")] string? Detail = null, + [property: JsonPropertyName("reason")] string? Reason = null); + +/// +/// VEX confidence information. +/// +internal sealed record VexConfidenceInfo( + [property: JsonPropertyName("level")] string? Level = null, + [property: JsonPropertyName("score")] double? Score = null, + [property: JsonPropertyName("method")] string? Method = null); + +// CLI-VEX-30-002: VEX consensus detail models + +/// +/// Detailed VEX consensus response including quorum, evidence, rationale, and signature status. +/// +internal sealed record VexConsensusDetailResponse( + [property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId, + [property: JsonPropertyName("product")] VexProductInfo Product, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("calculatedAt")] DateTimeOffset CalculatedAt, + [property: JsonPropertyName("sources")] IReadOnlyList Sources, + [property: JsonPropertyName("conflicts")] IReadOnlyList? Conflicts = null, + [property: JsonPropertyName("policyVersion")] string? PolicyVersion = null, + [property: JsonPropertyName("policyDigest")] string? PolicyDigest = null, + [property: JsonPropertyName("summary")] string? Summary = null, + [property: JsonPropertyName("quorum")] VexQuorumInfo? Quorum = null, + [property: JsonPropertyName("rationale")] VexRationaleInfo? Rationale = null, + [property: JsonPropertyName("signature")] VexSignatureInfo? Signature = null, + [property: JsonPropertyName("evidence")] IReadOnlyList? Evidence = null); + +/// +/// VEX quorum information showing how consensus was reached. +/// +internal sealed record VexQuorumInfo( + [property: JsonPropertyName("required")] int Required, + [property: JsonPropertyName("achieved")] int Achieved, + [property: JsonPropertyName("threshold")] double Threshold, + [property: JsonPropertyName("totalWeight")] double TotalWeight, + [property: JsonPropertyName("weightAchieved")] double WeightAchieved, + [property: JsonPropertyName("participatingProviders")] IReadOnlyList? ParticipatingProviders = null); + +/// +/// VEX rationale explaining the consensus decision. +/// +internal sealed record VexRationaleInfo( + [property: JsonPropertyName("text")] string? Text = null, + [property: JsonPropertyName("justifications")] IReadOnlyList? Justifications = null, + [property: JsonPropertyName("policyRules")] IReadOnlyList? PolicyRules = null); + +/// +/// VEX signature status information. +/// +internal sealed record VexSignatureInfo( + [property: JsonPropertyName("signed")] bool Signed, + [property: JsonPropertyName("algorithm")] string? Algorithm = null, + [property: JsonPropertyName("keyId")] string? KeyId = null, + [property: JsonPropertyName("signedAt")] DateTimeOffset? SignedAt = null, + [property: JsonPropertyName("verificationStatus")] string? VerificationStatus = null, + [property: JsonPropertyName("certificateChain")] IReadOnlyList? CertificateChain = null); + +/// +/// VEX evidence supporting the consensus decision. +/// +internal sealed record VexEvidenceInfo( + [property: JsonPropertyName("type")] string Type, + [property: JsonPropertyName("providerId")] string ProviderId, + [property: JsonPropertyName("documentId")] string? DocumentId = null, + [property: JsonPropertyName("documentDigest")] string? DocumentDigest = null, + [property: JsonPropertyName("timestamp")] DateTimeOffset? Timestamp = null, + [property: JsonPropertyName("content")] string? Content = null); + +// CLI-VEX-30-003: VEX simulation models + +/// +/// VEX simulation request with trust/threshold overrides. +/// +internal sealed record VexSimulationRequest( + [property: JsonPropertyName("vulnerabilityId")] string? VulnerabilityId = null, + [property: JsonPropertyName("productKey")] string? ProductKey = null, + [property: JsonPropertyName("purl")] string? Purl = null, + [property: JsonPropertyName("trustOverrides")] IReadOnlyDictionary? TrustOverrides = null, + [property: JsonPropertyName("thresholdOverride")] double? ThresholdOverride = null, + [property: JsonPropertyName("quorumOverride")] int? QuorumOverride = null, + [property: JsonPropertyName("excludeProviders")] IReadOnlyList? ExcludeProviders = null, + [property: JsonPropertyName("includeOnly")] IReadOnlyList? IncludeOnly = null); + +/// +/// VEX simulation response showing before/after comparison. +/// +internal sealed record VexSimulationResponse( + [property: JsonPropertyName("items")] IReadOnlyList Items, + [property: JsonPropertyName("parameters")] VexSimulationParameters Parameters, + [property: JsonPropertyName("summary")] VexSimulationSummary Summary); + +/// +/// Individual VEX simulation result showing the delta. +/// +internal sealed record VexSimulationResultItem( + [property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId, + [property: JsonPropertyName("product")] VexProductInfo Product, + [property: JsonPropertyName("before")] VexSimulationState Before, + [property: JsonPropertyName("after")] VexSimulationState After, + [property: JsonPropertyName("changed")] bool Changed, + [property: JsonPropertyName("changeType")] string? ChangeType = null); + +/// +/// VEX state for simulation comparison. +/// +internal sealed record VexSimulationState( + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("quorumAchieved")] int QuorumAchieved, + [property: JsonPropertyName("weightAchieved")] double WeightAchieved, + [property: JsonPropertyName("sources")] IReadOnlyList? Sources = null); + +/// +/// Parameters used in the simulation. +/// +internal sealed record VexSimulationParameters( + [property: JsonPropertyName("threshold")] double Threshold, + [property: JsonPropertyName("quorum")] int Quorum, + [property: JsonPropertyName("trustWeights")] IReadOnlyDictionary? TrustWeights = null, + [property: JsonPropertyName("excludedProviders")] IReadOnlyList? ExcludedProviders = null); + +/// +/// Summary of simulation results. +/// +internal sealed record VexSimulationSummary( + [property: JsonPropertyName("totalEvaluated")] int TotalEvaluated, + [property: JsonPropertyName("totalChanged")] int TotalChanged, + [property: JsonPropertyName("statusUpgrades")] int StatusUpgrades, + [property: JsonPropertyName("statusDowngrades")] int StatusDowngrades, + [property: JsonPropertyName("noChange")] int NoChange); + +// CLI-VEX-30-004: VEX export models + +/// +/// VEX export request parameters. +/// +internal sealed record VexExportRequest( + [property: JsonPropertyName("vulnerabilityIds")] IReadOnlyList? VulnerabilityIds = null, + [property: JsonPropertyName("productKeys")] IReadOnlyList? ProductKeys = null, + [property: JsonPropertyName("purls")] IReadOnlyList? Purls = null, + [property: JsonPropertyName("statuses")] IReadOnlyList? Statuses = null, + [property: JsonPropertyName("policyVersion")] string? PolicyVersion = null, + [property: JsonPropertyName("signed")] bool Signed = true, + [property: JsonPropertyName("format")] string Format = "ndjson"); + +/// +/// VEX export response with download information. +/// +internal sealed record VexExportResponse( + [property: JsonPropertyName("exportId")] string ExportId, + [property: JsonPropertyName("downloadUrl")] string? DownloadUrl = null, + [property: JsonPropertyName("format")] string Format = "ndjson", + [property: JsonPropertyName("itemCount")] int ItemCount = 0, + [property: JsonPropertyName("signed")] bool Signed = false, + [property: JsonPropertyName("signatureAlgorithm")] string? SignatureAlgorithm = null, + [property: JsonPropertyName("signatureKeyId")] string? SignatureKeyId = null, + [property: JsonPropertyName("digest")] string? Digest = null, + [property: JsonPropertyName("digestAlgorithm")] string? DigestAlgorithm = null, + [property: JsonPropertyName("expiresAt")] DateTimeOffset? ExpiresAt = null); + +/// +/// VEX export signature verification request. +/// +internal sealed record VexExportVerifyRequest( + [property: JsonPropertyName("filePath")] string FilePath, + [property: JsonPropertyName("signaturePath")] string? SignaturePath = null, + [property: JsonPropertyName("expectedDigest")] string? ExpectedDigest = null, + [property: JsonPropertyName("publicKeyPath")] string? PublicKeyPath = null); + +/// +/// VEX export signature verification result. +/// +internal sealed record VexExportVerifyResult( + [property: JsonPropertyName("valid")] bool Valid, + [property: JsonPropertyName("signatureStatus")] string SignatureStatus, + [property: JsonPropertyName("digestMatch")] bool? DigestMatch = null, + [property: JsonPropertyName("actualDigest")] string? ActualDigest = null, + [property: JsonPropertyName("expectedDigest")] string? ExpectedDigest = null, + [property: JsonPropertyName("keyId")] string? KeyId = null, + [property: JsonPropertyName("signedAt")] DateTimeOffset? SignedAt = null, + [property: JsonPropertyName("errors")] IReadOnlyList? Errors = null); diff --git a/src/Cli/StellaOps.Cli/Services/Models/VulnModels.cs b/src/Cli/StellaOps.Cli/Services/Models/VulnModels.cs new file mode 100644 index 000000000..96a86615a --- /dev/null +++ b/src/Cli/StellaOps.Cli/Services/Models/VulnModels.cs @@ -0,0 +1,291 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Cli.Services.Models; + +// CLI-VULN-29-001: Vulnerability Explorer models for CLI + +/// +/// Vulnerability list request parameters. +/// +internal sealed record VulnListRequest( + [property: JsonPropertyName("vulnerabilityId")] string? VulnerabilityId = null, + [property: JsonPropertyName("severity")] string? Severity = null, + [property: JsonPropertyName("status")] string? Status = null, + [property: JsonPropertyName("purl")] string? Purl = null, + [property: JsonPropertyName("cpe")] string? Cpe = null, + [property: JsonPropertyName("sbomId")] string? SbomId = null, + [property: JsonPropertyName("policyId")] string? PolicyId = null, + [property: JsonPropertyName("policyVersion")] int? PolicyVersion = null, + [property: JsonPropertyName("groupBy")] string? GroupBy = null, + [property: JsonPropertyName("limit")] int? Limit = null, + [property: JsonPropertyName("offset")] int? Offset = null, + [property: JsonPropertyName("cursor")] string? Cursor = null); + +/// +/// Paginated vulnerability list response. +/// +internal sealed record VulnListResponse( + [property: JsonPropertyName("items")] IReadOnlyList Items, + [property: JsonPropertyName("total")] int Total, + [property: JsonPropertyName("limit")] int Limit, + [property: JsonPropertyName("offset")] int Offset, + [property: JsonPropertyName("hasMore")] bool HasMore, + [property: JsonPropertyName("nextCursor")] string? NextCursor = null, + [property: JsonPropertyName("grouping")] VulnGroupingInfo? Grouping = null); + +/// +/// Individual vulnerability item from the explorer. +/// +internal sealed record VulnItem( + [property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("severity")] VulnSeverityInfo Severity, + [property: JsonPropertyName("affectedPackages")] IReadOnlyList AffectedPackages, + [property: JsonPropertyName("vexStatus")] string? VexStatus = null, + [property: JsonPropertyName("policyFindingId")] string? PolicyFindingId = null, + [property: JsonPropertyName("aliases")] IReadOnlyList? Aliases = null, + [property: JsonPropertyName("summary")] string? Summary = null, + [property: JsonPropertyName("publishedAt")] DateTimeOffset? PublishedAt = null, + [property: JsonPropertyName("updatedAt")] DateTimeOffset? UpdatedAt = null, + [property: JsonPropertyName("assignee")] string? Assignee = null, + [property: JsonPropertyName("dueDate")] DateTimeOffset? DueDate = null, + [property: JsonPropertyName("tags")] IReadOnlyList? Tags = null); + +/// +/// Vulnerability severity information. +/// +internal sealed record VulnSeverityInfo( + [property: JsonPropertyName("level")] string Level, + [property: JsonPropertyName("score")] double? Score = null, + [property: JsonPropertyName("vector")] string? Vector = null, + [property: JsonPropertyName("source")] string? Source = null); + +/// +/// Affected package information. +/// +internal sealed record VulnAffectedPackage( + [property: JsonPropertyName("purl")] string? Purl = null, + [property: JsonPropertyName("cpe")] string? Cpe = null, + [property: JsonPropertyName("name")] string? Name = null, + [property: JsonPropertyName("version")] string? Version = null, + [property: JsonPropertyName("fixedIn")] string? FixedIn = null, + [property: JsonPropertyName("sbomId")] string? SbomId = null, + [property: JsonPropertyName("pathCount")] int? PathCount = null); + +/// +/// Grouping information for aggregated results. +/// +internal sealed record VulnGroupingInfo( + [property: JsonPropertyName("field")] string Field, + [property: JsonPropertyName("groups")] IReadOnlyList Groups); + +/// +/// A group in aggregated results. +/// +internal sealed record VulnGroup( + [property: JsonPropertyName("key")] string Key, + [property: JsonPropertyName("count")] int Count, + [property: JsonPropertyName("criticalCount")] int? CriticalCount = null, + [property: JsonPropertyName("highCount")] int? HighCount = null, + [property: JsonPropertyName("mediumCount")] int? MediumCount = null, + [property: JsonPropertyName("lowCount")] int? LowCount = null); + +// CLI-VULN-29-002: Vulnerability detail models + +/// +/// Detailed vulnerability response including evidence, rationale, paths, and ledger. +/// +internal sealed record VulnDetailResponse( + [property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("severity")] VulnSeverityInfo Severity, + [property: JsonPropertyName("affectedPackages")] IReadOnlyList AffectedPackages, + [property: JsonPropertyName("vexStatus")] string? VexStatus = null, + [property: JsonPropertyName("policyFindingId")] string? PolicyFindingId = null, + [property: JsonPropertyName("aliases")] IReadOnlyList? Aliases = null, + [property: JsonPropertyName("summary")] string? Summary = null, + [property: JsonPropertyName("description")] string? Description = null, + [property: JsonPropertyName("publishedAt")] DateTimeOffset? PublishedAt = null, + [property: JsonPropertyName("updatedAt")] DateTimeOffset? UpdatedAt = null, + [property: JsonPropertyName("assignee")] string? Assignee = null, + [property: JsonPropertyName("dueDate")] DateTimeOffset? DueDate = null, + [property: JsonPropertyName("tags")] IReadOnlyList? Tags = null, + [property: JsonPropertyName("evidence")] IReadOnlyList? Evidence = null, + [property: JsonPropertyName("policyRationale")] VulnPolicyRationale? PolicyRationale = null, + [property: JsonPropertyName("dependencyPaths")] IReadOnlyList? DependencyPaths = null, + [property: JsonPropertyName("ledger")] IReadOnlyList? Ledger = null, + [property: JsonPropertyName("references")] IReadOnlyList? References = null); + +/// +/// Evidence supporting the vulnerability assessment. +/// +internal sealed record VulnEvidenceInfo( + [property: JsonPropertyName("type")] string Type, + [property: JsonPropertyName("source")] string Source, + [property: JsonPropertyName("documentId")] string? DocumentId = null, + [property: JsonPropertyName("documentDigest")] string? DocumentDigest = null, + [property: JsonPropertyName("timestamp")] DateTimeOffset? Timestamp = null, + [property: JsonPropertyName("content")] string? Content = null); + +/// +/// Policy rationale explaining the status decision. +/// +internal sealed record VulnPolicyRationale( + [property: JsonPropertyName("policyId")] string PolicyId, + [property: JsonPropertyName("policyVersion")] int PolicyVersion, + [property: JsonPropertyName("rules")] IReadOnlyList? Rules = null, + [property: JsonPropertyName("summary")] string? Summary = null); + +/// +/// Result of a policy rule evaluation. +/// +internal sealed record VulnPolicyRuleResult( + [property: JsonPropertyName("rule")] string Rule, + [property: JsonPropertyName("result")] string Result, + [property: JsonPropertyName("weight")] double? Weight = null, + [property: JsonPropertyName("reason")] string? Reason = null); + +/// +/// Dependency path showing how the vulnerable package is included. +/// +internal sealed record VulnDependencyPath( + [property: JsonPropertyName("path")] IReadOnlyList Path, + [property: JsonPropertyName("sbomId")] string? SbomId = null, + [property: JsonPropertyName("depth")] int? Depth = null); + +/// +/// Ledger entry tracking vulnerability workflow history. +/// +internal sealed record VulnLedgerEntry( + [property: JsonPropertyName("timestamp")] DateTimeOffset Timestamp, + [property: JsonPropertyName("action")] string Action, + [property: JsonPropertyName("actor")] string? Actor = null, + [property: JsonPropertyName("fromStatus")] string? FromStatus = null, + [property: JsonPropertyName("toStatus")] string? ToStatus = null, + [property: JsonPropertyName("comment")] string? Comment = null, + [property: JsonPropertyName("metadata")] IReadOnlyDictionary? Metadata = null); + +/// +/// Reference link for the vulnerability. +/// +internal sealed record VulnReference( + [property: JsonPropertyName("type")] string Type, + [property: JsonPropertyName("url")] string Url, + [property: JsonPropertyName("title")] string? Title = null); + +// CLI-VULN-29-003: Vulnerability workflow models + +/// +/// Workflow action request for vulnerability operations. +/// +internal sealed record VulnWorkflowRequest( + [property: JsonPropertyName("action")] string Action, + [property: JsonPropertyName("vulnerabilityIds")] IReadOnlyList? VulnerabilityIds = null, + [property: JsonPropertyName("filter")] VulnFilterSpec? Filter = null, + [property: JsonPropertyName("assignee")] string? Assignee = null, + [property: JsonPropertyName("comment")] string? Comment = null, + [property: JsonPropertyName("dueDate")] DateTimeOffset? DueDate = null, + [property: JsonPropertyName("justification")] string? Justification = null, + [property: JsonPropertyName("fixVersion")] string? FixVersion = null, + [property: JsonPropertyName("idempotencyKey")] string? IdempotencyKey = null); + +/// +/// Filter specification for bulk workflow operations. +/// +internal sealed record VulnFilterSpec( + [property: JsonPropertyName("severity")] string? Severity = null, + [property: JsonPropertyName("status")] string? Status = null, + [property: JsonPropertyName("purl")] string? Purl = null, + [property: JsonPropertyName("sbomId")] string? SbomId = null, + [property: JsonPropertyName("policyId")] string? PolicyId = null); + +/// +/// Workflow action response with affected items. +/// +internal sealed record VulnWorkflowResponse( + [property: JsonPropertyName("success")] bool Success, + [property: JsonPropertyName("action")] string Action, + [property: JsonPropertyName("affectedCount")] int AffectedCount, + [property: JsonPropertyName("affectedIds")] IReadOnlyList? AffectedIds = null, + [property: JsonPropertyName("errors")] IReadOnlyList? Errors = null, + [property: JsonPropertyName("idempotencyKey")] string? IdempotencyKey = null); + +/// +/// Error detail for workflow operations. +/// +internal sealed record VulnWorkflowError( + [property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId, + [property: JsonPropertyName("code")] string Code, + [property: JsonPropertyName("message")] string Message); + +// CLI-VULN-29-004: Vulnerability simulation models + +/// +/// Simulation request for policy/VEX changes. +/// +internal sealed record VulnSimulationRequest( + [property: JsonPropertyName("policyId")] string? PolicyId = null, + [property: JsonPropertyName("policyVersion")] int? PolicyVersion = null, + [property: JsonPropertyName("vexOverrides")] IReadOnlyDictionary? VexOverrides = null, + [property: JsonPropertyName("severityThreshold")] string? SeverityThreshold = null, + [property: JsonPropertyName("sbomIds")] IReadOnlyList? SbomIds = null, + [property: JsonPropertyName("outputMarkdown")] bool OutputMarkdown = false); + +/// +/// Simulation response showing deltas. +/// +internal sealed record VulnSimulationResponse( + [property: JsonPropertyName("items")] IReadOnlyList Items, + [property: JsonPropertyName("summary")] VulnSimulationSummary Summary, + [property: JsonPropertyName("markdownReport")] string? MarkdownReport = null); + +/// +/// Individual delta in simulation results. +/// +internal sealed record VulnSimulationDelta( + [property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId, + [property: JsonPropertyName("beforeStatus")] string BeforeStatus, + [property: JsonPropertyName("afterStatus")] string AfterStatus, + [property: JsonPropertyName("changed")] bool Changed, + [property: JsonPropertyName("changeReason")] string? ChangeReason = null); + +/// +/// Summary of simulation results. +/// +internal sealed record VulnSimulationSummary( + [property: JsonPropertyName("totalEvaluated")] int TotalEvaluated, + [property: JsonPropertyName("totalChanged")] int TotalChanged, + [property: JsonPropertyName("statusUpgrades")] int StatusUpgrades, + [property: JsonPropertyName("statusDowngrades")] int StatusDowngrades, + [property: JsonPropertyName("noChange")] int NoChange); + +// CLI-VULN-29-005: Vulnerability export models + +/// +/// Export request for vulnerability evidence bundles. +/// +internal sealed record VulnExportRequest( + [property: JsonPropertyName("vulnerabilityIds")] IReadOnlyList? VulnerabilityIds = null, + [property: JsonPropertyName("sbomIds")] IReadOnlyList? SbomIds = null, + [property: JsonPropertyName("policyId")] string? PolicyId = null, + [property: JsonPropertyName("format")] string Format = "ndjson", + [property: JsonPropertyName("includeEvidence")] bool IncludeEvidence = true, + [property: JsonPropertyName("includeLedger")] bool IncludeLedger = true, + [property: JsonPropertyName("signed")] bool Signed = true); + +/// +/// Export response with download information. +/// +internal sealed record VulnExportResponse( + [property: JsonPropertyName("exportId")] string ExportId, + [property: JsonPropertyName("downloadUrl")] string? DownloadUrl = null, + [property: JsonPropertyName("format")] string Format = "ndjson", + [property: JsonPropertyName("itemCount")] int ItemCount = 0, + [property: JsonPropertyName("signed")] bool Signed = false, + [property: JsonPropertyName("signatureAlgorithm")] string? SignatureAlgorithm = null, + [property: JsonPropertyName("signatureKeyId")] string? SignatureKeyId = null, + [property: JsonPropertyName("digest")] string? Digest = null, + [property: JsonPropertyName("digestAlgorithm")] string? DigestAlgorithm = null, + [property: JsonPropertyName("expiresAt")] DateTimeOffset? ExpiresAt = null); diff --git a/src/Cli/StellaOps.Cli/Telemetry/CliMetrics.cs b/src/Cli/StellaOps.Cli/Telemetry/CliMetrics.cs index 127b144d0..ddbc6225b 100644 --- a/src/Cli/StellaOps.Cli/Telemetry/CliMetrics.cs +++ b/src/Cli/StellaOps.Cli/Telemetry/CliMetrics.cs @@ -27,6 +27,7 @@ internal static class CliMetrics private static readonly Counter RubyInspectCounter = Meter.CreateCounter("stellaops.cli.ruby.inspect.count"); private static readonly Counter RubyResolveCounter = Meter.CreateCounter("stellaops.cli.ruby.resolve.count"); private static readonly Counter PhpInspectCounter = Meter.CreateCounter("stellaops.cli.php.inspect.count"); + private static readonly Counter PythonInspectCounter = Meter.CreateCounter("stellaops.cli.python.inspect.count"); private static readonly Histogram CommandDurationHistogram = Meter.CreateHistogram("stellaops.cli.command.duration.ms"); public static void RecordScannerDownload(string channel, bool fromCache) @@ -150,6 +151,12 @@ internal static class CliMetrics new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome) }); + public static void RecordPythonInspect(string outcome) + => PythonInspectCounter.Add(1, new KeyValuePair[] + { + new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome) + }); + public static IDisposable MeasureCommandDuration(string command) { var start = DateTime.UtcNow; diff --git a/src/Concelier/StellaOps.Concelier.WebService/Program.cs b/src/Concelier/StellaOps.Concelier.WebService/Program.cs index f3c4dc990..3907be5a3 100644 --- a/src/Concelier/StellaOps.Concelier.WebService/Program.cs +++ b/src/Concelier/StellaOps.Concelier.WebService/Program.cs @@ -3284,6 +3284,20 @@ private readonly record struct LinksetObservationSummary( static async Task InitializeMongoAsync(WebApplication app) { + // Skip Mongo initialization in testing/bypass mode. + var isTesting = string.Equals( + Environment.GetEnvironmentVariable("DOTNET_ENVIRONMENT"), + "Testing", + StringComparison.OrdinalIgnoreCase); + var bypass = string.Equals( + Environment.GetEnvironmentVariable("CONCELIER_BYPASS_MONGO"), + "1", + StringComparison.OrdinalIgnoreCase); + if (isTesting || bypass) + { + return; + } + await using var scope = app.Services.CreateAsyncScope(); var bootstrapper = scope.ServiceProvider.GetRequiredService(); var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("MongoBootstrapper"); diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/AdvisoryLinksetUpdatedEvent.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/AdvisoryLinksetUpdatedEvent.cs index 2d3ca1e66..619fa1523 100644 --- a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/AdvisoryLinksetUpdatedEvent.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/AdvisoryLinksetUpdatedEvent.cs @@ -10,16 +10,20 @@ namespace StellaOps.Concelier.Core.Linksets; /// /// Contract-matching payload for advisory.linkset.updated@1 events. /// Per LNM-21-005, emits delta descriptions + observation ids (tenant + provenance only). +/// Enhanced per CONCELIER-POLICY-23-002 with idempotent IDs, confidence summaries, and tenant metadata. /// public sealed record AdvisoryLinksetUpdatedEvent( Guid EventId, + string IdempotencyKey, string TenantId, + AdvisoryLinksetTenantMetadata TenantMetadata, string LinksetId, string AdvisoryId, string Source, ImmutableArray ObservationIds, AdvisoryLinksetDelta Delta, double? Confidence, + AdvisoryLinksetConfidenceSummary ConfidenceSummary, ImmutableArray Conflicts, AdvisoryLinksetProvenanceSummary Provenance, DateTimeOffset CreatedAt, @@ -43,16 +47,22 @@ public sealed record AdvisoryLinksetUpdatedEvent( var delta = ComputeDelta(linkset, previousLinkset); var conflicts = BuildConflictSummaries(linkset.Conflicts); var provenance = BuildProvenance(linkset.Provenance); + var tenantMetadata = BuildTenantMetadata(linkset.TenantId, tenantUrn); + var confidenceSummary = BuildConfidenceSummary(linkset.Confidence, conflicts.Length); + var idempotencyKey = ComputeIdempotencyKey(linksetId, linkset, delta); return new AdvisoryLinksetUpdatedEvent( EventId: Guid.NewGuid(), + IdempotencyKey: idempotencyKey, TenantId: tenantUrn, + TenantMetadata: tenantMetadata, LinksetId: linksetId, AdvisoryId: linkset.AdvisoryId, Source: linkset.Source, ObservationIds: linkset.ObservationIds, Delta: delta, Confidence: linkset.Confidence, + ConfidenceSummary: confidenceSummary, Conflicts: conflicts, Provenance: provenance, CreatedAt: linkset.CreatedAt, @@ -61,6 +71,139 @@ public sealed record AdvisoryLinksetUpdatedEvent( TraceId: traceId); } + /// + /// Computes a deterministic idempotency key for safe replay. + /// The key is derived from linkset identity + content hash so replaying the same change yields the same key. + /// + private static string ComputeIdempotencyKey(string linksetId, AdvisoryLinkset linkset, AdvisoryLinksetDelta delta) + { + var sb = new StringBuilder(256); + sb.Append(linksetId); + sb.Append('|'); + sb.Append(linkset.TenantId); + sb.Append('|'); + sb.Append(linkset.AdvisoryId); + sb.Append('|'); + sb.Append(linkset.Source); + sb.Append('|'); + sb.Append(linkset.CreatedAt.ToUniversalTime().Ticks); + sb.Append('|'); + sb.Append(delta.Type); + sb.Append('|'); + + // Include observation IDs in sorted order for determinism + foreach (var obsId in linkset.ObservationIds.OrderBy(id => id, StringComparer.Ordinal)) + { + sb.Append(obsId); + sb.Append(','); + } + + // Include provenance hash if available + if (linkset.Provenance?.PolicyHash is not null) + { + sb.Append('|'); + sb.Append(linkset.Provenance.PolicyHash); + } + + var input = Encoding.UTF8.GetBytes(sb.ToString()); + var hash = SHA256.HashData(input); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + /// + /// Builds tenant metadata for policy consumers. + /// + private static AdvisoryLinksetTenantMetadata BuildTenantMetadata(string tenantId, string tenantUrn) + { + // Extract tenant identifier from URN if present + var rawId = tenantUrn.StartsWith("urn:tenant:", StringComparison.Ordinal) + ? tenantUrn["urn:tenant:".Length..] + : tenantId; + + return new AdvisoryLinksetTenantMetadata( + TenantUrn: tenantUrn, + TenantId: rawId, + Namespace: ExtractNamespace(rawId)); + } + + /// + /// Extracts namespace prefix from tenant ID (e.g., "org:acme" → "org"). + /// + private static string? ExtractNamespace(string tenantId) + { + var colonIndex = tenantId.IndexOf(':'); + return colonIndex > 0 ? tenantId[..colonIndex] : null; + } + + /// + /// Builds confidence summary with tier classification and contributing factors. + /// + private static AdvisoryLinksetConfidenceSummary BuildConfidenceSummary(double? confidence, int conflictCount) + { + var tier = ClassifyConfidenceTier(confidence); + var factors = BuildConfidenceFactors(confidence, conflictCount); + + return new AdvisoryLinksetConfidenceSummary( + Value: confidence, + Tier: tier, + ConflictCount: conflictCount, + Factors: factors); + } + + /// + /// Classifies confidence into tiers for policy rules. + /// + private static string ClassifyConfidenceTier(double? confidence) => confidence switch + { + null => "unknown", + >= 0.9 => "high", + >= 0.7 => "medium", + >= 0.5 => "low", + _ => "very-low" + }; + + /// + /// Builds human-readable factors contributing to confidence score. + /// + private static ImmutableArray BuildConfidenceFactors(double? confidence, int conflictCount) + { + var factors = ImmutableArray.CreateBuilder(); + + if (confidence is null) + { + factors.Add("no-confidence-data"); + return factors.ToImmutable(); + } + + if (confidence >= 0.9) + { + factors.Add("strong-alias-correlation"); + } + else if (confidence >= 0.7) + { + factors.Add("moderate-alias-correlation"); + } + else if (confidence >= 0.5) + { + factors.Add("weak-alias-correlation"); + } + else + { + factors.Add("minimal-correlation"); + } + + if (conflictCount > 0) + { + factors.Add($"has-{conflictCount}-conflict{(conflictCount > 1 ? "s" : "")}"); + } + else + { + factors.Add("no-conflicts"); + } + + return factors.ToImmutable(); + } + private static AdvisoryLinksetDelta ComputeDelta(AdvisoryLinkset current, AdvisoryLinkset? previous) { if (previous is null) @@ -166,3 +309,26 @@ public sealed record AdvisoryLinksetProvenanceSummary( ImmutableArray ObservationHashes, string? ToolVersion, string? PolicyHash); + +/// +/// Tenant metadata for policy replay and multi-tenant filtering. +/// Per CONCELIER-POLICY-23-002. +/// +public sealed record AdvisoryLinksetTenantMetadata( + string TenantUrn, + string TenantId, + string? Namespace); + +/// +/// Confidence summary with tier classification for policy rules. +/// Per CONCELIER-POLICY-23-002. +/// +/// Raw confidence score (0.0 - 1.0). +/// Confidence tier: high (≥0.9), medium (≥0.7), low (≥0.5), very-low (<0.5), unknown (null). +/// Number of conflicts detected in the linkset. +/// Human-readable factors contributing to confidence score. +public sealed record AdvisoryLinksetConfidenceSummary( + double? Value, + string Tier, + int ConflictCount, + ImmutableArray Factors); diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/IPolicyDeltaCheckpointStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/IPolicyDeltaCheckpointStore.cs new file mode 100644 index 000000000..23803c1db --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/IPolicyDeltaCheckpointStore.cs @@ -0,0 +1,48 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Concelier.Core.Linksets; + +/// +/// Stores and retrieves policy delta checkpoints for deterministic replay. +/// Consumers use checkpoints to track their position in the linkset stream. +/// +public interface IPolicyDeltaCheckpointStore +{ + /// + /// Gets a checkpoint by consumer and tenant, creating one if it does not exist. + /// + Task GetOrCreateAsync( + string tenantId, + string consumerId, + CancellationToken cancellationToken); + + /// + /// Gets a checkpoint by its unique ID. + /// + Task GetAsync( + string checkpointId, + CancellationToken cancellationToken); + + /// + /// Updates a checkpoint after processing a batch of linksets. + /// + Task UpdateAsync( + PolicyDeltaCheckpoint checkpoint, + CancellationToken cancellationToken); + + /// + /// Lists all checkpoints for a given tenant. + /// + Task> ListByTenantAsync( + string tenantId, + CancellationToken cancellationToken); + + /// + /// Deletes a checkpoint (for cleanup or reset scenarios). + /// + Task DeleteAsync( + string checkpointId, + CancellationToken cancellationToken); +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/PolicyDeltaCheckpoint.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/PolicyDeltaCheckpoint.cs new file mode 100644 index 000000000..f62826c63 --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/PolicyDeltaCheckpoint.cs @@ -0,0 +1,86 @@ +using System; + +namespace StellaOps.Concelier.Core.Linksets; + +/// +/// Represents a checkpoint for tracking policy delta consumption. +/// Enables deterministic replay by persisting the last processed position. +/// +public sealed record PolicyDeltaCheckpoint( + /// Unique identifier for this checkpoint (typically consumerId + tenant). + string CheckpointId, + + /// Tenant scope for this checkpoint. + string TenantId, + + /// Consumer identifier (e.g., "policy-engine", "vuln-explorer"). + string ConsumerId, + + /// Last processed linkset CreatedAt timestamp for cursor-based pagination. + DateTimeOffset? LastCreatedAt, + + /// Last processed advisory ID (tie-breaker when CreatedAt matches). + string? LastAdvisoryId, + + /// MongoDB change-stream resume token for real-time delta subscriptions. + string? ResumeToken, + + /// Sequence number for ordering events within the same timestamp. + long SequenceNumber, + + /// When this checkpoint was last updated. + DateTimeOffset UpdatedAt, + + /// Count of linksets processed since checkpoint creation. + long ProcessedCount, + + /// Hash of the last processed batch for integrity verification. + string? LastBatchHash) +{ + public static PolicyDeltaCheckpoint CreateNew(string tenantId, string consumerId, DateTimeOffset now) => + new( + CheckpointId: $"{consumerId}:{tenantId}", + TenantId: tenantId, + ConsumerId: consumerId, + LastCreatedAt: null, + LastAdvisoryId: null, + ResumeToken: null, + SequenceNumber: 0, + UpdatedAt: now, + ProcessedCount: 0, + LastBatchHash: null); + + /// + /// Creates an from this checkpoint for pagination. + /// Returns null if no position has been recorded yet. + /// + public AdvisoryLinksetCursor? ToCursor() => + LastCreatedAt.HasValue && !string.IsNullOrEmpty(LastAdvisoryId) + ? new AdvisoryLinksetCursor(LastCreatedAt.Value, LastAdvisoryId) + : null; + + /// + /// Advances the checkpoint to a new position after processing a batch. + /// + public PolicyDeltaCheckpoint Advance( + DateTimeOffset lastCreatedAt, + string lastAdvisoryId, + long batchCount, + string? batchHash, + DateTimeOffset now) => + this with + { + LastCreatedAt = lastCreatedAt, + LastAdvisoryId = lastAdvisoryId, + SequenceNumber = SequenceNumber + batchCount, + UpdatedAt = now, + ProcessedCount = ProcessedCount + batchCount, + LastBatchHash = batchHash + }; + + /// + /// Updates the resume token for change-stream subscriptions. + /// + public PolicyDeltaCheckpoint WithResumeToken(string resumeToken, DateTimeOffset now) => + this with { ResumeToken = resumeToken, UpdatedAt = now }; +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/IVendorRiskSignalProvider.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/IVendorRiskSignalProvider.cs new file mode 100644 index 000000000..c74441376 --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/IVendorRiskSignalProvider.cs @@ -0,0 +1,111 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Concelier.Core.Risk; + +/// +/// Provider interface for extracting vendor risk signals from observations. +/// Per CONCELIER-RISK-66-001, surfaces fact-only CVSS/KEV/fix data with provenance. +/// +public interface IVendorRiskSignalProvider +{ + /// + /// Extracts risk signals from a specific observation. + /// + /// Tenant identifier. + /// Observation identifier. + /// Cancellation token. + /// Risk signal with CVSS, KEV, and fix data. + Task GetByObservationAsync( + string tenantId, + string observationId, + CancellationToken cancellationToken); + + /// + /// Extracts risk signals from all observations for an advisory. + /// + /// Tenant identifier. + /// Advisory identifier (e.g., CVE-2024-1234). + /// Cancellation token. + /// Collection of risk signals from all vendor observations. + Task> GetByAdvisoryAsync( + string tenantId, + string advisoryId, + CancellationToken cancellationToken); + + /// + /// Extracts aggregated risk signals for a linkset. + /// + /// Tenant identifier. + /// Linkset identifier. + /// Cancellation token. + /// Collection of risk signals from linked observations. + Task> GetByLinksetAsync( + string tenantId, + string linksetId, + CancellationToken cancellationToken); +} + +/// +/// Aggregated risk signal view combining multiple vendor observations. +/// +public sealed record AggregatedRiskView( + string TenantId, + string AdvisoryId, + IReadOnlyList VendorSignals) +{ + /// + /// Gets all unique CVSS scores across vendors with their provenance. + /// + public IReadOnlyList AllCvssScores => + VendorSignals + .SelectMany(s => s.CvssScores) + .OrderByDescending(c => c.Score) + .ToList(); + + /// + /// Gets the highest CVSS score from any vendor. + /// + public VendorCvssScore? HighestCvssScore => + AllCvssScores.FirstOrDefault(); + + /// + /// Indicates if any vendor reports KEV status. + /// + public bool IsKnownExploited => + VendorSignals.Any(s => s.IsKnownExploited); + + /// + /// Gets all KEV status entries from vendors. + /// + public IReadOnlyList KevStatuses => + VendorSignals + .Where(s => s.KevStatus is not null) + .Select(s => s.KevStatus!) + .ToList(); + + /// + /// Indicates if any vendor reports a fix available. + /// + public bool HasFixAvailable => + VendorSignals.Any(s => s.HasFixAvailable); + + /// + /// Gets all fix availability entries from vendors. + /// + public IReadOnlyList AllFixAvailability => + VendorSignals + .SelectMany(s => s.FixAvailability) + .ToList(); + + /// + /// Gets vendors that provided risk data. + /// + public IReadOnlyList ContributingVendors => + VendorSignals + .Select(s => s.Provenance.Vendor) + .Distinct(StringComparer.OrdinalIgnoreCase) + .OrderBy(v => v, StringComparer.OrdinalIgnoreCase) + .ToList(); +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/VendorRiskSignal.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/VendorRiskSignal.cs new file mode 100644 index 000000000..207509274 --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/VendorRiskSignal.cs @@ -0,0 +1,169 @@ +using System; +using System.Collections.Immutable; + +namespace StellaOps.Concelier.Core.Risk; + +/// +/// Vendor-provided risk signal for an advisory observation. +/// Per CONCELIER-RISK-66-001, surfaces CVSS/KEV/fix data exactly as published with provenance anchors. +/// +/// +/// This model is fact-only: no inference, weighting, or prioritization. +/// All data traces back to a specific vendor observation with provenance. +/// +public sealed record VendorRiskSignal( + string TenantId, + string AdvisoryId, + string ObservationId, + VendorRiskProvenance Provenance, + ImmutableArray CvssScores, + VendorKevStatus? KevStatus, + ImmutableArray FixAvailability, + DateTimeOffset ExtractedAt) +{ + /// + /// Creates a risk signal with no data (for observations without risk metadata). + /// + public static VendorRiskSignal Empty( + string tenantId, + string advisoryId, + string observationId, + VendorRiskProvenance provenance, + DateTimeOffset extractedAt) + { + return new VendorRiskSignal( + TenantId: tenantId, + AdvisoryId: advisoryId, + ObservationId: observationId, + Provenance: provenance, + CvssScores: ImmutableArray.Empty, + KevStatus: null, + FixAvailability: ImmutableArray.Empty, + ExtractedAt: extractedAt); + } + + /// + /// Gets the highest severity CVSS score if any. + /// + public VendorCvssScore? HighestCvssScore => CvssScores.IsDefaultOrEmpty + ? null + : CvssScores.MaxBy(s => s.Score); + + /// + /// Indicates if any fix is available from any vendor. + /// + public bool HasFixAvailable => !FixAvailability.IsDefaultOrEmpty && + FixAvailability.Any(f => f.Status == FixStatus.Available); + + /// + /// Indicates if this advisory is in the KEV list. + /// + public bool IsKnownExploited => KevStatus?.InKev == true; +} + +/// +/// Provenance anchor for vendor risk data. +/// +public sealed record VendorRiskProvenance( + string Vendor, + string Source, + string ObservationHash, + DateTimeOffset FetchedAt, + string? IngestJobId, + string? UpstreamId); + +/// +/// Vendor-provided CVSS score with version information. +/// +public sealed record VendorCvssScore( + string System, + double Score, + string? Vector, + string? Severity, + VendorRiskProvenance Provenance) +{ + /// + /// Normalizes the system name to a standard format. + /// + public string NormalizedSystem => System?.ToLowerInvariant() switch + { + "cvss_v2" or "cvssv2" or "cvss2" => "cvss_v2", + "cvss_v30" or "cvssv30" or "cvss30" or "cvss_v3" or "cvssv3" or "cvss3" => "cvss_v30", + "cvss_v31" or "cvssv31" or "cvss31" => "cvss_v31", + "cvss_v40" or "cvssv40" or "cvss40" or "cvss_v4" or "cvssv4" or "cvss4" => "cvss_v40", + var s => s ?? "unknown" + }; + + /// + /// Derives severity tier from score (if not provided by vendor). + /// + public string EffectiveSeverity => Severity ?? DeriveFromScore(Score, NormalizedSystem); + + private static string DeriveFromScore(double score, string system) + { + // CVSS v2 uses different thresholds + if (system == "cvss_v2") + { + return score switch + { + >= 7.0 => "high", + >= 4.0 => "medium", + _ => "low" + }; + } + + // CVSS v3.x and v4.x thresholds + return score switch + { + >= 9.0 => "critical", + >= 7.0 => "high", + >= 4.0 => "medium", + >= 0.1 => "low", + _ => "none" + }; + } +} + +/// +/// KEV (Known Exploited Vulnerabilities) status from vendor data. +/// +public sealed record VendorKevStatus( + bool InKev, + DateTimeOffset? DateAdded, + DateTimeOffset? DueDate, + string? KnownRansomwareCampaignUse, + string? Notes, + VendorRiskProvenance Provenance); + +/// +/// Fix availability information from vendor. +/// +public sealed record VendorFixAvailability( + FixStatus Status, + string? FixedVersion, + string? AdvisoryUrl, + DateTimeOffset? FixReleasedAt, + string? Package, + string? Ecosystem, + VendorRiskProvenance Provenance); + +/// +/// Fix availability status. +/// +public enum FixStatus +{ + /// Fix status unknown. + Unknown, + + /// Fix is available. + Available, + + /// No fix available yet. + NotAvailable, + + /// Will not be fixed (end of life, etc.). + WillNotFix, + + /// Fix is in progress. + InProgress +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/VendorRiskSignalExtractor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/VendorRiskSignalExtractor.cs new file mode 100644 index 000000000..2bae8c563 --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Risk/VendorRiskSignalExtractor.cs @@ -0,0 +1,263 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Text.Json; + +namespace StellaOps.Concelier.Core.Risk; + +/// +/// Extracts vendor risk signals from observation data. +/// Per CONCELIER-RISK-66-001, extracts fact-only CVSS/KEV/fix data with provenance. +/// +public static class VendorRiskSignalExtractor +{ + /// + /// Extracts a vendor risk signal from observation data. + /// + /// Tenant identifier. + /// Advisory identifier. + /// Observation identifier. + /// Vendor name. + /// Source identifier. + /// Content hash for provenance. + /// When the data was fetched. + /// Optional ingest job ID. + /// Optional upstream ID. + /// Severity data from observation. + /// Raw JSON content for KEV/fix extraction. + /// Current timestamp. + /// Extracted vendor risk signal. + public static VendorRiskSignal Extract( + string tenantId, + string advisoryId, + string observationId, + string vendor, + string source, + string observationHash, + DateTimeOffset fetchedAt, + string? ingestJobId, + string? upstreamId, + IReadOnlyList? severities, + JsonElement? rawContent, + DateTimeOffset now) + { + var provenance = new VendorRiskProvenance( + Vendor: vendor, + Source: source, + ObservationHash: observationHash, + FetchedAt: fetchedAt, + IngestJobId: ingestJobId, + UpstreamId: upstreamId); + + var cvssScores = ExtractCvssScores(severities, provenance); + var kevStatus = ExtractKevStatus(rawContent, provenance); + var fixAvailability = ExtractFixAvailability(rawContent, provenance); + + return new VendorRiskSignal( + TenantId: tenantId, + AdvisoryId: advisoryId, + ObservationId: observationId, + Provenance: provenance, + CvssScores: cvssScores, + KevStatus: kevStatus, + FixAvailability: fixAvailability, + ExtractedAt: now); + } + + private static ImmutableArray ExtractCvssScores( + IReadOnlyList? severities, + VendorRiskProvenance provenance) + { + if (severities is null || severities.Count == 0) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(severities.Count); + + foreach (var severity in severities) + { + if (string.IsNullOrWhiteSpace(severity.System)) + { + continue; + } + + builder.Add(new VendorCvssScore( + System: severity.System, + Score: severity.Score, + Vector: severity.Vector, + Severity: severity.Severity, + Provenance: provenance)); + } + + return builder.ToImmutable(); + } + + private static VendorKevStatus? ExtractKevStatus( + JsonElement? rawContent, + VendorRiskProvenance provenance) + { + if (rawContent is null || rawContent.Value.ValueKind != JsonValueKind.Object) + { + return null; + } + + var content = rawContent.Value; + + // Try common KEV data locations in raw content + // NVD format: cisa_exploit_add, cisa_required_action, cisa_vulnerability_name + if (TryGetProperty(content, "cisa_exploit_add", out var cisaAdd) || + TryGetProperty(content, "database_specific", out var dbSpecific) && TryGetProperty(dbSpecific, "cisa", out cisaAdd)) + { + return new VendorKevStatus( + InKev: true, + DateAdded: TryParseDate(cisaAdd), + DueDate: TryGetDateProperty(content, "cisa_action_due"), + KnownRansomwareCampaignUse: TryGetStringProperty(content, "cisa_ransomware"), + Notes: TryGetStringProperty(content, "cisa_vulnerability_name"), + Provenance: provenance); + } + + // OSV/GitHub format: database_specific.kev + if (TryGetProperty(content, "database_specific", out var osv) && + TryGetProperty(osv, "kev", out var kev)) + { + var inKev = kev.ValueKind == JsonValueKind.True || + (kev.ValueKind == JsonValueKind.Object && TryGetProperty(kev, "in_kev", out var inKevProp) && inKevProp.ValueKind == JsonValueKind.True); + + if (inKev) + { + return new VendorKevStatus( + InKev: true, + DateAdded: kev.ValueKind == JsonValueKind.Object ? TryGetDateProperty(kev, "date_added") : null, + DueDate: kev.ValueKind == JsonValueKind.Object ? TryGetDateProperty(kev, "due_date") : null, + KnownRansomwareCampaignUse: kev.ValueKind == JsonValueKind.Object ? TryGetStringProperty(kev, "ransomware") : null, + Notes: null, + Provenance: provenance); + } + } + + return null; + } + + private static ImmutableArray ExtractFixAvailability( + JsonElement? rawContent, + VendorRiskProvenance provenance) + { + if (rawContent is null || rawContent.Value.ValueKind != JsonValueKind.Object) + { + return ImmutableArray.Empty; + } + + var content = rawContent.Value; + var builder = ImmutableArray.CreateBuilder(); + + // OSV format: affected[].ranges[].events[{fixed: "version"}] + if (TryGetProperty(content, "affected", out var affected) && affected.ValueKind == JsonValueKind.Array) + { + foreach (var aff in affected.EnumerateArray()) + { + var package = TryGetStringProperty(aff, "package", "name") ?? TryGetStringProperty(aff, "purl"); + var ecosystem = TryGetStringProperty(aff, "package", "ecosystem"); + + if (TryGetProperty(aff, "ranges", out var ranges) && ranges.ValueKind == JsonValueKind.Array) + { + foreach (var range in ranges.EnumerateArray()) + { + if (TryGetProperty(range, "events", out var events) && events.ValueKind == JsonValueKind.Array) + { + foreach (var evt in events.EnumerateArray()) + { + if (TryGetProperty(evt, "fixed", out var fixedVersion)) + { + builder.Add(new VendorFixAvailability( + Status: FixStatus.Available, + FixedVersion: fixedVersion.GetString(), + AdvisoryUrl: null, + FixReleasedAt: null, + Package: package, + Ecosystem: ecosystem, + Provenance: provenance)); + } + } + } + } + } + + // Also check versions[] for fixed versions + if (TryGetProperty(aff, "versions", out var versions) && versions.ValueKind == JsonValueKind.Array) + { + // Fixed versions may be indicated by absence from versions array + // This is less reliable, so we only use it if no range data exists + } + } + } + + // NVD format: configurations with fix status + if (TryGetProperty(content, "configurations", out var configs) && configs.ValueKind == JsonValueKind.Array) + { + // NVD configurations don't directly indicate fixes, but CPE matches can imply them + // This would require more complex parsing - defer to vendor-specific connectors + } + + return builder.ToImmutable(); + } + + private static bool TryGetProperty(JsonElement element, string propertyName, out JsonElement value) + { + value = default; + if (element.ValueKind != JsonValueKind.Object) + { + return false; + } + + return element.TryGetProperty(propertyName, out value); + } + + private static string? TryGetStringProperty(JsonElement element, params string[] path) + { + var current = element; + foreach (var segment in path) + { + if (!TryGetProperty(current, segment, out current)) + { + return null; + } + } + + return current.ValueKind == JsonValueKind.String ? current.GetString() : null; + } + + private static DateTimeOffset? TryGetDateProperty(JsonElement element, string propertyName) + { + if (!TryGetProperty(element, propertyName, out var value)) + { + return null; + } + + return TryParseDate(value); + } + + private static DateTimeOffset? TryParseDate(JsonElement element) + { + if (element.ValueKind == JsonValueKind.String) + { + var str = element.GetString(); + if (DateTimeOffset.TryParse(str, out var date)) + { + return date; + } + } + + return null; + } +} + +/// +/// Input for severity extraction from observation data. +/// +public sealed record SeverityInput( + string System, + double Score, + string? Vector, + string? Severity); diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Tenancy/TenantCapabilitiesEndpoint.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Tenancy/TenantCapabilitiesEndpoint.cs new file mode 100644 index 000000000..e1d4648c8 --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Tenancy/TenantCapabilitiesEndpoint.cs @@ -0,0 +1,109 @@ +using System.Collections.Immutable; + +namespace StellaOps.Concelier.Core.Tenancy; + +/// +/// Response model for /capabilities/tenant endpoint. +/// Per AUTH-TEN-47-001 and CONCELIER-TEN-48-001: echoes tenantId, scopes, and mergeAllowed=false when LNM is enabled. +/// +public sealed record TenantCapabilitiesResponse( + string TenantId, + string TenantUrn, + ImmutableArray Scopes, + bool MergeAllowed, + bool OfflineAllowed, + TenantCapabilitiesMode Mode, + DateTimeOffset GeneratedAt) +{ + /// + /// Creates a Link-Not-Merge capabilities response. + /// + public static TenantCapabilitiesResponse ForLinkNotMerge( + TenantScope scope, + DateTimeOffset now) + { + return new TenantCapabilitiesResponse( + TenantId: scope.TenantId, + TenantUrn: scope.TenantUrn, + Scopes: scope.Scopes, + MergeAllowed: false, // Always false in LNM mode + OfflineAllowed: scope.Capabilities.OfflineAllowed, + Mode: TenantCapabilitiesMode.LinkNotMerge, + GeneratedAt: now); + } +} + +/// +/// Operating mode for tenant capabilities. +/// +public enum TenantCapabilitiesMode +{ + /// Link-Not-Merge mode - no advisory merging. + LinkNotMerge, + + /// Legacy merge mode (deprecated). + LegacyMerge +} + +/// +/// Interface for tenant capabilities provider. +/// +public interface ITenantCapabilitiesProvider +{ + /// + /// Gets the current capabilities for the tenant scope. + /// + TenantCapabilitiesResponse GetCapabilities(TenantScope scope); + + /// + /// Validates that the tenant scope is allowed to perform the requested operation. + /// + /// Tenant scope to validate. + /// Required scopes for the operation. + /// Thrown if validation fails. + void ValidateScope(TenantScope scope, params string[] requiredScopes); +} + +/// +/// Default implementation of tenant capabilities provider for Link-Not-Merge mode. +/// +public sealed class LinkNotMergeTenantCapabilitiesProvider : ITenantCapabilitiesProvider +{ + private readonly TimeProvider _timeProvider; + + public LinkNotMergeTenantCapabilitiesProvider(TimeProvider timeProvider) + { + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public TenantCapabilitiesResponse GetCapabilities(TenantScope scope) + { + ArgumentNullException.ThrowIfNull(scope); + scope.Validate(); + + // In Link-Not-Merge mode, merge is never allowed + // This enforces the contract even if the token claims mergeAllowed=true + return TenantCapabilitiesResponse.ForLinkNotMerge(scope, _timeProvider.GetUtcNow()); + } + + public void ValidateScope(TenantScope scope, params string[] requiredScopes) + { + ArgumentNullException.ThrowIfNull(scope); + scope.Validate(); + + if (requiredScopes.Length == 0) + { + return; + } + + var hasRequired = requiredScopes.Any(required => + scope.Scopes.Any(s => s.Equals(required, StringComparison.OrdinalIgnoreCase))); + + if (!hasRequired) + { + throw new TenantScopeException( + "auth/insufficient-scope", + $"Required scope missing. Need one of: {string.Join(", ", requiredScopes)}"); + } + } +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Tenancy/TenantScope.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Tenancy/TenantScope.cs new file mode 100644 index 000000000..5472290ae --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Tenancy/TenantScope.cs @@ -0,0 +1,123 @@ +using System; +using System.Collections.Immutable; + +namespace StellaOps.Concelier.Core.Tenancy; + +/// +/// Tenant scope data per AUTH-TEN-47-001 contract. +/// Per CONCELIER-TEN-48-001, enforces tenant scoping through normalization/linking. +/// +public sealed record TenantScope( + string TenantId, + string Issuer, + ImmutableArray Scopes, + TenantCapabilities Capabilities, + TenantAttribution? Attribution, + DateTimeOffset IssuedAt, + DateTimeOffset ExpiresAt) +{ + /// + /// Validates that the tenant scope is well-formed. + /// + public void Validate() + { + if (string.IsNullOrWhiteSpace(TenantId)) + { + throw new TenantScopeException("auth/tenant-scope-missing", "TenantId is required"); + } + + if (string.IsNullOrWhiteSpace(Issuer)) + { + throw new TenantScopeException("auth/tenant-scope-missing", "Issuer is required"); + } + + if (Scopes.IsDefaultOrEmpty) + { + throw new TenantScopeException("auth/tenant-scope-missing", "Scopes are required"); + } + + if (!HasRequiredScope()) + { + throw new TenantScopeException("auth/tenant-scope-missing", "Required concelier scope missing"); + } + + if (ExpiresAt <= DateTimeOffset.UtcNow) + { + throw new TenantScopeException("auth/token-expired", "Token has expired"); + } + } + + /// + /// Checks if the scope has at least one required Concelier scope. + /// + public bool HasRequiredScope() + { + return Scopes.Any(s => + s.StartsWith("concelier.", StringComparison.OrdinalIgnoreCase)); + } + + /// + /// Checks if the scope allows read access. + /// + public bool CanRead => + Scopes.Any(s => s.Equals("concelier.read", StringComparison.OrdinalIgnoreCase) || + s.Equals("concelier.linkset.read", StringComparison.OrdinalIgnoreCase)); + + /// + /// Checks if the scope allows write access. + /// + public bool CanWrite => + Scopes.Any(s => s.Equals("concelier.linkset.write", StringComparison.OrdinalIgnoreCase)); + + /// + /// Checks if the scope allows tenant admin access. + /// + public bool CanAdminTenant => + Scopes.Any(s => s.Equals("concelier.tenant.admin", StringComparison.OrdinalIgnoreCase)); + + /// + /// Gets the canonical tenant URN format. + /// + public string TenantUrn => TenantId.StartsWith("urn:tenant:", StringComparison.Ordinal) + ? TenantId + : $"urn:tenant:{TenantId}"; +} + +/// +/// Tenant capabilities per AUTH-TEN-47-001 contract. +/// +public sealed record TenantCapabilities( + bool MergeAllowed = false, + bool OfflineAllowed = true) +{ + /// + /// Default capabilities for Link-Not-Merge mode. + /// + public static TenantCapabilities Default { get; } = new( + MergeAllowed: false, + OfflineAllowed: true); +} + +/// +/// Tenant attribution for audit logging. +/// +public sealed record TenantAttribution( + string? Actor, + string? TraceId); + +/// +/// Exception thrown when tenant scope validation fails. +/// +public sealed class TenantScopeException : Exception +{ + public TenantScopeException(string errorCode, string message) + : base(message) + { + ErrorCode = errorCode; + } + + /// + /// Error code for API responses (e.g., auth/tenant-scope-missing). + /// + public string ErrorCode { get; } +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Core/Tenancy/TenantScopeNormalizer.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Tenancy/TenantScopeNormalizer.cs new file mode 100644 index 000000000..9035b3c8b --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Tenancy/TenantScopeNormalizer.cs @@ -0,0 +1,105 @@ +using System; + +namespace StellaOps.Concelier.Core.Tenancy; + +/// +/// Normalizes tenant identifiers for consistent storage and lookup. +/// Per CONCELIER-TEN-48-001: enforces tenant scoping through normalization. +/// +public static class TenantScopeNormalizer +{ + private const string TenantUrnPrefix = "urn:tenant:"; + + /// + /// Normalizes a tenant identifier to canonical URN format. + /// + /// Raw tenant identifier. + /// Normalized tenant URN. + public static string NormalizeToUrn(string tenantId) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant ID cannot be empty", nameof(tenantId)); + } + + var trimmed = tenantId.Trim(); + + // Already in URN format + if (trimmed.StartsWith(TenantUrnPrefix, StringComparison.Ordinal)) + { + return trimmed.ToLowerInvariant(); + } + + // Convert to URN format + return $"{TenantUrnPrefix}{trimmed.ToLowerInvariant()}"; + } + + /// + /// Extracts the raw tenant identifier from a URN. + /// + /// Tenant URN. + /// Raw tenant identifier. + public static string ExtractFromUrn(string tenantUrn) + { + if (string.IsNullOrWhiteSpace(tenantUrn)) + { + throw new ArgumentException("Tenant URN cannot be empty", nameof(tenantUrn)); + } + + var trimmed = tenantUrn.Trim(); + + if (trimmed.StartsWith(TenantUrnPrefix, StringComparison.OrdinalIgnoreCase)) + { + return trimmed[TenantUrnPrefix.Length..].ToLowerInvariant(); + } + + return trimmed.ToLowerInvariant(); + } + + /// + /// Normalizes a tenant identifier for storage (lowercase, no URN prefix). + /// + /// Raw tenant identifier or URN. + /// Normalized tenant ID for storage. + public static string NormalizeForStorage(string tenantId) + { + return ExtractFromUrn(tenantId); + } + + /// + /// Validates that two tenant identifiers refer to the same tenant. + /// + /// First tenant identifier. + /// Second tenant identifier. + /// True if both refer to the same tenant. + public static bool AreEqual(string? tenantId1, string? tenantId2) + { + if (string.IsNullOrWhiteSpace(tenantId1) || string.IsNullOrWhiteSpace(tenantId2)) + { + return false; + } + + var normalized1 = NormalizeForStorage(tenantId1); + var normalized2 = NormalizeForStorage(tenantId2); + + return string.Equals(normalized1, normalized2, StringComparison.Ordinal); + } + + /// + /// Validates that the provided tenant ID matches the scope's tenant. + /// + /// Tenant ID from request. + /// Authenticated tenant scope. + /// Thrown if tenant IDs don't match. + public static void ValidateTenantMatch(string requestTenantId, TenantScope scope) + { + ArgumentNullException.ThrowIfNull(scope); + + if (!AreEqual(requestTenantId, scope.TenantId)) + { + throw new TenantScopeException( + "auth/tenant-mismatch", + "Request tenant ID does not match authenticated tenant scope"); + } + } +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MIGRATIONS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MIGRATIONS.md index 47f3aa079..66586df11 100644 --- a/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MIGRATIONS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MIGRATIONS.md @@ -31,6 +31,9 @@ This module owns the persistent shape of Concelier's MongoDB database. Upgrades | `20251117_advisory_linksets_tenant_lower` | Lowercases `advisory_linksets.tenantId` to align writes with lookup filters. | | `20251116_link_not_merge_collections` | Ensures `advisory_observations` and `advisory_linksets` collections exist with JSON schema validators and baseline indexes for LNM. | | `20251127_lnm_sharding_and_ttl` | Adds hashed shard key indexes on `tenantId` for horizontal scaling and optional TTL indexes on `ingestedAt`/`createdAt` for storage retention. Creates `advisory_linkset_events` collection for linkset event outbox (LNM-21-101-DEV). | +| `20251127_lnm_legacy_backfill` | Backfills `advisory_observations` from `advisory_raw` documents and creates/updates `advisory_linksets` by grouping observations. Seeds `backfill_marker` tombstones on migrated documents for rollback tracking (LNM-21-102-DEV). | +| `20251128_policy_delta_checkpoints` | Creates `policy_delta_checkpoints` collection with tenant/consumer indexes for deterministic policy delta tracking. Supports cursor-based pagination and change-stream resume tokens for policy consumers (CONCELIER-POLICY-20-003). | +| `20251128_policy_lookup_indexes` | Adds secondary indexes for policy lookup patterns: alias multikey index on observations, confidence/severity indexes on linksets. Supports efficient policy joins without cached verdicts (CONCELIER-POLICY-23-001). | ## Operator Runbook @@ -44,6 +47,11 @@ This module owns the persistent shape of Concelier's MongoDB database. Upgrades - To re-run a migration in a lab, delete the corresponding document from `schema_migrations` and restart the service. **Do not** do this in production unless the migration body is known to be idempotent and safe. - When changing retention settings (`RawDocumentRetention`), deploy the new configuration and restart Concelier. The migration runner will adjust indexes on the next boot. - For the event-log collections (`advisory_statements`, `advisory_conflicts`), rollback is simply `db.advisory_statements.drop()` / `db.advisory_conflicts.drop()` followed by a restart if you must revert to the pre-event-log schema (only in labs). Production rollbacks should instead gate merge features that rely on these collections. +- For `20251127_lnm_legacy_backfill` rollback, use the provided Offline Kit script: + ```bash + mongo concelier ops/devops/scripts/rollback-lnm-backfill.js + ``` + This script removes backfilled observations and linksets by querying the `backfill_marker` field (`lnm_21_102_dev`), then clears the tombstone markers from `advisory_raw`. After rollback, delete `20251127_lnm_legacy_backfill` from `schema_migrations` and restart. - If migrations fail, restart with `Logging__LogLevel__StellaOps.Concelier.Storage.Mongo.Migrations=Debug` to surface diagnostic output. Remediate underlying index/collection drift before retrying. ## Validating an Upgrade diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsurePolicyDeltaCheckpointsCollectionMigration.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsurePolicyDeltaCheckpointsCollectionMigration.cs new file mode 100644 index 000000000..b89f99fe8 --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsurePolicyDeltaCheckpointsCollectionMigration.cs @@ -0,0 +1,81 @@ +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Concelier.Storage.Mongo.PolicyDelta; + +namespace StellaOps.Concelier.Storage.Mongo.Migrations; + +/// +/// Creates the policy_delta_checkpoints collection with indexes for deterministic policy delta tracking. +/// +internal sealed class EnsurePolicyDeltaCheckpointsCollectionMigration : IMongoMigration +{ + public string Id => "20251128_policy_delta_checkpoints"; + + public string Description => + "Creates policy_delta_checkpoints collection with tenant/consumer indexes for deterministic policy deltas (CONCELIER-POLICY-20-003)."; + + public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + var collectionName = MongoStorageDefaults.Collections.PolicyDeltaCheckpoints; + + // Ensure collection exists + var collectionNames = await database + .ListCollectionNames(cancellationToken: cancellationToken) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + var exists = collectionNames.Contains(collectionName); + if (!exists) + { + await database.CreateCollectionAsync(collectionName, cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + + var collection = database.GetCollection(collectionName); + + // Index: tenantId for listing checkpoints by tenant + var tenantIndex = new CreateIndexModel( + Builders.IndexKeys.Ascending(d => d.TenantId), + new CreateIndexOptions + { + Name = "ix_tenantId", + Background = true + }); + + // Index: consumerId for querying checkpoints by consumer + var consumerIndex = new CreateIndexModel( + Builders.IndexKeys.Ascending(d => d.ConsumerId), + new CreateIndexOptions + { + Name = "ix_consumerId", + Background = true + }); + + // Compound index: (tenantId, consumerId) for efficient lookups + var compoundIndex = new CreateIndexModel( + Builders.IndexKeys + .Ascending(d => d.TenantId) + .Ascending(d => d.ConsumerId), + new CreateIndexOptions + { + Name = "ix_tenantId_consumerId", + Background = true + }); + + // Index: updatedAt for maintenance queries (stale checkpoint detection) + var updatedAtIndex = new CreateIndexModel( + Builders.IndexKeys.Ascending(d => d.UpdatedAt), + new CreateIndexOptions + { + Name = "ix_updatedAt", + Background = true + }); + + await collection.Indexes.CreateManyAsync( + [tenantIndex, consumerIndex, compoundIndex, updatedAtIndex], + cancellationToken) + .ConfigureAwait(false); + } +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsurePolicyLookupIndexesMigration.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsurePolicyLookupIndexesMigration.cs new file mode 100644 index 000000000..54c9b199f --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsurePolicyLookupIndexesMigration.cs @@ -0,0 +1,131 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Bson; +using MongoDB.Driver; + +namespace StellaOps.Concelier.Storage.Mongo.Migrations; + +/// +/// Adds secondary indexes for policy lookup patterns: alias lookups, confidence filtering, and severity-based queries. +/// Supports efficient policy joins without cached verdicts per CONCELIER-POLICY-23-001. +/// +/// +/// Query patterns supported: +/// +/// Find observations by alias (CVE-ID, GHSA-ID): db.advisory_observations.find({"linkset.aliases": "cve-2024-1234"}) +/// Find linksets by confidence range: db.advisory_linksets.find({"confidence": {$gte: 0.7}}) +/// Find linksets by provider severity: db.advisory_linksets.find({"normalized.severities.system": "cvss_v31", "normalized.severities.score": {$gte: 7.0}}) +/// Find linksets by tenant and advisory with confidence: db.advisory_linksets.find({"tenantId": "...", "advisoryId": "...", "confidence": {$gte: 0.5}}) +/// +/// +internal sealed class EnsurePolicyLookupIndexesMigration : IMongoMigration +{ + public string Id => "20251128_policy_lookup_indexes"; + + public string Description => "Add secondary indexes for alias, confidence, and severity-based policy lookups (CONCELIER-POLICY-23-001)"; + + public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(database); + + await EnsureObservationPolicyIndexesAsync(database, cancellationToken).ConfigureAwait(false); + await EnsureLinksetPolicyIndexesAsync(database, cancellationToken).ConfigureAwait(false); + } + + private static async Task EnsureObservationPolicyIndexesAsync(IMongoDatabase database, CancellationToken ct) + { + var collection = database.GetCollection(MongoStorageDefaults.Collections.AdvisoryObservations); + + var indexes = new List> + { + // Multikey index on linkset.aliases for alias-based lookups (CVE-ID, GHSA-ID, etc.) + // Query pattern: db.advisory_observations.find({"linkset.aliases": "cve-2024-1234"}) + new(new BsonDocument("linkset.aliases", 1), + new CreateIndexOptions + { + Name = "obs_linkset_aliases", + Background = true, + Sparse = true + }), + + // Compound index for tenant + alias lookups + // Query pattern: db.advisory_observations.find({"tenant": "...", "linkset.aliases": "cve-2024-1234"}) + new(new BsonDocument { { "tenant", 1 }, { "linkset.aliases", 1 } }, + new CreateIndexOptions + { + Name = "obs_tenant_aliases", + Background = true + }) + }; + + await collection.Indexes.CreateManyAsync(indexes, cancellationToken: ct).ConfigureAwait(false); + } + + private static async Task EnsureLinksetPolicyIndexesAsync(IMongoDatabase database, CancellationToken ct) + { + var collection = database.GetCollection(MongoStorageDefaults.Collections.AdvisoryLinksets); + + var indexes = new List> + { + // Index on confidence for confidence-based filtering + // Query pattern: db.advisory_linksets.find({"confidence": {$gte: 0.7}}) + new(new BsonDocument("confidence", -1), + new CreateIndexOptions + { + Name = "linkset_confidence", + Background = true, + Sparse = true + }), + + // Compound index for tenant + confidence lookups + // Query pattern: db.advisory_linksets.find({"tenantId": "...", "confidence": {$gte: 0.7}}) + new(new BsonDocument { { "tenantId", 1 }, { "confidence", -1 } }, + new CreateIndexOptions + { + Name = "linkset_tenant_confidence", + Background = true + }), + + // Index on normalized.severities.system for severity system filtering + // Query pattern: db.advisory_linksets.find({"normalized.severities.system": "cvss_v31"}) + new(new BsonDocument("normalized.severities.system", 1), + new CreateIndexOptions + { + Name = "linkset_severity_system", + Background = true, + Sparse = true + }), + + // Compound index for severity system + score for range queries + // Query pattern: db.advisory_linksets.find({"normalized.severities.system": "cvss_v31", "normalized.severities.score": {$gte: 7.0}}) + new(new BsonDocument { { "normalized.severities.system", 1 }, { "normalized.severities.score", -1 } }, + new CreateIndexOptions + { + Name = "linkset_severity_system_score", + Background = true, + Sparse = true + }), + + // Compound index for tenant + advisory + confidence (policy delta queries) + // Query pattern: db.advisory_linksets.find({"tenantId": "...", "advisoryId": "...", "confidence": {$gte: 0.5}}) + new(new BsonDocument { { "tenantId", 1 }, { "advisoryId", 1 }, { "confidence", -1 } }, + new CreateIndexOptions + { + Name = "linkset_tenant_advisory_confidence", + Background = true + }), + + // Index for createdAt-based pagination (policy delta cursors) + // Query pattern: db.advisory_linksets.find({"tenantId": "...", "createdAt": {$gt: ISODate("...")}}).sort({"createdAt": 1}) + new(new BsonDocument { { "tenantId", 1 }, { "createdAt", 1 } }, + new CreateIndexOptions + { + Name = "linkset_tenant_createdAt", + Background = true + }) + }; + + await collection.Indexes.CreateManyAsync(indexes, cancellationToken: ct).ConfigureAwait(false); + } +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoStorageDefaults.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoStorageDefaults.cs index 466b3a9a8..af5a01288 100644 --- a/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoStorageDefaults.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoStorageDefaults.cs @@ -1,13 +1,13 @@ -namespace StellaOps.Concelier.Storage.Mongo; - -public static class MongoStorageDefaults -{ - public const string DefaultDatabaseName = "concelier"; - - public static class Collections - { - public const string Source = "source"; - public const string SourceState = "source_state"; +namespace StellaOps.Concelier.Storage.Mongo; + +public static class MongoStorageDefaults +{ + public const string DefaultDatabaseName = "concelier"; + + public static class Collections + { + public const string Source = "source"; + public const string SourceState = "source_state"; public const string Document = "document"; public const string Dto = "dto"; public const string Advisory = "advisory"; @@ -15,10 +15,10 @@ public static class MongoStorageDefaults public const string Alias = "alias"; public const string Affected = "affected"; public const string Reference = "reference"; - public const string KevFlag = "kev_flag"; - public const string RuFlags = "ru_flags"; - public const string JpFlags = "jp_flags"; - public const string PsirtFlags = "psirt_flags"; + public const string KevFlag = "kev_flag"; + public const string RuFlags = "ru_flags"; + public const string JpFlags = "jp_flags"; + public const string PsirtFlags = "psirt_flags"; public const string MergeEvent = "merge_event"; public const string ExportState = "export_state"; public const string Locks = "locks"; @@ -33,5 +33,6 @@ public static class MongoStorageDefaults public const string OrchestratorRegistry = "orchestrator_registry"; public const string OrchestratorCommands = "orchestrator_commands"; public const string OrchestratorHeartbeats = "orchestrator_heartbeats"; + public const string PolicyDeltaCheckpoints = "policy_delta_checkpoints"; } } diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PolicyDelta/MongoPolicyDeltaCheckpointStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PolicyDelta/MongoPolicyDeltaCheckpointStore.cs new file mode 100644 index 000000000..7068c2050 --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PolicyDelta/MongoPolicyDeltaCheckpointStore.cs @@ -0,0 +1,135 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Driver; +using StellaOps.Concelier.Core.Linksets; + +namespace StellaOps.Concelier.Storage.Mongo.PolicyDelta; + +/// +/// MongoDB implementation of . +/// +internal sealed class MongoPolicyDeltaCheckpointStore : IPolicyDeltaCheckpointStore +{ + private readonly IMongoCollection _collection; + private readonly TimeProvider _timeProvider; + + public MongoPolicyDeltaCheckpointStore(IMongoDatabase database, TimeProvider timeProvider) + { + ArgumentNullException.ThrowIfNull(database); + ArgumentNullException.ThrowIfNull(timeProvider); + + _collection = database.GetCollection( + MongoStorageDefaults.Collections.PolicyDeltaCheckpoints); + _timeProvider = timeProvider; + } + + public async Task GetOrCreateAsync( + string tenantId, + string consumerId, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(consumerId); + + var checkpointId = $"{consumerId}:{tenantId}"; + var existing = await _collection + .Find(d => d.Id == checkpointId) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + if (existing is not null) + { + return existing.ToRecord(); + } + + var now = _timeProvider.GetUtcNow(); + var checkpoint = PolicyDeltaCheckpoint.CreateNew(tenantId, consumerId, now); + var document = PolicyDeltaCheckpointDocument.FromRecord(checkpoint); + + try + { + await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + return checkpoint; + } + catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey) + { + // Race condition: another process created the checkpoint concurrently. + existing = await _collection + .Find(d => d.Id == checkpointId) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + return existing?.ToRecord() ?? checkpoint; + } + } + + public async Task GetAsync( + string checkpointId, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(checkpointId); + + var document = await _collection + .Find(d => d.Id == checkpointId) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + return document?.ToRecord(); + } + + public async Task UpdateAsync( + PolicyDeltaCheckpoint checkpoint, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(checkpoint); + + var document = PolicyDeltaCheckpointDocument.FromRecord(checkpoint); + var options = new ReplaceOptions { IsUpsert = true }; + + await _collection + .ReplaceOneAsync( + d => d.Id == checkpoint.CheckpointId, + document, + options, + cancellationToken) + .ConfigureAwait(false); + + return checkpoint; + } + + public async Task> ListByTenantAsync( + string tenantId, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + + var documents = await _collection + .Find(d => d.TenantId == tenantId) + .SortBy(d => d.ConsumerId) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + var results = new List(documents.Count); + foreach (var doc in documents) + { + results.Add(doc.ToRecord()); + } + + return results; + } + + public async Task DeleteAsync( + string checkpointId, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(checkpointId); + + var result = await _collection + .DeleteOneAsync(d => d.Id == checkpointId, cancellationToken) + .ConfigureAwait(false); + + return result.DeletedCount > 0; + } +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PolicyDelta/PolicyDeltaCheckpointDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PolicyDelta/PolicyDeltaCheckpointDocument.cs new file mode 100644 index 000000000..76b884074 --- /dev/null +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PolicyDelta/PolicyDeltaCheckpointDocument.cs @@ -0,0 +1,78 @@ +using System; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; +using StellaOps.Concelier.Core.Linksets; + +namespace StellaOps.Concelier.Storage.Mongo.PolicyDelta; + +/// +/// MongoDB document for storing policy delta checkpoints. +/// +[BsonIgnoreExtraElements] +internal sealed class PolicyDeltaCheckpointDocument +{ + /// + /// Unique identifier: {consumerId}:{tenantId} + /// + [BsonId] + public string Id { get; set; } = string.Empty; + + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + [BsonElement("consumerId")] + public string ConsumerId { get; set; } = string.Empty; + + [BsonElement("lastCreatedAt")] + [BsonIgnoreIfNull] + public DateTime? LastCreatedAt { get; set; } + + [BsonElement("lastAdvisoryId")] + [BsonIgnoreIfNull] + public string? LastAdvisoryId { get; set; } + + [BsonElement("resumeToken")] + [BsonIgnoreIfNull] + public string? ResumeToken { get; set; } + + [BsonElement("sequenceNumber")] + public long SequenceNumber { get; set; } + + [BsonElement("updatedAt")] + public DateTime UpdatedAt { get; set; } + + [BsonElement("processedCount")] + public long ProcessedCount { get; set; } + + [BsonElement("lastBatchHash")] + [BsonIgnoreIfNull] + public string? LastBatchHash { get; set; } + + public PolicyDeltaCheckpoint ToRecord() => + new( + CheckpointId: Id, + TenantId: TenantId, + ConsumerId: ConsumerId, + LastCreatedAt: LastCreatedAt.HasValue ? new DateTimeOffset(LastCreatedAt.Value, TimeSpan.Zero) : null, + LastAdvisoryId: LastAdvisoryId, + ResumeToken: ResumeToken, + SequenceNumber: SequenceNumber, + UpdatedAt: new DateTimeOffset(UpdatedAt, TimeSpan.Zero), + ProcessedCount: ProcessedCount, + LastBatchHash: LastBatchHash); + + public static PolicyDeltaCheckpointDocument FromRecord(PolicyDeltaCheckpoint record) => + new() + { + Id = record.CheckpointId, + TenantId = record.TenantId, + ConsumerId = record.ConsumerId, + LastCreatedAt = record.LastCreatedAt?.UtcDateTime, + LastAdvisoryId = record.LastAdvisoryId, + ResumeToken = record.ResumeToken, + SequenceNumber = record.SequenceNumber, + UpdatedAt = record.UpdatedAt.UtcDateTime, + ProcessedCount = record.ProcessedCount, + LastBatchHash = record.LastBatchHash + }; +} diff --git a/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs index 2c35ce772..bcf5325e6 100644 --- a/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs @@ -24,6 +24,8 @@ using StellaOps.Concelier.Storage.Mongo.Observations; using StellaOps.Concelier.Core.Observations; using StellaOps.Concelier.Storage.Mongo.Linksets; using StellaOps.Concelier.Storage.Mongo.Orchestrator; +using StellaOps.Concelier.Storage.Mongo.PolicyDelta; +using StellaOps.Concelier.Core.Linksets; namespace StellaOps.Concelier.Storage.Mongo; @@ -190,8 +192,12 @@ public static class ServiceCollectionExtensions services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(); services.AddSingleton(); diff --git a/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyCompileMetadata.cs b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyCompileMetadata.cs new file mode 100644 index 000000000..2e389fc0e --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyCompileMetadata.cs @@ -0,0 +1,220 @@ +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Text; +using StellaOps.PolicyDsl; + +namespace StellaOps.Policy.Engine.Compilation; + +/// +/// Extended compile output metadata for policy analysis, coverage tracking, and editor support. +/// +public sealed record PolicyCompileMetadata( + PolicySymbolTable SymbolTable, + PolicyRuleIndex RuleIndex, + PolicyDocumentation Documentation, + PolicyRuleCoverageMetadata CoverageMetadata, + PolicyDeterministicHashes Hashes); + +/// +/// Deterministic hashes for policy identity and change detection. +/// +public sealed record PolicyDeterministicHashes( + /// SHA256 of canonical IR JSON representation. + string ContentHash, + /// SHA256 of rule structure only (names, priorities, conditions). + string StructureHash, + /// SHA256 of rule names and priorities (for ordering verification). + string OrderingHash, + /// Combined hash for complete identity verification. + string IdentityHash); + +/// +/// Symbol table containing all identifiers, functions, and their usages. +/// +public sealed record PolicySymbolTable( + ImmutableArray Symbols, + ImmutableArray BuiltInFunctions, + ImmutableArray Variables, + ImmutableDictionary> ReferencesByName); + +/// +/// A symbol in the policy DSL (identifier, function, variable, etc.). +/// +public sealed record PolicySymbol( + string Name, + PolicySymbolKind Kind, + string? Type, + PolicySymbolScope Scope, + ImmutableArray References); + +/// +/// Symbol kinds in the policy DSL. +/// +public enum PolicySymbolKind +{ + Variable, + Function, + Profile, + ProfileMap, + ProfileEnv, + ProfileScalar, + Rule, + Metadata, + Setting, + Parameter, + BuiltIn +} + +/// +/// Symbol scope information. +/// +public sealed record PolicySymbolScope( + string? RuleName, + string? ProfileName, + bool IsGlobal); + +/// +/// Reference to a symbol usage in the policy. +/// +public sealed record PolicySymbolReference( + string SymbolName, + string Context, + int? LineNumber, + int? ColumnNumber, + PolicySymbolUsage Usage); + +/// +/// How a symbol is used. +/// +public enum PolicySymbolUsage +{ + Definition, + Read, + Write, + Invocation, + MemberAccess +} + +/// +/// Built-in function signature for autocomplete. +/// +public sealed record PolicyFunctionSignature( + string Name, + string Description, + ImmutableArray Parameters, + string ReturnType, + ImmutableArray Examples); + +/// +/// Parameter information for function signatures. +/// +public sealed record PolicyParameterInfo( + string Name, + string Type, + bool IsOptional, + string? DefaultValue, + string Description); + +/// +/// Variable definition extracted from policy. +/// +public sealed record PolicyVariableDefinition( + string Name, + string? InferredType, + string? InitialValue, + string DefinedInRule, + bool IsAssignment); + +/// +/// Rule index for fast lookup and editor autocomplete. +/// +public sealed record PolicyRuleIndex( + ImmutableArray Rules, + ImmutableDictionary ByName, + ImmutableDictionary> ByPriority, + ImmutableArray ActionTypes, + ImmutableArray UsedIdentifiers); + +/// +/// Index entry for a single rule. +/// +public sealed record PolicyRuleEntry( + string Name, + int Priority, + int Index, + string ConditionSummary, + ImmutableArray ThenActionTypes, + ImmutableArray ElseActionTypes, + string Justification, + ImmutableArray ReferencedIdentifiers, + ImmutableArray ReferencedFunctions); + +/// +/// Extracted documentation from policy source. +/// +public sealed record PolicyDocumentation( + string? PolicyDescription, + ImmutableArray Tags, + string? Author, + ImmutableDictionary CustomMetadata, + ImmutableArray RuleDocumentation, + ImmutableArray ProfileDocumentation); + +/// +/// Documentation for a single rule. +/// +public sealed record PolicyRuleDocumentation( + string RuleName, + int Priority, + string Justification, + string ConditionDescription, + ImmutableArray ActionDescriptions); + +/// +/// Documentation for a profile. +/// +public sealed record PolicyProfileDocumentation( + string ProfileName, + ImmutableArray MapNames, + ImmutableArray EnvNames, + ImmutableArray ScalarNames); + +/// +/// Rule coverage metadata for tracking test coverage. +/// +public sealed record PolicyRuleCoverageMetadata( + ImmutableArray Rules, + int TotalRules, + int TotalConditions, + int TotalActions, + ImmutableDictionary ActionTypeCounts, + ImmutableArray CoveragePaths); + +/// +/// Coverage entry for a single rule. +/// +public sealed record PolicyRuleCoverageEntry( + string RuleName, + int Priority, + string ConditionHash, + int ThenActionCount, + int ElseActionCount, + bool HasElseBranch, + ImmutableArray CoveragePoints); + +/// +/// A coverage path through the policy (for test generation). +/// +public sealed record PolicyCoveragePath( + string PathId, + ImmutableArray RuleSequence, + ImmutableArray Decisions, + string PathHash); + +/// +/// A branch decision point. +/// +public sealed record PolicyBranchDecision( + string RuleName, + bool TookThenBranch, + string ConditionHash); diff --git a/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyMetadataExtractor.cs b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyMetadataExtractor.cs new file mode 100644 index 000000000..5b8269d10 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyMetadataExtractor.cs @@ -0,0 +1,988 @@ +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using StellaOps.PolicyDsl; + +namespace StellaOps.Policy.Engine.Compilation; + +/// +/// Extracts comprehensive metadata from compiled policy IR documents. +/// Generates symbol tables, rule indices, documentation, coverage metadata, and deterministic hashes. +/// +internal sealed class PolicyMetadataExtractor +{ + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = false + }; + + /// + /// Extracts all metadata from a compiled policy document. + /// + public PolicyCompileMetadata Extract(PolicyIrDocument document, ImmutableArray canonicalRepresentation) + { + ArgumentNullException.ThrowIfNull(document); + + var symbolTable = ExtractSymbolTable(document); + var ruleIndex = BuildRuleIndex(document); + var documentation = ExtractDocumentation(document); + var coverageMetadata = BuildCoverageMetadata(document); + var hashes = ComputeHashes(document, canonicalRepresentation); + + return new PolicyCompileMetadata( + symbolTable, + ruleIndex, + documentation, + coverageMetadata, + hashes); + } + + #region Symbol Table Extraction + + private PolicySymbolTable ExtractSymbolTable(PolicyIrDocument document) + { + var symbols = new List(); + var variables = new List(); + var referencesByName = new Dictionary>(); + + // Extract profile symbols + if (!document.Profiles.IsDefaultOrEmpty) + { + foreach (var profile in document.Profiles) + { + symbols.Add(new PolicySymbol( + profile.Name, + PolicySymbolKind.Profile, + "profile", + new PolicySymbolScope(null, profile.Name, true), + ImmutableArray.Empty)); + + if (!profile.Maps.IsDefaultOrEmpty) + { + foreach (var map in profile.Maps) + { + symbols.Add(new PolicySymbol( + map.Name, + PolicySymbolKind.ProfileMap, + "map", + new PolicySymbolScope(null, profile.Name, false), + ImmutableArray.Empty)); + } + } + + if (!profile.Environments.IsDefaultOrEmpty) + { + foreach (var env in profile.Environments) + { + symbols.Add(new PolicySymbol( + env.Name, + PolicySymbolKind.ProfileEnv, + "env", + new PolicySymbolScope(null, profile.Name, false), + ImmutableArray.Empty)); + + // Extract identifiers from environment conditions + if (!env.Entries.IsDefaultOrEmpty) + { + foreach (var entry in env.Entries) + { + ExtractExpressionReferences(entry.Condition, null, profile.Name, referencesByName); + } + } + } + } + + if (!profile.Scalars.IsDefaultOrEmpty) + { + foreach (var scalar in profile.Scalars) + { + symbols.Add(new PolicySymbol( + scalar.Name, + PolicySymbolKind.ProfileScalar, + InferLiteralType(scalar.Value), + new PolicySymbolScope(null, profile.Name, false), + ImmutableArray.Empty)); + } + } + } + } + + // Extract rule symbols and variable definitions + if (!document.Rules.IsDefaultOrEmpty) + { + foreach (var rule in document.Rules) + { + symbols.Add(new PolicySymbol( + rule.Name, + PolicySymbolKind.Rule, + "rule", + new PolicySymbolScope(rule.Name, null, true), + ImmutableArray.Empty)); + + // Extract identifiers from rule condition + ExtractExpressionReferences(rule.When, rule.Name, null, referencesByName); + + // Extract from then actions + if (!rule.ThenActions.IsDefaultOrEmpty) + { + foreach (var action in rule.ThenActions) + { + ExtractActionReferences(action, rule.Name, referencesByName, variables); + } + } + + // Extract from else actions + if (!rule.ElseActions.IsDefaultOrEmpty) + { + foreach (var action in rule.ElseActions) + { + ExtractActionReferences(action, rule.Name, referencesByName, variables); + } + } + } + } + + // Extract metadata symbols + foreach (var (key, _) in document.Metadata) + { + symbols.Add(new PolicySymbol( + key, + PolicySymbolKind.Metadata, + "metadata", + new PolicySymbolScope(null, null, true), + ImmutableArray.Empty)); + } + + // Extract settings symbols + foreach (var (key, _) in document.Settings) + { + symbols.Add(new PolicySymbol( + key, + PolicySymbolKind.Setting, + "setting", + new PolicySymbolScope(null, null, true), + ImmutableArray.Empty)); + } + + return new PolicySymbolTable( + symbols.ToImmutableArray(), + GetBuiltInFunctions(), + variables.ToImmutableArray(), + referencesByName.ToImmutableDictionary( + kvp => kvp.Key, + kvp => kvp.Value.ToImmutableArray())); + } + + private void ExtractExpressionReferences( + PolicyExpression? expression, + string? ruleName, + string? profileName, + Dictionary> referencesByName) + { + if (expression is null) return; + + switch (expression) + { + case PolicyIdentifierExpression identifier: + AddReference(referencesByName, identifier.Name, ruleName, profileName, PolicySymbolUsage.Read); + break; + + case PolicyMemberAccessExpression member: + ExtractExpressionReferences(member.Target, ruleName, profileName, referencesByName); + // Member name is not a standalone identifier + break; + + case PolicyInvocationExpression invocation: + ExtractExpressionReferences(invocation.Target, ruleName, profileName, referencesByName); + if (!invocation.Arguments.IsDefaultOrEmpty) + { + foreach (var arg in invocation.Arguments) + { + ExtractExpressionReferences(arg, ruleName, profileName, referencesByName); + } + } + break; + + case PolicyIndexerExpression indexer: + ExtractExpressionReferences(indexer.Target, ruleName, profileName, referencesByName); + ExtractExpressionReferences(indexer.Index, ruleName, profileName, referencesByName); + break; + + case PolicyUnaryExpression unary: + ExtractExpressionReferences(unary.Operand, ruleName, profileName, referencesByName); + break; + + case PolicyBinaryExpression binary: + ExtractExpressionReferences(binary.Left, ruleName, profileName, referencesByName); + ExtractExpressionReferences(binary.Right, ruleName, profileName, referencesByName); + break; + + case PolicyListExpression list when !list.Items.IsDefaultOrEmpty: + foreach (var item in list.Items) + { + ExtractExpressionReferences(item, ruleName, profileName, referencesByName); + } + break; + } + } + + private void ExtractActionReferences( + PolicyIrAction action, + string ruleName, + Dictionary> referencesByName, + List variables) + { + switch (action) + { + case PolicyIrAssignmentAction assignment: + if (!assignment.Target.IsDefaultOrEmpty) + { + var varName = string.Join(".", assignment.Target); + AddReference(referencesByName, varName, ruleName, null, PolicySymbolUsage.Write); + variables.Add(new PolicyVariableDefinition( + varName, + InferExpressionType(assignment.Value), + SummarizeExpression(assignment.Value), + ruleName, + true)); + } + ExtractExpressionReferences(assignment.Value, ruleName, null, referencesByName); + break; + + case PolicyIrAnnotateAction annotate: + if (!annotate.Target.IsDefaultOrEmpty) + { + var targetName = string.Join(".", annotate.Target); + AddReference(referencesByName, targetName, ruleName, null, PolicySymbolUsage.Write); + } + ExtractExpressionReferences(annotate.Value, ruleName, null, referencesByName); + break; + + case PolicyIrIgnoreAction ignore: + ExtractExpressionReferences(ignore.Until, ruleName, null, referencesByName); + break; + + case PolicyIrEscalateAction escalate: + ExtractExpressionReferences(escalate.To, ruleName, null, referencesByName); + ExtractExpressionReferences(escalate.When, ruleName, null, referencesByName); + break; + + case PolicyIrRequireVexAction require: + foreach (var condition in require.Conditions.Values) + { + ExtractExpressionReferences(condition, ruleName, null, referencesByName); + } + break; + + case PolicyIrWarnAction warn: + ExtractExpressionReferences(warn.Message, ruleName, null, referencesByName); + break; + + case PolicyIrDeferAction defer: + ExtractExpressionReferences(defer.Until, ruleName, null, referencesByName); + break; + } + } + + private static void AddReference( + Dictionary> referencesByName, + string symbolName, + string? ruleName, + string? profileName, + PolicySymbolUsage usage) + { + if (!referencesByName.TryGetValue(symbolName, out var refs)) + { + refs = []; + referencesByName[symbolName] = refs; + } + + refs.Add(new PolicySymbolReference( + symbolName, + ruleName ?? profileName ?? "global", + null, + null, + usage)); + } + + private static string? InferLiteralType(PolicyIrLiteral literal) => literal switch + { + PolicyIrStringLiteral => "string", + PolicyIrNumberLiteral => "number", + PolicyIrBooleanLiteral => "boolean", + PolicyIrListLiteral => "list", + _ => null + }; + + private static string? InferExpressionType(PolicyExpression? expression) => expression switch + { + PolicyLiteralExpression lit => lit.Value switch + { + string => "string", + decimal or double or float or int or long => "number", + bool => "boolean", + null => "null", + _ => "unknown" + }, + PolicyListExpression => "list", + PolicyBinaryExpression bin => bin.Operator switch + { + PolicyBinaryOperator.And or PolicyBinaryOperator.Or or PolicyBinaryOperator.Equal or + PolicyBinaryOperator.NotEqual or PolicyBinaryOperator.LessThan or PolicyBinaryOperator.LessThanOrEqual or + PolicyBinaryOperator.GreaterThan or PolicyBinaryOperator.GreaterThanOrEqual or + PolicyBinaryOperator.In or PolicyBinaryOperator.NotIn => "boolean", + _ => "unknown" + }, + PolicyUnaryExpression { Operator: PolicyUnaryOperator.Not } => "boolean", + _ => null + }; + + private static ImmutableArray GetBuiltInFunctions() + { + return + [ + new PolicyFunctionSignature( + "contains", + "Checks if a string contains a substring or a list contains an element", + [ + new PolicyParameterInfo("haystack", "string|list", false, null, "The string or list to search in"), + new PolicyParameterInfo("needle", "any", false, null, "The value to search for") + ], + "boolean", + ["contains(advisory.id, \"CVE\")", "contains(tags, \"critical\")"]), + + new PolicyFunctionSignature( + "startsWith", + "Checks if a string starts with a prefix", + [ + new PolicyParameterInfo("value", "string", false, null, "The string to check"), + new PolicyParameterInfo("prefix", "string", false, null, "The prefix to match") + ], + "boolean", + ["startsWith(component.purl, \"pkg:npm\")"]), + + new PolicyFunctionSignature( + "endsWith", + "Checks if a string ends with a suffix", + [ + new PolicyParameterInfo("value", "string", false, null, "The string to check"), + new PolicyParameterInfo("suffix", "string", false, null, "The suffix to match") + ], + "boolean", + ["endsWith(component.name, \"-dev\")"]), + + new PolicyFunctionSignature( + "matches", + "Checks if a string matches a regex pattern", + [ + new PolicyParameterInfo("value", "string", false, null, "The string to check"), + new PolicyParameterInfo("pattern", "string", false, null, "The regex pattern") + ], + "boolean", + ["matches(advisory.id, \"^CVE-202[3-9]\")"]), + + new PolicyFunctionSignature( + "length", + "Returns the length of a string or list", + [ + new PolicyParameterInfo("value", "string|list", false, null, "The value to measure") + ], + "number", + ["length(component.name)", "length(tags)"]), + + new PolicyFunctionSignature( + "lower", + "Converts a string to lowercase", + [ + new PolicyParameterInfo("value", "string", false, null, "The string to convert") + ], + "string", + ["lower(component.ecosystem)"]), + + new PolicyFunctionSignature( + "upper", + "Converts a string to uppercase", + [ + new PolicyParameterInfo("value", "string", false, null, "The string to convert") + ], + "string", + ["upper(severity)"]), + + new PolicyFunctionSignature( + "now", + "Returns the current evaluation timestamp (deterministic within a run)", + [], + "datetime", + ["now()"]), + + new PolicyFunctionSignature( + "days", + "Creates a duration in days", + [ + new PolicyParameterInfo("count", "number", false, null, "Number of days") + ], + "duration", + ["days(30)", "days(7)"]), + + new PolicyFunctionSignature( + "semver", + "Parses a semantic version string", + [ + new PolicyParameterInfo("version", "string", false, null, "The version string to parse") + ], + "semver", + ["semver(component.version)"]), + + new PolicyFunctionSignature( + "semverCompare", + "Compares two semantic versions", + [ + new PolicyParameterInfo("left", "string|semver", false, null, "First version"), + new PolicyParameterInfo("right", "string|semver", false, null, "Second version") + ], + "number", + ["semverCompare(component.version, \"1.0.0\")"]) + ]; + } + + #endregion + + #region Rule Index Building + + private PolicyRuleIndex BuildRuleIndex(PolicyIrDocument document) + { + var rules = new List(); + var byName = new Dictionary(StringComparer.Ordinal); + var byPriority = new Dictionary>(); + var allActionTypes = new HashSet(); + var allIdentifiers = new HashSet(); + + if (!document.Rules.IsDefaultOrEmpty) + { + for (var i = 0; i < document.Rules.Length; i++) + { + var rule = document.Rules[i]; + var thenActionTypes = GetActionTypes(rule.ThenActions, allActionTypes); + var elseActionTypes = GetActionTypes(rule.ElseActions, allActionTypes); + var (identifiers, functions) = ExtractRuleReferences(rule); + + foreach (var id in identifiers) + { + allIdentifiers.Add(id); + } + + var entry = new PolicyRuleEntry( + rule.Name, + rule.Priority, + i, + SummarizeExpression(rule.When) ?? "true", + thenActionTypes, + elseActionTypes, + rule.Because, + identifiers, + functions); + + rules.Add(entry); + byName[rule.Name] = entry; + + if (!byPriority.TryGetValue(rule.Priority, out var priorityList)) + { + priorityList = []; + byPriority[rule.Priority] = priorityList; + } + priorityList.Add(entry); + } + } + + return new PolicyRuleIndex( + rules.ToImmutableArray(), + byName.ToImmutableDictionary(), + byPriority.ToImmutableDictionary(kvp => kvp.Key, kvp => kvp.Value.ToImmutableArray()), + allActionTypes.Order().ToImmutableArray(), + allIdentifiers.Order().ToImmutableArray()); + } + + private static ImmutableArray GetActionTypes( + ImmutableArray actions, + HashSet allActionTypes) + { + if (actions.IsDefaultOrEmpty) return []; + + var types = new List(); + foreach (var action in actions) + { + var typeName = action switch + { + PolicyIrAssignmentAction => "assign", + PolicyIrAnnotateAction => "annotate", + PolicyIrIgnoreAction => "ignore", + PolicyIrEscalateAction => "escalate", + PolicyIrRequireVexAction => "requireVex", + PolicyIrWarnAction => "warn", + PolicyIrDeferAction => "defer", + _ => "unknown" + }; + types.Add(typeName); + allActionTypes.Add(typeName); + } + return types.ToImmutableArray(); + } + + private static (ImmutableArray Identifiers, ImmutableArray Functions) ExtractRuleReferences(PolicyIrRule rule) + { + var identifiers = new HashSet(); + var functions = new HashSet(); + + CollectExpressionReferences(rule.When, identifiers, functions); + + if (!rule.ThenActions.IsDefaultOrEmpty) + { + foreach (var action in rule.ThenActions) + { + CollectActionReferences(action, identifiers, functions); + } + } + + if (!rule.ElseActions.IsDefaultOrEmpty) + { + foreach (var action in rule.ElseActions) + { + CollectActionReferences(action, identifiers, functions); + } + } + + return (identifiers.Order().ToImmutableArray(), functions.Order().ToImmutableArray()); + } + + private static void CollectExpressionReferences( + PolicyExpression? expression, + HashSet identifiers, + HashSet functions) + { + if (expression is null) return; + + switch (expression) + { + case PolicyIdentifierExpression id: + identifiers.Add(id.Name); + break; + case PolicyMemberAccessExpression member: + CollectExpressionReferences(member.Target, identifiers, functions); + break; + case PolicyInvocationExpression invocation: + if (invocation.Target is PolicyIdentifierExpression funcId) + { + functions.Add(funcId.Name); + } + else + { + CollectExpressionReferences(invocation.Target, identifiers, functions); + } + if (!invocation.Arguments.IsDefaultOrEmpty) + { + foreach (var arg in invocation.Arguments) + { + CollectExpressionReferences(arg, identifiers, functions); + } + } + break; + case PolicyIndexerExpression indexer: + CollectExpressionReferences(indexer.Target, identifiers, functions); + CollectExpressionReferences(indexer.Index, identifiers, functions); + break; + case PolicyUnaryExpression unary: + CollectExpressionReferences(unary.Operand, identifiers, functions); + break; + case PolicyBinaryExpression binary: + CollectExpressionReferences(binary.Left, identifiers, functions); + CollectExpressionReferences(binary.Right, identifiers, functions); + break; + case PolicyListExpression list when !list.Items.IsDefaultOrEmpty: + foreach (var item in list.Items) + { + CollectExpressionReferences(item, identifiers, functions); + } + break; + } + } + + private static void CollectActionReferences( + PolicyIrAction action, + HashSet identifiers, + HashSet functions) + { + switch (action) + { + case PolicyIrAssignmentAction assign: + CollectExpressionReferences(assign.Value, identifiers, functions); + break; + case PolicyIrAnnotateAction annotate: + CollectExpressionReferences(annotate.Value, identifiers, functions); + break; + case PolicyIrIgnoreAction ignore: + CollectExpressionReferences(ignore.Until, identifiers, functions); + break; + case PolicyIrEscalateAction escalate: + CollectExpressionReferences(escalate.To, identifiers, functions); + CollectExpressionReferences(escalate.When, identifiers, functions); + break; + case PolicyIrRequireVexAction require: + foreach (var condition in require.Conditions.Values) + { + CollectExpressionReferences(condition, identifiers, functions); + } + break; + case PolicyIrWarnAction warn: + CollectExpressionReferences(warn.Message, identifiers, functions); + break; + case PolicyIrDeferAction defer: + CollectExpressionReferences(defer.Until, identifiers, functions); + break; + } + } + + #endregion + + #region Documentation Extraction + + private PolicyDocumentation ExtractDocumentation(PolicyIrDocument document) + { + string? description = null; + var tags = ImmutableArray.Empty; + string? author = null; + var customMetadata = new Dictionary(); + + // Extract from metadata + if (document.Metadata.TryGetValue("description", out var descLit) && descLit is PolicyIrStringLiteral descStr) + { + description = descStr.Value; + } + + if (document.Metadata.TryGetValue("author", out var authorLit) && authorLit is PolicyIrStringLiteral authorStr) + { + author = authorStr.Value; + } + + if (document.Metadata.TryGetValue("tags", out var tagsLit) && tagsLit is PolicyIrListLiteral tagsList) + { + tags = tagsList.Items + .OfType() + .Select(s => s.Value) + .ToImmutableArray(); + } + + foreach (var (key, value) in document.Metadata) + { + if (key is not ("description" or "author" or "tags") && value is PolicyIrStringLiteral strVal) + { + customMetadata[key] = strVal.Value; + } + } + + // Extract rule documentation + var ruleDocs = new List(); + if (!document.Rules.IsDefaultOrEmpty) + { + foreach (var rule in document.Rules) + { + var actionDescs = new List(); + if (!rule.ThenActions.IsDefaultOrEmpty) + { + foreach (var action in rule.ThenActions) + { + actionDescs.Add($"then: {DescribeAction(action)}"); + } + } + if (!rule.ElseActions.IsDefaultOrEmpty) + { + foreach (var action in rule.ElseActions) + { + actionDescs.Add($"else: {DescribeAction(action)}"); + } + } + + ruleDocs.Add(new PolicyRuleDocumentation( + rule.Name, + rule.Priority, + rule.Because, + SummarizeExpression(rule.When) ?? "true", + actionDescs.ToImmutableArray())); + } + } + + // Extract profile documentation + var profileDocs = new List(); + if (!document.Profiles.IsDefaultOrEmpty) + { + foreach (var profile in document.Profiles) + { + profileDocs.Add(new PolicyProfileDocumentation( + profile.Name, + profile.Maps.IsDefaultOrEmpty + ? [] + : profile.Maps.Select(m => m.Name).ToImmutableArray(), + profile.Environments.IsDefaultOrEmpty + ? [] + : profile.Environments.Select(e => e.Name).ToImmutableArray(), + profile.Scalars.IsDefaultOrEmpty + ? [] + : profile.Scalars.Select(s => s.Name).ToImmutableArray())); + } + } + + return new PolicyDocumentation( + description, + tags, + author, + customMetadata.ToImmutableDictionary(), + ruleDocs.ToImmutableArray(), + profileDocs.ToImmutableArray()); + } + + private static string DescribeAction(PolicyIrAction action) => action switch + { + PolicyIrAssignmentAction a => $"assign {string.Join(".", a.Target)} = {SummarizeExpression(a.Value)}", + PolicyIrAnnotateAction a => $"annotate {string.Join(".", a.Target)} = {SummarizeExpression(a.Value)}", + PolicyIrIgnoreAction a => $"ignore{(a.Until is not null ? $" until {SummarizeExpression(a.Until)}" : "")}{(a.Because is not null ? $" because \"{a.Because}\"" : "")}", + PolicyIrEscalateAction a => $"escalate{(a.To is not null ? $" to {SummarizeExpression(a.To)}" : "")}{(a.When is not null ? $" when {SummarizeExpression(a.When)}" : "")}", + PolicyIrRequireVexAction a => $"requireVex({string.Join(", ", a.Conditions.Keys)})", + PolicyIrWarnAction a => $"warn {SummarizeExpression(a.Message)}", + PolicyIrDeferAction a => $"defer{(a.Until is not null ? $" until {SummarizeExpression(a.Until)}" : "")}", + _ => "unknown" + }; + + #endregion + + #region Coverage Metadata Building + + private PolicyRuleCoverageMetadata BuildCoverageMetadata(PolicyIrDocument document) + { + var rules = new List(); + var actionTypeCounts = new Dictionary(); + var totalConditions = 0; + var totalActions = 0; + + if (!document.Rules.IsDefaultOrEmpty) + { + foreach (var rule in document.Rules) + { + totalConditions++; + var thenCount = rule.ThenActions.IsDefaultOrEmpty ? 0 : rule.ThenActions.Length; + var elseCount = rule.ElseActions.IsDefaultOrEmpty ? 0 : rule.ElseActions.Length; + totalActions += thenCount + elseCount; + + // Count action types + CountActionTypes(rule.ThenActions, actionTypeCounts); + CountActionTypes(rule.ElseActions, actionTypeCounts); + + // Generate coverage points + var coveragePoints = new List + { + $"{rule.Name}:condition" + }; + + if (thenCount > 0) + { + coveragePoints.Add($"{rule.Name}:then"); + for (var i = 0; i < thenCount; i++) + { + coveragePoints.Add($"{rule.Name}:then[{i}]"); + } + } + + if (elseCount > 0) + { + coveragePoints.Add($"{rule.Name}:else"); + for (var i = 0; i < elseCount; i++) + { + coveragePoints.Add($"{rule.Name}:else[{i}]"); + } + } + + rules.Add(new PolicyRuleCoverageEntry( + rule.Name, + rule.Priority, + ComputeExpressionHash(rule.When), + thenCount, + elseCount, + elseCount > 0, + coveragePoints.ToImmutableArray())); + } + } + + // Generate coverage paths (simplified - exhaustive paths for small policies) + var coveragePaths = GenerateCoveragePaths(document.Rules); + + return new PolicyRuleCoverageMetadata( + rules.ToImmutableArray(), + rules.Count, + totalConditions, + totalActions, + actionTypeCounts.ToImmutableDictionary(), + coveragePaths); + } + + private static void CountActionTypes(ImmutableArray actions, Dictionary counts) + { + if (actions.IsDefaultOrEmpty) return; + + foreach (var action in actions) + { + var typeName = action switch + { + PolicyIrAssignmentAction => "assign", + PolicyIrAnnotateAction => "annotate", + PolicyIrIgnoreAction => "ignore", + PolicyIrEscalateAction => "escalate", + PolicyIrRequireVexAction => "requireVex", + PolicyIrWarnAction => "warn", + PolicyIrDeferAction => "defer", + _ => "unknown" + }; + + counts.TryGetValue(typeName, out var count); + counts[typeName] = count + 1; + } + } + + private static ImmutableArray GenerateCoveragePaths(ImmutableArray rules) + { + if (rules.IsDefaultOrEmpty) return []; + + var paths = new List(); + + // For small policies, generate all 2^n paths + // For larger policies, generate key paths only + var ruleCount = rules.Length; + var maxPaths = ruleCount <= 10 ? (1 << ruleCount) : 100; + + for (var pathIndex = 0; pathIndex < maxPaths && pathIndex < (1 << ruleCount); pathIndex++) + { + var sequence = new List(); + var decisions = new List(); + var pathHashBuilder = new StringBuilder(); + + for (var ruleIndex = 0; ruleIndex < ruleCount; ruleIndex++) + { + var rule = rules[ruleIndex]; + var tookThen = (pathIndex & (1 << ruleIndex)) != 0; + + sequence.Add(rule.Name); + decisions.Add(new PolicyBranchDecision( + rule.Name, + tookThen, + ComputeExpressionHash(rule.When))); + + pathHashBuilder.Append(rule.Name); + pathHashBuilder.Append(tookThen ? ":T" : ":F"); + pathHashBuilder.Append('|'); + } + + var pathId = $"path_{pathIndex:D4}"; + var pathHash = ComputeStringHash(pathHashBuilder.ToString()); + + paths.Add(new PolicyCoveragePath( + pathId, + sequence.ToImmutableArray(), + decisions.ToImmutableArray(), + pathHash)); + } + + return paths.ToImmutableArray(); + } + + #endregion + + #region Hash Computation + + private PolicyDeterministicHashes ComputeHashes(PolicyIrDocument document, ImmutableArray canonicalRepresentation) + { + // Content hash from canonical representation + var contentHash = ComputeHash(canonicalRepresentation.AsSpan()); + + // Structure hash (rules only) + var structureBuilder = new StringBuilder(); + if (!document.Rules.IsDefaultOrEmpty) + { + foreach (var rule in document.Rules) + { + structureBuilder.Append(rule.Name); + structureBuilder.Append(':'); + structureBuilder.Append(rule.Priority); + structureBuilder.Append(':'); + structureBuilder.Append(ComputeExpressionHash(rule.When)); + structureBuilder.Append('|'); + } + } + var structureHash = ComputeStringHash(structureBuilder.ToString()); + + // Ordering hash (names and priorities only) + var orderingBuilder = new StringBuilder(); + if (!document.Rules.IsDefaultOrEmpty) + { + foreach (var rule in document.Rules) + { + orderingBuilder.Append(rule.Name); + orderingBuilder.Append(':'); + orderingBuilder.Append(rule.Priority); + orderingBuilder.Append('|'); + } + } + var orderingHash = ComputeStringHash(orderingBuilder.ToString()); + + // Identity hash (combination) + var identityBuilder = new StringBuilder(); + identityBuilder.Append(document.Name); + identityBuilder.Append(':'); + identityBuilder.Append(document.Syntax); + identityBuilder.Append(':'); + identityBuilder.Append(contentHash); + var identityHash = ComputeStringHash(identityBuilder.ToString()); + + return new PolicyDeterministicHashes(contentHash, structureHash, orderingHash, identityHash); + } + + private static string ComputeExpressionHash(PolicyExpression? expression) + { + if (expression is null) return "null"; + var summary = SummarizeExpression(expression) ?? "empty"; + return ComputeStringHash(summary); + } + + private static string ComputeStringHash(string value) + { + var bytes = Encoding.UTF8.GetBytes(value); + return ComputeHash(bytes); + } + + private static string ComputeHash(ReadOnlySpan bytes) + { + Span hash = stackalloc byte[32]; + SHA256.HashData(bytes, hash); + return Convert.ToHexStringLower(hash); + } + + private static string? SummarizeExpression(PolicyExpression? expression, int maxLength = 100) + { + if (expression is null) return null; + + var summary = expression switch + { + PolicyLiteralExpression lit => lit.Value?.ToString() ?? "null", + PolicyIdentifierExpression id => id.Name, + PolicyMemberAccessExpression member => $"{SummarizeExpression(member.Target)}.{member.Member}", + PolicyInvocationExpression inv => $"{SummarizeExpression(inv.Target)}({string.Join(", ", inv.Arguments.IsDefaultOrEmpty ? [] : inv.Arguments.Select(a => SummarizeExpression(a)))})", + PolicyIndexerExpression idx => $"{SummarizeExpression(idx.Target)}[{SummarizeExpression(idx.Index)}]", + PolicyUnaryExpression unary => $"{unary.Operator} {SummarizeExpression(unary.Operand)}", + PolicyBinaryExpression binary => $"{SummarizeExpression(binary.Left)} {binary.Operator} {SummarizeExpression(binary.Right)}", + PolicyListExpression list => $"[{string.Join(", ", list.Items.IsDefaultOrEmpty ? [] : list.Items.Take(3).Select(i => SummarizeExpression(i)))}{(list.Items.Length > 3 ? ", ..." : "")}]", + _ => expression.GetType().Name + }; + + return summary.Length > maxLength ? summary[..(maxLength - 3)] + "..." : summary; + } + + #endregion +} diff --git a/src/Policy/StellaOps.Policy.Engine/DependencyInjection/PolicyEngineServiceCollectionExtensions.cs b/src/Policy/StellaOps.Policy.Engine/DependencyInjection/PolicyEngineServiceCollectionExtensions.cs new file mode 100644 index 000000000..7df817ca8 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/DependencyInjection/PolicyEngineServiceCollectionExtensions.cs @@ -0,0 +1,154 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using StellaOps.Policy.Engine.Caching; +using StellaOps.Policy.Engine.EffectiveDecisionMap; +using StellaOps.Policy.Engine.Events; +using StellaOps.Policy.Engine.ExceptionCache; +using StellaOps.Policy.Engine.Options; +using StellaOps.Policy.Engine.Services; +using StellaOps.Policy.Engine.WhatIfSimulation; +using StellaOps.Policy.Engine.Workers; +using StackExchange.Redis; + +namespace StellaOps.Policy.Engine.DependencyInjection; + +/// +/// Extension methods for registering Policy Engine services. +/// +public static class PolicyEngineServiceCollectionExtensions +{ + /// + /// Adds the core Policy Engine services to the service collection. + /// Includes TimeProvider, cache, and core evaluation services. + /// + public static IServiceCollection AddPolicyEngineCore(this IServiceCollection services) + { + // Time provider + services.TryAddSingleton(TimeProvider.System); + + // Core compilation and evaluation services + services.TryAddSingleton(); + + // Cache + services.TryAddSingleton(); + + // Runtime evaluation + services.TryAddSingleton(); + + // Bundle service + services.TryAddSingleton(); + + // Decision service + services.TryAddSingleton(); + + return services; + } + + /// + /// Adds the Policy Engine event pipeline services. + /// Includes event processor and job scheduler. + /// + public static IServiceCollection AddPolicyEngineEventPipeline(this IServiceCollection services) + { + // Event processor (implements both IPolicyEffectiveEventPublisher and IReEvaluationJobScheduler) + services.TryAddSingleton(); + services.TryAddSingleton(sp => + sp.GetRequiredService()); + services.TryAddSingleton(sp => + sp.GetRequiredService()); + + return services; + } + + /// + /// Adds the Policy Engine evaluation worker services. + /// Includes background host for continuous job processing. + /// + public static IServiceCollection AddPolicyEngineWorker(this IServiceCollection services) + { + // Worker service + services.TryAddSingleton(); + + // Background host + services.AddHostedService(); + + return services; + } + + /// + /// Adds the Policy Engine explainer services. + /// Requires IExplainTraceRepository and IPolicyPackRepository to be registered. + /// + public static IServiceCollection AddPolicyEngineExplainer(this IServiceCollection services) + { + services.TryAddSingleton(); + return services; + } + + /// + /// Adds the effective decision map services for Graph overlays. + /// Requires Redis connection to be registered. + /// + public static IServiceCollection AddEffectiveDecisionMap(this IServiceCollection services) + { + services.TryAddSingleton(); + return services; + } + + /// + /// Adds the exception effective cache for fast exception lookups during policy evaluation. + /// Requires Redis connection and IExceptionRepository to be registered. + /// + public static IServiceCollection AddExceptionEffectiveCache(this IServiceCollection services) + { + services.TryAddSingleton(); + return services; + } + + /// + /// Adds the What-If simulation service for Graph APIs. + /// Supports hypothetical SBOM diffs and draft policies without persisting results. + /// + public static IServiceCollection AddWhatIfSimulation(this IServiceCollection services) + { + services.TryAddSingleton(); + return services; + } + + /// + /// Adds Redis connection for effective decision map and evaluation cache. + /// + public static IServiceCollection AddPolicyEngineRedis( + this IServiceCollection services, + string connectionString) + { + services.TryAddSingleton(sp => + ConnectionMultiplexer.Connect(connectionString)); + + return services; + } + + /// + /// Adds all Policy Engine services with default configuration. + /// + public static IServiceCollection AddPolicyEngine(this IServiceCollection services) + { + services.AddPolicyEngineCore(); + services.AddPolicyEngineEventPipeline(); + services.AddPolicyEngineWorker(); + services.AddPolicyEngineExplainer(); + + return services; + } + + /// + /// Adds all Policy Engine services with configuration binding. + /// + public static IServiceCollection AddPolicyEngine( + this IServiceCollection services, + Action configure) + { + services.Configure(configure); + return services.AddPolicyEngine(); + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Domain/PolicyPackRecord.cs b/src/Policy/StellaOps.Policy.Engine/Domain/PolicyPackRecord.cs index dc06a88c2..ce59c093a 100644 --- a/src/Policy/StellaOps.Policy.Engine/Domain/PolicyPackRecord.cs +++ b/src/Policy/StellaOps.Policy.Engine/Domain/PolicyPackRecord.cs @@ -1,5 +1,6 @@ using System.Collections.Concurrent; using System.Collections.Immutable; +using StellaOps.PolicyDsl; namespace StellaOps.Policy.Engine.Domain; @@ -113,6 +114,7 @@ internal sealed record PolicyBundleRecord( int Size, DateTimeOffset CreatedAt, ImmutableArray Payload, + PolicyIrDocument? CompiledDocument = null, PolicyAocMetadata? AocMetadata = null); /// diff --git a/src/Policy/StellaOps.Policy.Engine/EffectiveDecisionMap/EffectiveDecisionModels.cs b/src/Policy/StellaOps.Policy.Engine/EffectiveDecisionMap/EffectiveDecisionModels.cs new file mode 100644 index 000000000..6e224c4a2 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/EffectiveDecisionMap/EffectiveDecisionModels.cs @@ -0,0 +1,221 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Policy.Engine.EffectiveDecisionMap; + +/// +/// Represents an effective policy decision for an asset/snapshot. +/// Stored in Redis for Graph overlay lookups. +/// +public sealed record EffectiveDecisionEntry +{ + /// + /// Tenant identifier. + /// + [JsonPropertyName("tenant_id")] + public required string TenantId { get; init; } + + /// + /// Asset identifier (PURL or SBOM ID). + /// + [JsonPropertyName("asset_id")] + public required string AssetId { get; init; } + + /// + /// Snapshot identifier (SBOM version or evaluation run). + /// + [JsonPropertyName("snapshot_id")] + public required string SnapshotId { get; init; } + + /// + /// Policy pack ID that produced this decision. + /// + [JsonPropertyName("pack_id")] + public required string PackId { get; init; } + + /// + /// Policy pack version. + /// + [JsonPropertyName("pack_version")] + public required int PackVersion { get; init; } + + /// + /// Final decision status (allow, warn, deny, blocked). + /// + [JsonPropertyName("status")] + public required string Status { get; init; } + + /// + /// Severity level if applicable. + /// + [JsonPropertyName("severity")] + public string? Severity { get; init; } + + /// + /// Rule name that determined the decision. + /// + [JsonPropertyName("rule_name")] + public string? RuleName { get; init; } + + /// + /// Priority of the applied rule. + /// + [JsonPropertyName("priority")] + public int? Priority { get; init; } + + /// + /// Exception ID if an exception was applied. + /// + [JsonPropertyName("exception_id")] + public string? ExceptionId { get; init; } + + /// + /// Count of advisories affecting this asset. + /// + [JsonPropertyName("advisory_count")] + public int AdvisoryCount { get; init; } + + /// + /// Count of critical/high severity findings. + /// + [JsonPropertyName("high_severity_count")] + public int HighSeverityCount { get; init; } + + /// + /// Aggregated annotations from the decision. + /// + [JsonPropertyName("annotations")] + public ImmutableDictionary Annotations { get; init; } = ImmutableDictionary.Empty; + + /// + /// Version counter for cache coherency. + /// + [JsonPropertyName("version")] + public required long Version { get; init; } + + /// + /// When this entry was evaluated. + /// + [JsonPropertyName("evaluated_at")] + public required DateTimeOffset EvaluatedAt { get; init; } + + /// + /// When this entry expires. + /// + [JsonPropertyName("expires_at")] + public required DateTimeOffset ExpiresAt { get; init; } + + /// + /// Correlation ID for tracing. + /// + [JsonPropertyName("correlation_id")] + public string? CorrelationId { get; init; } +} + +/// +/// Result of an effective decision map query. +/// +public sealed record EffectiveDecisionQueryResult +{ + /// + /// Found entries mapped by asset ID. + /// + public required IReadOnlyDictionary Entries { get; init; } + + /// + /// Asset IDs that were not found. + /// + public required IReadOnlyList NotFound { get; init; } + + /// + /// Current version of the decision map. + /// + public long MapVersion { get; init; } + + /// + /// Whether the result came from cache. + /// + public bool FromCache { get; init; } +} + +/// +/// Summary statistics for a snapshot's effective decisions. +/// +public sealed record EffectiveDecisionSummary +{ + /// + /// Snapshot ID. + /// + public required string SnapshotId { get; init; } + + /// + /// Total assets evaluated. + /// + public int TotalAssets { get; init; } + + /// + /// Count by status. + /// + public required IReadOnlyDictionary StatusCounts { get; init; } + + /// + /// Count by severity. + /// + public required IReadOnlyDictionary SeverityCounts { get; init; } + + /// + /// Assets with exceptions applied. + /// + public int ExceptionCount { get; init; } + + /// + /// Map version at time of summary. + /// + public long MapVersion { get; init; } + + /// + /// When this summary was computed. + /// + public DateTimeOffset ComputedAt { get; init; } +} + +/// +/// Filter options for querying effective decisions. +/// +public sealed record EffectiveDecisionFilter +{ + /// + /// Filter by status values. + /// + public IReadOnlyList? Statuses { get; init; } + + /// + /// Filter by severity values. + /// + public IReadOnlyList? Severities { get; init; } + + /// + /// Include only assets with exceptions. + /// + public bool? HasException { get; init; } + + /// + /// Filter by minimum advisory count. + /// + public int? MinAdvisoryCount { get; init; } + + /// + /// Filter by minimum high severity count. + /// + public int? MinHighSeverityCount { get; init; } + + /// + /// Maximum results to return. + /// + public int Limit { get; init; } = 1000; + + /// + /// Offset for pagination. + /// + public int Offset { get; init; } = 0; +} diff --git a/src/Policy/StellaOps.Policy.Engine/EffectiveDecisionMap/IEffectiveDecisionMap.cs b/src/Policy/StellaOps.Policy.Engine/EffectiveDecisionMap/IEffectiveDecisionMap.cs new file mode 100644 index 000000000..819c7ea0d --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/EffectiveDecisionMap/IEffectiveDecisionMap.cs @@ -0,0 +1,144 @@ +namespace StellaOps.Policy.Engine.EffectiveDecisionMap; + +/// +/// Interface for effective decision map storage. +/// Maintains policy decisions per asset/snapshot for Graph overlays. +/// +public interface IEffectiveDecisionMap +{ + /// + /// Sets an effective decision entry. + /// + Task SetAsync( + string tenantId, + string snapshotId, + EffectiveDecisionEntry entry, + CancellationToken cancellationToken = default); + + /// + /// Sets multiple effective decision entries. + /// + Task SetBatchAsync( + string tenantId, + string snapshotId, + IEnumerable entries, + CancellationToken cancellationToken = default); + + /// + /// Gets an effective decision entry. + /// + Task GetAsync( + string tenantId, + string snapshotId, + string assetId, + CancellationToken cancellationToken = default); + + /// + /// Gets multiple effective decision entries. + /// + Task GetBatchAsync( + string tenantId, + string snapshotId, + IReadOnlyList assetIds, + CancellationToken cancellationToken = default); + + /// + /// Gets all effective decisions for a snapshot. + /// + Task> GetAllForSnapshotAsync( + string tenantId, + string snapshotId, + EffectiveDecisionFilter? filter = null, + CancellationToken cancellationToken = default); + + /// + /// Gets a summary of effective decisions for a snapshot. + /// + Task GetSummaryAsync( + string tenantId, + string snapshotId, + CancellationToken cancellationToken = default); + + /// + /// Invalidates a specific entry. + /// + Task InvalidateAsync( + string tenantId, + string snapshotId, + string assetId, + CancellationToken cancellationToken = default); + + /// + /// Invalidates all entries for a snapshot. + /// + Task InvalidateSnapshotAsync( + string tenantId, + string snapshotId, + CancellationToken cancellationToken = default); + + /// + /// Invalidates all entries for a tenant. + /// + Task InvalidateTenantAsync( + string tenantId, + CancellationToken cancellationToken = default); + + /// + /// Gets the current map version for a snapshot. + /// + Task GetVersionAsync( + string tenantId, + string snapshotId, + CancellationToken cancellationToken = default); + + /// + /// Increments and returns the new map version for a snapshot. + /// + Task IncrementVersionAsync( + string tenantId, + string snapshotId, + CancellationToken cancellationToken = default); + + /// + /// Gets statistics about the effective decision map. + /// + Task GetStatsAsync( + string? tenantId = null, + CancellationToken cancellationToken = default); +} + +/// +/// Statistics about the effective decision map. +/// +public sealed record EffectiveDecisionMapStats +{ + /// + /// Total entries across all tenants/snapshots. + /// + public long TotalEntries { get; init; } + + /// + /// Total snapshots tracked. + /// + public long TotalSnapshots { get; init; } + + /// + /// Memory used in bytes (if available). + /// + public long? MemoryUsedBytes { get; init; } + + /// + /// Entries expiring in the next hour. + /// + public long ExpiringWithinHour { get; init; } + + /// + /// Last eviction timestamp. + /// + public DateTimeOffset? LastEvictionAt { get; init; } + + /// + /// Count of entries evicted in last eviction run. + /// + public long LastEvictionCount { get; init; } +} diff --git a/src/Policy/StellaOps.Policy.Engine/EffectiveDecisionMap/RedisEffectiveDecisionMap.cs b/src/Policy/StellaOps.Policy.Engine/EffectiveDecisionMap/RedisEffectiveDecisionMap.cs new file mode 100644 index 000000000..3e87a5ccb --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/EffectiveDecisionMap/RedisEffectiveDecisionMap.cs @@ -0,0 +1,501 @@ +using System.Text.Json; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Policy.Engine.Options; +using StellaOps.Policy.Engine.Telemetry; +using StackExchange.Redis; + +namespace StellaOps.Policy.Engine.EffectiveDecisionMap; + +/// +/// Redis-backed effective decision map with versioning and TTL-based eviction. +/// Key structure: +/// - Entry: stellaops:edm:{tenant}:{snapshot}:e:{asset} -> JSON entry +/// - Version: stellaops:edm:{tenant}:{snapshot}:v -> integer version +/// - Index: stellaops:edm:{tenant}:{snapshot}:idx -> sorted set of assets by evaluated_at +/// +internal sealed class RedisEffectiveDecisionMap : IEffectiveDecisionMap +{ + private readonly IConnectionMultiplexer _redis; + private readonly ILogger _logger; + private readonly EffectiveDecisionMapOptions _options; + private readonly TimeProvider _timeProvider; + + private const string KeyPrefix = "stellaops:edm"; + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = false, + }; + + public RedisEffectiveDecisionMap( + IConnectionMultiplexer redis, + ILogger logger, + IOptions options, + TimeProvider timeProvider) + { + _redis = redis ?? throw new ArgumentNullException(nameof(redis)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _options = options?.Value.EffectiveDecisionMap ?? new EffectiveDecisionMapOptions(); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public async Task SetAsync( + string tenantId, + string snapshotId, + EffectiveDecisionEntry entry, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(entry); + + var db = _redis.GetDatabase(); + var entryKey = GetEntryKey(tenantId, snapshotId, entry.AssetId); + var indexKey = GetIndexKey(tenantId, snapshotId); + + var json = JsonSerializer.Serialize(entry, JsonOptions); + var ttl = entry.ExpiresAt - _timeProvider.GetUtcNow(); + if (ttl <= TimeSpan.Zero) + { + ttl = TimeSpan.FromMinutes(_options.DefaultTtlMinutes); + } + + var tasks = new List + { + db.StringSetAsync(entryKey, json, ttl), + db.SortedSetAddAsync(indexKey, entry.AssetId, entry.EvaluatedAt.ToUnixTimeMilliseconds()), + db.KeyExpireAsync(indexKey, ttl + TimeSpan.FromMinutes(5)), // Index lives slightly longer + }; + + await Task.WhenAll(tasks).ConfigureAwait(false); + + PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(1, + new KeyValuePair("operation", "set"), + new KeyValuePair("tenant_id", tenantId)); + } + + public async Task SetBatchAsync( + string tenantId, + string snapshotId, + IEnumerable entries, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var batch = db.CreateBatch(); + var indexKey = GetIndexKey(tenantId, snapshotId); + var now = _timeProvider.GetUtcNow(); + var count = 0; + + var sortedSetEntries = new List(); + + foreach (var entry in entries) + { + var entryKey = GetEntryKey(tenantId, snapshotId, entry.AssetId); + var json = JsonSerializer.Serialize(entry, JsonOptions); + var ttl = entry.ExpiresAt - now; + if (ttl <= TimeSpan.Zero) + { + ttl = TimeSpan.FromMinutes(_options.DefaultTtlMinutes); + } + + _ = batch.StringSetAsync(entryKey, json, ttl); + sortedSetEntries.Add(new SortedSetEntry(entry.AssetId, entry.EvaluatedAt.ToUnixTimeMilliseconds())); + count++; + } + + if (sortedSetEntries.Count > 0) + { + _ = batch.SortedSetAddAsync(indexKey, sortedSetEntries.ToArray()); + _ = batch.KeyExpireAsync(indexKey, TimeSpan.FromMinutes(_options.DefaultTtlMinutes + 5)); + } + + batch.Execute(); + await Task.CompletedTask; // Batch operations are synchronous + + // Increment version after batch write + await IncrementVersionAsync(tenantId, snapshotId, cancellationToken).ConfigureAwait(false); + + PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(count, + new KeyValuePair("operation", "set_batch"), + new KeyValuePair("tenant_id", tenantId)); + + _logger.LogDebug("Set {Count} effective decisions for snapshot {SnapshotId}", count, snapshotId); + } + + public async Task GetAsync( + string tenantId, + string snapshotId, + string assetId, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var entryKey = GetEntryKey(tenantId, snapshotId, assetId); + + var json = await db.StringGetAsync(entryKey).ConfigureAwait(false); + + PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(1, + new KeyValuePair("operation", "get"), + new KeyValuePair("tenant_id", tenantId), + new KeyValuePair("cache_hit", json.HasValue)); + + if (!json.HasValue) + { + return null; + } + + return JsonSerializer.Deserialize((string)json!, JsonOptions); + } + + public async Task GetBatchAsync( + string tenantId, + string snapshotId, + IReadOnlyList assetIds, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var keys = assetIds.Select(id => (RedisKey)GetEntryKey(tenantId, snapshotId, id)).ToArray(); + + var values = await db.StringGetAsync(keys).ConfigureAwait(false); + + var entries = new Dictionary(); + var notFound = new List(); + + for (int i = 0; i < assetIds.Count; i++) + { + if (values[i].HasValue) + { + var entry = JsonSerializer.Deserialize((string)values[i]!, JsonOptions); + if (entry != null) + { + entries[assetIds[i]] = entry; + } + } + else + { + notFound.Add(assetIds[i]); + } + } + + var version = await GetVersionAsync(tenantId, snapshotId, cancellationToken).ConfigureAwait(false); + + PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(assetIds.Count, + new KeyValuePair("operation", "get_batch"), + new KeyValuePair("tenant_id", tenantId)); + + return new EffectiveDecisionQueryResult + { + Entries = entries, + NotFound = notFound, + MapVersion = version, + FromCache = true, + }; + } + + public async Task> GetAllForSnapshotAsync( + string tenantId, + string snapshotId, + EffectiveDecisionFilter? filter = null, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var indexKey = GetIndexKey(tenantId, snapshotId); + + // Get all asset IDs from the index + var assetIds = await db.SortedSetRangeByRankAsync(indexKey, 0, -1, Order.Descending) + .ConfigureAwait(false); + + if (assetIds.Length == 0) + { + return Array.Empty(); + } + + // Get all entries + var keys = assetIds.Select(id => (RedisKey)GetEntryKey(tenantId, snapshotId, id!)).ToArray(); + var values = await db.StringGetAsync(keys).ConfigureAwait(false); + + var entries = new List(); + + foreach (var value in values) + { + if (!value.HasValue) continue; + + var entry = JsonSerializer.Deserialize((string)value!, JsonOptions); + if (entry is null) continue; + + // Apply filters + if (filter != null) + { + if (filter.Statuses?.Count > 0 && + !filter.Statuses.Contains(entry.Status, StringComparer.OrdinalIgnoreCase)) + { + continue; + } + + if (filter.Severities?.Count > 0 && + (entry.Severity is null || !filter.Severities.Contains(entry.Severity, StringComparer.OrdinalIgnoreCase))) + { + continue; + } + + if (filter.HasException == true && entry.ExceptionId is null) + { + continue; + } + + if (filter.HasException == false && entry.ExceptionId is not null) + { + continue; + } + + if (filter.MinAdvisoryCount.HasValue && entry.AdvisoryCount < filter.MinAdvisoryCount) + { + continue; + } + + if (filter.MinHighSeverityCount.HasValue && entry.HighSeverityCount < filter.MinHighSeverityCount) + { + continue; + } + } + + entries.Add(entry); + + // Apply limit + if (filter?.Limit > 0 && entries.Count >= filter.Limit + (filter?.Offset ?? 0)) + { + break; + } + } + + // Apply offset + if (filter?.Offset > 0) + { + entries = entries.Skip(filter.Offset).ToList(); + } + + // Apply final limit + if (filter?.Limit > 0) + { + entries = entries.Take(filter.Limit).ToList(); + } + + PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(1, + new KeyValuePair("operation", "get_all"), + new KeyValuePair("tenant_id", tenantId)); + + return entries; + } + + public async Task GetSummaryAsync( + string tenantId, + string snapshotId, + CancellationToken cancellationToken = default) + { + var entries = await GetAllForSnapshotAsync(tenantId, snapshotId, null, cancellationToken) + .ConfigureAwait(false); + + var statusCounts = entries + .GroupBy(e => e.Status, StringComparer.OrdinalIgnoreCase) + .ToDictionary(g => g.Key, g => g.Count(), StringComparer.OrdinalIgnoreCase); + + var severityCounts = entries + .Where(e => e.Severity is not null) + .GroupBy(e => e.Severity!, StringComparer.OrdinalIgnoreCase) + .ToDictionary(g => g.Key, g => g.Count(), StringComparer.OrdinalIgnoreCase); + + var version = await GetVersionAsync(tenantId, snapshotId, cancellationToken).ConfigureAwait(false); + + return new EffectiveDecisionSummary + { + SnapshotId = snapshotId, + TotalAssets = entries.Count, + StatusCounts = statusCounts, + SeverityCounts = severityCounts, + ExceptionCount = entries.Count(e => e.ExceptionId is not null), + MapVersion = version, + ComputedAt = _timeProvider.GetUtcNow(), + }; + } + + public async Task InvalidateAsync( + string tenantId, + string snapshotId, + string assetId, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var entryKey = GetEntryKey(tenantId, snapshotId, assetId); + var indexKey = GetIndexKey(tenantId, snapshotId); + + await Task.WhenAll( + db.KeyDeleteAsync(entryKey), + db.SortedSetRemoveAsync(indexKey, assetId) + ).ConfigureAwait(false); + + await IncrementVersionAsync(tenantId, snapshotId, cancellationToken).ConfigureAwait(false); + + PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(1, + new KeyValuePair("operation", "invalidate"), + new KeyValuePair("tenant_id", tenantId)); + } + + public async Task InvalidateSnapshotAsync( + string tenantId, + string snapshotId, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var indexKey = GetIndexKey(tenantId, snapshotId); + + // Get all asset IDs from the index + var assetIds = await db.SortedSetRangeByRankAsync(indexKey).ConfigureAwait(false); + + if (assetIds.Length > 0) + { + var keys = assetIds + .Select(id => (RedisKey)GetEntryKey(tenantId, snapshotId, id!)) + .Append(indexKey) + .Append(GetVersionKey(tenantId, snapshotId)) + .ToArray(); + + await db.KeyDeleteAsync(keys).ConfigureAwait(false); + } + + PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(assetIds.Length, + new KeyValuePair("operation", "invalidate_snapshot"), + new KeyValuePair("tenant_id", tenantId)); + + _logger.LogInformation("Invalidated {Count} entries for snapshot {SnapshotId}", assetIds.Length, snapshotId); + } + + public async Task InvalidateTenantAsync( + string tenantId, + CancellationToken cancellationToken = default) + { + var server = _redis.GetServer(_redis.GetEndPoints().First()); + var pattern = $"{KeyPrefix}:{tenantId}:*"; + var keys = server.Keys(pattern: pattern).ToArray(); + + if (keys.Length > 0) + { + var db = _redis.GetDatabase(); + await db.KeyDeleteAsync(keys).ConfigureAwait(false); + } + + PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(keys.Length, + new KeyValuePair("operation", "invalidate_tenant"), + new KeyValuePair("tenant_id", tenantId)); + + _logger.LogInformation("Invalidated {Count} keys for tenant {TenantId}", keys.Length, tenantId); + } + + public async Task GetVersionAsync( + string tenantId, + string snapshotId, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var versionKey = GetVersionKey(tenantId, snapshotId); + + var version = await db.StringGetAsync(versionKey).ConfigureAwait(false); + return version.HasValue ? (long)version : 0; + } + + public async Task IncrementVersionAsync( + string tenantId, + string snapshotId, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var versionKey = GetVersionKey(tenantId, snapshotId); + + var newVersion = await db.StringIncrementAsync(versionKey).ConfigureAwait(false); + + // Set TTL on version key if not already set + await db.KeyExpireAsync(versionKey, TimeSpan.FromMinutes(_options.DefaultTtlMinutes + 10), ExpireWhen.HasNoExpiry) + .ConfigureAwait(false); + + return newVersion; + } + + public async Task GetStatsAsync( + string? tenantId = null, + CancellationToken cancellationToken = default) + { + var server = _redis.GetServer(_redis.GetEndPoints().First()); + var pattern = tenantId != null + ? $"{KeyPrefix}:{tenantId}:*:e:*" + : $"{KeyPrefix}:*:e:*"; + + var entryCount = server.Keys(pattern: pattern).Count(); + + var snapshotPattern = tenantId != null + ? $"{KeyPrefix}:{tenantId}:*:idx" + : $"{KeyPrefix}:*:idx"; + + var snapshotCount = server.Keys(pattern: snapshotPattern).Count(); + + long? memoryUsed = null; + try + { + var info = server.Info("memory"); + var memorySection = info.FirstOrDefault(s => s.Key == "Memory"); + if (memorySection is not null) + { + var usedMemory = memorySection.FirstOrDefault(p => p.Key == "used_memory"); + if (usedMemory.Key is not null && long.TryParse(usedMemory.Value, out var bytes)) + { + memoryUsed = bytes; + } + } + } + catch + { + // Ignore - memory info not available + } + + return new EffectiveDecisionMapStats + { + TotalEntries = entryCount, + TotalSnapshots = snapshotCount, + MemoryUsedBytes = memoryUsed, + ExpiringWithinHour = 0, // Would require scanning TTLs + LastEvictionAt = null, + LastEvictionCount = 0, + }; + } + + private static string GetEntryKey(string tenantId, string snapshotId, string assetId) => + $"{KeyPrefix}:{tenantId}:{snapshotId}:e:{assetId}"; + + private static string GetIndexKey(string tenantId, string snapshotId) => + $"{KeyPrefix}:{tenantId}:{snapshotId}:idx"; + + private static string GetVersionKey(string tenantId, string snapshotId) => + $"{KeyPrefix}:{tenantId}:{snapshotId}:v"; +} + +/// +/// Configuration options for effective decision map. +/// +public sealed class EffectiveDecisionMapOptions +{ + /// + /// Default TTL for entries in minutes. + /// + public int DefaultTtlMinutes { get; set; } = 60; + + /// + /// Maximum entries per snapshot. + /// + public int MaxEntriesPerSnapshot { get; set; } = 100000; + + /// + /// Whether to enable automatic eviction of expired entries. + /// + public bool EnableAutoEviction { get; set; } = true; + + /// + /// Eviction check interval in minutes. + /// + public int EvictionIntervalMinutes { get; set; } = 5; +} diff --git a/src/Policy/StellaOps.Policy.Engine/Events/PolicyEffectiveEventModels.cs b/src/Policy/StellaOps.Policy.Engine/Events/PolicyEffectiveEventModels.cs new file mode 100644 index 000000000..1e893f0cf --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Events/PolicyEffectiveEventModels.cs @@ -0,0 +1,184 @@ +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json.Serialization; + +namespace StellaOps.Policy.Engine.Events; + +/// +/// Type of policy effective event. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum PolicyEffectiveEventType +{ + /// Policy decision changed for a subject. + [JsonPropertyName("policy.effective.updated")] + EffectiveUpdated, + + /// Policy decision added for new subject. + [JsonPropertyName("policy.effective.added")] + EffectiveAdded, + + /// Policy decision removed (subject no longer affected). + [JsonPropertyName("policy.effective.removed")] + EffectiveRemoved, + + /// Batch re-evaluation completed. + [JsonPropertyName("policy.effective.batch_completed")] + BatchCompleted +} + +/// +/// Base class for policy effective events. +/// +public abstract record PolicyEffectiveEvent( + [property: JsonPropertyName("event_id")] string EventId, + [property: JsonPropertyName("event_type")] PolicyEffectiveEventType EventType, + [property: JsonPropertyName("tenant_id")] string TenantId, + [property: JsonPropertyName("timestamp")] DateTimeOffset Timestamp, + [property: JsonPropertyName("correlation_id")] string? CorrelationId); + +/// +/// Event emitted when a policy decision is updated for a subject. +/// +public sealed record PolicyEffectiveUpdatedEvent( + string EventId, + string TenantId, + DateTimeOffset Timestamp, + string? CorrelationId, + [property: JsonPropertyName("pack_id")] string PackId, + [property: JsonPropertyName("pack_version")] int PackVersion, + [property: JsonPropertyName("subject_purl")] string SubjectPurl, + [property: JsonPropertyName("advisory_id")] string AdvisoryId, + [property: JsonPropertyName("trigger_type")] string TriggerType, + [property: JsonPropertyName("diff")] PolicyDecisionDiff Diff) + : PolicyEffectiveEvent(EventId, PolicyEffectiveEventType.EffectiveUpdated, TenantId, Timestamp, CorrelationId); + +/// +/// Diff metadata for policy decision changes. +/// +public sealed record PolicyDecisionDiff( + [property: JsonPropertyName("old_status")] string? OldStatus, + [property: JsonPropertyName("new_status")] string NewStatus, + [property: JsonPropertyName("old_severity")] string? OldSeverity, + [property: JsonPropertyName("new_severity")] string? NewSeverity, + [property: JsonPropertyName("old_rule")] string? OldRule, + [property: JsonPropertyName("new_rule")] string? NewRule, + [property: JsonPropertyName("old_priority")] int? OldPriority, + [property: JsonPropertyName("new_priority")] int? NewPriority, + [property: JsonPropertyName("status_changed")] bool StatusChanged, + [property: JsonPropertyName("severity_changed")] bool SeverityChanged, + [property: JsonPropertyName("rule_changed")] bool RuleChanged, + [property: JsonPropertyName("annotations_added")] ImmutableArray AnnotationsAdded, + [property: JsonPropertyName("annotations_removed")] ImmutableArray AnnotationsRemoved) +{ + /// + /// Creates a diff between two policy decisions. + /// + public static PolicyDecisionDiff Create( + string? oldStatus, string newStatus, + string? oldSeverity, string? newSeverity, + string? oldRule, string? newRule, + int? oldPriority, int? newPriority, + ImmutableDictionary? oldAnnotations, + ImmutableDictionary? newAnnotations) + { + var oldKeys = oldAnnotations?.Keys ?? Enumerable.Empty(); + var newKeys = newAnnotations?.Keys ?? Enumerable.Empty(); + + var annotationsAdded = newKeys + .Where(k => oldAnnotations?.ContainsKey(k) != true) + .OrderBy(k => k) + .ToImmutableArray(); + + var annotationsRemoved = oldKeys + .Where(k => newAnnotations?.ContainsKey(k) != true) + .OrderBy(k => k) + .ToImmutableArray(); + + return new PolicyDecisionDiff( + OldStatus: oldStatus, + NewStatus: newStatus, + OldSeverity: oldSeverity, + NewSeverity: newSeverity, + OldRule: oldRule, + NewRule: newRule, + OldPriority: oldPriority, + NewPriority: newPriority, + StatusChanged: !string.Equals(oldStatus, newStatus, StringComparison.Ordinal), + SeverityChanged: !string.Equals(oldSeverity, newSeverity, StringComparison.Ordinal), + RuleChanged: !string.Equals(oldRule, newRule, StringComparison.Ordinal), + AnnotationsAdded: annotationsAdded, + AnnotationsRemoved: annotationsRemoved); + } +} + +/// +/// Event emitted when batch re-evaluation completes. +/// +public sealed record PolicyBatchCompletedEvent( + string EventId, + string TenantId, + DateTimeOffset Timestamp, + string? CorrelationId, + [property: JsonPropertyName("batch_id")] string BatchId, + [property: JsonPropertyName("trigger_type")] string TriggerType, + [property: JsonPropertyName("subjects_evaluated")] int SubjectsEvaluated, + [property: JsonPropertyName("decisions_changed")] int DecisionsChanged, + [property: JsonPropertyName("duration_ms")] long DurationMs, + [property: JsonPropertyName("summary")] PolicyBatchSummary Summary) + : PolicyEffectiveEvent(EventId, PolicyEffectiveEventType.BatchCompleted, TenantId, Timestamp, CorrelationId); + +/// +/// Summary of changes in a batch re-evaluation. +/// +public sealed record PolicyBatchSummary( + [property: JsonPropertyName("status_upgrades")] int StatusUpgrades, + [property: JsonPropertyName("status_downgrades")] int StatusDowngrades, + [property: JsonPropertyName("new_blocks")] int NewBlocks, + [property: JsonPropertyName("blocks_removed")] int BlocksRemoved, + [property: JsonPropertyName("affected_advisories")] ImmutableArray AffectedAdvisories, + [property: JsonPropertyName("affected_purls")] ImmutableArray AffectedPurls); + +/// +/// Request to schedule a re-evaluation job. +/// +public sealed record ReEvaluationJobRequest( + string JobId, + string TenantId, + string PackId, + int PackVersion, + string TriggerType, + string? CorrelationId, + DateTimeOffset CreatedAt, + PolicyChangePriority Priority, + ImmutableArray AdvisoryIds, + ImmutableArray SubjectPurls, + ImmutableArray SbomIds, + ImmutableDictionary Metadata) +{ + /// + /// Creates a deterministic job ID. + /// + public static string CreateJobId( + string tenantId, + string packId, + int packVersion, + string triggerType, + DateTimeOffset createdAt) + { + var seed = $"{tenantId}|{packId}|{packVersion}|{triggerType}|{createdAt:O}"; + var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(seed)); + return $"rej-{Convert.ToHexStringLower(bytes)[..16]}"; + } +} + +/// +/// Policy change priority from IncrementalOrchestrator namespace. +/// +public enum PolicyChangePriority +{ + Normal = 0, + High = 1, + Emergency = 2 +} diff --git a/src/Policy/StellaOps.Policy.Engine/Events/PolicyEventProcessor.cs b/src/Policy/StellaOps.Policy.Engine/Events/PolicyEventProcessor.cs new file mode 100644 index 000000000..58bcb251f --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Events/PolicyEventProcessor.cs @@ -0,0 +1,454 @@ +using System.Collections.Concurrent; +using System.Collections.Immutable; +using System.Diagnostics; +using Microsoft.Extensions.Logging; +using StellaOps.Policy.Engine.IncrementalOrchestrator; +using StellaOps.Policy.Engine.Services; +using StellaOps.Policy.Engine.Telemetry; + +namespace StellaOps.Policy.Engine.Events; + +/// +/// Interface for publishing policy effective events. +/// +public interface IPolicyEffectiveEventPublisher +{ + /// + /// Publishes a policy effective updated event. + /// + Task PublishEffectiveUpdatedAsync(PolicyEffectiveUpdatedEvent evt, CancellationToken cancellationToken = default); + + /// + /// Publishes a batch completed event. + /// + Task PublishBatchCompletedAsync(PolicyBatchCompletedEvent evt, CancellationToken cancellationToken = default); + + /// + /// Registers a handler for effective events. + /// + void RegisterHandler(Func handler); +} + +/// +/// Interface for scheduling re-evaluation jobs. +/// +public interface IReEvaluationJobScheduler +{ + /// + /// Schedules a re-evaluation job. + /// + Task ScheduleAsync(ReEvaluationJobRequest request, CancellationToken cancellationToken = default); + + /// + /// Gets pending job count. + /// + int GetPendingJobCount(); + + /// + /// Gets job by ID. + /// + ReEvaluationJobRequest? GetJob(string jobId); +} + +/// +/// Processes policy change events, schedules re-evaluations, and emits effective events. +/// +public sealed class PolicyEventProcessor : IPolicyEffectiveEventPublisher, IReEvaluationJobScheduler +{ + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + private readonly ConcurrentQueue _jobQueue; + private readonly ConcurrentDictionary _jobIndex; + private readonly ConcurrentQueue _eventStream; + private readonly List> _eventHandlers; + private readonly object _handlersLock = new(); + + private const int MaxQueueSize = 10000; + private const int MaxEventStreamSize = 50000; + + public PolicyEventProcessor( + ILogger logger, + TimeProvider timeProvider) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _jobQueue = new ConcurrentQueue(); + _jobIndex = new ConcurrentDictionary(StringComparer.OrdinalIgnoreCase); + _eventStream = new ConcurrentQueue(); + _eventHandlers = new List>(); + } + + /// + /// Processes a policy change event and schedules re-evaluation if needed. + /// + public async Task ProcessChangeEventAsync( + PolicyChangeEvent changeEvent, + string packId, + int packVersion, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(changeEvent); + + using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("policy_event.process", ActivityKind.Internal); + activity?.SetTag("event.id", changeEvent.EventId); + activity?.SetTag("event.type", changeEvent.ChangeType.ToString()); + activity?.SetTag("tenant.id", changeEvent.TenantId); + + _logger.LogDebug( + "Processing policy change event {EventId}: {ChangeType} for tenant {TenantId}", + changeEvent.EventId, changeEvent.ChangeType, changeEvent.TenantId); + + // Skip if event targets no subjects + if (changeEvent.AffectedPurls.IsDefaultOrEmpty && + changeEvent.AffectedSbomIds.IsDefaultOrEmpty && + changeEvent.AffectedProductKeys.IsDefaultOrEmpty) + { + _logger.LogDebug("Skipping event {EventId}: no affected subjects", changeEvent.EventId); + return null; + } + + // Create re-evaluation job request + var jobId = ReEvaluationJobRequest.CreateJobId( + changeEvent.TenantId, + packId, + packVersion, + changeEvent.ChangeType.ToString(), + _timeProvider.GetUtcNow()); + + var jobRequest = new ReEvaluationJobRequest( + JobId: jobId, + TenantId: changeEvent.TenantId, + PackId: packId, + PackVersion: packVersion, + TriggerType: changeEvent.ChangeType.ToString(), + CorrelationId: changeEvent.CorrelationId, + CreatedAt: _timeProvider.GetUtcNow(), + Priority: MapPriority(changeEvent.Priority), + AdvisoryIds: changeEvent.AdvisoryId is not null + ? ImmutableArray.Create(changeEvent.AdvisoryId) + : ImmutableArray.Empty, + SubjectPurls: changeEvent.AffectedPurls, + SbomIds: changeEvent.AffectedSbomIds, + Metadata: changeEvent.Metadata); + + // Schedule the job + var scheduledId = await ScheduleAsync(jobRequest, cancellationToken).ConfigureAwait(false); + + activity?.SetTag("job.id", scheduledId); + PolicyEngineTelemetry.PolicyEventsProcessed.Add(1); + + return scheduledId; + } + + /// + /// Processes results from a re-evaluation and emits effective events. + /// + public async Task ProcessReEvaluationResultsAsync( + string jobId, + string tenantId, + string packId, + int packVersion, + string triggerType, + string? correlationId, + IReadOnlyList changes, + long durationMs, + CancellationToken cancellationToken = default) + { + using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("policy_event.emit_results", ActivityKind.Internal); + activity?.SetTag("job.id", jobId); + activity?.SetTag("changes.count", changes.Count); + + var now = _timeProvider.GetUtcNow(); + var changedCount = 0; + + // Emit individual effective events for each changed decision + foreach (var change in changes) + { + if (!change.HasChanged) + { + continue; + } + + changedCount++; + + var diff = PolicyDecisionDiff.Create( + change.OldStatus, change.NewStatus, + change.OldSeverity, change.NewSeverity, + change.OldRule, change.NewRule, + change.OldPriority, change.NewPriority, + change.OldAnnotations, change.NewAnnotations); + + var evt = new PolicyEffectiveUpdatedEvent( + EventId: GenerateEventId(), + TenantId: tenantId, + Timestamp: now, + CorrelationId: correlationId, + PackId: packId, + PackVersion: packVersion, + SubjectPurl: change.SubjectPurl, + AdvisoryId: change.AdvisoryId, + TriggerType: triggerType, + Diff: diff); + + await PublishEffectiveUpdatedAsync(evt, cancellationToken).ConfigureAwait(false); + } + + // Emit batch completed event + var summary = ComputeBatchSummary(changes); + var batchEvent = new PolicyBatchCompletedEvent( + EventId: GenerateEventId(), + TenantId: tenantId, + Timestamp: now, + CorrelationId: correlationId, + BatchId: jobId, + TriggerType: triggerType, + SubjectsEvaluated: changes.Count, + DecisionsChanged: changedCount, + DurationMs: durationMs, + Summary: summary); + + await PublishBatchCompletedAsync(batchEvent, cancellationToken).ConfigureAwait(false); + + activity?.SetTag("decisions.changed", changedCount); + _logger.LogInformation( + "Re-evaluation {JobId} completed: {Evaluated} subjects, {Changed} decisions changed in {Duration}ms", + jobId, changes.Count, changedCount, durationMs); + } + + /// + public async Task PublishEffectiveUpdatedAsync( + PolicyEffectiveUpdatedEvent evt, + CancellationToken cancellationToken = default) + { + await PublishEventAsync(evt).ConfigureAwait(false); + } + + /// + public async Task PublishBatchCompletedAsync( + PolicyBatchCompletedEvent evt, + CancellationToken cancellationToken = default) + { + await PublishEventAsync(evt).ConfigureAwait(false); + } + + /// + public void RegisterHandler(Func handler) + { + ArgumentNullException.ThrowIfNull(handler); + + lock (_handlersLock) + { + _eventHandlers.Add(handler); + } + } + + /// + public Task ScheduleAsync(ReEvaluationJobRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + // Check for duplicate + if (_jobIndex.ContainsKey(request.JobId)) + { + _logger.LogDebug("Duplicate job {JobId} ignored", request.JobId); + return Task.FromResult(request.JobId); + } + + // Enforce queue limit + if (_jobQueue.Count >= MaxQueueSize) + { + _logger.LogWarning("Job queue full, rejecting job {JobId}", request.JobId); + throw new InvalidOperationException("Re-evaluation job queue is full"); + } + + _jobIndex[request.JobId] = request; + _jobQueue.Enqueue(request); + + PolicyEngineTelemetry.ReEvaluationJobsScheduled.Add(1); + _logger.LogDebug( + "Scheduled re-evaluation job {JobId}: {TriggerType} for {TenantId}/{PackId}@{Version}", + request.JobId, request.TriggerType, request.TenantId, request.PackId, request.PackVersion); + + return Task.FromResult(request.JobId); + } + + /// + public int GetPendingJobCount() => _jobQueue.Count; + + /// + public ReEvaluationJobRequest? GetJob(string jobId) + { + _jobIndex.TryGetValue(jobId, out var job); + return job; + } + + /// + /// Dequeues the next job for processing. + /// + public ReEvaluationJobRequest? DequeueJob() + { + if (_jobQueue.TryDequeue(out var job)) + { + _jobIndex.TryRemove(job.JobId, out _); + return job; + } + + return null; + } + + /// + /// Gets recent effective events. + /// + public IReadOnlyList GetRecentEvents(int limit = 100) + { + return _eventStream + .ToArray() + .OrderByDescending(e => e.Timestamp) + .Take(limit) + .ToList() + .AsReadOnly(); + } + + private async Task PublishEventAsync(PolicyEffectiveEvent evt) + { + // Add to stream + _eventStream.Enqueue(evt); + + // Trim if too large + while (_eventStream.Count > MaxEventStreamSize) + { + _eventStream.TryDequeue(out _); + } + + // Invoke handlers + List> handlers; + lock (_handlersLock) + { + handlers = _eventHandlers.ToList(); + } + + foreach (var handler in handlers) + { + try + { + await handler(evt).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error invoking event handler for {EventType}", evt.EventType); + } + } + + PolicyEngineTelemetry.PolicyEffectiveEventsPublished.Add(1); + } + + private static PolicyBatchSummary ComputeBatchSummary(IReadOnlyList changes) + { + var statusUpgrades = 0; + var statusDowngrades = 0; + var newBlocks = 0; + var blocksRemoved = 0; + var advisories = new HashSet(StringComparer.OrdinalIgnoreCase); + var purls = new HashSet(StringComparer.Ordinal); + + foreach (var change in changes) + { + advisories.Add(change.AdvisoryId); + purls.Add(change.SubjectPurl); + + if (!change.HasChanged) + { + continue; + } + + var severityChange = CompareSeverity(change.OldStatus, change.NewStatus); + if (severityChange > 0) + { + statusUpgrades++; + } + else if (severityChange < 0) + { + statusDowngrades++; + } + + if (IsBlockStatus(change.NewStatus) && !IsBlockStatus(change.OldStatus)) + { + newBlocks++; + } + else if (IsBlockStatus(change.OldStatus) && !IsBlockStatus(change.NewStatus)) + { + blocksRemoved++; + } + } + + return new PolicyBatchSummary( + StatusUpgrades: statusUpgrades, + StatusDowngrades: statusDowngrades, + NewBlocks: newBlocks, + BlocksRemoved: blocksRemoved, + AffectedAdvisories: advisories.OrderBy(a => a).ToImmutableArray(), + AffectedPurls: purls.OrderBy(p => p).Take(100).ToImmutableArray()); + } + + private static int CompareSeverity(string? oldStatus, string? newStatus) + { + var oldSeverity = GetStatusSeverityLevel(oldStatus); + var newSeverity = GetStatusSeverityLevel(newStatus); + return newSeverity.CompareTo(oldSeverity); + } + + private static int GetStatusSeverityLevel(string? status) => status?.ToLowerInvariant() switch + { + "blocked" => 4, + "deny" => 4, + "warn" => 3, + "affected" => 2, + "allow" => 1, + "ignored" => 0, + _ => 1 + }; + + private static bool IsBlockStatus(string? status) => + string.Equals(status, "blocked", StringComparison.OrdinalIgnoreCase) || + string.Equals(status, "deny", StringComparison.OrdinalIgnoreCase); + + private static Events.PolicyChangePriority MapPriority(IncrementalOrchestrator.PolicyChangePriority priority) => + priority switch + { + IncrementalOrchestrator.PolicyChangePriority.Emergency => Events.PolicyChangePriority.Emergency, + IncrementalOrchestrator.PolicyChangePriority.High => Events.PolicyChangePriority.High, + _ => Events.PolicyChangePriority.Normal + }; + + private static string GenerateEventId() + { + var guid = Guid.NewGuid().ToByteArray(); + return $"pee-{Convert.ToHexStringLower(guid)[..16]}"; + } +} + +/// +/// Represents a change in policy decision for a subject. +/// +public sealed record PolicyDecisionChange( + string SubjectPurl, + string AdvisoryId, + string? OldStatus, + string NewStatus, + string? OldSeverity, + string? NewSeverity, + string? OldRule, + string? NewRule, + int? OldPriority, + int? NewPriority, + ImmutableDictionary? OldAnnotations, + ImmutableDictionary? NewAnnotations) +{ + /// + /// Whether the decision has changed. + /// + public bool HasChanged => + !string.Equals(OldStatus, NewStatus, StringComparison.Ordinal) || + !string.Equals(OldSeverity, NewSeverity, StringComparison.Ordinal) || + !string.Equals(OldRule, NewRule, StringComparison.Ordinal); +} diff --git a/src/Policy/StellaOps.Policy.Engine/ExceptionCache/ExceptionCacheModels.cs b/src/Policy/StellaOps.Policy.Engine/ExceptionCache/ExceptionCacheModels.cs new file mode 100644 index 000000000..58f29a555 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/ExceptionCache/ExceptionCacheModels.cs @@ -0,0 +1,225 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Policy.Engine.ExceptionCache; + +/// +/// Cached exception entry for fast lookup during policy evaluation. +/// +public sealed record ExceptionCacheEntry +{ + /// + /// Exception identifier. + /// + [JsonPropertyName("exception_id")] + public required string ExceptionId { get; init; } + + /// + /// Asset identifier this exception applies to. + /// + [JsonPropertyName("asset_id")] + public required string AssetId { get; init; } + + /// + /// Advisory ID covered (null if applies to all advisories for asset). + /// + [JsonPropertyName("advisory_id")] + public string? AdvisoryId { get; init; } + + /// + /// CVE ID covered (null if applies to all CVEs for asset). + /// + [JsonPropertyName("cve_id")] + public string? CveId { get; init; } + + /// + /// Decision override applied by this exception. + /// + [JsonPropertyName("decision_override")] + public required string DecisionOverride { get; init; } + + /// + /// Exception type: waiver, override, temporary, permanent. + /// + [JsonPropertyName("exception_type")] + public required string ExceptionType { get; init; } + + /// + /// Priority for conflict resolution (higher = more precedence). + /// + [JsonPropertyName("priority")] + public int Priority { get; init; } + + /// + /// When the exception becomes effective. + /// + [JsonPropertyName("effective_from")] + public DateTimeOffset EffectiveFrom { get; init; } + + /// + /// When the exception expires (null = no expiration). + /// + [JsonPropertyName("expires_at")] + public DateTimeOffset? ExpiresAt { get; init; } + + /// + /// When this cache entry was created. + /// + [JsonPropertyName("cached_at")] + public DateTimeOffset CachedAt { get; init; } + + /// + /// Original exception name for display. + /// + [JsonPropertyName("exception_name")] + public string? ExceptionName { get; init; } +} + +/// +/// Result of querying exceptions for an asset. +/// +public sealed record ExceptionCacheQueryResult +{ + /// + /// Applicable exceptions for the asset, ordered by priority (highest first). + /// + public required ImmutableArray Entries { get; init; } + + /// + /// Whether the result came from cache. + /// + public bool FromCache { get; init; } + + /// + /// Cache version at time of query. + /// + public long CacheVersion { get; init; } + + /// + /// Time taken to query in milliseconds. + /// + public long QueryDurationMs { get; init; } +} + +/// +/// Summary of cached exceptions for a tenant. +/// +public sealed record ExceptionCacheSummary +{ + /// + /// Tenant identifier. + /// + public required string TenantId { get; init; } + + /// + /// Total cached exception entries. + /// + public int TotalEntries { get; init; } + + /// + /// Unique exceptions in cache. + /// + public int UniqueExceptions { get; init; } + + /// + /// Unique assets with exceptions. + /// + public int UniqueAssets { get; init; } + + /// + /// Counts by exception type. + /// + public required IReadOnlyDictionary ByType { get; init; } + + /// + /// Counts by decision override. + /// + public required IReadOnlyDictionary ByDecision { get; init; } + + /// + /// Entries expiring within the next hour. + /// + public int ExpiringWithinHour { get; init; } + + /// + /// Cache version. + /// + public long CacheVersion { get; init; } + + /// + /// When summary was computed. + /// + public DateTimeOffset ComputedAt { get; init; } +} + +/// +/// Options for exception cache operations. +/// +public sealed record ExceptionCacheOptions +{ + /// + /// Default TTL for cache entries in minutes. + /// + public int DefaultTtlMinutes { get; set; } = 60; + + /// + /// Whether to enable automatic cache warming. + /// + public bool EnableAutoWarm { get; set; } = true; + + /// + /// Warm interval in minutes. + /// + public int WarmIntervalMinutes { get; set; } = 15; + + /// + /// Maximum entries per tenant. + /// + public int MaxEntriesPerTenant { get; set; } = 50000; + + /// + /// Whether to invalidate cache on exception events. + /// + public bool InvalidateOnEvents { get; set; } = true; +} + +/// +/// Statistics for the exception cache. +/// +public sealed record ExceptionCacheStats +{ + /// + /// Total entries in cache. + /// + public int TotalEntries { get; init; } + + /// + /// Total tenants with cached data. + /// + public int TotalTenants { get; init; } + + /// + /// Memory used by cache in bytes (if available). + /// + public long? MemoryUsedBytes { get; init; } + + /// + /// Cache hit count since last reset. + /// + public long HitCount { get; init; } + + /// + /// Cache miss count since last reset. + /// + public long MissCount { get; init; } + + /// + /// Last warm operation timestamp. + /// + public DateTimeOffset? LastWarmAt { get; init; } + + /// + /// Last invalidation timestamp. + /// + public DateTimeOffset? LastInvalidationAt { get; init; } +} diff --git a/src/Policy/StellaOps.Policy.Engine/ExceptionCache/IExceptionEffectiveCache.cs b/src/Policy/StellaOps.Policy.Engine/ExceptionCache/IExceptionEffectiveCache.cs new file mode 100644 index 000000000..79b9cdc56 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/ExceptionCache/IExceptionEffectiveCache.cs @@ -0,0 +1,156 @@ +using System.Collections.Immutable; + +namespace StellaOps.Policy.Engine.ExceptionCache; + +/// +/// Interface for caching effective exception decisions per asset. +/// Supports warm/invalidation logic reacting to exception events. +/// +internal interface IExceptionEffectiveCache +{ + /// + /// Gets applicable exceptions for an asset at a given time. + /// + Task GetForAssetAsync( + string tenantId, + string assetId, + string? advisoryId, + DateTimeOffset asOf, + CancellationToken cancellationToken = default); + + /// + /// Gets applicable exceptions for multiple assets. + /// + Task> GetBatchAsync( + string tenantId, + IReadOnlyList assetIds, + DateTimeOffset asOf, + CancellationToken cancellationToken = default); + + /// + /// Sets a cache entry. + /// + Task SetAsync( + string tenantId, + ExceptionCacheEntry entry, + CancellationToken cancellationToken = default); + + /// + /// Sets multiple cache entries in batch. + /// + Task SetBatchAsync( + string tenantId, + IEnumerable entries, + CancellationToken cancellationToken = default); + + /// + /// Invalidates cache entries for an exception. + /// Called when an exception is modified/revoked/expired. + /// + Task InvalidateExceptionAsync( + string tenantId, + string exceptionId, + CancellationToken cancellationToken = default); + + /// + /// Invalidates cache entries for an asset. + /// Called when asset exceptions need re-evaluation. + /// + Task InvalidateAssetAsync( + string tenantId, + string assetId, + CancellationToken cancellationToken = default); + + /// + /// Invalidates all cache entries for a tenant. + /// + Task InvalidateTenantAsync( + string tenantId, + CancellationToken cancellationToken = default); + + /// + /// Warms the cache for a tenant by loading active exceptions from the repository. + /// + Task WarmAsync( + string tenantId, + CancellationToken cancellationToken = default); + + /// + /// Gets cache summary for a tenant. + /// + Task GetSummaryAsync( + string tenantId, + CancellationToken cancellationToken = default); + + /// + /// Gets cache statistics. + /// + Task GetStatsAsync( + string? tenantId = null, + CancellationToken cancellationToken = default); + + /// + /// Gets the current cache version for a tenant. + /// + Task GetVersionAsync( + string tenantId, + CancellationToken cancellationToken = default); + + /// + /// Processes an exception event and updates cache accordingly. + /// + Task HandleExceptionEventAsync( + ExceptionEvent exceptionEvent, + CancellationToken cancellationToken = default); +} + +/// +/// Event representing a change to an exception. +/// +public sealed record ExceptionEvent +{ + /// + /// Event type: activated, expired, revoked, updated, created, deleted. + /// + public required string EventType { get; init; } + + /// + /// Tenant identifier. + /// + public required string TenantId { get; init; } + + /// + /// Exception identifier. + /// + public required string ExceptionId { get; init; } + + /// + /// Exception name. + /// + public string? ExceptionName { get; init; } + + /// + /// Exception type. + /// + public string? ExceptionType { get; init; } + + /// + /// Affected asset IDs (if known). + /// + public ImmutableArray AffectedAssetIds { get; init; } = ImmutableArray.Empty; + + /// + /// Affected advisory IDs (if known). + /// + public ImmutableArray AffectedAdvisoryIds { get; init; } = ImmutableArray.Empty; + + /// + /// When the event occurred. + /// + public DateTimeOffset OccurredAt { get; init; } + + /// + /// Correlation ID for tracing. + /// + public string? CorrelationId { get; init; } +} diff --git a/src/Policy/StellaOps.Policy.Engine/ExceptionCache/RedisExceptionEffectiveCache.cs b/src/Policy/StellaOps.Policy.Engine/ExceptionCache/RedisExceptionEffectiveCache.cs new file mode 100644 index 000000000..3c02f5246 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/ExceptionCache/RedisExceptionEffectiveCache.cs @@ -0,0 +1,725 @@ +using System.Collections.Immutable; +using System.Diagnostics; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Policy.Engine.Options; +using StellaOps.Policy.Engine.Storage.Mongo.Repositories; +using StellaOps.Policy.Engine.Telemetry; +using StackExchange.Redis; + +namespace StellaOps.Policy.Engine.ExceptionCache; + +/// +/// Redis-backed exception effective cache with warm/invalidation support. +/// Key structure: +/// - Entry by asset: stellaops:exc:{tenant}:a:{asset}:{advisory|all} -> JSON array of entries +/// - Entry by exception: stellaops:exc:{tenant}:e:{exceptionId} -> JSON entry +/// - Index by exception: stellaops:exc:{tenant}:idx:e:{exceptionId} -> set of asset keys +/// - Version: stellaops:exc:{tenant}:v -> integer version +/// - Stats: stellaops:exc:{tenant}:stats -> JSON stats +/// +internal sealed class RedisExceptionEffectiveCache : IExceptionEffectiveCache +{ + private readonly IConnectionMultiplexer _redis; + private readonly IExceptionRepository _repository; + private readonly ILogger _logger; + private readonly ExceptionCacheOptions _options; + private readonly TimeProvider _timeProvider; + + private const string KeyPrefix = "stellaops:exc"; + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = false, + }; + + public RedisExceptionEffectiveCache( + IConnectionMultiplexer redis, + IExceptionRepository repository, + ILogger logger, + IOptions options, + TimeProvider timeProvider) + { + _redis = redis ?? throw new ArgumentNullException(nameof(redis)); + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _options = options?.Value.ExceptionCache ?? new ExceptionCacheOptions(); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public async Task GetForAssetAsync( + string tenantId, + string assetId, + string? advisoryId, + DateTimeOffset asOf, + CancellationToken cancellationToken = default) + { + var sw = Stopwatch.StartNew(); + var db = _redis.GetDatabase(); + + // Try specific advisory key first, then fall back to "all" + var entries = new List(); + var fromCache = false; + + if (advisoryId is not null) + { + var specificKey = GetAssetKey(tenantId, assetId, advisoryId); + var specificJson = await db.StringGetAsync(specificKey).ConfigureAwait(false); + if (specificJson.HasValue) + { + var specificEntries = JsonSerializer.Deserialize>((string)specificJson!, JsonOptions); + if (specificEntries is not null) + { + entries.AddRange(specificEntries); + fromCache = true; + } + } + } + + // Also get "all" entries (exceptions without specific advisory) + var allKey = GetAssetKey(tenantId, assetId, null); + var allJson = await db.StringGetAsync(allKey).ConfigureAwait(false); + if (allJson.HasValue) + { + var allEntries = JsonSerializer.Deserialize>((string)allJson!, JsonOptions); + if (allEntries is not null) + { + entries.AddRange(allEntries); + fromCache = true; + } + } + + // Filter by time and sort by priority + var validEntries = entries + .Where(e => e.EffectiveFrom <= asOf && (e.ExpiresAt is null || e.ExpiresAt > asOf)) + .OrderByDescending(e => e.Priority) + .ToImmutableArray(); + + var version = await GetVersionAsync(tenantId, cancellationToken).ConfigureAwait(false); + + sw.Stop(); + + PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, fromCache ? "hit" : "miss"); + + return new ExceptionCacheQueryResult + { + Entries = validEntries, + FromCache = fromCache, + CacheVersion = version, + QueryDurationMs = sw.ElapsedMilliseconds, + }; + } + + public async Task> GetBatchAsync( + string tenantId, + IReadOnlyList assetIds, + DateTimeOffset asOf, + CancellationToken cancellationToken = default) + { + var results = new Dictionary(StringComparer.OrdinalIgnoreCase); + var db = _redis.GetDatabase(); + + // Get all "all" keys for assets + var keys = assetIds.Select(id => (RedisKey)GetAssetKey(tenantId, id, null)).ToArray(); + var values = await db.StringGetAsync(keys).ConfigureAwait(false); + + var version = await GetVersionAsync(tenantId, cancellationToken).ConfigureAwait(false); + + for (int i = 0; i < assetIds.Count; i++) + { + var entries = ImmutableArray.Empty; + var fromCache = false; + + if (values[i].HasValue) + { + var cachedEntries = JsonSerializer.Deserialize>((string)values[i]!, JsonOptions); + if (cachedEntries is not null) + { + entries = cachedEntries + .Where(e => e.EffectiveFrom <= asOf && (e.ExpiresAt is null || e.ExpiresAt > asOf)) + .OrderByDescending(e => e.Priority) + .ToImmutableArray(); + fromCache = true; + } + } + + results[assetIds[i]] = new ExceptionCacheQueryResult + { + Entries = entries, + FromCache = fromCache, + CacheVersion = version, + QueryDurationMs = 0, + }; + } + + PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "batch_get"); + + return results; + } + + public async Task SetAsync( + string tenantId, + ExceptionCacheEntry entry, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(entry); + + var db = _redis.GetDatabase(); + var assetKey = GetAssetKey(tenantId, entry.AssetId, entry.AdvisoryId); + var exceptionIndexKey = GetExceptionIndexKey(tenantId, entry.ExceptionId); + + // Get existing entries for this asset + var existingJson = await db.StringGetAsync(assetKey).ConfigureAwait(false); + var entries = existingJson.HasValue + ? JsonSerializer.Deserialize>((string)existingJson!, JsonOptions) ?? new List() + : new List(); + + // Remove existing entry for same exception if any + entries.RemoveAll(e => e.ExceptionId == entry.ExceptionId); + + // Add new entry + entries.Add(entry); + + var ttl = ComputeTtl(entry); + var json = JsonSerializer.Serialize(entries, JsonOptions); + + var tasks = new List + { + db.StringSetAsync(assetKey, json, ttl), + db.SetAddAsync(exceptionIndexKey, assetKey), + db.KeyExpireAsync(exceptionIndexKey, ttl + TimeSpan.FromMinutes(5)), + }; + + await Task.WhenAll(tasks).ConfigureAwait(false); + + PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "set"); + } + + public async Task SetBatchAsync( + string tenantId, + IEnumerable entries, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var batch = db.CreateBatch(); + var count = 0; + + // Group entries by asset+advisory + var groupedEntries = entries + .GroupBy(e => GetAssetKey(tenantId, e.AssetId, e.AdvisoryId)) + .ToDictionary(g => g.Key, g => g.ToList()); + + foreach (var (assetKey, assetEntries) in groupedEntries) + { + var ttl = assetEntries.Max(e => ComputeTtl(e)); + var json = JsonSerializer.Serialize(assetEntries, JsonOptions); + + _ = batch.StringSetAsync(assetKey, json, ttl); + + // Update exception indexes + foreach (var entry in assetEntries) + { + var exceptionIndexKey = GetExceptionIndexKey(tenantId, entry.ExceptionId); + _ = batch.SetAddAsync(exceptionIndexKey, assetKey); + _ = batch.KeyExpireAsync(exceptionIndexKey, ttl + TimeSpan.FromMinutes(5)); + } + + count += assetEntries.Count; + } + + batch.Execute(); + + // Increment version + await IncrementVersionAsync(tenantId, cancellationToken).ConfigureAwait(false); + + PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "set_batch"); + + _logger.LogDebug("Set {Count} exception cache entries for tenant {TenantId}", count, tenantId); + } + + public async Task InvalidateExceptionAsync( + string tenantId, + string exceptionId, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var exceptionIndexKey = GetExceptionIndexKey(tenantId, exceptionId); + + // Get all asset keys affected by this exception + var assetKeys = await db.SetMembersAsync(exceptionIndexKey).ConfigureAwait(false); + + if (assetKeys.Length > 0) + { + // For each asset key, remove entries for this exception + foreach (var assetKey in assetKeys) + { + var json = await db.StringGetAsync((string)assetKey!).ConfigureAwait(false); + if (json.HasValue) + { + var entries = JsonSerializer.Deserialize>((string)json!, JsonOptions); + if (entries is not null) + { + entries.RemoveAll(e => e.ExceptionId == exceptionId); + if (entries.Count > 0) + { + await db.StringSetAsync((string)assetKey!, JsonSerializer.Serialize(entries, JsonOptions)) + .ConfigureAwait(false); + } + else + { + await db.KeyDeleteAsync((string)assetKey!).ConfigureAwait(false); + } + } + } + } + } + + // Delete the exception index + await db.KeyDeleteAsync(exceptionIndexKey).ConfigureAwait(false); + + // Increment version + await IncrementVersionAsync(tenantId, cancellationToken).ConfigureAwait(false); + + PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "invalidate_exception"); + + _logger.LogInformation( + "Invalidated exception {ExceptionId} affecting {Count} assets for tenant {TenantId}", + exceptionId, assetKeys.Length, tenantId); + } + + public async Task InvalidateAssetAsync( + string tenantId, + string assetId, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var server = _redis.GetServer(_redis.GetEndPoints().First()); + + // Find all keys for this asset (all advisory variants) + var pattern = $"{KeyPrefix}:{tenantId}:a:{assetId}:*"; + var keys = server.Keys(pattern: pattern).ToArray(); + + if (keys.Length > 0) + { + await db.KeyDeleteAsync(keys).ConfigureAwait(false); + } + + // Increment version + await IncrementVersionAsync(tenantId, cancellationToken).ConfigureAwait(false); + + PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "invalidate_asset"); + + _logger.LogDebug("Invalidated {Count} cache keys for asset {AssetId}", keys.Length, assetId); + } + + public async Task InvalidateTenantAsync( + string tenantId, + CancellationToken cancellationToken = default) + { + var server = _redis.GetServer(_redis.GetEndPoints().First()); + var pattern = $"{KeyPrefix}:{tenantId}:*"; + var keys = server.Keys(pattern: pattern).ToArray(); + + if (keys.Length > 0) + { + var db = _redis.GetDatabase(); + await db.KeyDeleteAsync(keys).ConfigureAwait(false); + } + + PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "invalidate_tenant"); + + _logger.LogInformation("Invalidated {Count} cache keys for tenant {TenantId}", keys.Length, tenantId); + } + + public async Task WarmAsync( + string tenantId, + CancellationToken cancellationToken = default) + { + using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity( + "exception.cache.warm", ActivityKind.Internal); + activity?.SetTag("tenant_id", tenantId); + + var sw = Stopwatch.StartNew(); + var now = _timeProvider.GetUtcNow(); + + _logger.LogInformation("Starting cache warm for tenant {TenantId}", tenantId); + + try + { + // Get all active exceptions from repository + var exceptions = await _repository.ListExceptionsAsync( + tenantId, + new ExceptionQueryOptions + { + Statuses = ImmutableArray.Create("active"), + IncludeExpired = false, + Limit = _options.MaxEntriesPerTenant, + }, + cancellationToken).ConfigureAwait(false); + + if (exceptions.Length == 0) + { + _logger.LogDebug("No active exceptions to warm for tenant {TenantId}", tenantId); + return; + } + + // Get bindings for all exceptions + var entries = new List(); + + foreach (var exception in exceptions) + { + var bindings = await _repository.GetBindingsForExceptionAsync( + tenantId, exception.Id, cancellationToken).ConfigureAwait(false); + + foreach (var binding in bindings.Where(b => b.Status == "active")) + { + entries.Add(new ExceptionCacheEntry + { + ExceptionId = exception.Id, + AssetId = binding.AssetId, + AdvisoryId = binding.AdvisoryId, + CveId = binding.CveId, + DecisionOverride = binding.DecisionOverride, + ExceptionType = exception.ExceptionType, + Priority = exception.Priority, + EffectiveFrom = binding.EffectiveFrom, + ExpiresAt = binding.ExpiresAt ?? exception.ExpiresAt, + CachedAt = now, + ExceptionName = exception.Name, + }); + } + + // Also add entries for scope-based exceptions without explicit bindings + if (exception.Scope.ApplyToAll || exception.Scope.AssetIds.Count > 0) + { + foreach (var assetId in exception.Scope.AssetIds) + { + foreach (var advisoryId in exception.Scope.AdvisoryIds.DefaultIfEmpty(null!)) + { + entries.Add(new ExceptionCacheEntry + { + ExceptionId = exception.Id, + AssetId = assetId, + AdvisoryId = advisoryId, + CveId = null, + DecisionOverride = "allow", + ExceptionType = exception.ExceptionType, + Priority = exception.Priority, + EffectiveFrom = exception.EffectiveFrom ?? exception.CreatedAt, + ExpiresAt = exception.ExpiresAt, + CachedAt = now, + ExceptionName = exception.Name, + }); + } + } + } + } + + if (entries.Count > 0) + { + await SetBatchAsync(tenantId, entries, cancellationToken).ConfigureAwait(false); + } + + sw.Stop(); + + // Update warm stats + await UpdateWarmStatsAsync(tenantId, now, entries.Count).ConfigureAwait(false); + + PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "warm"); + + _logger.LogInformation( + "Warmed cache with {Count} entries from {ExceptionCount} exceptions for tenant {TenantId} in {Duration}ms", + entries.Count, exceptions.Length, tenantId, sw.ElapsedMilliseconds); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to warm cache for tenant {TenantId}", tenantId); + PolicyEngineTelemetry.RecordError("exception_cache_warm", tenantId); + throw; + } + } + + public async Task GetSummaryAsync( + string tenantId, + CancellationToken cancellationToken = default) + { + var server = _redis.GetServer(_redis.GetEndPoints().First()); + var db = _redis.GetDatabase(); + var now = _timeProvider.GetUtcNow(); + + // Count asset keys + var assetPattern = $"{KeyPrefix}:{tenantId}:a:*"; + var assetKeys = server.Keys(pattern: assetPattern).ToArray(); + + // Count exception index keys + var exceptionPattern = $"{KeyPrefix}:{tenantId}:idx:e:*"; + var exceptionKeys = server.Keys(pattern: exceptionPattern).ToArray(); + + // Aggregate stats + var byType = new Dictionary(StringComparer.OrdinalIgnoreCase); + var byDecision = new Dictionary(StringComparer.OrdinalIgnoreCase); + var totalEntries = 0; + var expiringWithinHour = 0; + var uniqueAssets = new HashSet(StringComparer.OrdinalIgnoreCase); + + foreach (var key in assetKeys.Take(1000)) // Limit scan for performance + { + var json = await db.StringGetAsync(key).ConfigureAwait(false); + if (!json.HasValue) continue; + + var entries = JsonSerializer.Deserialize>((string)json!, JsonOptions); + if (entries is null) continue; + + foreach (var entry in entries) + { + totalEntries++; + uniqueAssets.Add(entry.AssetId); + + byType.TryGetValue(entry.ExceptionType, out var typeCount); + byType[entry.ExceptionType] = typeCount + 1; + + byDecision.TryGetValue(entry.DecisionOverride, out var decisionCount); + byDecision[entry.DecisionOverride] = decisionCount + 1; + + if (entry.ExpiresAt.HasValue && entry.ExpiresAt.Value - now <= TimeSpan.FromHours(1)) + { + expiringWithinHour++; + } + } + } + + var version = await GetVersionAsync(tenantId, cancellationToken).ConfigureAwait(false); + + return new ExceptionCacheSummary + { + TenantId = tenantId, + TotalEntries = totalEntries, + UniqueExceptions = exceptionKeys.Length, + UniqueAssets = uniqueAssets.Count, + ByType = byType, + ByDecision = byDecision, + ExpiringWithinHour = expiringWithinHour, + CacheVersion = version, + ComputedAt = now, + }; + } + + public async Task GetStatsAsync( + string? tenantId = null, + CancellationToken cancellationToken = default) + { + var server = _redis.GetServer(_redis.GetEndPoints().First()); + + var pattern = tenantId != null + ? $"{KeyPrefix}:{tenantId}:a:*" + : $"{KeyPrefix}:*:a:*"; + + var entryCount = server.Keys(pattern: pattern).Count(); + + var tenantPattern = tenantId != null + ? $"{KeyPrefix}:{tenantId}:v" + : $"{KeyPrefix}:*:v"; + + var tenantCount = server.Keys(pattern: tenantPattern).Count(); + + long? memoryUsed = null; + try + { + var info = server.Info("memory"); + var memorySection = info.FirstOrDefault(s => s.Key == "Memory"); + if (memorySection is not null) + { + var usedMemory = memorySection.FirstOrDefault(p => p.Key == "used_memory"); + if (usedMemory.Key is not null && long.TryParse(usedMemory.Value, out var bytes)) + { + memoryUsed = bytes; + } + } + } + catch + { + // Ignore - memory info not available + } + + return new ExceptionCacheStats + { + TotalEntries = entryCount, + TotalTenants = tenantCount, + MemoryUsedBytes = memoryUsed, + HitCount = 0, // Would need to track separately + MissCount = 0, + LastWarmAt = null, + LastInvalidationAt = null, + }; + } + + public async Task GetVersionAsync( + string tenantId, + CancellationToken cancellationToken = default) + { + var db = _redis.GetDatabase(); + var versionKey = GetVersionKey(tenantId); + + var version = await db.StringGetAsync(versionKey).ConfigureAwait(false); + return version.HasValue ? (long)version : 0; + } + + public async Task HandleExceptionEventAsync( + ExceptionEvent exceptionEvent, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(exceptionEvent); + + using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity( + "exception.cache.handle_event", ActivityKind.Internal); + activity?.SetTag("tenant_id", exceptionEvent.TenantId); + activity?.SetTag("event_type", exceptionEvent.EventType); + activity?.SetTag("exception_id", exceptionEvent.ExceptionId); + + _logger.LogDebug( + "Handling exception event {EventType} for exception {ExceptionId} tenant {TenantId}", + exceptionEvent.EventType, exceptionEvent.ExceptionId, exceptionEvent.TenantId); + + switch (exceptionEvent.EventType.ToLowerInvariant()) + { + case "activated": + // Warm the cache with the new exception + await WarmExceptionAsync(exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken) + .ConfigureAwait(false); + break; + + case "expired": + case "revoked": + case "deleted": + // Invalidate cache entries for this exception + await InvalidateExceptionAsync(exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken) + .ConfigureAwait(false); + break; + + case "updated": + // Invalidate and re-warm + await InvalidateExceptionAsync(exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken) + .ConfigureAwait(false); + await WarmExceptionAsync(exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken) + .ConfigureAwait(false); + break; + + case "created": + // Only warm if already active + var exception = await _repository.GetExceptionAsync( + exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken).ConfigureAwait(false); + if (exception?.Status == "active") + { + await WarmExceptionAsync(exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken) + .ConfigureAwait(false); + } + break; + + default: + _logger.LogWarning("Unknown exception event type: {EventType}", exceptionEvent.EventType); + break; + } + + PolicyEngineTelemetry.RecordExceptionCacheOperation(exceptionEvent.TenantId, $"event_{exceptionEvent.EventType}"); + } + + private async Task WarmExceptionAsync(string tenantId, string exceptionId, CancellationToken cancellationToken) + { + var exception = await _repository.GetExceptionAsync(tenantId, exceptionId, cancellationToken) + .ConfigureAwait(false); + + if (exception is null || exception.Status != "active") + { + return; + } + + var now = _timeProvider.GetUtcNow(); + var entries = new List(); + + var bindings = await _repository.GetBindingsForExceptionAsync(tenantId, exceptionId, cancellationToken) + .ConfigureAwait(false); + + foreach (var binding in bindings.Where(b => b.Status == "active")) + { + entries.Add(new ExceptionCacheEntry + { + ExceptionId = exception.Id, + AssetId = binding.AssetId, + AdvisoryId = binding.AdvisoryId, + CveId = binding.CveId, + DecisionOverride = binding.DecisionOverride, + ExceptionType = exception.ExceptionType, + Priority = exception.Priority, + EffectiveFrom = binding.EffectiveFrom, + ExpiresAt = binding.ExpiresAt ?? exception.ExpiresAt, + CachedAt = now, + ExceptionName = exception.Name, + }); + } + + if (entries.Count > 0) + { + await SetBatchAsync(tenantId, entries, cancellationToken).ConfigureAwait(false); + } + + _logger.LogDebug( + "Warmed cache with {Count} entries for exception {ExceptionId}", + entries.Count, exceptionId); + } + + private async Task IncrementVersionAsync(string tenantId, CancellationToken cancellationToken) + { + var db = _redis.GetDatabase(); + var versionKey = GetVersionKey(tenantId); + + var newVersion = await db.StringIncrementAsync(versionKey).ConfigureAwait(false); + + // Set TTL on version key if not already set + await db.KeyExpireAsync(versionKey, TimeSpan.FromMinutes(_options.DefaultTtlMinutes + 10), ExpireWhen.HasNoExpiry) + .ConfigureAwait(false); + + return newVersion; + } + + private async Task UpdateWarmStatsAsync(string tenantId, DateTimeOffset warmAt, int count) + { + var db = _redis.GetDatabase(); + var statsKey = GetStatsKey(tenantId); + + var stats = new Dictionary + { + ["lastWarmAt"] = warmAt.ToString("O"), + ["lastWarmCount"] = count.ToString(), + }; + + await db.HashSetAsync(statsKey, stats.Select(kv => new HashEntry(kv.Key, kv.Value)).ToArray()) + .ConfigureAwait(false); + } + + private TimeSpan ComputeTtl(ExceptionCacheEntry entry) + { + if (entry.ExpiresAt.HasValue) + { + var ttl = entry.ExpiresAt.Value - _timeProvider.GetUtcNow(); + if (ttl > TimeSpan.Zero) + { + return ttl; + } + } + + return TimeSpan.FromMinutes(_options.DefaultTtlMinutes); + } + + private static string GetAssetKey(string tenantId, string assetId, string? advisoryId) => + $"{KeyPrefix}:{tenantId}:a:{assetId}:{advisoryId ?? "all"}"; + + private static string GetExceptionIndexKey(string tenantId, string exceptionId) => + $"{KeyPrefix}:{tenantId}:idx:e:{exceptionId}"; + + private static string GetVersionKey(string tenantId) => + $"{KeyPrefix}:{tenantId}:v"; + + private static string GetStatsKey(string tenantId) => + $"{KeyPrefix}:{tenantId}:stats"; +} diff --git a/src/Policy/StellaOps.Policy.Engine/Options/PolicyEngineOptions.cs b/src/Policy/StellaOps.Policy.Engine/Options/PolicyEngineOptions.cs index 2313a797e..16f381da8 100644 --- a/src/Policy/StellaOps.Policy.Engine/Options/PolicyEngineOptions.cs +++ b/src/Policy/StellaOps.Policy.Engine/Options/PolicyEngineOptions.cs @@ -1,6 +1,8 @@ using System.Collections.ObjectModel; using StellaOps.Auth.Abstractions; using StellaOps.Policy.Engine.Caching; +using StellaOps.Policy.Engine.EffectiveDecisionMap; +using StellaOps.Policy.Engine.ExceptionCache; using StellaOps.Policy.Engine.ReachabilityFacts; using StellaOps.Policy.Engine.Telemetry; @@ -33,6 +35,10 @@ public sealed class PolicyEngineOptions public PolicyEvaluationCacheOptions EvaluationCache { get; } = new(); + public EffectiveDecisionMapOptions EffectiveDecisionMap { get; } = new(); + + public ExceptionCacheOptions ExceptionCache { get; } = new(); + public void Validate() { Authority.Validate(); diff --git a/src/Policy/StellaOps.Policy.Engine/Services/PolicyBundleService.cs b/src/Policy/StellaOps.Policy.Engine/Services/PolicyBundleService.cs index 101135277..4aa989f68 100644 --- a/src/Policy/StellaOps.Policy.Engine/Services/PolicyBundleService.cs +++ b/src/Policy/StellaOps.Policy.Engine/Services/PolicyBundleService.cs @@ -79,6 +79,7 @@ internal sealed class PolicyBundleService Size: payload.Length, CreatedAt: createdAt, Payload: payload.ToImmutableArray(), + CompiledDocument: compileResult.Document, AocMetadata: aocMetadata); await _repository.StoreBundleAsync(packId, version, record, cancellationToken).ConfigureAwait(false); diff --git a/src/Policy/StellaOps.Policy.Engine/Services/PolicyCompilationService.cs b/src/Policy/StellaOps.Policy.Engine/Services/PolicyCompilationService.cs index bcc0ecea6..d7528de59 100644 --- a/src/Policy/StellaOps.Policy.Engine/Services/PolicyCompilationService.cs +++ b/src/Policy/StellaOps.Policy.Engine/Services/PolicyCompilationService.cs @@ -1,9 +1,12 @@ using System; using System.Collections.Immutable; +using System.Diagnostics; +using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using StellaOps.Policy; using StellaOps.Policy.Engine.Compilation; using StellaOps.Policy.Engine.Options; +using StellaOps.Policy.Engine.Telemetry; using StellaOps.PolicyDsl; using DslCompiler = StellaOps.PolicyDsl.PolicyCompiler; using DslCompilationResult = StellaOps.PolicyDsl.PolicyCompilationResult; @@ -27,19 +30,25 @@ internal sealed class PolicyCompilationService { private readonly DslCompiler compiler; private readonly PolicyComplexityAnalyzer complexityAnalyzer; + private readonly PolicyMetadataExtractor metadataExtractor; private readonly IOptionsMonitor optionsMonitor; private readonly TimeProvider timeProvider; + private readonly ILogger _logger; public PolicyCompilationService( DslCompiler compiler, PolicyComplexityAnalyzer complexityAnalyzer, + PolicyMetadataExtractor metadataExtractor, IOptionsMonitor optionsMonitor, - TimeProvider timeProvider) + TimeProvider timeProvider, + ILogger? logger = null) { this.compiler = compiler ?? throw new ArgumentNullException(nameof(compiler)); this.complexityAnalyzer = complexityAnalyzer ?? throw new ArgumentNullException(nameof(complexityAnalyzer)); + this.metadataExtractor = metadataExtractor ?? throw new ArgumentNullException(nameof(metadataExtractor)); this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); this.timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger.Instance; } public PolicyCompilationResultDto Compile(PolicyCompileRequest request) @@ -56,6 +65,9 @@ internal sealed class PolicyCompilationService if (!string.Equals(request.Dsl.Syntax, "stella-dsl@1", StringComparison.Ordinal)) { + PolicyEngineTelemetry.RecordCompilation("unsupported_syntax", 0); + PolicyEngineTelemetry.RecordError("compilation"); + _logger.LogWarning("Compilation rejected: unsupported syntax {Syntax}", request.Dsl.Syntax ?? "null"); return PolicyCompilationResultDto.FromFailure( ImmutableArray.Create(PolicyIssue.Error( DiagnosticCodes.UnsupportedSyntaxVersion, @@ -65,13 +77,23 @@ internal sealed class PolicyCompilationService durationMilliseconds: 0); } + using var activity = PolicyEngineTelemetry.StartCompileActivity(policyId: null, version: request.Dsl.Syntax); + var start = timeProvider.GetTimestamp(); var result = compiler.Compile(request.Dsl.Source); var elapsed = timeProvider.GetElapsedTime(start, timeProvider.GetTimestamp()); var durationMilliseconds = (long)Math.Ceiling(elapsed.TotalMilliseconds); + var durationSeconds = elapsed.TotalSeconds; if (!result.Success || result.Document is null) { + PolicyEngineTelemetry.RecordCompilation("failure", durationSeconds); + PolicyEngineTelemetry.RecordError("compilation"); + activity?.SetStatus(ActivityStatusCode.Error, "Compilation failed"); + _logger.LogWarning( + "Policy compilation failed in {DurationMs}ms with {DiagnosticCount} diagnostics", + durationMilliseconds, + result.Diagnostics.IsDefault ? 0 : result.Diagnostics.Length); return PolicyCompilationResultDto.FromFailure(result.Diagnostics, null, durationMilliseconds); } @@ -79,6 +101,9 @@ internal sealed class PolicyCompilationService var diagnostics = result.Diagnostics.IsDefault ? ImmutableArray.Empty : result.Diagnostics; var limits = optionsMonitor.CurrentValue?.Compilation ?? new PolicyEngineCompilationOptions(); + activity?.SetTag("policy.rule_count", result.Document.Rules.Length); + activity?.SetTag("policy.complexity_score", complexity.Score); + if (limits.EnforceComplexity && complexity.Score > limits.MaxComplexityScore) { var diagnostic = PolicyIssue.Error( @@ -86,6 +111,12 @@ internal sealed class PolicyCompilationService $"Policy complexity score {complexity.Score:F2} exceeds configured maximum {limits.MaxComplexityScore:F2}. Reduce rule count or expression depth.", "$.rules"); diagnostics = AppendDiagnostic(diagnostics, diagnostic); + PolicyEngineTelemetry.RecordCompilation("complexity_exceeded", durationSeconds); + PolicyEngineTelemetry.RecordError("compilation"); + activity?.SetStatus(ActivityStatusCode.Error, "Complexity exceeded"); + _logger.LogWarning( + "Policy compilation rejected: complexity {Score:F2} exceeds limit {MaxScore:F2}", + complexity.Score, limits.MaxComplexityScore); return PolicyCompilationResultDto.FromFailure(diagnostics, complexity, durationMilliseconds); } @@ -96,10 +127,27 @@ internal sealed class PolicyCompilationService $"Policy compilation time {durationMilliseconds} ms exceeded limit {limits.MaxDurationMilliseconds} ms.", "$.dsl"); diagnostics = AppendDiagnostic(diagnostics, diagnostic); + PolicyEngineTelemetry.RecordCompilation("duration_exceeded", durationSeconds); + PolicyEngineTelemetry.RecordError("compilation"); + activity?.SetStatus(ActivityStatusCode.Error, "Duration exceeded"); + _logger.LogWarning( + "Policy compilation rejected: duration {DurationMs}ms exceeds limit {MaxDurationMs}ms", + durationMilliseconds, limits.MaxDurationMilliseconds); return PolicyCompilationResultDto.FromFailure(diagnostics, complexity, durationMilliseconds); } - return PolicyCompilationResultDto.FromSuccess(result, complexity, durationMilliseconds); + // Extract extended metadata (symbol table, rule index, documentation, coverage, hashes) + var metadata = metadataExtractor.Extract(result.Document, result.CanonicalRepresentation); + + PolicyEngineTelemetry.RecordCompilation("success", durationSeconds); + activity?.SetStatus(ActivityStatusCode.Ok); + activity?.SetTag("policy.symbol_count", metadata.SymbolTable.Symbols.Length); + activity?.SetTag("policy.coverage_paths", metadata.CoverageMetadata.CoveragePaths.Length); + _logger.LogDebug( + "Policy compiled successfully in {DurationMs}ms: {RuleCount} rules, complexity {Score:F2}, {SymbolCount} symbols", + durationMilliseconds, result.Document.Rules.Length, complexity.Score, metadata.SymbolTable.Symbols.Length); + + return PolicyCompilationResultDto.FromSuccess(result, complexity, metadata, durationMilliseconds); } private static ImmutableArray AppendDiagnostic(ImmutableArray diagnostics, PolicyIssue diagnostic) @@ -119,17 +167,20 @@ internal sealed record PolicyCompilationResultDto( ImmutableArray CanonicalRepresentation, ImmutableArray Diagnostics, PolicyComplexityReport? Complexity, - long DurationMilliseconds) + long DurationMilliseconds, + IrDocument? Document = null, + PolicyCompileMetadata? Metadata = null) { public static PolicyCompilationResultDto FromFailure( ImmutableArray diagnostics, PolicyComplexityReport? complexity, long durationMilliseconds) => - new(false, null, null, ImmutableArray.Empty, diagnostics, complexity, durationMilliseconds); + new(false, null, null, ImmutableArray.Empty, diagnostics, complexity, durationMilliseconds, null, null); public static PolicyCompilationResultDto FromSuccess( DslCompilationResult compilationResult, PolicyComplexityReport complexity, + PolicyCompileMetadata metadata, long durationMilliseconds) { if (compilationResult.Document is null) @@ -145,7 +196,9 @@ internal sealed record PolicyCompilationResultDto( compilationResult.CanonicalRepresentation, compilationResult.Diagnostics, complexity, - durationMilliseconds); + durationMilliseconds, + compilationResult.Document, + metadata); } } diff --git a/src/Policy/StellaOps.Policy.Engine/Services/PolicyExplainerService.cs b/src/Policy/StellaOps.Policy.Engine/Services/PolicyExplainerService.cs new file mode 100644 index 000000000..68e22a16b --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Services/PolicyExplainerService.cs @@ -0,0 +1,497 @@ +using System.Collections.Immutable; +using Microsoft.Extensions.Logging; +using StellaOps.Policy.Engine.Domain; +using StellaOps.Policy.Engine.Telemetry; + +namespace StellaOps.Policy.Engine.Services; + +/// +/// Query options for retrieving explain traces. +/// +public sealed record ExplainQueryOptions +{ + /// + /// Filter by policy ID. + /// + public string? PolicyId { get; init; } + + /// + /// Filter by policy version. + /// + public int? PolicyVersion { get; init; } + + /// + /// Filter by run ID. + /// + public string? RunId { get; init; } + + /// + /// Filter by component PURL. + /// + public string? ComponentPurl { get; init; } + + /// + /// Filter by vulnerability ID. + /// + public string? VulnerabilityId { get; init; } + + /// + /// Filter by final outcome. + /// + public string? FinalOutcome { get; init; } + + /// + /// Filter by evaluation time range start. + /// + public DateTimeOffset? FromTime { get; init; } + + /// + /// Filter by evaluation time range end. + /// + public DateTimeOffset? ToTime { get; init; } + + /// + /// Maximum number of results to return. + /// + public int Limit { get; init; } = 100; + + /// + /// Number of results to skip for pagination. + /// + public int Skip { get; init; } = 0; + + /// + /// Include rule steps in results (can be large). + /// + public bool IncludeRuleSteps { get; init; } = true; + + /// + /// Include VEX evidence in results. + /// + public bool IncludeVexEvidence { get; init; } = true; +} + +/// +/// Stored explain trace with AOC chain reference. +/// +public sealed record StoredExplainTrace +{ + /// + /// Unique identifier. + /// + public required string Id { get; init; } + + /// + /// The explain trace data. + /// + public required ExplainTrace Trace { get; init; } + + /// + /// Reference to the AOC chain for this decision. + /// + public ExplainAocChain? AocChain { get; init; } + + /// + /// When this trace was stored. + /// + public required DateTimeOffset StoredAt { get; init; } +} + +/// +/// AOC chain linking a decision to its attestation chain. +/// +public sealed record ExplainAocChain +{ + /// + /// Compilation ID that produced the policy bundle. + /// + public required string CompilationId { get; init; } + + /// + /// Compiler version used. + /// + public required string CompilerVersion { get; init; } + + /// + /// Source digest of the policy document. + /// + public required string SourceDigest { get; init; } + + /// + /// Artifact digest of the compiled bundle. + /// + public required string ArtifactDigest { get; init; } + + /// + /// Reference to the signed attestation. + /// + public ExplainAttestationRef? AttestationRef { get; init; } + + /// + /// Provenance information. + /// + public ExplainProvenance? Provenance { get; init; } +} + +/// +/// Attestation reference for AOC chain. +/// +public sealed record ExplainAttestationRef( + string AttestationId, + string EnvelopeDigest, + string? Uri, + string? SigningKeyId); + +/// +/// Provenance for AOC chain. +/// +public sealed record ExplainProvenance( + string SourceType, + string? SourceUrl, + string? Submitter, + string? CommitSha, + string? Branch); + +/// +/// Repository interface for explain trace persistence. +/// +public interface IExplainTraceRepository +{ + /// + /// Stores an explain trace. + /// + Task StoreAsync( + string tenantId, + ExplainTrace trace, + ExplainAocChain? aocChain, + TimeSpan? retention, + CancellationToken cancellationToken); + + /// + /// Retrieves an explain trace by ID. + /// + Task GetByIdAsync( + string tenantId, + string id, + CancellationToken cancellationToken); + + /// + /// Retrieves an explain trace by run ID and subject hash. + /// + Task GetByRunAndSubjectAsync( + string tenantId, + string runId, + string subjectHash, + CancellationToken cancellationToken); + + /// + /// Queries explain traces with filtering and pagination. + /// + Task> QueryAsync( + string tenantId, + ExplainQueryOptions options, + CancellationToken cancellationToken); + + /// + /// Gets all explain traces for a policy run. + /// + Task> GetByRunIdAsync( + string tenantId, + string runId, + CancellationToken cancellationToken); + + /// + /// Deletes explain traces older than the specified retention period. + /// + Task PruneExpiredAsync( + string tenantId, + CancellationToken cancellationToken); +} + +/// +/// Service for persisting and retrieving policy explain traces with AOC chain linkage. +/// +internal sealed class PolicyExplainerService +{ + private readonly IExplainTraceRepository _repository; + private readonly IPolicyPackRepository _policyRepository; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + private readonly TimeSpan _defaultRetention; + + public PolicyExplainerService( + IExplainTraceRepository repository, + IPolicyPackRepository policyRepository, + ILogger logger, + TimeProvider timeProvider, + TimeSpan? defaultRetention = null) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _policyRepository = policyRepository ?? throw new ArgumentNullException(nameof(policyRepository)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _defaultRetention = defaultRetention ?? TimeSpan.FromDays(30); + } + + /// + /// Stores an explain trace and links it to the AOC chain from the policy bundle. + /// + public async Task StoreExplainTraceAsync( + string tenantId, + ExplainTrace trace, + TimeSpan? retention = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(tenantId); + ArgumentNullException.ThrowIfNull(trace); + + _logger.LogDebug( + "Storing explain trace for run {RunId}, policy {PolicyId}:{Version}, tenant {TenantId}", + trace.RunId, trace.PolicyId, trace.PolicyVersion, tenantId); + + // Try to get AOC chain from the policy bundle + ExplainAocChain? aocChain = null; + if (trace.PolicyVersion.HasValue) + { + var revision = await _policyRepository.GetRevisionAsync( + trace.PolicyId, + trace.PolicyVersion.Value, + cancellationToken).ConfigureAwait(false); + + if (revision?.Bundle?.AocMetadata is not null) + { + var aoc = revision.Bundle.AocMetadata; + aocChain = new ExplainAocChain + { + CompilationId = aoc.CompilationId, + CompilerVersion = aoc.CompilerVersion, + SourceDigest = aoc.SourceDigest, + ArtifactDigest = aoc.ArtifactDigest, + AttestationRef = aoc.AttestationRef is not null + ? new ExplainAttestationRef( + aoc.AttestationRef.AttestationId, + aoc.AttestationRef.EnvelopeDigest, + aoc.AttestationRef.Uri, + aoc.AttestationRef.SigningKeyId) + : null, + Provenance = aoc.Provenance is not null + ? new ExplainProvenance( + aoc.Provenance.SourceType, + aoc.Provenance.SourceUrl, + aoc.Provenance.Submitter, + aoc.Provenance.CommitSha, + aoc.Provenance.Branch) + : null + }; + + _logger.LogDebug( + "Linked explain trace to AOC chain: compilation {CompilationId}, attestation {AttestationId}", + aocChain.CompilationId, + aocChain.AttestationRef?.AttestationId ?? "(none)"); + } + } + + var stored = await _repository.StoreAsync( + tenantId, + trace, + aocChain, + retention ?? _defaultRetention, + cancellationToken).ConfigureAwait(false); + + PolicyEngineTelemetry.ExplainTracesStored.Add(1, + new KeyValuePair("tenant_id", tenantId), + new KeyValuePair("policy_id", trace.PolicyId)); + + return stored; + } + + /// + /// Retrieves an explain trace by its ID. + /// + public Task GetExplainTraceAsync( + string tenantId, + string traceId, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(tenantId); + ArgumentNullException.ThrowIfNull(traceId); + + return _repository.GetByIdAsync(tenantId, traceId, cancellationToken); + } + + /// + /// Retrieves an explain trace for a specific decision. + /// + public Task GetExplainTraceForDecisionAsync( + string tenantId, + string runId, + string subjectHash, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(tenantId); + ArgumentNullException.ThrowIfNull(runId); + ArgumentNullException.ThrowIfNull(subjectHash); + + return _repository.GetByRunAndSubjectAsync(tenantId, runId, subjectHash, cancellationToken); + } + + /// + /// Gets all explain traces for a policy run. + /// + public Task> GetExplainTracesForRunAsync( + string tenantId, + string runId, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(tenantId); + ArgumentNullException.ThrowIfNull(runId); + + return _repository.GetByRunIdAsync(tenantId, runId, cancellationToken); + } + + /// + /// Queries explain traces with filtering and pagination. + /// + public Task> QueryExplainTracesAsync( + string tenantId, + ExplainQueryOptions options, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(tenantId); + options ??= new ExplainQueryOptions(); + + return _repository.QueryAsync(tenantId, options, cancellationToken); + } + + /// + /// Gets the AOC chain for a stored explain trace. + /// + public async Task GetAocChainForTraceAsync( + string tenantId, + string traceId, + CancellationToken cancellationToken = default) + { + var trace = await GetExplainTraceAsync(tenantId, traceId, cancellationToken).ConfigureAwait(false); + return trace?.AocChain; + } + + /// + /// Validates that an explain trace's AOC chain is intact. + /// + public async Task ValidateAocChainAsync( + string tenantId, + string traceId, + CancellationToken cancellationToken = default) + { + var trace = await GetExplainTraceAsync(tenantId, traceId, cancellationToken).ConfigureAwait(false); + if (trace is null) + { + return new AocChainValidationResult( + IsValid: false, + ValidationMessage: "Explain trace not found", + PolicyFound: false, + BundleIntact: false, + AttestationAvailable: false); + } + + if (trace.AocChain is null) + { + return new AocChainValidationResult( + IsValid: false, + ValidationMessage: "No AOC chain linked to this trace", + PolicyFound: true, + BundleIntact: false, + AttestationAvailable: false); + } + + // Verify the policy revision still exists + if (!trace.Trace.PolicyVersion.HasValue) + { + return new AocChainValidationResult( + IsValid: false, + ValidationMessage: "Trace has no policy version", + PolicyFound: false, + BundleIntact: false, + AttestationAvailable: false); + } + + var revision = await _policyRepository.GetRevisionAsync( + trace.Trace.PolicyId, + trace.Trace.PolicyVersion.Value, + cancellationToken).ConfigureAwait(false); + + if (revision is null) + { + return new AocChainValidationResult( + IsValid: false, + ValidationMessage: $"Policy revision {trace.Trace.PolicyId}:{trace.Trace.PolicyVersion} no longer exists", + PolicyFound: false, + BundleIntact: false, + AttestationAvailable: false); + } + + // Verify bundle digest matches + var bundleIntact = revision.Bundle?.Digest == trace.AocChain.ArtifactDigest; + if (!bundleIntact) + { + return new AocChainValidationResult( + IsValid: false, + ValidationMessage: "Bundle digest mismatch - policy bundle has been modified", + PolicyFound: true, + BundleIntact: false, + AttestationAvailable: trace.AocChain.AttestationRef is not null); + } + + // Verify AOC metadata matches + var aocMatches = revision.Bundle?.AocMetadata?.CompilationId == trace.AocChain.CompilationId && + revision.Bundle?.AocMetadata?.SourceDigest == trace.AocChain.SourceDigest; + + if (!aocMatches) + { + return new AocChainValidationResult( + IsValid: false, + ValidationMessage: "AOC metadata mismatch - compilation chain has been modified", + PolicyFound: true, + BundleIntact: true, + AttestationAvailable: trace.AocChain.AttestationRef is not null); + } + + return new AocChainValidationResult( + IsValid: true, + ValidationMessage: "AOC chain is intact and verifiable", + PolicyFound: true, + BundleIntact: true, + AttestationAvailable: trace.AocChain.AttestationRef is not null); + } + + /// + /// Prunes expired explain traces for a tenant. + /// + public async Task PruneExpiredTracesAsync( + string tenantId, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(tenantId); + + var pruned = await _repository.PruneExpiredAsync(tenantId, cancellationToken).ConfigureAwait(false); + + if (pruned > 0) + { + _logger.LogInformation( + "Pruned {Count} expired explain traces for tenant {TenantId}", + pruned, tenantId); + } + + return pruned; + } +} + +/// +/// Result of AOC chain validation. +/// +public sealed record AocChainValidationResult( + bool IsValid, + string ValidationMessage, + bool PolicyFound, + bool BundleIntact, + bool AttestationAvailable); diff --git a/src/Policy/StellaOps.Policy.Engine/Services/PolicyRuntimeEvaluationService.cs b/src/Policy/StellaOps.Policy.Engine/Services/PolicyRuntimeEvaluationService.cs index 1abdac8bd..975c2dd32 100644 --- a/src/Policy/StellaOps.Policy.Engine/Services/PolicyRuntimeEvaluationService.cs +++ b/src/Policy/StellaOps.Policy.Engine/Services/PolicyRuntimeEvaluationService.cs @@ -1,4 +1,5 @@ using System.Collections.Immutable; +using System.Diagnostics; using System.Security.Cryptography; using System.Text; using System.Text.Json; @@ -6,6 +7,7 @@ using Microsoft.Extensions.Logging; using StellaOps.Policy.Engine.Caching; using StellaOps.Policy.Engine.Domain; using StellaOps.Policy.Engine.Evaluation; +using StellaOps.Policy.Engine.Telemetry; using StellaOps.PolicyDsl; namespace StellaOps.Policy.Engine.Services; @@ -88,6 +90,12 @@ internal sealed class PolicyRuntimeEvaluationService { ArgumentNullException.ThrowIfNull(request); + using var activity = PolicyEngineTelemetry.StartEvaluateActivity( + request.TenantId, request.PackId, runId: null); + activity?.SetTag("policy.version", request.Version); + activity?.SetTag("subject.purl", request.SubjectPurl); + activity?.SetTag("advisory.id", request.AdvisoryId); + var startTimestamp = _timeProvider.GetTimestamp(); var evaluationTimestamp = request.EvaluationTimestamp ?? _timeProvider.GetUtcNow(); @@ -97,6 +105,9 @@ internal sealed class PolicyRuntimeEvaluationService if (bundle is null) { + PolicyEngineTelemetry.RecordError("evaluation", request.TenantId); + PolicyEngineTelemetry.RecordEvaluationFailure(request.TenantId, request.PackId, "bundle_not_found"); + activity?.SetStatus(ActivityStatusCode.Error, "Bundle not found"); throw new InvalidOperationException( $"Policy bundle not found for pack '{request.PackId}' version {request.Version}."); } @@ -113,6 +124,12 @@ internal sealed class PolicyRuntimeEvaluationService if (cacheResult.CacheHit && cacheResult.Entry is not null) { var duration = GetElapsedMilliseconds(startTimestamp); + var durationSeconds = duration / 1000.0; + PolicyEngineTelemetry.RecordEvaluationLatency(durationSeconds, request.TenantId, request.PackId); + PolicyEngineTelemetry.RecordEvaluation(request.TenantId, request.PackId, "cached"); + activity?.SetTag("cache.hit", true); + activity?.SetTag("cache.source", cacheResult.Source.ToString()); + activity?.SetStatus(ActivityStatusCode.Ok); _logger.LogDebug( "Cache hit for evaluation {PackId}@{Version} subject {Subject} from {Source}", request.PackId, request.Version, request.SubjectPurl, cacheResult.Source); @@ -122,12 +139,17 @@ internal sealed class PolicyRuntimeEvaluationService } } + activity?.SetTag("cache.hit", false); + // Cache miss - perform evaluation - var document = DeserializeCompiledPolicy(bundle.Payload); + var document = bundle.CompiledDocument; if (document is null) { + PolicyEngineTelemetry.RecordError("evaluation", request.TenantId); + PolicyEngineTelemetry.RecordEvaluationFailure(request.TenantId, request.PackId, "document_not_found"); + activity?.SetStatus(ActivityStatusCode.Error, "Document not found"); throw new InvalidOperationException( - $"Failed to deserialize compiled policy for pack '{request.PackId}' version {request.Version}."); + $"Compiled policy document not found for pack '{request.PackId}' version {request.Version}."); } var context = new PolicyEvaluationContext( @@ -162,6 +184,21 @@ internal sealed class PolicyRuntimeEvaluationService await _cache.SetAsync(cacheKey, cacheEntry, cancellationToken).ConfigureAwait(false); var evalDuration = GetElapsedMilliseconds(startTimestamp); + var evalDurationSeconds = evalDuration / 1000.0; + + // Record metrics + PolicyEngineTelemetry.RecordEvaluationLatency(evalDurationSeconds, request.TenantId, request.PackId); + PolicyEngineTelemetry.RecordEvaluation(request.TenantId, request.PackId, "full"); + if (!string.IsNullOrEmpty(result.RuleName)) + { + PolicyEngineTelemetry.RecordRuleFired(request.PackId, result.RuleName); + } + + activity?.SetTag("evaluation.status", result.Status); + activity?.SetTag("evaluation.rule", result.RuleName ?? "none"); + activity?.SetTag("evaluation.duration_ms", evalDuration); + activity?.SetStatus(ActivityStatusCode.Ok); + _logger.LogDebug( "Evaluated {PackId}@{Version} subject {Subject} in {Duration}ms - {Status}", request.PackId, request.Version, request.SubjectPurl, evalDuration, result.Status); @@ -195,7 +232,13 @@ internal sealed class PolicyRuntimeEvaluationService return Array.Empty(); } + using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("policy.evaluate_batch", ActivityKind.Internal); + activity?.SetTag("batch.size", requests.Count); + + var batchStartTimestamp = _timeProvider.GetTimestamp(); var results = new List(requests.Count); + var cacheHits = 0; + var cacheMisses = 0; // Group by pack/version for bundle loading efficiency var groups = requests.GroupBy(r => (r.PackId, r.Version)); @@ -210,6 +253,7 @@ internal sealed class PolicyRuntimeEvaluationService { foreach (var request in group) { + PolicyEngineTelemetry.RecordEvaluationFailure(request.TenantId, packId, "bundle_not_found"); _logger.LogWarning( "Policy bundle not found for pack '{PackId}' version {Version}, skipping evaluation", packId, version); @@ -217,11 +261,12 @@ internal sealed class PolicyRuntimeEvaluationService continue; } - var document = DeserializeCompiledPolicy(bundle.Payload); + var document = bundle.CompiledDocument; if (document is null) { + PolicyEngineTelemetry.RecordEvaluationFailure("default", packId, "document_not_found"); _logger.LogWarning( - "Failed to deserialize policy bundle for pack '{PackId}' version {Version}", + "Compiled policy document not found for pack '{PackId}' version {Version}", packId, version); continue; } @@ -249,6 +294,8 @@ internal sealed class PolicyRuntimeEvaluationService { var response = CreateResponseFromCache(request, bundle.Digest, entry, CacheSource.InMemory, 0); results.Add(response); + cacheHits++; + PolicyEngineTelemetry.RecordEvaluation(request.TenantId, packId, "cached"); } else { @@ -294,6 +341,15 @@ internal sealed class PolicyRuntimeEvaluationService expiresAt); entriesToCache[key] = cacheEntry; + cacheMisses++; + + // Record metrics for each evaluation + PolicyEngineTelemetry.RecordEvaluationLatency(duration / 1000.0, request.TenantId, packId); + PolicyEngineTelemetry.RecordEvaluation(request.TenantId, packId, "full"); + if (!string.IsNullOrEmpty(result.RuleName)) + { + PolicyEngineTelemetry.RecordRuleFired(packId, result.RuleName); + } results.Add(new RuntimeEvaluationResponse( request.PackId, @@ -319,6 +375,17 @@ internal sealed class PolicyRuntimeEvaluationService } } + // Record batch-level metrics + var batchDuration = GetElapsedMilliseconds(batchStartTimestamp); + activity?.SetTag("batch.cache_hits", cacheHits); + activity?.SetTag("batch.cache_misses", cacheMisses); + activity?.SetTag("batch.duration_ms", batchDuration); + activity?.SetStatus(ActivityStatusCode.Ok); + + _logger.LogDebug( + "Batch evaluation completed: {Total} subjects, {CacheHits} cache hits, {CacheMisses} evaluated in {Duration}ms", + requests.Count, cacheHits, cacheMisses, batchDuration); + return results; } @@ -398,24 +465,6 @@ internal sealed class PolicyRuntimeEvaluationService return Convert.ToHexString(hash); } - private static PolicyIrDocument? DeserializeCompiledPolicy(ImmutableArray payload) - { - if (payload.IsDefaultOrEmpty) - { - return null; - } - - try - { - var json = Encoding.UTF8.GetString(payload.AsSpan()); - return JsonSerializer.Deserialize(json); - } - catch - { - return null; - } - } - private long GetElapsedMilliseconds(long startTimestamp) { var elapsed = _timeProvider.GetElapsedTime(startTimestamp); diff --git a/src/Policy/StellaOps.Policy.Engine/Simulation/RiskSimulationModels.cs b/src/Policy/StellaOps.Policy.Engine/Simulation/RiskSimulationModels.cs index b8a7ea72a..f96acdde4 100644 --- a/src/Policy/StellaOps.Policy.Engine/Simulation/RiskSimulationModels.cs +++ b/src/Policy/StellaOps.Policy.Engine/Simulation/RiskSimulationModels.cs @@ -61,7 +61,8 @@ public sealed record RiskSimulationResult( [property: JsonPropertyName("distribution")] RiskDistribution? Distribution, [property: JsonPropertyName("top_movers")] IReadOnlyList? TopMovers, [property: JsonPropertyName("aggregate_metrics")] AggregateRiskMetrics AggregateMetrics, - [property: JsonPropertyName("execution_time_ms")] double ExecutionTimeMs); + [property: JsonPropertyName("execution_time_ms")] double ExecutionTimeMs, + [property: JsonPropertyName("analytics")] SimulationAnalytics? Analytics = null); /// /// Computed risk score for a finding. diff --git a/src/Policy/StellaOps.Policy.Engine/Simulation/SimulationAnalytics.cs b/src/Policy/StellaOps.Policy.Engine/Simulation/SimulationAnalytics.cs new file mode 100644 index 000000000..027fa1646 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Simulation/SimulationAnalytics.cs @@ -0,0 +1,236 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Policy.Engine.Simulation; + +/// +/// Extended simulation analytics including rule firing counts, heatmaps, traces, and delta summaries. +/// +public sealed record SimulationAnalytics( + [property: JsonPropertyName("rule_firing_counts")] RuleFiringCounts RuleFiringCounts, + [property: JsonPropertyName("heatmap")] SimulationHeatmap Heatmap, + [property: JsonPropertyName("sampled_traces")] SampledExplainTraces SampledTraces, + [property: JsonPropertyName("delta_summary")] SimulationDeltaSummary? DeltaSummary); + +/// +/// Rule firing counts aggregated across simulation runs. +/// +public sealed record RuleFiringCounts( + [property: JsonPropertyName("total_evaluations")] int TotalEvaluations, + [property: JsonPropertyName("total_rules_fired")] int TotalRulesFired, + [property: JsonPropertyName("rules_by_name")] ImmutableDictionary RulesByName, + [property: JsonPropertyName("rules_by_priority")] ImmutableDictionary RulesByPriority, + [property: JsonPropertyName("rules_by_outcome")] ImmutableDictionary RulesByOutcome, + [property: JsonPropertyName("rules_by_category")] ImmutableDictionary RulesByCategory, + [property: JsonPropertyName("top_rules")] ImmutableArray TopRules, + [property: JsonPropertyName("vex_override_counts")] VexOverrideCounts VexOverrides); + +/// +/// Fire count for a single rule. +/// +public sealed record RuleFireCount( + [property: JsonPropertyName("rule_name")] string RuleName, + [property: JsonPropertyName("priority")] int Priority, + [property: JsonPropertyName("category")] string? Category, + [property: JsonPropertyName("fire_count")] int FireCount, + [property: JsonPropertyName("fire_percentage")] double FirePercentage, + [property: JsonPropertyName("outcomes")] ImmutableDictionary OutcomeBreakdown, + [property: JsonPropertyName("avg_evaluation_us")] double AverageEvaluationMicroseconds); + +/// +/// VEX override aggregation. +/// +public sealed record VexOverrideCounts( + [property: JsonPropertyName("total_overrides")] int TotalOverrides, + [property: JsonPropertyName("by_vendor")] ImmutableDictionary ByVendor, + [property: JsonPropertyName("by_status")] ImmutableDictionary ByStatus, + [property: JsonPropertyName("by_justification")] ImmutableDictionary ByJustification); + +/// +/// Heatmap aggregates for visualization. +/// +public sealed record SimulationHeatmap( + [property: JsonPropertyName("rule_severity_matrix")] ImmutableArray RuleSeverityMatrix, + [property: JsonPropertyName("rule_outcome_matrix")] ImmutableArray RuleOutcomeMatrix, + [property: JsonPropertyName("finding_rule_coverage")] FindingRuleCoverage FindingRuleCoverage, + [property: JsonPropertyName("temporal_distribution")] ImmutableArray TemporalDistribution); + +/// +/// A cell in the heatmap matrix. +/// +public sealed record HeatmapCell( + [property: JsonPropertyName("x")] string X, + [property: JsonPropertyName("y")] string Y, + [property: JsonPropertyName("value")] int Value, + [property: JsonPropertyName("normalized")] double Normalized); + +/// +/// Coverage of findings by rules. +/// +public sealed record FindingRuleCoverage( + [property: JsonPropertyName("total_findings")] int TotalFindings, + [property: JsonPropertyName("findings_matched")] int FindingsMatched, + [property: JsonPropertyName("findings_unmatched")] int FindingsUnmatched, + [property: JsonPropertyName("coverage_percentage")] double CoveragePercentage, + [property: JsonPropertyName("rules_never_fired")] ImmutableArray RulesNeverFired, + [property: JsonPropertyName("findings_by_match_count")] ImmutableDictionary FindingsByMatchCount); + +/// +/// Temporal distribution bucket. +/// +public sealed record TemporalBucket( + [property: JsonPropertyName("bucket_start_ms")] long BucketStartMs, + [property: JsonPropertyName("bucket_end_ms")] long BucketEndMs, + [property: JsonPropertyName("evaluation_count")] int EvaluationCount, + [property: JsonPropertyName("rules_fired")] int RulesFired); + +/// +/// Sampled explain traces with deterministic ordering. +/// +public sealed record SampledExplainTraces( + [property: JsonPropertyName("sample_rate")] double SampleRate, + [property: JsonPropertyName("total_traces")] int TotalTraces, + [property: JsonPropertyName("sampled_count")] int SampledCount, + [property: JsonPropertyName("ordering")] TraceOrdering Ordering, + [property: JsonPropertyName("traces")] ImmutableArray Traces, + [property: JsonPropertyName("determinism_hash")] string DeterminismHash); + +/// +/// Deterministic ordering specification. +/// +public sealed record TraceOrdering( + [property: JsonPropertyName("primary_key")] string PrimaryKey, + [property: JsonPropertyName("secondary_key")] string? SecondaryKey, + [property: JsonPropertyName("direction")] string Direction); + +/// +/// A sampled trace with key metadata. +/// +public sealed record SampledTrace( + [property: JsonPropertyName("trace_id")] string TraceId, + [property: JsonPropertyName("finding_id")] string FindingId, + [property: JsonPropertyName("component_purl")] string? ComponentPurl, + [property: JsonPropertyName("advisory_id")] string? AdvisoryId, + [property: JsonPropertyName("final_outcome")] string FinalOutcome, + [property: JsonPropertyName("assigned_severity")] string? AssignedSeverity, + [property: JsonPropertyName("rules_evaluated")] int RulesEvaluated, + [property: JsonPropertyName("rules_fired")] int RulesFired, + [property: JsonPropertyName("vex_applied")] bool VexApplied, + [property: JsonPropertyName("evaluation_ms")] double EvaluationMs, + [property: JsonPropertyName("rule_sequence")] ImmutableArray RuleSequence, + [property: JsonPropertyName("sample_reason")] string SampleReason); + +/// +/// Delta summary comparing simulation results. +/// +public sealed record SimulationDeltaSummary( + [property: JsonPropertyName("comparison_type")] SimulationComparisonType ComparisonType, + [property: JsonPropertyName("base_policy_ref")] string BasePolicyRef, + [property: JsonPropertyName("candidate_policy_ref")] string? CandidatePolicyRef, + [property: JsonPropertyName("total_findings")] int TotalFindings, + [property: JsonPropertyName("outcome_changes")] OutcomeChangeSummary OutcomeChanges, + [property: JsonPropertyName("severity_changes")] SeverityChangeSummary SeverityChanges, + [property: JsonPropertyName("rule_changes")] RuleChangeSummary RuleChanges, + [property: JsonPropertyName("high_impact_findings")] ImmutableArray HighImpactFindings, + [property: JsonPropertyName("determinism_hash")] string DeterminismHash); + +/// +/// Type of simulation comparison. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum SimulationComparisonType +{ + /// Single policy snapshot. + [JsonPropertyName("snapshot")] + Snapshot, + + /// Comparing two policy versions. + [JsonPropertyName("version_compare")] + VersionCompare, + + /// What-if analysis with hypothetical changes. + [JsonPropertyName("whatif")] + WhatIf, + + /// Batch comparison across multiple inputs. + [JsonPropertyName("batch")] + Batch +} + +/// +/// Summary of outcome changes. +/// +public sealed record OutcomeChangeSummary( + [property: JsonPropertyName("unchanged")] int Unchanged, + [property: JsonPropertyName("improved")] int Improved, + [property: JsonPropertyName("regressed")] int Regressed, + [property: JsonPropertyName("transitions")] ImmutableArray Transitions); + +/// +/// A specific outcome transition. +/// +public sealed record OutcomeTransition( + [property: JsonPropertyName("from_outcome")] string FromOutcome, + [property: JsonPropertyName("to_outcome")] string ToOutcome, + [property: JsonPropertyName("count")] int Count, + [property: JsonPropertyName("percentage")] double Percentage, + [property: JsonPropertyName("is_improvement")] bool IsImprovement); + +/// +/// Summary of severity changes. +/// +public sealed record SeverityChangeSummary( + [property: JsonPropertyName("unchanged")] int Unchanged, + [property: JsonPropertyName("escalated")] int Escalated, + [property: JsonPropertyName("deescalated")] int Deescalated, + [property: JsonPropertyName("transitions")] ImmutableArray Transitions); + +/// +/// A specific severity transition. +/// +public sealed record SeverityTransition( + [property: JsonPropertyName("from_severity")] string FromSeverity, + [property: JsonPropertyName("to_severity")] string ToSeverity, + [property: JsonPropertyName("count")] int Count, + [property: JsonPropertyName("percentage")] double Percentage); + +/// +/// Summary of rule behavior changes. +/// +public sealed record RuleChangeSummary( + [property: JsonPropertyName("rules_added")] ImmutableArray RulesAdded, + [property: JsonPropertyName("rules_removed")] ImmutableArray RulesRemoved, + [property: JsonPropertyName("rules_modified")] ImmutableArray RulesModified, + [property: JsonPropertyName("fire_rate_changes")] ImmutableArray FireRateChanges); + +/// +/// A rule modification between versions. +/// +public sealed record RuleModification( + [property: JsonPropertyName("rule_name")] string RuleName, + [property: JsonPropertyName("modification_type")] string ModificationType, + [property: JsonPropertyName("description")] string Description); + +/// +/// Change in rule fire rate. +/// +public sealed record RuleFireRateChange( + [property: JsonPropertyName("rule_name")] string RuleName, + [property: JsonPropertyName("base_fire_rate")] double BaseFireRate, + [property: JsonPropertyName("candidate_fire_rate")] double CandidateFireRate, + [property: JsonPropertyName("change_percentage")] double ChangePercentage, + [property: JsonPropertyName("is_significant")] bool IsSignificant); + +/// +/// A finding with high impact from policy changes. +/// +public sealed record HighImpactFinding( + [property: JsonPropertyName("finding_id")] string FindingId, + [property: JsonPropertyName("component_purl")] string? ComponentPurl, + [property: JsonPropertyName("advisory_id")] string? AdvisoryId, + [property: JsonPropertyName("base_outcome")] string BaseOutcome, + [property: JsonPropertyName("candidate_outcome")] string? CandidateOutcome, + [property: JsonPropertyName("base_severity")] string? BaseSeverity, + [property: JsonPropertyName("candidate_severity")] string? CandidateSeverity, + [property: JsonPropertyName("impact_score")] double ImpactScore, + [property: JsonPropertyName("impact_reason")] string ImpactReason); diff --git a/src/Policy/StellaOps.Policy.Engine/Simulation/SimulationAnalyticsService.cs b/src/Policy/StellaOps.Policy.Engine/Simulation/SimulationAnalyticsService.cs new file mode 100644 index 000000000..e8516e038 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Simulation/SimulationAnalyticsService.cs @@ -0,0 +1,811 @@ +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Text; +using StellaOps.Policy.Engine.Telemetry; + +namespace StellaOps.Policy.Engine.Simulation; + +/// +/// Service for computing simulation analytics including rule firing counts, heatmaps, +/// sampled traces, and delta summaries. +/// +public sealed class SimulationAnalyticsService +{ + private static readonly ImmutableArray OutcomeSeverityOrder = ImmutableArray.Create( + "allow", "info", "warn", "review", "block", "deny", "critical"); + + private static readonly ImmutableArray SeverityOrder = ImmutableArray.Create( + "informational", "low", "medium", "high", "critical"); + + /// + /// Computes full simulation analytics from rule hit traces. + /// + public SimulationAnalytics ComputeAnalytics( + string policyRef, + IReadOnlyList traces, + IReadOnlyList findings, + SimulationAnalyticsOptions? options = null) + { + options ??= SimulationAnalyticsOptions.Default; + + var firingCounts = ComputeRuleFiringCounts(traces, findings.Count); + var heatmap = ComputeHeatmap(traces, findings, options); + var sampledTraces = ComputeSampledTraces(traces, findings, options); + + return new SimulationAnalytics( + firingCounts, + heatmap, + sampledTraces, + DeltaSummary: null); + } + + /// + /// Computes delta summary comparing base and candidate simulation results. + /// + public SimulationDeltaSummary ComputeDeltaSummary( + string basePolicyRef, + string candidatePolicyRef, + IReadOnlyList baseResults, + IReadOnlyList candidateResults, + SimulationComparisonType comparisonType = SimulationComparisonType.VersionCompare) + { + var baseByFinding = baseResults.ToDictionary(r => r.FindingId); + var candidateByFinding = candidateResults.ToDictionary(r => r.FindingId); + + var outcomeChanges = ComputeOutcomeChanges(baseByFinding, candidateByFinding); + var severityChanges = ComputeSeverityChanges(baseByFinding, candidateByFinding); + var ruleChanges = ComputeRuleChanges(baseResults, candidateResults); + var highImpact = ComputeHighImpactFindings(baseByFinding, candidateByFinding); + + var hashInput = $"{basePolicyRef}:{candidatePolicyRef}:{baseResults.Count}:{candidateResults.Count}"; + var determinismHash = ComputeHash(hashInput); + + return new SimulationDeltaSummary( + comparisonType, + basePolicyRef, + candidatePolicyRef, + TotalFindings: baseResults.Count, + outcomeChanges, + severityChanges, + ruleChanges, + highImpact, + determinismHash); + } + + /// + /// Computes rule firing counts from traces. + /// + public RuleFiringCounts ComputeRuleFiringCounts( + IReadOnlyList traces, + int totalEvaluations) + { + var ruleStats = new Dictionary(); + var byPriority = new Dictionary(); + var byOutcome = new Dictionary(); + var byCategory = new Dictionary(); + var vexByVendor = new Dictionary(); + var vexByStatus = new Dictionary(); + var vexByJustification = new Dictionary(); + var totalFired = 0; + var totalVexOverrides = 0; + + foreach (var trace in traces) + { + if (!trace.ExpressionResult) + { + continue; + } + + totalFired++; + + // Rule stats + if (!ruleStats.TryGetValue(trace.RuleName, out var stats)) + { + stats = new RuleStats(trace.RuleName, trace.RulePriority, trace.RuleCategory); + ruleStats[trace.RuleName] = stats; + } + + stats.FireCount++; + stats.TotalEvaluationUs += trace.EvaluationMicroseconds; + stats.IncrementOutcome(trace.Outcome); + + // Priority aggregation + byPriority.TryGetValue(trace.RulePriority, out var priorityCount); + byPriority[trace.RulePriority] = priorityCount + 1; + + // Outcome aggregation + byOutcome.TryGetValue(trace.Outcome, out var outcomeCount); + byOutcome[trace.Outcome] = outcomeCount + 1; + + // Category aggregation + if (!string.IsNullOrWhiteSpace(trace.RuleCategory)) + { + byCategory.TryGetValue(trace.RuleCategory, out var categoryCount); + byCategory[trace.RuleCategory] = categoryCount + 1; + } + + // VEX overrides + if (trace.IsVexOverride) + { + totalVexOverrides++; + + if (!string.IsNullOrWhiteSpace(trace.VexVendor)) + { + vexByVendor.TryGetValue(trace.VexVendor, out var vendorCount); + vexByVendor[trace.VexVendor] = vendorCount + 1; + } + + if (!string.IsNullOrWhiteSpace(trace.VexStatus)) + { + vexByStatus.TryGetValue(trace.VexStatus, out var statusCount); + vexByStatus[trace.VexStatus] = statusCount + 1; + } + + if (!string.IsNullOrWhiteSpace(trace.VexJustification)) + { + vexByJustification.TryGetValue(trace.VexJustification, out var justCount); + vexByJustification[trace.VexJustification] = justCount + 1; + } + } + } + + // Build rule fire counts + var ruleFireCounts = ruleStats.Values + .Select(s => new RuleFireCount( + s.RuleName, + s.Priority, + s.Category, + s.FireCount, + totalEvaluations > 0 ? (double)s.FireCount / totalEvaluations * 100 : 0, + s.OutcomeCounts.ToImmutableDictionary(), + s.FireCount > 0 ? (double)s.TotalEvaluationUs / s.FireCount : 0)) + .ToImmutableDictionary(r => r.RuleName); + + var topRules = ruleFireCounts.Values + .OrderByDescending(r => r.FireCount) + .Take(10) + .ToImmutableArray(); + + var vexOverrides = new VexOverrideCounts( + totalVexOverrides, + vexByVendor.ToImmutableDictionary(), + vexByStatus.ToImmutableDictionary(), + vexByJustification.ToImmutableDictionary()); + + return new RuleFiringCounts( + totalEvaluations, + totalFired, + ruleFireCounts, + byPriority.ToImmutableDictionary(), + byOutcome.ToImmutableDictionary(), + byCategory.ToImmutableDictionary(), + topRules, + vexOverrides); + } + + /// + /// Computes heatmap aggregates for visualization. + /// + public SimulationHeatmap ComputeHeatmap( + IReadOnlyList traces, + IReadOnlyList findings, + SimulationAnalyticsOptions options) + { + var ruleSeverityMatrix = ComputeRuleSeverityMatrix(traces); + var ruleOutcomeMatrix = ComputeRuleOutcomeMatrix(traces); + var findingCoverage = ComputeFindingRuleCoverage(traces, findings); + var temporalDist = ComputeTemporalDistribution(traces, options.TemporalBucketMs); + + return new SimulationHeatmap( + ruleSeverityMatrix, + ruleOutcomeMatrix, + findingCoverage, + temporalDist); + } + + /// + /// Computes sampled explain traces with deterministic ordering. + /// + public SampledExplainTraces ComputeSampledTraces( + IReadOnlyList traces, + IReadOnlyList findings, + SimulationAnalyticsOptions options) + { + // Group traces by finding + var tracesByFinding = traces + .GroupBy(t => t.ComponentPurl ?? t.AdvisoryId ?? "unknown") + .ToDictionary(g => g.Key, g => g.ToList()); + + var findingsById = findings.ToDictionary(f => f.FindingId); + + // Deterministic ordering by finding_id, then rule_priority + var ordering = new TraceOrdering("finding_id", "rule_priority", "ascending"); + + // Sample traces deterministically + var sampledList = new List(); + var totalTraceCount = 0; + + foreach (var finding in findings.OrderBy(f => f.FindingId, StringComparer.Ordinal)) + { + var key = finding.ComponentPurl ?? finding.AdvisoryId ?? finding.FindingId; + if (!tracesByFinding.TryGetValue(key, out var findingTraces)) + { + continue; + } + + totalTraceCount += findingTraces.Count; + + // Deterministic sampling based on finding_id hash + var sampleHash = ComputeHash(finding.FindingId); + var sampleValue = Math.Abs(sampleHash.GetHashCode()) % 100; + var shouldSample = sampleValue < (int)(options.TraceSampleRate * 100); + + if (!shouldSample && sampledList.Count >= options.MaxSampledTraces) + { + continue; + } + + // Always sample high-impact findings + var hasFiredRule = findingTraces.Any(t => t.ExpressionResult); + var isHighSeverity = findingTraces.Any(t => + t.AssignedSeverity?.Equals("critical", StringComparison.OrdinalIgnoreCase) == true || + t.AssignedSeverity?.Equals("high", StringComparison.OrdinalIgnoreCase) == true); + var hasVexOverride = findingTraces.Any(t => t.IsVexOverride); + + var sampleReason = DetermineSampleReason(shouldSample, isHighSeverity, hasVexOverride); + + if (!shouldSample && !isHighSeverity && !hasVexOverride) + { + continue; + } + + var orderedTraces = findingTraces.OrderBy(t => t.RulePriority).ToList(); + var finalTrace = orderedTraces.LastOrDefault(t => t.ExpressionResult) ?? orderedTraces.LastOrDefault(); + + if (finalTrace == null) + { + continue; + } + + var ruleSequence = orderedTraces + .Where(t => t.ExpressionResult) + .Select(t => t.RuleName) + .ToImmutableArray(); + + sampledList.Add(new SampledTrace( + TraceId: $"{finding.FindingId}:{finalTrace.SpanId}", + FindingId: finding.FindingId, + ComponentPurl: finding.ComponentPurl, + AdvisoryId: finding.AdvisoryId, + FinalOutcome: finalTrace.Outcome, + AssignedSeverity: finalTrace.AssignedSeverity, + RulesEvaluated: findingTraces.Count, + RulesFired: findingTraces.Count(t => t.ExpressionResult), + VexApplied: hasVexOverride, + EvaluationMs: findingTraces.Sum(t => t.EvaluationMicroseconds) / 1000.0, + RuleSequence: ruleSequence, + SampleReason: sampleReason)); + + if (sampledList.Count >= options.MaxSampledTraces) + { + break; + } + } + + // Compute determinism hash from ordered sample + var hashBuilder = new StringBuilder(); + foreach (var sample in sampledList.OrderBy(s => s.FindingId, StringComparer.Ordinal)) + { + hashBuilder.Append(sample.FindingId); + hashBuilder.Append(':'); + hashBuilder.Append(sample.FinalOutcome); + hashBuilder.Append(';'); + } + + var determinismHash = ComputeHash(hashBuilder.ToString()); + + return new SampledExplainTraces( + options.TraceSampleRate, + totalTraceCount, + sampledList.Count, + ordering, + sampledList.ToImmutableArray(), + determinismHash); + } + + private ImmutableArray ComputeRuleSeverityMatrix(IReadOnlyList traces) + { + var matrix = new Dictionary<(string rule, string severity), int>(); + + foreach (var trace in traces.Where(t => t.ExpressionResult && !string.IsNullOrWhiteSpace(t.AssignedSeverity))) + { + var key = (trace.RuleName, trace.AssignedSeverity!); + matrix.TryGetValue(key, out var count); + matrix[key] = count + 1; + } + + var maxValue = matrix.Values.DefaultIfEmpty(1).Max(); + + return matrix + .Select(kvp => new HeatmapCell( + kvp.Key.rule, + kvp.Key.severity, + kvp.Value, + maxValue > 0 ? (double)kvp.Value / maxValue : 0)) + .OrderBy(c => c.X, StringComparer.Ordinal) + .ThenBy(c => SeverityOrder.IndexOf(c.Y.ToLowerInvariant())) + .ToImmutableArray(); + } + + private ImmutableArray ComputeRuleOutcomeMatrix(IReadOnlyList traces) + { + var matrix = new Dictionary<(string rule, string outcome), int>(); + + foreach (var trace in traces.Where(t => t.ExpressionResult)) + { + var key = (trace.RuleName, trace.Outcome); + matrix.TryGetValue(key, out var count); + matrix[key] = count + 1; + } + + var maxValue = matrix.Values.DefaultIfEmpty(1).Max(); + + return matrix + .Select(kvp => new HeatmapCell( + kvp.Key.rule, + kvp.Key.outcome, + kvp.Value, + maxValue > 0 ? (double)kvp.Value / maxValue : 0)) + .OrderBy(c => c.X, StringComparer.Ordinal) + .ThenBy(c => OutcomeSeverityOrder.IndexOf(c.Y.ToLowerInvariant())) + .ToImmutableArray(); + } + + private FindingRuleCoverage ComputeFindingRuleCoverage( + IReadOnlyList traces, + IReadOnlyList findings) + { + var rulesThatFired = traces + .Where(t => t.ExpressionResult) + .Select(t => t.RuleName) + .ToHashSet(); + + var allRules = traces + .Select(t => t.RuleName) + .Distinct() + .ToHashSet(); + + var rulesNeverFired = allRules.Except(rulesThatFired).ToImmutableArray(); + + // Group by finding to count matches per finding + var findingMatchCounts = traces + .Where(t => t.ExpressionResult) + .GroupBy(t => t.ComponentPurl ?? t.AdvisoryId ?? "unknown") + .ToDictionary(g => g.Key, g => g.Select(t => t.RuleName).Distinct().Count()); + + var matchCountDistribution = findingMatchCounts.Values + .GroupBy(c => c) + .ToDictionary(g => g.Key, g => g.Count()) + .ToImmutableDictionary(); + + var findingsMatched = findingMatchCounts.Count; + var findingsUnmatched = findings.Count - findingsMatched; + + return new FindingRuleCoverage( + findings.Count, + findingsMatched, + findingsUnmatched, + findings.Count > 0 ? (double)findingsMatched / findings.Count * 100 : 0, + rulesNeverFired, + matchCountDistribution); + } + + private ImmutableArray ComputeTemporalDistribution( + IReadOnlyList traces, + long bucketMs) + { + if (traces.Count == 0) + { + return ImmutableArray.Empty; + } + + var minTime = traces.Min(t => t.EvaluationTimestamp); + var maxTime = traces.Max(t => t.EvaluationTimestamp); + var totalMs = (long)(maxTime - minTime).TotalMilliseconds; + + if (totalMs <= 0) + { + return ImmutableArray.Create(new TemporalBucket(0, bucketMs, traces.Count, traces.Count(t => t.ExpressionResult))); + } + + var buckets = new Dictionary(); + + foreach (var trace in traces) + { + var offsetMs = (long)(trace.EvaluationTimestamp - minTime).TotalMilliseconds; + var bucketStart = (offsetMs / bucketMs) * bucketMs; + + buckets.TryGetValue(bucketStart, out var counts); + buckets[bucketStart] = (counts.evalCount + 1, counts.fireCount + (trace.ExpressionResult ? 1 : 0)); + } + + return buckets + .OrderBy(kvp => kvp.Key) + .Select(kvp => new TemporalBucket(kvp.Key, kvp.Key + bucketMs, kvp.Value.evalCount, kvp.Value.fireCount)) + .ToImmutableArray(); + } + + private OutcomeChangeSummary ComputeOutcomeChanges( + Dictionary baseResults, + Dictionary candidateResults) + { + var unchanged = 0; + var improved = 0; + var regressed = 0; + var transitionCounts = new Dictionary<(string from, string to), int>(); + + foreach (var (findingId, baseResult) in baseResults) + { + if (!candidateResults.TryGetValue(findingId, out var candidateResult)) + { + continue; + } + + if (baseResult.Outcome == candidateResult.Outcome) + { + unchanged++; + } + else + { + var key = (baseResult.Outcome, candidateResult.Outcome); + transitionCounts.TryGetValue(key, out var count); + transitionCounts[key] = count + 1; + + var isImprovement = IsOutcomeImprovement(baseResult.Outcome, candidateResult.Outcome); + if (isImprovement) + { + improved++; + } + else + { + regressed++; + } + } + } + + var total = baseResults.Count; + var transitions = transitionCounts + .Select(kvp => new OutcomeTransition( + kvp.Key.from, + kvp.Key.to, + kvp.Value, + total > 0 ? (double)kvp.Value / total * 100 : 0, + IsOutcomeImprovement(kvp.Key.from, kvp.Key.to))) + .OrderByDescending(t => t.Count) + .ToImmutableArray(); + + return new OutcomeChangeSummary(unchanged, improved, regressed, transitions); + } + + private SeverityChangeSummary ComputeSeverityChanges( + Dictionary baseResults, + Dictionary candidateResults) + { + var unchanged = 0; + var escalated = 0; + var deescalated = 0; + var transitionCounts = new Dictionary<(string from, string to), int>(); + + foreach (var (findingId, baseResult) in baseResults) + { + if (!candidateResults.TryGetValue(findingId, out var candidateResult)) + { + continue; + } + + var baseSeverity = baseResult.Severity ?? "unknown"; + var candidateSeverity = candidateResult.Severity ?? "unknown"; + + if (baseSeverity == candidateSeverity) + { + unchanged++; + } + else + { + var key = (baseSeverity, candidateSeverity); + transitionCounts.TryGetValue(key, out var count); + transitionCounts[key] = count + 1; + + var baseIdx = SeverityOrder.IndexOf(baseSeverity.ToLowerInvariant()); + var candidateIdx = SeverityOrder.IndexOf(candidateSeverity.ToLowerInvariant()); + + if (candidateIdx > baseIdx) + { + escalated++; + } + else + { + deescalated++; + } + } + } + + var total = baseResults.Count; + var transitions = transitionCounts + .Select(kvp => new SeverityTransition( + kvp.Key.from, + kvp.Key.to, + kvp.Value, + total > 0 ? (double)kvp.Value / total * 100 : 0)) + .OrderByDescending(t => t.Count) + .ToImmutableArray(); + + return new SeverityChangeSummary(unchanged, escalated, deescalated, transitions); + } + + private RuleChangeSummary ComputeRuleChanges( + IReadOnlyList baseResults, + IReadOnlyList candidateResults) + { + var baseRules = baseResults + .SelectMany(r => r.FiredRules ?? Array.Empty()) + .Distinct() + .ToHashSet(); + + var candidateRules = candidateResults + .SelectMany(r => r.FiredRules ?? Array.Empty()) + .Distinct() + .ToHashSet(); + + var rulesAdded = candidateRules.Except(baseRules).ToImmutableArray(); + var rulesRemoved = baseRules.Except(candidateRules).ToImmutableArray(); + + // Compute fire rate changes for common rules + var baseFireRates = ComputeFireRates(baseResults); + var candidateFireRates = ComputeFireRates(candidateResults); + + var fireRateChanges = baseRules.Intersect(candidateRules) + .Select(rule => + { + var baseRate = baseFireRates.GetValueOrDefault(rule, 0); + var candidateRate = candidateFireRates.GetValueOrDefault(rule, 0); + var change = candidateRate - baseRate; + return new RuleFireRateChange( + rule, + baseRate, + candidateRate, + change, + Math.Abs(change) > 5.0); // >5% change is significant + }) + .Where(c => Math.Abs(c.ChangePercentage) > 1.0) // Only show changes > 1% + .OrderByDescending(c => Math.Abs(c.ChangePercentage)) + .Take(20) + .ToImmutableArray(); + + return new RuleChangeSummary( + rulesAdded, + rulesRemoved, + ImmutableArray.Empty, // Would require policy diff analysis + fireRateChanges); + } + + private Dictionary ComputeFireRates(IReadOnlyList results) + { + var ruleCounts = new Dictionary(); + + foreach (var result in results) + { + foreach (var rule in result.FiredRules ?? Array.Empty()) + { + ruleCounts.TryGetValue(rule, out var count); + ruleCounts[rule] = count + 1; + } + } + + var total = results.Count; + return ruleCounts.ToDictionary( + kvp => kvp.Key, + kvp => total > 0 ? (double)kvp.Value / total * 100 : 0); + } + + private ImmutableArray ComputeHighImpactFindings( + Dictionary baseResults, + Dictionary candidateResults) + { + var highImpact = new List(); + + foreach (var (findingId, baseResult) in baseResults) + { + if (!candidateResults.TryGetValue(findingId, out var candidateResult)) + { + continue; + } + + var impactScore = ComputeImpactScore(baseResult, candidateResult); + if (impactScore < 0.3) // Threshold for high impact + { + continue; + } + + var impactReason = DetermineImpactReason(baseResult, candidateResult); + + highImpact.Add(new HighImpactFinding( + findingId, + baseResult.ComponentPurl, + baseResult.AdvisoryId, + baseResult.Outcome, + candidateResult.Outcome, + baseResult.Severity, + candidateResult.Severity, + impactScore, + impactReason)); + } + + return highImpact + .OrderByDescending(f => f.ImpactScore) + .Take(50) + .ToImmutableArray(); + } + + private double ComputeImpactScore(SimulationFindingResult baseResult, SimulationFindingResult candidateResult) + { + var score = 0.0; + + // Outcome change weight + if (baseResult.Outcome != candidateResult.Outcome) + { + var baseIdx = OutcomeSeverityOrder.IndexOf(baseResult.Outcome.ToLowerInvariant()); + var candidateIdx = OutcomeSeverityOrder.IndexOf(candidateResult.Outcome.ToLowerInvariant()); + score += Math.Abs(candidateIdx - baseIdx) * 0.2; + } + + // Severity change weight + var baseSeverity = baseResult.Severity ?? "unknown"; + var candidateSeverity = candidateResult.Severity ?? "unknown"; + if (baseSeverity != candidateSeverity) + { + var baseIdx = SeverityOrder.IndexOf(baseSeverity.ToLowerInvariant()); + var candidateIdx = SeverityOrder.IndexOf(candidateSeverity.ToLowerInvariant()); + score += Math.Abs(candidateIdx - baseIdx) * 0.15; + } + + return Math.Min(1.0, score); + } + + private string DetermineImpactReason(SimulationFindingResult baseResult, SimulationFindingResult candidateResult) + { + var reasons = new List(); + + if (baseResult.Outcome != candidateResult.Outcome) + { + reasons.Add($"Outcome changed from '{baseResult.Outcome}' to '{candidateResult.Outcome}'"); + } + + if (baseResult.Severity != candidateResult.Severity) + { + reasons.Add($"Severity changed from '{baseResult.Severity}' to '{candidateResult.Severity}'"); + } + + return string.Join("; ", reasons); + } + + private bool IsOutcomeImprovement(string from, string to) + { + var fromIdx = OutcomeSeverityOrder.IndexOf(from.ToLowerInvariant()); + var toIdx = OutcomeSeverityOrder.IndexOf(to.ToLowerInvariant()); + + // Lower index = less severe = improvement + return toIdx < fromIdx; + } + + private static string DetermineSampleReason(bool randomSample, bool highSeverity, bool vexOverride) + { + if (vexOverride) + { + return "vex_override"; + } + + if (highSeverity) + { + return "high_severity"; + } + + return randomSample ? "random_sample" : "coverage"; + } + + private static string ComputeHash(string input) + { + var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input)); + return Convert.ToHexString(bytes)[..16].ToLowerInvariant(); + } + + private sealed class RuleStats + { + public string RuleName { get; } + public int Priority { get; } + public string? Category { get; } + public int FireCount { get; set; } + public long TotalEvaluationUs { get; set; } + public Dictionary OutcomeCounts { get; } = new(); + + public RuleStats(string ruleName, int priority, string? category) + { + RuleName = ruleName; + Priority = priority; + Category = category; + } + + public void IncrementOutcome(string outcome) + { + OutcomeCounts.TryGetValue(outcome, out var count); + OutcomeCounts[outcome] = count + 1; + } + } +} + +/// +/// Options for simulation analytics computation. +/// +public sealed record SimulationAnalyticsOptions +{ + /// + /// Sample rate for traces (0.0 to 1.0). + /// + public double TraceSampleRate { get; init; } = 0.1; + + /// + /// Maximum number of sampled traces to include. + /// + public int MaxSampledTraces { get; init; } = 100; + + /// + /// Temporal bucket size in milliseconds. + /// + public long TemporalBucketMs { get; init; } = 100; + + /// + /// Maximum number of top rules to include. + /// + public int MaxTopRules { get; init; } = 10; + + /// + /// Significance threshold for fire rate changes (percentage). + /// + public double FireRateSignificanceThreshold { get; init; } = 5.0; + + /// + /// Default options. + /// + public static SimulationAnalyticsOptions Default { get; } = new(); + + /// + /// Options for quick simulations (lower sampling, faster). + /// + public static SimulationAnalyticsOptions Quick { get; } = new() + { + TraceSampleRate = 0.01, + MaxSampledTraces = 20, + TemporalBucketMs = 500 + }; + + /// + /// Options for batch simulations (balanced). + /// + public static SimulationAnalyticsOptions Batch { get; } = new() + { + TraceSampleRate = 0.05, + MaxSampledTraces = 50, + TemporalBucketMs = 200 + }; +} + +/// +/// Result of a single finding simulation (for delta comparison). +/// +public sealed record SimulationFindingResult( + string FindingId, + string? ComponentPurl, + string? AdvisoryId, + string Outcome, + string? Severity, + IReadOnlyList? FiredRules); diff --git a/src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj b/src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj index 88f67b821..bc52e3ca3 100644 --- a/src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj +++ b/src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj @@ -10,6 +10,8 @@ + + diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/EffectiveFindingDocument.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/EffectiveFindingDocument.cs new file mode 100644 index 000000000..e39e64b77 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/EffectiveFindingDocument.cs @@ -0,0 +1,325 @@ +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Documents; + +/// +/// MongoDB document representing an effective finding after policy evaluation. +/// Collection: effective_finding_{policyId} +/// Tenant-scoped with unique constraint on (tenantId, componentPurl, advisoryId). +/// +[BsonIgnoreExtraElements] +public sealed class EffectiveFindingDocument +{ + /// + /// Unique identifier: sha256:{hash of tenantId|policyId|componentPurl|advisoryId} + /// + [BsonId] + [BsonElement("_id")] + public string Id { get; set; } = string.Empty; + + /// + /// Tenant identifier (normalized to lowercase). + /// + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + /// + /// Policy identifier. + /// + [BsonElement("policyId")] + public string PolicyId { get; set; } = string.Empty; + + /// + /// Policy version at time of evaluation. + /// + [BsonElement("policyVersion")] + public int PolicyVersion { get; set; } + + /// + /// Component PURL from the SBOM. + /// + [BsonElement("componentPurl")] + public string ComponentPurl { get; set; } = string.Empty; + + /// + /// Component name. + /// + [BsonElement("componentName")] + public string ComponentName { get; set; } = string.Empty; + + /// + /// Component version. + /// + [BsonElement("componentVersion")] + public string ComponentVersion { get; set; } = string.Empty; + + /// + /// Package ecosystem (npm, maven, pypi, etc.). + /// + [BsonElement("ecosystem")] + [BsonIgnoreIfNull] + public string? Ecosystem { get; set; } + + /// + /// Advisory identifier (CVE, GHSA, etc.). + /// + [BsonElement("advisoryId")] + public string AdvisoryId { get; set; } = string.Empty; + + /// + /// Advisory source. + /// + [BsonElement("advisorySource")] + public string AdvisorySource { get; set; } = string.Empty; + + /// + /// Vulnerability ID (may differ from advisory ID). + /// + [BsonElement("vulnerabilityId")] + [BsonIgnoreIfNull] + public string? VulnerabilityId { get; set; } + + /// + /// Policy evaluation status (affected, blocked, suppressed, etc.). + /// + [BsonElement("status")] + public string Status { get; set; } = string.Empty; + + /// + /// Normalized severity (Critical, High, Medium, Low, None). + /// + [BsonElement("severity")] + [BsonIgnoreIfNull] + public string? Severity { get; set; } + + /// + /// CVSS score (if available). + /// + [BsonElement("cvssScore")] + [BsonIgnoreIfNull] + public double? CvssScore { get; set; } + + /// + /// Rule name that matched. + /// + [BsonElement("ruleName")] + [BsonIgnoreIfNull] + public string? RuleName { get; set; } + + /// + /// Rule priority. + /// + [BsonElement("rulePriority")] + [BsonIgnoreIfNull] + public int? RulePriority { get; set; } + + /// + /// VEX status overlay (if VEX was applied). + /// + [BsonElement("vexStatus")] + [BsonIgnoreIfNull] + public string? VexStatus { get; set; } + + /// + /// VEX justification (if VEX was applied). + /// + [BsonElement("vexJustification")] + [BsonIgnoreIfNull] + public string? VexJustification { get; set; } + + /// + /// VEX provider/vendor. + /// + [BsonElement("vexVendor")] + [BsonIgnoreIfNull] + public string? VexVendor { get; set; } + + /// + /// Whether a VEX override was applied. + /// + [BsonElement("isVexOverride")] + public bool IsVexOverride { get; set; } + + /// + /// SBOM ID where component was found. + /// + [BsonElement("sbomId")] + [BsonIgnoreIfNull] + public string? SbomId { get; set; } + + /// + /// Product key associated with the SBOM. + /// + [BsonElement("productKey")] + [BsonIgnoreIfNull] + public string? ProductKey { get; set; } + + /// + /// Policy evaluation annotations. + /// + [BsonElement("annotations")] + public Dictionary Annotations { get; set; } = new(); + + /// + /// Current history version (incremented on each update). + /// + [BsonElement("historyVersion")] + public long HistoryVersion { get; set; } + + /// + /// Reference to the policy run that produced this finding. + /// + [BsonElement("policyRunId")] + [BsonIgnoreIfNull] + public string? PolicyRunId { get; set; } + + /// + /// Trace ID for distributed tracing. + /// + [BsonElement("traceId")] + [BsonIgnoreIfNull] + public string? TraceId { get; set; } + + /// + /// Span ID for distributed tracing. + /// + [BsonElement("spanId")] + [BsonIgnoreIfNull] + public string? SpanId { get; set; } + + /// + /// When this finding was first created. + /// + [BsonElement("createdAt")] + public DateTimeOffset CreatedAt { get; set; } + + /// + /// When this finding was last updated. + /// + [BsonElement("updatedAt")] + public DateTimeOffset UpdatedAt { get; set; } + + /// + /// Content hash for deduplication and change detection. + /// + [BsonElement("contentHash")] + public string ContentHash { get; set; } = string.Empty; +} + +/// +/// MongoDB document for effective finding history (append-only). +/// Collection: effective_finding_history_{policyId} +/// +[BsonIgnoreExtraElements] +public sealed class EffectiveFindingHistoryDocument +{ + /// + /// Unique identifier: {findingId}:v{version} + /// + [BsonId] + [BsonElement("_id")] + public string Id { get; set; } = string.Empty; + + /// + /// Tenant identifier. + /// + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + /// + /// Reference to the effective finding. + /// + [BsonElement("findingId")] + public string FindingId { get; set; } = string.Empty; + + /// + /// Policy identifier. + /// + [BsonElement("policyId")] + public string PolicyId { get; set; } = string.Empty; + + /// + /// History version number (monotonically increasing). + /// + [BsonElement("version")] + public long Version { get; set; } + + /// + /// Type of change (Created, StatusChanged, SeverityChanged, VexApplied, etc.). + /// + [BsonElement("changeType")] + public string ChangeType { get; set; } = string.Empty; + + /// + /// Previous status (for status changes). + /// + [BsonElement("previousStatus")] + [BsonIgnoreIfNull] + public string? PreviousStatus { get; set; } + + /// + /// New status. + /// + [BsonElement("newStatus")] + public string NewStatus { get; set; } = string.Empty; + + /// + /// Previous severity (for severity changes). + /// + [BsonElement("previousSeverity")] + [BsonIgnoreIfNull] + public string? PreviousSeverity { get; set; } + + /// + /// New severity. + /// + [BsonElement("newSeverity")] + [BsonIgnoreIfNull] + public string? NewSeverity { get; set; } + + /// + /// Previous content hash. + /// + [BsonElement("previousContentHash")] + [BsonIgnoreIfNull] + public string? PreviousContentHash { get; set; } + + /// + /// New content hash. + /// + [BsonElement("newContentHash")] + public string NewContentHash { get; set; } = string.Empty; + + /// + /// Policy run that triggered this change. + /// + [BsonElement("policyRunId")] + [BsonIgnoreIfNull] + public string? PolicyRunId { get; set; } + + /// + /// Trace ID for distributed tracing. + /// + [BsonElement("traceId")] + [BsonIgnoreIfNull] + public string? TraceId { get; set; } + + /// + /// When this change occurred. + /// + [BsonElement("occurredAt")] + public DateTimeOffset OccurredAt { get; set; } + + /// + /// TTL expiration timestamp for automatic cleanup. + /// + [BsonElement("expiresAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? ExpiresAt { get; set; } + + /// + /// Creates the composite ID for a history entry. + /// + public static string CreateId(string findingId, long version) => $"{findingId}:v{version}"; +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyAuditDocument.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyAuditDocument.cs new file mode 100644 index 000000000..c44be4231 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyAuditDocument.cs @@ -0,0 +1,157 @@ +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Documents; + +/// +/// MongoDB document for policy audit log entries. +/// Collection: policy_audit +/// Tracks all policy-related actions for compliance and debugging. +/// +[BsonIgnoreExtraElements] +public sealed class PolicyAuditDocument +{ + /// + /// Unique audit entry identifier. + /// + [BsonId] + [BsonElement("_id")] + public ObjectId Id { get; set; } + + /// + /// Tenant identifier. + /// + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + /// + /// Action type (PolicyCreated, PolicyUpdated, RevisionApproved, RunStarted, etc.). + /// + [BsonElement("action")] + public string Action { get; set; } = string.Empty; + + /// + /// Resource type (Policy, Revision, Bundle, Run, Finding). + /// + [BsonElement("resourceType")] + public string ResourceType { get; set; } = string.Empty; + + /// + /// Resource identifier. + /// + [BsonElement("resourceId")] + public string ResourceId { get; set; } = string.Empty; + + /// + /// Actor identifier (user ID or service account). + /// + [BsonElement("actorId")] + [BsonIgnoreIfNull] + public string? ActorId { get; set; } + + /// + /// Actor type (User, ServiceAccount, System). + /// + [BsonElement("actorType")] + public string ActorType { get; set; } = "System"; + + /// + /// Previous state snapshot (for update actions). + /// + [BsonElement("previousState")] + [BsonIgnoreIfNull] + public BsonDocument? PreviousState { get; set; } + + /// + /// New state snapshot (for create/update actions). + /// + [BsonElement("newState")] + [BsonIgnoreIfNull] + public BsonDocument? NewState { get; set; } + + /// + /// Additional context/metadata. + /// + [BsonElement("metadata")] + public Dictionary Metadata { get; set; } = new(); + + /// + /// Correlation ID for distributed tracing. + /// + [BsonElement("correlationId")] + [BsonIgnoreIfNull] + public string? CorrelationId { get; set; } + + /// + /// Trace ID for OpenTelemetry. + /// + [BsonElement("traceId")] + [BsonIgnoreIfNull] + public string? TraceId { get; set; } + + /// + /// Client IP address. + /// + [BsonElement("clientIp")] + [BsonIgnoreIfNull] + public string? ClientIp { get; set; } + + /// + /// User agent string. + /// + [BsonElement("userAgent")] + [BsonIgnoreIfNull] + public string? UserAgent { get; set; } + + /// + /// When the action occurred. + /// + [BsonElement("occurredAt")] + public DateTimeOffset OccurredAt { get; set; } +} + +/// +/// Audit action types for policy operations. +/// +public static class PolicyAuditActions +{ + public const string PolicyCreated = "PolicyCreated"; + public const string PolicyUpdated = "PolicyUpdated"; + public const string PolicyDeleted = "PolicyDeleted"; + public const string RevisionCreated = "RevisionCreated"; + public const string RevisionApproved = "RevisionApproved"; + public const string RevisionActivated = "RevisionActivated"; + public const string RevisionArchived = "RevisionArchived"; + public const string BundleCompiled = "BundleCompiled"; + public const string RunStarted = "RunStarted"; + public const string RunCompleted = "RunCompleted"; + public const string RunFailed = "RunFailed"; + public const string RunCancelled = "RunCancelled"; + public const string FindingCreated = "FindingCreated"; + public const string FindingUpdated = "FindingUpdated"; + public const string SimulationStarted = "SimulationStarted"; + public const string SimulationCompleted = "SimulationCompleted"; +} + +/// +/// Resource types for policy audit entries. +/// +public static class PolicyAuditResourceTypes +{ + public const string Policy = "Policy"; + public const string Revision = "Revision"; + public const string Bundle = "Bundle"; + public const string Run = "Run"; + public const string Finding = "Finding"; + public const string Simulation = "Simulation"; +} + +/// +/// Actor types for policy audit entries. +/// +public static class PolicyAuditActorTypes +{ + public const string User = "User"; + public const string ServiceAccount = "ServiceAccount"; + public const string System = "System"; +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyDocuments.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyDocuments.cs new file mode 100644 index 000000000..2fb9ad7b1 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyDocuments.cs @@ -0,0 +1,343 @@ +using System.Collections.Immutable; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Documents; + +/// +/// MongoDB document representing a policy pack. +/// Collection: policies +/// +[BsonIgnoreExtraElements] +public sealed class PolicyDocument +{ + /// + /// Unique identifier (packId). + /// + [BsonId] + [BsonElement("_id")] + public string Id { get; set; } = string.Empty; + + /// + /// Tenant identifier (normalized to lowercase). + /// + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + /// + /// Display name for the policy pack. + /// + [BsonElement("displayName")] + [BsonIgnoreIfNull] + public string? DisplayName { get; set; } + + /// + /// Description of the policy pack. + /// + [BsonElement("description")] + [BsonIgnoreIfNull] + public string? Description { get; set; } + + /// + /// Current active revision version (null if none active). + /// + [BsonElement("activeVersion")] + [BsonIgnoreIfNull] + public int? ActiveVersion { get; set; } + + /// + /// Latest revision version. + /// + [BsonElement("latestVersion")] + public int LatestVersion { get; set; } + + /// + /// Tags for categorization and filtering. + /// + [BsonElement("tags")] + public List Tags { get; set; } = []; + + /// + /// Creation timestamp. + /// + [BsonElement("createdAt")] + public DateTimeOffset CreatedAt { get; set; } + + /// + /// Last update timestamp. + /// + [BsonElement("updatedAt")] + public DateTimeOffset UpdatedAt { get; set; } + + /// + /// User who created the policy pack. + /// + [BsonElement("createdBy")] + [BsonIgnoreIfNull] + public string? CreatedBy { get; set; } +} + +/// +/// MongoDB document representing a policy revision. +/// Collection: policy_revisions +/// +[BsonIgnoreExtraElements] +public sealed class PolicyRevisionDocument +{ + /// + /// Unique identifier: {packId}:{version} + /// + [BsonId] + [BsonElement("_id")] + public string Id { get; set; } = string.Empty; + + /// + /// Tenant identifier. + /// + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + /// + /// Reference to policy pack. + /// + [BsonElement("packId")] + public string PackId { get; set; } = string.Empty; + + /// + /// Revision version number. + /// + [BsonElement("version")] + public int Version { get; set; } + + /// + /// Revision status (Draft, Approved, Active, Archived). + /// + [BsonElement("status")] + public string Status { get; set; } = "Draft"; + + /// + /// Whether two-person approval is required. + /// + [BsonElement("requiresTwoPersonApproval")] + public bool RequiresTwoPersonApproval { get; set; } + + /// + /// Approval records. + /// + [BsonElement("approvals")] + public List Approvals { get; set; } = []; + + /// + /// Reference to the compiled bundle. + /// + [BsonElement("bundleId")] + [BsonIgnoreIfNull] + public string? BundleId { get; set; } + + /// + /// SHA256 digest of the bundle. + /// + [BsonElement("bundleDigest")] + [BsonIgnoreIfNull] + public string? BundleDigest { get; set; } + + /// + /// Creation timestamp. + /// + [BsonElement("createdAt")] + public DateTimeOffset CreatedAt { get; set; } + + /// + /// Activation timestamp (when status became Active). + /// + [BsonElement("activatedAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? ActivatedAt { get; set; } + + /// + /// Creates the composite ID for a revision. + /// + public static string CreateId(string packId, int version) => $"{packId}:{version}"; +} + +/// +/// Embedded approval record for policy revisions. +/// +[BsonIgnoreExtraElements] +public sealed class PolicyApprovalRecord +{ + /// + /// User who approved. + /// + [BsonElement("actorId")] + public string ActorId { get; set; } = string.Empty; + + /// + /// Approval timestamp. + /// + [BsonElement("approvedAt")] + public DateTimeOffset ApprovedAt { get; set; } + + /// + /// Optional comment. + /// + [BsonElement("comment")] + [BsonIgnoreIfNull] + public string? Comment { get; set; } +} + +/// +/// MongoDB document for compiled policy bundles. +/// Collection: policy_bundles +/// +[BsonIgnoreExtraElements] +public sealed class PolicyBundleDocument +{ + /// + /// Unique identifier (SHA256 digest). + /// + [BsonId] + [BsonElement("_id")] + public string Id { get; set; } = string.Empty; + + /// + /// Tenant identifier. + /// + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + /// + /// Reference to policy pack. + /// + [BsonElement("packId")] + public string PackId { get; set; } = string.Empty; + + /// + /// Revision version. + /// + [BsonElement("version")] + public int Version { get; set; } + + /// + /// Cryptographic signature. + /// + [BsonElement("signature")] + public string Signature { get; set; } = string.Empty; + + /// + /// Bundle size in bytes. + /// + [BsonElement("sizeBytes")] + public int SizeBytes { get; set; } + + /// + /// Compiled bundle payload (binary). + /// + [BsonElement("payload")] + public byte[] Payload { get; set; } = []; + + /// + /// AOC metadata for compliance tracking. + /// + [BsonElement("aocMetadata")] + [BsonIgnoreIfNull] + public PolicyAocMetadataDocument? AocMetadata { get; set; } + + /// + /// Creation timestamp. + /// + [BsonElement("createdAt")] + public DateTimeOffset CreatedAt { get; set; } +} + +/// +/// Embedded AOC metadata document. +/// +[BsonIgnoreExtraElements] +public sealed class PolicyAocMetadataDocument +{ + [BsonElement("compilationId")] + public string CompilationId { get; set; } = string.Empty; + + [BsonElement("compilerVersion")] + public string CompilerVersion { get; set; } = string.Empty; + + [BsonElement("compiledAt")] + public DateTimeOffset CompiledAt { get; set; } + + [BsonElement("sourceDigest")] + public string SourceDigest { get; set; } = string.Empty; + + [BsonElement("artifactDigest")] + public string ArtifactDigest { get; set; } = string.Empty; + + [BsonElement("complexityScore")] + public double ComplexityScore { get; set; } + + [BsonElement("ruleCount")] + public int RuleCount { get; set; } + + [BsonElement("durationMilliseconds")] + public long DurationMilliseconds { get; set; } + + [BsonElement("provenance")] + [BsonIgnoreIfNull] + public PolicyProvenanceDocument? Provenance { get; set; } + + [BsonElement("attestationRef")] + [BsonIgnoreIfNull] + public PolicyAttestationRefDocument? AttestationRef { get; set; } +} + +/// +/// Embedded provenance document. +/// +[BsonIgnoreExtraElements] +public sealed class PolicyProvenanceDocument +{ + [BsonElement("sourceType")] + public string SourceType { get; set; } = string.Empty; + + [BsonElement("sourceUrl")] + [BsonIgnoreIfNull] + public string? SourceUrl { get; set; } + + [BsonElement("submitter")] + [BsonIgnoreIfNull] + public string? Submitter { get; set; } + + [BsonElement("commitSha")] + [BsonIgnoreIfNull] + public string? CommitSha { get; set; } + + [BsonElement("branch")] + [BsonIgnoreIfNull] + public string? Branch { get; set; } + + [BsonElement("ingestedAt")] + public DateTimeOffset IngestedAt { get; set; } +} + +/// +/// Embedded attestation reference document. +/// +[BsonIgnoreExtraElements] +public sealed class PolicyAttestationRefDocument +{ + [BsonElement("attestationId")] + public string AttestationId { get; set; } = string.Empty; + + [BsonElement("envelopeDigest")] + public string EnvelopeDigest { get; set; } = string.Empty; + + [BsonElement("uri")] + [BsonIgnoreIfNull] + public string? Uri { get; set; } + + [BsonElement("signingKeyId")] + [BsonIgnoreIfNull] + public string? SigningKeyId { get; set; } + + [BsonElement("createdAt")] + public DateTimeOffset CreatedAt { get; set; } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyExceptionDocuments.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyExceptionDocuments.cs new file mode 100644 index 000000000..13c2039bb --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyExceptionDocuments.cs @@ -0,0 +1,482 @@ +using System.Collections.Immutable; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Documents; + +/// +/// MongoDB document representing a policy exception. +/// Collection: exceptions +/// +[BsonIgnoreExtraElements] +public sealed class PolicyExceptionDocument +{ + /// + /// Unique identifier. + /// + [BsonId] + [BsonElement("_id")] + public string Id { get; set; } = string.Empty; + + /// + /// Tenant identifier (normalized to lowercase). + /// + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + /// + /// Human-readable name for the exception. + /// + [BsonElement("name")] + public string Name { get; set; } = string.Empty; + + /// + /// Description and justification for the exception. + /// + [BsonElement("description")] + [BsonIgnoreIfNull] + public string? Description { get; set; } + + /// + /// Exception type: waiver, override, temporary, permanent. + /// + [BsonElement("exceptionType")] + public string ExceptionType { get; set; } = "waiver"; + + /// + /// Exception status: draft, pending_review, approved, active, expired, revoked. + /// + [BsonElement("status")] + public string Status { get; set; } = "draft"; + + /// + /// Scope of the exception (e.g., advisory IDs, PURL patterns, CVE IDs). + /// + [BsonElement("scope")] + public ExceptionScopeDocument Scope { get; set; } = new(); + + /// + /// Risk assessment and mitigation details. + /// + [BsonElement("riskAssessment")] + [BsonIgnoreIfNull] + public ExceptionRiskAssessmentDocument? RiskAssessment { get; set; } + + /// + /// Compensating controls in place while exception is active. + /// + [BsonElement("compensatingControls")] + public List CompensatingControls { get; set; } = []; + + /// + /// Tags for categorization and filtering. + /// + [BsonElement("tags")] + public List Tags { get; set; } = []; + + /// + /// Priority for conflict resolution (higher = more precedence). + /// + [BsonElement("priority")] + public int Priority { get; set; } + + /// + /// When the exception becomes active (null = immediately upon approval). + /// + [BsonElement("effectiveFrom")] + [BsonIgnoreIfNull] + public DateTimeOffset? EffectiveFrom { get; set; } + + /// + /// When the exception expires (null = no expiration). + /// + [BsonElement("expiresAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? ExpiresAt { get; set; } + + /// + /// User who created the exception. + /// + [BsonElement("createdBy")] + public string CreatedBy { get; set; } = string.Empty; + + /// + /// Creation timestamp. + /// + [BsonElement("createdAt")] + public DateTimeOffset CreatedAt { get; set; } + + /// + /// Last update timestamp. + /// + [BsonElement("updatedAt")] + public DateTimeOffset UpdatedAt { get; set; } + + /// + /// When the exception was activated. + /// + [BsonElement("activatedAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? ActivatedAt { get; set; } + + /// + /// When the exception was revoked. + /// + [BsonElement("revokedAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? RevokedAt { get; set; } + + /// + /// User who revoked the exception. + /// + [BsonElement("revokedBy")] + [BsonIgnoreIfNull] + public string? RevokedBy { get; set; } + + /// + /// Reason for revocation. + /// + [BsonElement("revocationReason")] + [BsonIgnoreIfNull] + public string? RevocationReason { get; set; } + + /// + /// Reference to the active review (if pending_review status). + /// + [BsonElement("activeReviewId")] + [BsonIgnoreIfNull] + public string? ActiveReviewId { get; set; } + + /// + /// Correlation ID for tracing. + /// + [BsonElement("correlationId")] + [BsonIgnoreIfNull] + public string? CorrelationId { get; set; } +} + +/// +/// Embedded document for exception scope definition. +/// +[BsonIgnoreExtraElements] +public sealed class ExceptionScopeDocument +{ + /// + /// Advisory IDs covered by this exception. + /// + [BsonElement("advisoryIds")] + public List AdvisoryIds { get; set; } = []; + + /// + /// CVE IDs covered by this exception. + /// + [BsonElement("cveIds")] + public List CveIds { get; set; } = []; + + /// + /// PURL patterns (supports wildcards) covered by this exception. + /// + [BsonElement("purlPatterns")] + public List PurlPatterns { get; set; } = []; + + /// + /// Specific asset IDs covered. + /// + [BsonElement("assetIds")] + public List AssetIds { get; set; } = []; + + /// + /// Repository IDs covered (scope limiter). + /// + [BsonElement("repositoryIds")] + public List RepositoryIds { get; set; } = []; + + /// + /// Snapshot IDs covered (scope limiter). + /// + [BsonElement("snapshotIds")] + public List SnapshotIds { get; set; } = []; + + /// + /// Severity levels to apply exception to. + /// + [BsonElement("severities")] + public List Severities { get; set; } = []; + + /// + /// Whether this exception applies to all assets (tenant-wide). + /// + [BsonElement("applyToAll")] + public bool ApplyToAll { get; set; } +} + +/// +/// Embedded document for risk assessment. +/// +[BsonIgnoreExtraElements] +public sealed class ExceptionRiskAssessmentDocument +{ + /// + /// Original risk level being excepted. + /// + [BsonElement("originalRiskLevel")] + public string OriginalRiskLevel { get; set; } = string.Empty; + + /// + /// Residual risk level after compensating controls. + /// + [BsonElement("residualRiskLevel")] + public string ResidualRiskLevel { get; set; } = string.Empty; + + /// + /// Business justification for accepting the risk. + /// + [BsonElement("businessJustification")] + [BsonIgnoreIfNull] + public string? BusinessJustification { get; set; } + + /// + /// Impact assessment if vulnerability is exploited. + /// + [BsonElement("impactAssessment")] + [BsonIgnoreIfNull] + public string? ImpactAssessment { get; set; } + + /// + /// Exploitability assessment. + /// + [BsonElement("exploitability")] + [BsonIgnoreIfNull] + public string? Exploitability { get; set; } +} + +/// +/// MongoDB document representing an exception review. +/// Collection: exception_reviews +/// +[BsonIgnoreExtraElements] +public sealed class ExceptionReviewDocument +{ + /// + /// Unique identifier. + /// + [BsonId] + [BsonElement("_id")] + public string Id { get; set; } = string.Empty; + + /// + /// Tenant identifier. + /// + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + /// + /// Reference to the exception being reviewed. + /// + [BsonElement("exceptionId")] + public string ExceptionId { get; set; } = string.Empty; + + /// + /// Review status: pending, approved, rejected. + /// + [BsonElement("status")] + public string Status { get; set; } = "pending"; + + /// + /// Type of review: initial, renewal, modification. + /// + [BsonElement("reviewType")] + public string ReviewType { get; set; } = "initial"; + + /// + /// Whether multiple approvers are required. + /// + [BsonElement("requiresMultipleApprovers")] + public bool RequiresMultipleApprovers { get; set; } + + /// + /// Minimum number of approvals required. + /// + [BsonElement("requiredApprovals")] + public int RequiredApprovals { get; set; } = 1; + + /// + /// Designated reviewers (user or group IDs). + /// + [BsonElement("designatedReviewers")] + public List DesignatedReviewers { get; set; } = []; + + /// + /// Individual approval/rejection decisions. + /// + [BsonElement("decisions")] + public List Decisions { get; set; } = []; + + /// + /// User who requested the review. + /// + [BsonElement("requestedBy")] + public string RequestedBy { get; set; } = string.Empty; + + /// + /// When the review was requested. + /// + [BsonElement("requestedAt")] + public DateTimeOffset RequestedAt { get; set; } + + /// + /// When the review was completed. + /// + [BsonElement("completedAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? CompletedAt { get; set; } + + /// + /// Review deadline. + /// + [BsonElement("deadline")] + [BsonIgnoreIfNull] + public DateTimeOffset? Deadline { get; set; } + + /// + /// Notes or comments on the review. + /// + [BsonElement("notes")] + [BsonIgnoreIfNull] + public string? Notes { get; set; } + + /// + /// Creates the composite ID for a review. + /// + public static string CreateId(string exceptionId, string reviewType, DateTimeOffset timestamp) + => $"{exceptionId}:{reviewType}:{timestamp:yyyyMMddHHmmss}"; +} + +/// +/// Embedded document for an individual reviewer's decision. +/// +[BsonIgnoreExtraElements] +public sealed class ReviewDecisionDocument +{ + /// + /// Reviewer ID (user or service account). + /// + [BsonElement("reviewerId")] + public string ReviewerId { get; set; } = string.Empty; + + /// + /// Decision: approved, rejected, abstained. + /// + [BsonElement("decision")] + public string Decision { get; set; } = string.Empty; + + /// + /// Timestamp of the decision. + /// + [BsonElement("decidedAt")] + public DateTimeOffset DecidedAt { get; set; } + + /// + /// Comment explaining the decision. + /// + [BsonElement("comment")] + [BsonIgnoreIfNull] + public string? Comment { get; set; } + + /// + /// Conditions attached to approval. + /// + [BsonElement("conditions")] + public List Conditions { get; set; } = []; +} + +/// +/// MongoDB document representing an exception binding to specific assets. +/// Collection: exception_bindings +/// +[BsonIgnoreExtraElements] +public sealed class ExceptionBindingDocument +{ + /// + /// Unique identifier: {exceptionId}:{assetId}:{advisoryId} + /// + [BsonId] + [BsonElement("_id")] + public string Id { get; set; } = string.Empty; + + /// + /// Tenant identifier. + /// + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + /// + /// Reference to the exception. + /// + [BsonElement("exceptionId")] + public string ExceptionId { get; set; } = string.Empty; + + /// + /// Asset ID (PURL or other identifier) this binding applies to. + /// + [BsonElement("assetId")] + public string AssetId { get; set; } = string.Empty; + + /// + /// Advisory ID this binding covers. + /// + [BsonElement("advisoryId")] + [BsonIgnoreIfNull] + public string? AdvisoryId { get; set; } + + /// + /// CVE ID this binding covers. + /// + [BsonElement("cveId")] + [BsonIgnoreIfNull] + public string? CveId { get; set; } + + /// + /// Snapshot ID where binding was created. + /// + [BsonElement("snapshotId")] + [BsonIgnoreIfNull] + public string? SnapshotId { get; set; } + + /// + /// Binding status: active, expired, revoked. + /// + [BsonElement("status")] + public string Status { get; set; } = "active"; + + /// + /// Policy decision override applied by this binding. + /// + [BsonElement("decisionOverride")] + public string DecisionOverride { get; set; } = "allow"; + + /// + /// When the binding becomes effective. + /// + [BsonElement("effectiveFrom")] + public DateTimeOffset EffectiveFrom { get; set; } + + /// + /// When the binding expires. + /// + [BsonElement("expiresAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? ExpiresAt { get; set; } + + /// + /// When the binding was created. + /// + [BsonElement("createdAt")] + public DateTimeOffset CreatedAt { get; set; } + + /// + /// Creates the composite ID for a binding. + /// + public static string CreateId(string exceptionId, string assetId, string? advisoryId) + => $"{exceptionId}:{assetId}:{advisoryId ?? "all"}"; +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyExplainDocument.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyExplainDocument.cs new file mode 100644 index 000000000..efac7fb26 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyExplainDocument.cs @@ -0,0 +1,383 @@ +using System.Collections.Immutable; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Documents; + +/// +/// MongoDB document for storing policy explain traces. +/// Collection: policy_explains +/// +[BsonIgnoreExtraElements] +public sealed class PolicyExplainDocument +{ + /// + /// Unique identifier (combination of runId and subjectHash). + /// + [BsonId] + [BsonElement("_id")] + public string Id { get; set; } = string.Empty; + + /// + /// Tenant identifier. + /// + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + /// + /// Policy run identifier. + /// + [BsonElement("runId")] + public string RunId { get; set; } = string.Empty; + + /// + /// Policy pack identifier. + /// + [BsonElement("policyId")] + public string PolicyId { get; set; } = string.Empty; + + /// + /// Policy version at time of evaluation. + /// + [BsonElement("policyVersion")] + [BsonIgnoreIfNull] + public int? PolicyVersion { get; set; } + + /// + /// Hash of the evaluation subject (component + advisory). + /// + [BsonElement("subjectHash")] + public string SubjectHash { get; set; } = string.Empty; + + /// + /// Hash of the policy bundle used. + /// + [BsonElement("bundleDigest")] + [BsonIgnoreIfNull] + public string? BundleDigest { get; set; } + + /// + /// Evaluation timestamp (deterministic). + /// + [BsonElement("evaluatedAt")] + public DateTimeOffset EvaluatedAt { get; set; } + + /// + /// Evaluation duration in milliseconds. + /// + [BsonElement("durationMs")] + public long DurationMs { get; set; } + + /// + /// Final outcome of the evaluation. + /// + [BsonElement("finalOutcome")] + public string FinalOutcome { get; set; } = string.Empty; + + /// + /// Input context information. + /// + [BsonElement("inputContext")] + public ExplainInputContextDocument InputContext { get; set; } = new(); + + /// + /// Rule evaluation steps. + /// + [BsonElement("ruleSteps")] + public List RuleSteps { get; set; } = []; + + /// + /// VEX evidence applied. + /// + [BsonElement("vexEvidence")] + public List VexEvidence { get; set; } = []; + + /// + /// Statistics summary. + /// + [BsonElement("statistics")] + public ExplainStatisticsDocument Statistics { get; set; } = new(); + + /// + /// Determinism hash for reproducibility verification. + /// + [BsonElement("determinismHash")] + [BsonIgnoreIfNull] + public string? DeterminismHash { get; set; } + + /// + /// Reference to AOC chain for this evaluation. + /// + [BsonElement("aocChain")] + [BsonIgnoreIfNull] + public ExplainAocChainDocument? AocChain { get; set; } + + /// + /// Additional metadata. + /// + [BsonElement("metadata")] + public Dictionary Metadata { get; set; } = new(); + + /// + /// Creation timestamp. + /// + [BsonElement("createdAt")] + public DateTimeOffset CreatedAt { get; set; } + + /// + /// TTL expiration timestamp for automatic cleanup. + /// + [BsonElement("expiresAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? ExpiresAt { get; set; } + + /// + /// Creates the composite ID for an explain trace. + /// + public static string CreateId(string runId, string subjectHash) => $"{runId}:{subjectHash}"; +} + +/// +/// Input context embedded document. +/// +[BsonIgnoreExtraElements] +public sealed class ExplainInputContextDocument +{ + [BsonElement("componentPurl")] + [BsonIgnoreIfNull] + public string? ComponentPurl { get; set; } + + [BsonElement("componentName")] + [BsonIgnoreIfNull] + public string? ComponentName { get; set; } + + [BsonElement("componentVersion")] + [BsonIgnoreIfNull] + public string? ComponentVersion { get; set; } + + [BsonElement("advisoryId")] + [BsonIgnoreIfNull] + public string? AdvisoryId { get; set; } + + [BsonElement("vulnerabilityId")] + [BsonIgnoreIfNull] + public string? VulnerabilityId { get; set; } + + [BsonElement("inputSeverity")] + [BsonIgnoreIfNull] + public string? InputSeverity { get; set; } + + [BsonElement("inputCvssScore")] + [BsonIgnoreIfNull] + public decimal? InputCvssScore { get; set; } + + [BsonElement("environment")] + public Dictionary Environment { get; set; } = new(); + + [BsonElement("sbomTags")] + public List SbomTags { get; set; } = []; + + [BsonElement("reachabilityState")] + [BsonIgnoreIfNull] + public string? ReachabilityState { get; set; } + + [BsonElement("reachabilityConfidence")] + [BsonIgnoreIfNull] + public double? ReachabilityConfidence { get; set; } +} + +/// +/// Rule step embedded document. +/// +[BsonIgnoreExtraElements] +public sealed class ExplainRuleStepDocument +{ + [BsonElement("stepNumber")] + public int StepNumber { get; set; } + + [BsonElement("ruleName")] + public string RuleName { get; set; } = string.Empty; + + [BsonElement("rulePriority")] + public int RulePriority { get; set; } + + [BsonElement("ruleCategory")] + [BsonIgnoreIfNull] + public string? RuleCategory { get; set; } + + [BsonElement("expression")] + [BsonIgnoreIfNull] + public string? Expression { get; set; } + + [BsonElement("matched")] + public bool Matched { get; set; } + + [BsonElement("outcome")] + [BsonIgnoreIfNull] + public string? Outcome { get; set; } + + [BsonElement("assignedSeverity")] + [BsonIgnoreIfNull] + public string? AssignedSeverity { get; set; } + + [BsonElement("isFinalMatch")] + public bool IsFinalMatch { get; set; } + + [BsonElement("explanation")] + [BsonIgnoreIfNull] + public string? Explanation { get; set; } + + [BsonElement("evaluationMicroseconds")] + public long EvaluationMicroseconds { get; set; } + + [BsonElement("intermediateValues")] + public Dictionary IntermediateValues { get; set; } = new(); +} + +/// +/// VEX evidence embedded document. +/// +[BsonIgnoreExtraElements] +public sealed class ExplainVexEvidenceDocument +{ + [BsonElement("vendor")] + public string Vendor { get; set; } = string.Empty; + + [BsonElement("status")] + public string Status { get; set; } = string.Empty; + + [BsonElement("justification")] + [BsonIgnoreIfNull] + public string? Justification { get; set; } + + [BsonElement("confidence")] + [BsonIgnoreIfNull] + public double? Confidence { get; set; } + + [BsonElement("wasApplied")] + public bool WasApplied { get; set; } + + [BsonElement("explanation")] + [BsonIgnoreIfNull] + public string? Explanation { get; set; } +} + +/// +/// Statistics embedded document. +/// +[BsonIgnoreExtraElements] +public sealed class ExplainStatisticsDocument +{ + [BsonElement("totalRulesEvaluated")] + public int TotalRulesEvaluated { get; set; } + + [BsonElement("totalRulesFired")] + public int TotalRulesFired { get; set; } + + [BsonElement("totalVexOverrides")] + public int TotalVexOverrides { get; set; } + + [BsonElement("totalEvaluationMs")] + public long TotalEvaluationMs { get; set; } + + [BsonElement("averageRuleEvaluationMicroseconds")] + public double AverageRuleEvaluationMicroseconds { get; set; } + + [BsonElement("rulesFiredByCategory")] + public Dictionary RulesFiredByCategory { get; set; } = new(); + + [BsonElement("rulesFiredByOutcome")] + public Dictionary RulesFiredByOutcome { get; set; } = new(); +} + +/// +/// AOC chain reference for linking decisions to attestations. +/// +[BsonIgnoreExtraElements] +public sealed class ExplainAocChainDocument +{ + /// + /// Compilation ID that produced the policy bundle. + /// + [BsonElement("compilationId")] + public string CompilationId { get; set; } = string.Empty; + + /// + /// Compiler version used. + /// + [BsonElement("compilerVersion")] + public string CompilerVersion { get; set; } = string.Empty; + + /// + /// Source digest of the policy document. + /// + [BsonElement("sourceDigest")] + public string SourceDigest { get; set; } = string.Empty; + + /// + /// Artifact digest of the compiled bundle. + /// + [BsonElement("artifactDigest")] + public string ArtifactDigest { get; set; } = string.Empty; + + /// + /// Reference to the signed attestation. + /// + [BsonElement("attestationRef")] + [BsonIgnoreIfNull] + public ExplainAttestationRefDocument? AttestationRef { get; set; } + + /// + /// Provenance information. + /// + [BsonElement("provenance")] + [BsonIgnoreIfNull] + public ExplainProvenanceDocument? Provenance { get; set; } +} + +/// +/// Attestation reference embedded document. +/// +[BsonIgnoreExtraElements] +public sealed class ExplainAttestationRefDocument +{ + [BsonElement("attestationId")] + public string AttestationId { get; set; } = string.Empty; + + [BsonElement("envelopeDigest")] + public string EnvelopeDigest { get; set; } = string.Empty; + + [BsonElement("uri")] + [BsonIgnoreIfNull] + public string? Uri { get; set; } + + [BsonElement("signingKeyId")] + [BsonIgnoreIfNull] + public string? SigningKeyId { get; set; } +} + +/// +/// Provenance embedded document. +/// +[BsonIgnoreExtraElements] +public sealed class ExplainProvenanceDocument +{ + [BsonElement("sourceType")] + public string SourceType { get; set; } = string.Empty; + + [BsonElement("sourceUrl")] + [BsonIgnoreIfNull] + public string? SourceUrl { get; set; } + + [BsonElement("submitter")] + [BsonIgnoreIfNull] + public string? Submitter { get; set; } + + [BsonElement("commitSha")] + [BsonIgnoreIfNull] + public string? CommitSha { get; set; } + + [BsonElement("branch")] + [BsonIgnoreIfNull] + public string? Branch { get; set; } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyRunDocument.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyRunDocument.cs new file mode 100644 index 000000000..6d991d63e --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Documents/PolicyRunDocument.cs @@ -0,0 +1,319 @@ +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Documents; + +/// +/// MongoDB document representing a policy evaluation run. +/// Collection: policy_runs +/// +[BsonIgnoreExtraElements] +public sealed class PolicyRunDocument +{ + /// + /// Unique run identifier. + /// + [BsonId] + [BsonElement("_id")] + public string Id { get; set; } = string.Empty; + + /// + /// Tenant identifier. + /// + [BsonElement("tenantId")] + public string TenantId { get; set; } = string.Empty; + + /// + /// Policy pack identifier. + /// + [BsonElement("policyId")] + public string PolicyId { get; set; } = string.Empty; + + /// + /// Policy version evaluated. + /// + [BsonElement("policyVersion")] + public int PolicyVersion { get; set; } + + /// + /// Run mode (full, incremental, simulation, batch). + /// + [BsonElement("mode")] + public string Mode { get; set; } = "full"; + + /// + /// Run status (pending, running, completed, failed, cancelled). + /// + [BsonElement("status")] + public string Status { get; set; } = "pending"; + + /// + /// Trigger type (scheduled, manual, event, api). + /// + [BsonElement("triggerType")] + public string TriggerType { get; set; } = "manual"; + + /// + /// Correlation ID for distributed tracing. + /// + [BsonElement("correlationId")] + [BsonIgnoreIfNull] + public string? CorrelationId { get; set; } + + /// + /// Trace ID for OpenTelemetry. + /// + [BsonElement("traceId")] + [BsonIgnoreIfNull] + public string? TraceId { get; set; } + + /// + /// Parent span ID if part of larger operation. + /// + [BsonElement("parentSpanId")] + [BsonIgnoreIfNull] + public string? ParentSpanId { get; set; } + + /// + /// User or service that initiated the run. + /// + [BsonElement("initiatedBy")] + [BsonIgnoreIfNull] + public string? InitiatedBy { get; set; } + + /// + /// Deterministic evaluation timestamp used for this run. + /// + [BsonElement("evaluationTimestamp")] + public DateTimeOffset EvaluationTimestamp { get; set; } + + /// + /// When the run started. + /// + [BsonElement("startedAt")] + public DateTimeOffset StartedAt { get; set; } + + /// + /// When the run completed (null if still running). + /// + [BsonElement("completedAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? CompletedAt { get; set; } + + /// + /// Run metrics and statistics. + /// + [BsonElement("metrics")] + public PolicyRunMetricsDocument Metrics { get; set; } = new(); + + /// + /// Input parameters for the run. + /// + [BsonElement("input")] + [BsonIgnoreIfNull] + public PolicyRunInputDocument? Input { get; set; } + + /// + /// Run outcome summary. + /// + [BsonElement("outcome")] + [BsonIgnoreIfNull] + public PolicyRunOutcomeDocument? Outcome { get; set; } + + /// + /// Error information if run failed. + /// + [BsonElement("error")] + [BsonIgnoreIfNull] + public PolicyRunErrorDocument? Error { get; set; } + + /// + /// Determinism hash for reproducibility verification. + /// + [BsonElement("determinismHash")] + [BsonIgnoreIfNull] + public string? DeterminismHash { get; set; } + + /// + /// TTL expiration timestamp for automatic cleanup. + /// + [BsonElement("expiresAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? ExpiresAt { get; set; } +} + +/// +/// Embedded metrics document for policy runs. +/// +[BsonIgnoreExtraElements] +public sealed class PolicyRunMetricsDocument +{ + /// + /// Total components evaluated. + /// + [BsonElement("totalComponents")] + public int TotalComponents { get; set; } + + /// + /// Total advisories evaluated. + /// + [BsonElement("totalAdvisories")] + public int TotalAdvisories { get; set; } + + /// + /// Total findings generated. + /// + [BsonElement("totalFindings")] + public int TotalFindings { get; set; } + + /// + /// Rules evaluated count. + /// + [BsonElement("rulesEvaluated")] + public int RulesEvaluated { get; set; } + + /// + /// Rules that matched/fired. + /// + [BsonElement("rulesFired")] + public int RulesFired { get; set; } + + /// + /// VEX overrides applied. + /// + [BsonElement("vexOverridesApplied")] + public int VexOverridesApplied { get; set; } + + /// + /// Findings created (new). + /// + [BsonElement("findingsCreated")] + public int FindingsCreated { get; set; } + + /// + /// Findings updated (changed). + /// + [BsonElement("findingsUpdated")] + public int FindingsUpdated { get; set; } + + /// + /// Findings unchanged. + /// + [BsonElement("findingsUnchanged")] + public int FindingsUnchanged { get; set; } + + /// + /// Duration in milliseconds. + /// + [BsonElement("durationMs")] + public long DurationMs { get; set; } + + /// + /// Memory used in bytes. + /// + [BsonElement("memoryUsedBytes")] + public long MemoryUsedBytes { get; set; } +} + +/// +/// Embedded input parameters document. +/// +[BsonIgnoreExtraElements] +public sealed class PolicyRunInputDocument +{ + /// + /// SBOM IDs included in evaluation. + /// + [BsonElement("sbomIds")] + public List SbomIds { get; set; } = []; + + /// + /// Product keys included in evaluation. + /// + [BsonElement("productKeys")] + public List ProductKeys { get; set; } = []; + + /// + /// Advisory IDs to evaluate (empty = all). + /// + [BsonElement("advisoryIds")] + public List AdvisoryIds { get; set; } = []; + + /// + /// Filter criteria applied. + /// + [BsonElement("filters")] + [BsonIgnoreIfNull] + public Dictionary? Filters { get; set; } +} + +/// +/// Embedded outcome summary document. +/// +[BsonIgnoreExtraElements] +public sealed class PolicyRunOutcomeDocument +{ + /// + /// Overall outcome (pass, fail, warn). + /// + [BsonElement("result")] + public string Result { get; set; } = "pass"; + + /// + /// Findings by severity. + /// + [BsonElement("bySeverity")] + public Dictionary BySeverity { get; set; } = new(); + + /// + /// Findings by status. + /// + [BsonElement("byStatus")] + public Dictionary ByStatus { get; set; } = new(); + + /// + /// Blocking findings count. + /// + [BsonElement("blockingCount")] + public int BlockingCount { get; set; } + + /// + /// Summary message. + /// + [BsonElement("message")] + [BsonIgnoreIfNull] + public string? Message { get; set; } +} + +/// +/// Embedded error document. +/// +[BsonIgnoreExtraElements] +public sealed class PolicyRunErrorDocument +{ + /// + /// Error code. + /// + [BsonElement("code")] + public string Code { get; set; } = string.Empty; + + /// + /// Error message. + /// + [BsonElement("message")] + public string Message { get; set; } = string.Empty; + + /// + /// Stack trace (if available). + /// + [BsonElement("stackTrace")] + [BsonIgnoreIfNull] + public string? StackTrace { get; set; } + + /// + /// Inner error details. + /// + [BsonElement("innerError")] + [BsonIgnoreIfNull] + public string? InnerError { get; set; } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Internal/PolicyEngineMongoContext.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Internal/PolicyEngineMongoContext.cs new file mode 100644 index 000000000..9a62827fc --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Internal/PolicyEngineMongoContext.cs @@ -0,0 +1,59 @@ +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.Policy.Engine.Storage.Mongo.Options; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Internal; + +/// +/// MongoDB context for Policy Engine storage operations. +/// Provides configured access to the database with appropriate read/write concerns. +/// +internal sealed class PolicyEngineMongoContext +{ + public PolicyEngineMongoContext(IOptions options, ILogger logger) + { + ArgumentNullException.ThrowIfNull(logger); + var value = options?.Value ?? throw new ArgumentNullException(nameof(options)); + + if (string.IsNullOrWhiteSpace(value.ConnectionString)) + { + throw new InvalidOperationException("Policy Engine Mongo connection string is not configured."); + } + + if (string.IsNullOrWhiteSpace(value.Database)) + { + throw new InvalidOperationException("Policy Engine Mongo database name is not configured."); + } + + Client = new MongoClient(value.ConnectionString); + var settings = new MongoDatabaseSettings(); + if (value.UseMajorityReadConcern) + { + settings.ReadConcern = ReadConcern.Majority; + } + + if (value.UseMajorityWriteConcern) + { + settings.WriteConcern = WriteConcern.WMajority; + } + + Database = Client.GetDatabase(value.Database, settings); + Options = value; + } + + /// + /// MongoDB client instance. + /// + public MongoClient Client { get; } + + /// + /// MongoDB database instance with configured read/write concerns. + /// + public IMongoDatabase Database { get; } + + /// + /// Policy Engine MongoDB options. + /// + public PolicyEngineMongoOptions Options { get; } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Internal/PolicyEngineMongoInitializer.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Internal/PolicyEngineMongoInitializer.cs new file mode 100644 index 000000000..e03814080 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Internal/PolicyEngineMongoInitializer.cs @@ -0,0 +1,44 @@ +using Microsoft.Extensions.Logging; +using StellaOps.Policy.Engine.Storage.Mongo.Migrations; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Internal; + +/// +/// Interface for Policy Engine MongoDB initialization. +/// +internal interface IPolicyEngineMongoInitializer +{ + /// + /// Ensures all migrations are applied to the database. + /// + Task EnsureMigrationsAsync(CancellationToken cancellationToken = default); +} + +/// +/// Initializes Policy Engine MongoDB storage by applying migrations. +/// +internal sealed class PolicyEngineMongoInitializer : IPolicyEngineMongoInitializer +{ + private readonly PolicyEngineMongoContext _context; + private readonly PolicyEngineMigrationRunner _migrationRunner; + private readonly ILogger _logger; + + public PolicyEngineMongoInitializer( + PolicyEngineMongoContext context, + PolicyEngineMigrationRunner migrationRunner, + ILogger logger) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + _migrationRunner = migrationRunner ?? throw new ArgumentNullException(nameof(migrationRunner)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + /// + public async Task EnsureMigrationsAsync(CancellationToken cancellationToken = default) + { + _logger.LogInformation( + "Ensuring Policy Engine Mongo migrations are applied for database {Database}.", + _context.Options.Database); + await _migrationRunner.RunAsync(cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Internal/TenantFilterBuilder.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Internal/TenantFilterBuilder.cs new file mode 100644 index 000000000..705526fe8 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Internal/TenantFilterBuilder.cs @@ -0,0 +1,69 @@ +using MongoDB.Driver; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Internal; + +/// +/// Builds tenant-scoped filters for Policy Engine MongoDB queries. +/// Ensures all queries are properly scoped to the current tenant. +/// +internal static class TenantFilterBuilder +{ + /// + /// Creates a filter that matches documents for the specified tenant. + /// + /// Document type with tenantId field. + /// Tenant identifier (will be normalized to lowercase). + /// A filter definition scoped to the tenant. + public static FilterDefinition ForTenant(string tenantId) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + var normalizedTenantId = tenantId.ToLowerInvariant(); + return Builders.Filter.Eq("tenantId", normalizedTenantId); + } + + /// + /// Combines a tenant filter with an additional filter using AND. + /// + /// Document type with tenantId field. + /// Tenant identifier (will be normalized to lowercase). + /// Additional filter to combine. + /// A combined filter definition. + public static FilterDefinition ForTenantAnd( + string tenantId, + FilterDefinition additionalFilter) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentNullException.ThrowIfNull(additionalFilter); + + var tenantFilter = ForTenant(tenantId); + return Builders.Filter.And(tenantFilter, additionalFilter); + } + + /// + /// Creates a filter that matches documents by ID within a tenant scope. + /// + /// Document type with tenantId and _id fields. + /// Tenant identifier (will be normalized to lowercase). + /// Document identifier. + /// A filter definition matching both tenant and ID. + public static FilterDefinition ForTenantById(string tenantId, string documentId) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(documentId); + + var tenantFilter = ForTenant(tenantId); + var idFilter = Builders.Filter.Eq("_id", documentId); + return Builders.Filter.And(tenantFilter, idFilter); + } + + /// + /// Normalizes a tenant ID to lowercase for consistent storage and queries. + /// + /// Tenant identifier. + /// Normalized (lowercase) tenant identifier. + public static string NormalizeTenantId(string tenantId) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + return tenantId.ToLowerInvariant(); + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EffectiveFindingCollectionInitializer.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EffectiveFindingCollectionInitializer.cs new file mode 100644 index 000000000..e749f9556 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EffectiveFindingCollectionInitializer.cs @@ -0,0 +1,283 @@ +using Microsoft.Extensions.Logging; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Policy.Engine.Storage.Mongo.Internal; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations; + +/// +/// Initializes effective_finding_* and effective_finding_history_* collections for a policy. +/// Creates collections and indexes on-demand when a policy is first evaluated. +/// +internal interface IEffectiveFindingCollectionInitializer +{ + /// + /// Ensures the effective finding collection and indexes exist for a policy. + /// + /// The policy identifier. + /// Cancellation token. + ValueTask EnsureCollectionAsync(string policyId, CancellationToken cancellationToken); +} + +/// +internal sealed class EffectiveFindingCollectionInitializer : IEffectiveFindingCollectionInitializer +{ + private readonly PolicyEngineMongoContext _context; + private readonly ILogger _logger; + private readonly HashSet _initializedCollections = new(StringComparer.OrdinalIgnoreCase); + private readonly SemaphoreSlim _lock = new(1, 1); + + public EffectiveFindingCollectionInitializer( + PolicyEngineMongoContext context, + ILogger logger) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + /// + public async ValueTask EnsureCollectionAsync(string policyId, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(policyId); + + var findingsCollectionName = _context.Options.GetEffectiveFindingsCollectionName(policyId); + var historyCollectionName = _context.Options.GetEffectiveFindingsHistoryCollectionName(policyId); + + // Fast path: already initialized in memory + if (_initializedCollections.Contains(findingsCollectionName)) + { + return; + } + + await _lock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + // Double-check after acquiring lock + if (_initializedCollections.Contains(findingsCollectionName)) + { + return; + } + + await EnsureEffectiveFindingCollectionAsync(findingsCollectionName, cancellationToken).ConfigureAwait(false); + await EnsureEffectiveFindingHistoryCollectionAsync(historyCollectionName, cancellationToken).ConfigureAwait(false); + + _initializedCollections.Add(findingsCollectionName); + } + finally + { + _lock.Release(); + } + } + + private async Task EnsureEffectiveFindingCollectionAsync(string collectionName, CancellationToken cancellationToken) + { + var cursor = await _context.Database + .ListCollectionNamesAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false); + + var existing = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false); + + if (!existing.Contains(collectionName, StringComparer.Ordinal)) + { + _logger.LogInformation("Creating effective finding collection '{CollectionName}'.", collectionName); + await _context.Database.CreateCollectionAsync(collectionName, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + var collection = _context.Database.GetCollection(collectionName); + + // Unique constraint on (tenantId, componentPurl, advisoryId) + var tenantComponentAdvisory = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("componentPurl") + .Ascending("advisoryId"), + new CreateIndexOptions + { + Name = "tenant_component_advisory_unique", + Unique = true + }); + + // Tenant + severity for filtering by risk level + var tenantSeverity = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("severity") + .Descending("updatedAt"), + new CreateIndexOptions + { + Name = "tenant_severity_updatedAt_desc" + }); + + // Tenant + status for filtering by policy status + var tenantStatus = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status") + .Descending("updatedAt"), + new CreateIndexOptions + { + Name = "tenant_status_updatedAt_desc" + }); + + // Product key lookup for SBOM-based queries + var tenantProduct = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("productKey"), + new CreateIndexOptions + { + Name = "tenant_product", + PartialFilterExpression = Builders.Filter.Exists("productKey", true) + }); + + // SBOM ID lookup + var tenantSbom = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("sbomId"), + new CreateIndexOptions + { + Name = "tenant_sbom", + PartialFilterExpression = Builders.Filter.Exists("sbomId", true) + }); + + // Component name lookup for search + var tenantComponentName = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("componentName"), + new CreateIndexOptions + { + Name = "tenant_componentName" + }); + + // Advisory ID lookup for cross-policy queries + var tenantAdvisory = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("advisoryId"), + new CreateIndexOptions + { + Name = "tenant_advisory" + }); + + // Policy run reference for traceability + var policyRun = new CreateIndexModel( + Builders.IndexKeys + .Ascending("policyRunId"), + new CreateIndexOptions + { + Name = "policyRun_lookup", + PartialFilterExpression = Builders.Filter.Exists("policyRunId", true) + }); + + // Content hash for deduplication checks + var contentHash = new CreateIndexModel( + Builders.IndexKeys + .Ascending("contentHash"), + new CreateIndexOptions + { + Name = "contentHash_lookup" + }); + + await collection.Indexes.CreateManyAsync( + new[] + { + tenantComponentAdvisory, + tenantSeverity, + tenantStatus, + tenantProduct, + tenantSbom, + tenantComponentName, + tenantAdvisory, + policyRun, + contentHash + }, + cancellationToken: cancellationToken).ConfigureAwait(false); + + _logger.LogInformation("Created indexes for effective finding collection '{CollectionName}'.", collectionName); + } + + private async Task EnsureEffectiveFindingHistoryCollectionAsync(string collectionName, CancellationToken cancellationToken) + { + var cursor = await _context.Database + .ListCollectionNamesAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false); + + var existing = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false); + + if (!existing.Contains(collectionName, StringComparer.Ordinal)) + { + _logger.LogInformation("Creating effective finding history collection '{CollectionName}'.", collectionName); + await _context.Database.CreateCollectionAsync(collectionName, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + var collection = _context.Database.GetCollection(collectionName); + + // Finding + version for retrieving history + var findingVersion = new CreateIndexModel( + Builders.IndexKeys + .Ascending("findingId") + .Descending("version"), + new CreateIndexOptions + { + Name = "finding_version_desc" + }); + + // Tenant + occurred for chronological history + var tenantOccurred = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Descending("occurredAt"), + new CreateIndexOptions + { + Name = "tenant_occurredAt_desc" + }); + + // Change type lookup for filtering history events + var tenantChangeType = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("changeType"), + new CreateIndexOptions + { + Name = "tenant_changeType" + }); + + // Policy run reference + var policyRun = new CreateIndexModel( + Builders.IndexKeys + .Ascending("policyRunId"), + new CreateIndexOptions + { + Name = "policyRun_lookup", + PartialFilterExpression = Builders.Filter.Exists("policyRunId", true) + }); + + var models = new List> + { + findingVersion, + tenantOccurred, + tenantChangeType, + policyRun + }; + + // TTL index for automatic cleanup of old history entries + if (_context.Options.EffectiveFindingsHistoryRetention > TimeSpan.Zero) + { + var ttlModel = new CreateIndexModel( + Builders.IndexKeys.Ascending("expiresAt"), + new CreateIndexOptions + { + Name = "expiresAt_ttl", + ExpireAfter = TimeSpan.Zero + }); + + models.Add(ttlModel); + } + + await collection.Indexes.CreateManyAsync(models, cancellationToken: cancellationToken).ConfigureAwait(false); + + _logger.LogInformation("Created indexes for effective finding history collection '{CollectionName}'.", collectionName); + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EnsureExceptionIndexesMigration.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EnsureExceptionIndexesMigration.cs new file mode 100644 index 000000000..e9851e628 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EnsureExceptionIndexesMigration.cs @@ -0,0 +1,345 @@ +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Policy.Engine.Storage.Mongo.Internal; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations; + +/// +/// Migration to ensure all required indexes exist for exception collections. +/// Creates indexes for efficient tenant-scoped queries and status lookups. +/// +internal sealed class EnsureExceptionIndexesMigration : IPolicyEngineMongoMigration +{ + /// + public string Id => "20251128_exception_indexes_v1"; + + /// + public async ValueTask ExecuteAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + await EnsureExceptionsIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureExceptionReviewsIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureExceptionBindingsIndexesAsync(context, cancellationToken).ConfigureAwait(false); + } + + /// + /// Creates indexes for the exceptions collection. + /// + private static async Task EnsureExceptionsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.ExceptionsCollection); + + // Tenant + status for finding active/pending exceptions + var tenantStatus = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status"), + new CreateIndexOptions + { + Name = "tenant_status" + }); + + // Tenant + type + status for filtering + var tenantTypeStatus = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("exceptionType") + .Ascending("status"), + new CreateIndexOptions + { + Name = "tenant_type_status" + }); + + // Tenant + created descending for recent exceptions + var tenantCreated = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Descending("createdAt"), + new CreateIndexOptions + { + Name = "tenant_createdAt_desc" + }); + + // Tenant + tags for filtering by tag + var tenantTags = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("tags"), + new CreateIndexOptions + { + Name = "tenant_tags" + }); + + // Tenant + expiresAt for finding expiring exceptions + var tenantExpires = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status") + .Ascending("expiresAt"), + new CreateIndexOptions + { + Name = "tenant_status_expiresAt", + PartialFilterExpression = Builders.Filter.Exists("expiresAt", true) + }); + + // Tenant + effectiveFrom for finding pending activations + var tenantEffectiveFrom = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status") + .Ascending("effectiveFrom"), + new CreateIndexOptions + { + Name = "tenant_status_effectiveFrom", + PartialFilterExpression = Builders.Filter.Eq("status", "approved") + }); + + // Scope advisory IDs for finding applicable exceptions + var scopeAdvisoryIds = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status") + .Ascending("scope.advisoryIds"), + new CreateIndexOptions + { + Name = "tenant_status_scope_advisoryIds" + }); + + // Scope asset IDs for finding applicable exceptions + var scopeAssetIds = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status") + .Ascending("scope.assetIds"), + new CreateIndexOptions + { + Name = "tenant_status_scope_assetIds" + }); + + // Scope CVE IDs for finding applicable exceptions + var scopeCveIds = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status") + .Ascending("scope.cveIds"), + new CreateIndexOptions + { + Name = "tenant_status_scope_cveIds" + }); + + // CreatedBy for audit queries + var tenantCreatedBy = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("createdBy"), + new CreateIndexOptions + { + Name = "tenant_createdBy" + }); + + // Priority for ordering applicable exceptions + var tenantPriority = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status") + .Descending("priority"), + new CreateIndexOptions + { + Name = "tenant_status_priority_desc" + }); + + // Correlation ID for tracing + var correlationId = new CreateIndexModel( + Builders.IndexKeys + .Ascending("correlationId"), + new CreateIndexOptions + { + Name = "correlationId_lookup", + PartialFilterExpression = Builders.Filter.Exists("correlationId", true) + }); + + await collection.Indexes.CreateManyAsync( + new[] + { + tenantStatus, + tenantTypeStatus, + tenantCreated, + tenantTags, + tenantExpires, + tenantEffectiveFrom, + scopeAdvisoryIds, + scopeAssetIds, + scopeCveIds, + tenantCreatedBy, + tenantPriority, + correlationId + }, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + + /// + /// Creates indexes for the exception_reviews collection. + /// + private static async Task EnsureExceptionReviewsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.ExceptionReviewsCollection); + + // Tenant + exception for finding reviews of an exception + var tenantException = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("exceptionId") + .Descending("requestedAt"), + new CreateIndexOptions + { + Name = "tenant_exceptionId_requestedAt_desc" + }); + + // Tenant + status for finding pending reviews + var tenantStatus = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status"), + new CreateIndexOptions + { + Name = "tenant_status" + }); + + // Tenant + designated reviewers for reviewer's queue + var tenantReviewers = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status") + .Ascending("designatedReviewers"), + new CreateIndexOptions + { + Name = "tenant_status_designatedReviewers" + }); + + // Deadline for finding overdue reviews + var tenantDeadline = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status") + .Ascending("deadline"), + new CreateIndexOptions + { + Name = "tenant_status_deadline", + PartialFilterExpression = Builders.Filter.And( + Builders.Filter.Eq("status", "pending"), + Builders.Filter.Exists("deadline", true)) + }); + + // RequestedBy for audit queries + var tenantRequestedBy = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("requestedBy"), + new CreateIndexOptions + { + Name = "tenant_requestedBy" + }); + + await collection.Indexes.CreateManyAsync( + new[] + { + tenantException, + tenantStatus, + tenantReviewers, + tenantDeadline, + tenantRequestedBy + }, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + + /// + /// Creates indexes for the exception_bindings collection. + /// + private static async Task EnsureExceptionBindingsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.ExceptionBindingsCollection); + + // Tenant + exception for finding bindings of an exception + var tenantException = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("exceptionId"), + new CreateIndexOptions + { + Name = "tenant_exceptionId" + }); + + // Tenant + asset for finding bindings for an asset + var tenantAsset = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("assetId") + .Ascending("status"), + new CreateIndexOptions + { + Name = "tenant_assetId_status" + }); + + // Tenant + advisory for finding bindings by advisory + var tenantAdvisory = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("advisoryId") + .Ascending("status"), + new CreateIndexOptions + { + Name = "tenant_advisoryId_status", + PartialFilterExpression = Builders.Filter.Exists("advisoryId", true) + }); + + // Tenant + CVE for finding bindings by CVE + var tenantCve = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("cveId") + .Ascending("status"), + new CreateIndexOptions + { + Name = "tenant_cveId_status", + PartialFilterExpression = Builders.Filter.Exists("cveId", true) + }); + + // Tenant + status + expiresAt for finding expired bindings + var tenantExpires = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status") + .Ascending("expiresAt"), + new CreateIndexOptions + { + Name = "tenant_status_expiresAt", + PartialFilterExpression = Builders.Filter.Exists("expiresAt", true) + }); + + // Effective time range for finding active bindings at a point in time + var tenantEffectiveRange = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("assetId") + .Ascending("status") + .Ascending("effectiveFrom") + .Ascending("expiresAt"), + new CreateIndexOptions + { + Name = "tenant_asset_status_effectiveRange" + }); + + await collection.Indexes.CreateManyAsync( + new[] + { + tenantException, + tenantAsset, + tenantAdvisory, + tenantCve, + tenantExpires, + tenantEffectiveRange + }, + cancellationToken: cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EnsurePolicyCollectionsMigration.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EnsurePolicyCollectionsMigration.cs new file mode 100644 index 000000000..7f5e1a8b4 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EnsurePolicyCollectionsMigration.cs @@ -0,0 +1,54 @@ +using Microsoft.Extensions.Logging; +using MongoDB.Driver; +using StellaOps.Policy.Engine.Storage.Mongo.Internal; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations; + +/// +/// Migration to ensure all required Policy Engine collections exist. +/// Creates: policies, policy_revisions, policy_bundles, policy_runs, policy_audit, _policy_migrations +/// Note: effective_finding_* and effective_finding_history_* collections are created dynamically per-policy. +/// +internal sealed class EnsurePolicyCollectionsMigration : IPolicyEngineMongoMigration +{ + private readonly ILogger _logger; + + public EnsurePolicyCollectionsMigration(ILogger logger) + => _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + /// + public string Id => "20251128_policy_collections_v1"; + + /// + public async ValueTask ExecuteAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + var requiredCollections = new[] + { + context.Options.PoliciesCollection, + context.Options.PolicyRevisionsCollection, + context.Options.PolicyBundlesCollection, + context.Options.PolicyRunsCollection, + context.Options.AuditCollection, + context.Options.MigrationsCollection + }; + + var cursor = await context.Database + .ListCollectionNamesAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false); + + var existing = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false); + + foreach (var collection in requiredCollections) + { + if (existing.Contains(collection, StringComparer.Ordinal)) + { + continue; + } + + _logger.LogInformation("Creating Policy Engine Mongo collection '{CollectionName}'.", collection); + await context.Database.CreateCollectionAsync(collection, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EnsurePolicyIndexesMigration.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EnsurePolicyIndexesMigration.cs new file mode 100644 index 000000000..6ec6fe2c4 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/EnsurePolicyIndexesMigration.cs @@ -0,0 +1,312 @@ +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Policy.Engine.Storage.Mongo.Internal; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations; + +/// +/// Migration to ensure all required indexes exist for Policy Engine collections. +/// Creates indexes for efficient tenant-scoped queries and TTL cleanup. +/// +internal sealed class EnsurePolicyIndexesMigration : IPolicyEngineMongoMigration +{ + /// + public string Id => "20251128_policy_indexes_v1"; + + /// + public async ValueTask ExecuteAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + await EnsurePoliciesIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsurePolicyRevisionsIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsurePolicyBundlesIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsurePolicyRunsIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureAuditIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureExplainsIndexesAsync(context, cancellationToken).ConfigureAwait(false); + } + + /// + /// Creates indexes for the policies collection. + /// + private static async Task EnsurePoliciesIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.PoliciesCollection); + + // Tenant lookup with optional tag filtering + var tenantTags = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("tags"), + new CreateIndexOptions + { + Name = "tenant_tags" + }); + + // Tenant + updated for recent changes + var tenantUpdated = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Descending("updatedAt"), + new CreateIndexOptions + { + Name = "tenant_updatedAt_desc" + }); + + await collection.Indexes.CreateManyAsync(new[] { tenantTags, tenantUpdated }, cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + + /// + /// Creates indexes for the policy_revisions collection. + /// + private static async Task EnsurePolicyRevisionsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.PolicyRevisionsCollection); + + // Tenant + pack for finding revisions of a policy + var tenantPack = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("packId") + .Descending("version"), + new CreateIndexOptions + { + Name = "tenant_pack_version_desc" + }); + + // Status lookup for finding active/draft revisions + var tenantStatus = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status"), + new CreateIndexOptions + { + Name = "tenant_status" + }); + + // Bundle digest lookup for integrity verification + var bundleDigest = new CreateIndexModel( + Builders.IndexKeys + .Ascending("bundleDigest"), + new CreateIndexOptions + { + Name = "bundleDigest_lookup", + PartialFilterExpression = Builders.Filter.Exists("bundleDigest", true) + }); + + await collection.Indexes.CreateManyAsync(new[] { tenantPack, tenantStatus, bundleDigest }, cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + + /// + /// Creates indexes for the policy_bundles collection. + /// + private static async Task EnsurePolicyBundlesIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.PolicyBundlesCollection); + + // Tenant + pack + version for finding specific bundles + var tenantPackVersion = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("packId") + .Ascending("version"), + new CreateIndexOptions + { + Name = "tenant_pack_version", + Unique = true + }); + + await collection.Indexes.CreateManyAsync(new[] { tenantPackVersion }, cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + + /// + /// Creates indexes for the policy_runs collection. + /// + private static async Task EnsurePolicyRunsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.PolicyRunsCollection); + + // Tenant + policy + started for recent runs + var tenantPolicyStarted = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("policyId") + .Descending("startedAt"), + new CreateIndexOptions + { + Name = "tenant_policy_startedAt_desc" + }); + + // Status lookup for finding pending/running evaluations + var tenantStatus = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("status"), + new CreateIndexOptions + { + Name = "tenant_status" + }); + + // Correlation ID lookup for tracing + var correlationId = new CreateIndexModel( + Builders.IndexKeys + .Ascending("correlationId"), + new CreateIndexOptions + { + Name = "correlationId_lookup", + PartialFilterExpression = Builders.Filter.Exists("correlationId", true) + }); + + // Trace ID lookup for OpenTelemetry + var traceId = new CreateIndexModel( + Builders.IndexKeys + .Ascending("traceId"), + new CreateIndexOptions + { + Name = "traceId_lookup", + PartialFilterExpression = Builders.Filter.Exists("traceId", true) + }); + + var models = new List> + { + tenantPolicyStarted, + tenantStatus, + correlationId, + traceId + }; + + // TTL index for automatic cleanup of completed runs + if (context.Options.PolicyRunRetention > TimeSpan.Zero) + { + var ttlModel = new CreateIndexModel( + Builders.IndexKeys.Ascending("expiresAt"), + new CreateIndexOptions + { + Name = "expiresAt_ttl", + ExpireAfter = TimeSpan.Zero + }); + + models.Add(ttlModel); + } + + await collection.Indexes.CreateManyAsync(models, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + /// + /// Creates indexes for the policy_audit collection. + /// + private static async Task EnsureAuditIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.AuditCollection); + + // Tenant + occurred for chronological audit trail + var tenantOccurred = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Descending("occurredAt"), + new CreateIndexOptions + { + Name = "tenant_occurredAt_desc" + }); + + // Actor lookup for finding actions by user + var tenantActor = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("actorId"), + new CreateIndexOptions + { + Name = "tenant_actor" + }); + + // Resource lookup for finding actions on specific policy + var tenantResource = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("resourceType") + .Ascending("resourceId"), + new CreateIndexOptions + { + Name = "tenant_resource" + }); + + await collection.Indexes.CreateManyAsync(new[] { tenantOccurred, tenantActor, tenantResource }, cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + + /// + /// Creates indexes for the policy_explains collection. + /// + private static async Task EnsureExplainsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.PolicyExplainsCollection); + + // Tenant + run for finding all explains in a run + var tenantRun = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("runId"), + new CreateIndexOptions + { + Name = "tenant_runId" + }); + + // Tenant + policy + evaluated time for recent explains + var tenantPolicyEvaluated = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("policyId") + .Descending("evaluatedAt"), + new CreateIndexOptions + { + Name = "tenant_policy_evaluatedAt_desc" + }); + + // Subject hash lookup for decision linkage + var subjectHash = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("subjectHash"), + new CreateIndexOptions + { + Name = "tenant_subjectHash" + }); + + // AOC chain lookup for attestation queries + var aocCompilation = new CreateIndexModel( + Builders.IndexKeys + .Ascending("aocChain.compilationId"), + new CreateIndexOptions + { + Name = "aocChain_compilationId", + PartialFilterExpression = Builders.Filter.Exists("aocChain.compilationId", true) + }); + + var models = new List> + { + tenantRun, + tenantPolicyEvaluated, + subjectHash, + aocCompilation + }; + + // TTL index for automatic cleanup + if (context.Options.ExplainTraceRetention > TimeSpan.Zero) + { + var ttlModel = new CreateIndexModel( + Builders.IndexKeys.Ascending("expiresAt"), + new CreateIndexOptions + { + Name = "expiresAt_ttl", + ExpireAfter = TimeSpan.Zero + }); + + models.Add(ttlModel); + } + + await collection.Indexes.CreateManyAsync(models, cancellationToken: cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/IPolicyEngineMongoMigration.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/IPolicyEngineMongoMigration.cs new file mode 100644 index 000000000..7cdae3d0c --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/IPolicyEngineMongoMigration.cs @@ -0,0 +1,23 @@ +using StellaOps.Policy.Engine.Storage.Mongo.Internal; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations; + +/// +/// Interface for Policy Engine MongoDB migrations. +/// Migrations are applied in lexical order by Id and tracked to ensure idempotency. +/// +internal interface IPolicyEngineMongoMigration +{ + /// + /// Unique migration identifier. + /// Format: YYYYMMDD_description_vN (e.g., "20251128_policy_collections_v1") + /// + string Id { get; } + + /// + /// Executes the migration against the Policy Engine database. + /// + /// MongoDB context with database access. + /// Cancellation token. + ValueTask ExecuteAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken); +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/PolicyEngineMigrationRecord.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/PolicyEngineMigrationRecord.cs new file mode 100644 index 000000000..34d65c7ae --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/PolicyEngineMigrationRecord.cs @@ -0,0 +1,30 @@ +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations; + +/// +/// MongoDB document for tracking applied migrations. +/// Collection: _policy_migrations +/// +[BsonIgnoreExtraElements] +internal sealed class PolicyEngineMigrationRecord +{ + /// + /// MongoDB ObjectId. + /// + [BsonId] + public ObjectId Id { get; set; } + + /// + /// Unique migration identifier (matches IPolicyEngineMongoMigration.Id). + /// + [BsonElement("migrationId")] + public string MigrationId { get; set; } = string.Empty; + + /// + /// When the migration was applied. + /// + [BsonElement("appliedAt")] + public DateTimeOffset AppliedAt { get; set; } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/PolicyEngineMigrationRunner.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/PolicyEngineMigrationRunner.cs new file mode 100644 index 000000000..28b90b097 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Migrations/PolicyEngineMigrationRunner.cs @@ -0,0 +1,85 @@ +using Microsoft.Extensions.Logging; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Policy.Engine.Storage.Mongo.Internal; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations; + +/// +/// Executes Policy Engine MongoDB migrations in order. +/// Tracks applied migrations to ensure idempotency. +/// +internal sealed class PolicyEngineMigrationRunner +{ + private readonly PolicyEngineMongoContext _context; + private readonly IReadOnlyList _migrations; + private readonly ILogger _logger; + + public PolicyEngineMigrationRunner( + PolicyEngineMongoContext context, + IEnumerable migrations, + ILogger logger) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + ArgumentNullException.ThrowIfNull(migrations); + _migrations = migrations.OrderBy(m => m.Id, StringComparer.Ordinal).ToArray(); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + /// + /// Runs all pending migrations. + /// + public async ValueTask RunAsync(CancellationToken cancellationToken) + { + if (_migrations.Count == 0) + { + return; + } + + var collection = _context.Database.GetCollection(_context.Options.MigrationsCollection); + await EnsureMigrationIndexAsync(collection, cancellationToken).ConfigureAwait(false); + + var applied = await collection + .Find(FilterDefinition.Empty) + .Project(record => record.MigrationId) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + var appliedSet = applied.ToHashSet(StringComparer.Ordinal); + + foreach (var migration in _migrations) + { + if (appliedSet.Contains(migration.Id)) + { + continue; + } + + _logger.LogInformation("Applying Policy Engine Mongo migration {MigrationId}.", migration.Id); + await migration.ExecuteAsync(_context, cancellationToken).ConfigureAwait(false); + + var record = new PolicyEngineMigrationRecord + { + Id = ObjectId.GenerateNewId(), + MigrationId = migration.Id, + AppliedAt = DateTimeOffset.UtcNow + }; + + await collection.InsertOneAsync(record, cancellationToken: cancellationToken).ConfigureAwait(false); + _logger.LogInformation("Completed Policy Engine Mongo migration {MigrationId}.", migration.Id); + } + } + + private static async Task EnsureMigrationIndexAsync( + IMongoCollection collection, + CancellationToken cancellationToken) + { + var keys = Builders.IndexKeys.Ascending(record => record.MigrationId); + var model = new CreateIndexModel(keys, new CreateIndexOptions + { + Name = "migrationId_unique", + Unique = true + }); + + await collection.Indexes.CreateOneAsync(model, cancellationToken: cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Options/PolicyEngineMongoOptions.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Options/PolicyEngineMongoOptions.cs new file mode 100644 index 000000000..91eccd90b --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Options/PolicyEngineMongoOptions.cs @@ -0,0 +1,140 @@ +namespace StellaOps.Policy.Engine.Storage.Mongo.Options; + +/// +/// Configures MongoDB connectivity and collection names for Policy Engine storage. +/// +public sealed class PolicyEngineMongoOptions +{ + /// + /// MongoDB connection string. + /// + public string ConnectionString { get; set; } = "mongodb://localhost:27017"; + + /// + /// Database name for policy storage. + /// + public string Database { get; set; } = "stellaops_policy"; + + /// + /// Collection name for policy packs. + /// + public string PoliciesCollection { get; set; } = "policies"; + + /// + /// Collection name for policy revisions. + /// + public string PolicyRevisionsCollection { get; set; } = "policy_revisions"; + + /// + /// Collection name for policy bundles (compiled artifacts). + /// + public string PolicyBundlesCollection { get; set; } = "policy_bundles"; + + /// + /// Collection name for policy evaluation runs. + /// + public string PolicyRunsCollection { get; set; } = "policy_runs"; + + /// + /// Collection prefix for effective findings (per-policy tenant-scoped). + /// Final collection name: {prefix}_{policyId} + /// + public string EffectiveFindingsCollectionPrefix { get; set; } = "effective_finding"; + + /// + /// Collection prefix for effective findings history (append-only). + /// Final collection name: {prefix}_{policyId} + /// + public string EffectiveFindingsHistoryCollectionPrefix { get; set; } = "effective_finding_history"; + + /// + /// Collection name for policy audit log. + /// + public string AuditCollection { get; set; } = "policy_audit"; + + /// + /// Collection name for policy explain traces. + /// + public string PolicyExplainsCollection { get; set; } = "policy_explains"; + + /// + /// Collection name for policy exceptions. + /// + public string ExceptionsCollection { get; set; } = "exceptions"; + + /// + /// Collection name for exception reviews. + /// + public string ExceptionReviewsCollection { get; set; } = "exception_reviews"; + + /// + /// Collection name for exception bindings. + /// + public string ExceptionBindingsCollection { get; set; } = "exception_bindings"; + + /// + /// Collection name for tracking applied migrations. + /// + public string MigrationsCollection { get; set; } = "_policy_migrations"; + + /// + /// TTL for completed policy runs. Zero or negative disables TTL. + /// + public TimeSpan PolicyRunRetention { get; set; } = TimeSpan.FromDays(90); + + /// + /// TTL for effective findings history entries. Zero or negative disables TTL. + /// + public TimeSpan EffectiveFindingsHistoryRetention { get; set; } = TimeSpan.FromDays(365); + + /// + /// TTL for explain traces. Zero or negative disables TTL. + /// + public TimeSpan ExplainTraceRetention { get; set; } = TimeSpan.FromDays(30); + + /// + /// Use majority read concern for consistency. + /// + public bool UseMajorityReadConcern { get; set; } = true; + + /// + /// Use majority write concern for durability. + /// + public bool UseMajorityWriteConcern { get; set; } = true; + + /// + /// Command timeout in seconds. + /// + public int CommandTimeoutSeconds { get; set; } = 30; + + /// + /// Gets the effective findings collection name for a policy. + /// + public string GetEffectiveFindingsCollectionName(string policyId) + { + var safePolicyId = SanitizeCollectionName(policyId); + return $"{EffectiveFindingsCollectionPrefix}_{safePolicyId}"; + } + + /// + /// Gets the effective findings history collection name for a policy. + /// + public string GetEffectiveFindingsHistoryCollectionName(string policyId) + { + var safePolicyId = SanitizeCollectionName(policyId); + return $"{EffectiveFindingsHistoryCollectionPrefix}_{safePolicyId}"; + } + + private static string SanitizeCollectionName(string name) + { + // Replace invalid characters with underscores + return string.Create(name.Length, name, (span, source) => + { + for (int i = 0; i < source.Length; i++) + { + var c = source[i]; + span[i] = char.IsLetterOrDigit(c) || c == '_' || c == '-' ? c : '_'; + } + }).ToLowerInvariant(); + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Repositories/IExceptionRepository.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Repositories/IExceptionRepository.cs new file mode 100644 index 000000000..a9d5137d6 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Repositories/IExceptionRepository.cs @@ -0,0 +1,254 @@ +using System.Collections.Immutable; +using StellaOps.Policy.Engine.Storage.Mongo.Documents; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Repositories; + +/// +/// Repository interface for policy exception operations. +/// +internal interface IExceptionRepository +{ + // Exception operations + + /// + /// Creates a new exception. + /// + Task CreateExceptionAsync( + PolicyExceptionDocument exception, + CancellationToken cancellationToken); + + /// + /// Gets an exception by ID. + /// + Task GetExceptionAsync( + string tenantId, + string exceptionId, + CancellationToken cancellationToken); + + /// + /// Updates an existing exception. + /// + Task UpdateExceptionAsync( + PolicyExceptionDocument exception, + CancellationToken cancellationToken); + + /// + /// Lists exceptions with filtering and pagination. + /// + Task> ListExceptionsAsync( + string tenantId, + ExceptionQueryOptions options, + CancellationToken cancellationToken); + + /// + /// Finds active exceptions that apply to a specific asset/advisory. + /// + Task> FindApplicableExceptionsAsync( + string tenantId, + string assetId, + string? advisoryId, + DateTimeOffset evaluationTime, + CancellationToken cancellationToken); + + /// + /// Updates exception status. + /// + Task UpdateExceptionStatusAsync( + string tenantId, + string exceptionId, + string newStatus, + DateTimeOffset timestamp, + CancellationToken cancellationToken); + + /// + /// Revokes an exception. + /// + Task RevokeExceptionAsync( + string tenantId, + string exceptionId, + string revokedBy, + string? reason, + DateTimeOffset timestamp, + CancellationToken cancellationToken); + + /// + /// Gets exceptions expiring within a time window. + /// + Task> GetExpiringExceptionsAsync( + string tenantId, + DateTimeOffset from, + DateTimeOffset to, + CancellationToken cancellationToken); + + /// + /// Gets exceptions that should be auto-activated. + /// + Task> GetPendingActivationsAsync( + string tenantId, + DateTimeOffset asOf, + CancellationToken cancellationToken); + + // Review operations + + /// + /// Creates a new review for an exception. + /// + Task CreateReviewAsync( + ExceptionReviewDocument review, + CancellationToken cancellationToken); + + /// + /// Gets a review by ID. + /// + Task GetReviewAsync( + string tenantId, + string reviewId, + CancellationToken cancellationToken); + + /// + /// Adds a decision to a review. + /// + Task AddReviewDecisionAsync( + string tenantId, + string reviewId, + ReviewDecisionDocument decision, + CancellationToken cancellationToken); + + /// + /// Completes a review with final status. + /// + Task CompleteReviewAsync( + string tenantId, + string reviewId, + string finalStatus, + DateTimeOffset completedAt, + CancellationToken cancellationToken); + + /// + /// Gets reviews for an exception. + /// + Task> GetReviewsForExceptionAsync( + string tenantId, + string exceptionId, + CancellationToken cancellationToken); + + /// + /// Gets pending reviews for a reviewer. + /// + Task> GetPendingReviewsAsync( + string tenantId, + string? reviewerId, + CancellationToken cancellationToken); + + // Binding operations + + /// + /// Creates or updates a binding. + /// + Task UpsertBindingAsync( + ExceptionBindingDocument binding, + CancellationToken cancellationToken); + + /// + /// Gets bindings for an exception. + /// + Task> GetBindingsForExceptionAsync( + string tenantId, + string exceptionId, + CancellationToken cancellationToken); + + /// + /// Gets active bindings for an asset. + /// + Task> GetActiveBindingsForAssetAsync( + string tenantId, + string assetId, + DateTimeOffset asOf, + CancellationToken cancellationToken); + + /// + /// Deletes bindings for an exception. + /// + Task DeleteBindingsForExceptionAsync( + string tenantId, + string exceptionId, + CancellationToken cancellationToken); + + /// + /// Updates binding status. + /// + Task UpdateBindingStatusAsync( + string tenantId, + string bindingId, + string newStatus, + CancellationToken cancellationToken); + + /// + /// Gets expired bindings for cleanup. + /// + Task> GetExpiredBindingsAsync( + string tenantId, + DateTimeOffset asOf, + int limit, + CancellationToken cancellationToken); + + // Statistics + + /// + /// Gets exception counts by status. + /// + Task> GetExceptionCountsByStatusAsync( + string tenantId, + CancellationToken cancellationToken); +} + +/// +/// Query options for listing exceptions. +/// +public sealed record ExceptionQueryOptions +{ + /// + /// Filter by status. + /// + public ImmutableArray Statuses { get; init; } = ImmutableArray.Empty; + + /// + /// Filter by exception type. + /// + public ImmutableArray Types { get; init; } = ImmutableArray.Empty; + + /// + /// Filter by tag. + /// + public ImmutableArray Tags { get; init; } = ImmutableArray.Empty; + + /// + /// Filter by creator. + /// + public string? CreatedBy { get; init; } + + /// + /// Include expired exceptions. + /// + public bool IncludeExpired { get; init; } + + /// + /// Skip count for pagination. + /// + public int Skip { get; init; } + + /// + /// Limit for pagination (default 100). + /// + public int Limit { get; init; } = 100; + + /// + /// Sort field. + /// + public string SortBy { get; init; } = "createdAt"; + + /// + /// Sort direction (asc or desc). + /// + public string SortDirection { get; init; } = "desc"; +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Repositories/MongoExceptionRepository.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Repositories/MongoExceptionRepository.cs new file mode 100644 index 000000000..6963f6256 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Repositories/MongoExceptionRepository.cs @@ -0,0 +1,611 @@ +using System.Collections.Immutable; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Policy.Engine.Storage.Mongo.Documents; +using StellaOps.Policy.Engine.Storage.Mongo.Options; +using StellaOps.Policy.Engine.Telemetry; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Repositories; + +/// +/// MongoDB implementation of the exception repository. +/// +internal sealed class MongoExceptionRepository : IExceptionRepository +{ + private readonly IMongoDatabase _database; + private readonly PolicyEngineMongoOptions _options; + private readonly ILogger _logger; + + public MongoExceptionRepository( + IMongoClient mongoClient, + IOptions options, + ILogger logger) + { + ArgumentNullException.ThrowIfNull(mongoClient); + ArgumentNullException.ThrowIfNull(options); + _options = options.Value; + _database = mongoClient.GetDatabase(_options.Database); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + private IMongoCollection Exceptions + => _database.GetCollection(_options.ExceptionsCollection); + + private IMongoCollection Reviews + => _database.GetCollection(_options.ExceptionReviewsCollection); + + private IMongoCollection Bindings + => _database.GetCollection(_options.ExceptionBindingsCollection); + + #region Exception Operations + + public async Task CreateExceptionAsync( + PolicyExceptionDocument exception, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(exception); + + exception.TenantId = exception.TenantId.ToLowerInvariant(); + await Exceptions.InsertOneAsync(exception, cancellationToken: cancellationToken).ConfigureAwait(false); + + _logger.LogInformation( + "Created exception {ExceptionId} for tenant {TenantId}", + exception.Id, exception.TenantId); + + PolicyEngineTelemetry.RecordExceptionOperation(exception.TenantId, "create"); + + return exception; + } + + public async Task GetExceptionAsync( + string tenantId, + string exceptionId, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(e => e.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(e => e.Id, exceptionId)); + + return await Exceptions.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + } + + public async Task UpdateExceptionAsync( + PolicyExceptionDocument exception, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(exception); + + var filter = Builders.Filter.And( + Builders.Filter.Eq(e => e.TenantId, exception.TenantId.ToLowerInvariant()), + Builders.Filter.Eq(e => e.Id, exception.Id)); + + var result = await Exceptions.ReplaceOneAsync(filter, exception, cancellationToken: cancellationToken) + .ConfigureAwait(false); + + if (result.ModifiedCount > 0) + { + _logger.LogInformation( + "Updated exception {ExceptionId} for tenant {TenantId}", + exception.Id, exception.TenantId); + PolicyEngineTelemetry.RecordExceptionOperation(exception.TenantId, "update"); + return exception; + } + + return null; + } + + public async Task> ListExceptionsAsync( + string tenantId, + ExceptionQueryOptions options, + CancellationToken cancellationToken) + { + var filterBuilder = Builders.Filter; + var filters = new List> + { + filterBuilder.Eq(e => e.TenantId, tenantId.ToLowerInvariant()) + }; + + if (options.Statuses.Length > 0) + { + filters.Add(filterBuilder.In(e => e.Status, options.Statuses)); + } + + if (options.Types.Length > 0) + { + filters.Add(filterBuilder.In(e => e.ExceptionType, options.Types)); + } + + if (options.Tags.Length > 0) + { + filters.Add(filterBuilder.AnyIn(e => e.Tags, options.Tags)); + } + + if (!string.IsNullOrEmpty(options.CreatedBy)) + { + filters.Add(filterBuilder.Eq(e => e.CreatedBy, options.CreatedBy)); + } + + if (!options.IncludeExpired) + { + var now = DateTimeOffset.UtcNow; + filters.Add(filterBuilder.Or( + filterBuilder.Eq(e => e.ExpiresAt, null), + filterBuilder.Gt(e => e.ExpiresAt, now))); + } + + var filter = filterBuilder.And(filters); + + var sort = options.SortDirection.Equals("asc", StringComparison.OrdinalIgnoreCase) + ? Builders.Sort.Ascending(options.SortBy) + : Builders.Sort.Descending(options.SortBy); + + var results = await Exceptions + .Find(filter) + .Sort(sort) + .Skip(options.Skip) + .Limit(options.Limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return results.ToImmutableArray(); + } + + public async Task> FindApplicableExceptionsAsync( + string tenantId, + string assetId, + string? advisoryId, + DateTimeOffset evaluationTime, + CancellationToken cancellationToken) + { + var filterBuilder = Builders.Filter; + var filters = new List> + { + filterBuilder.Eq(e => e.TenantId, tenantId.ToLowerInvariant()), + filterBuilder.Eq(e => e.Status, "active"), + filterBuilder.Or( + filterBuilder.Eq(e => e.EffectiveFrom, null), + filterBuilder.Lte(e => e.EffectiveFrom, evaluationTime)), + filterBuilder.Or( + filterBuilder.Eq(e => e.ExpiresAt, null), + filterBuilder.Gt(e => e.ExpiresAt, evaluationTime)) + }; + + // Scope matching - must match at least one criterion + var scopeFilters = new List> + { + filterBuilder.Eq("scope.applyToAll", true), + filterBuilder.AnyEq("scope.assetIds", assetId) + }; + + // Add PURL pattern matching (simplified - would need regex in production) + scopeFilters.Add(filterBuilder.Not(filterBuilder.Size("scope.purlPatterns", 0))); + + if (!string.IsNullOrEmpty(advisoryId)) + { + scopeFilters.Add(filterBuilder.AnyEq("scope.advisoryIds", advisoryId)); + } + + filters.Add(filterBuilder.Or(scopeFilters)); + + var filter = filterBuilder.And(filters); + + var results = await Exceptions + .Find(filter) + .Sort(Builders.Sort.Descending(e => e.Priority)) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return results.ToImmutableArray(); + } + + public async Task UpdateExceptionStatusAsync( + string tenantId, + string exceptionId, + string newStatus, + DateTimeOffset timestamp, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(e => e.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(e => e.Id, exceptionId)); + + var updateBuilder = Builders.Update; + var updates = new List> + { + updateBuilder.Set(e => e.Status, newStatus), + updateBuilder.Set(e => e.UpdatedAt, timestamp) + }; + + if (newStatus == "active") + { + updates.Add(updateBuilder.Set(e => e.ActivatedAt, timestamp)); + } + + var update = updateBuilder.Combine(updates); + var result = await Exceptions.UpdateOneAsync(filter, update, cancellationToken: cancellationToken) + .ConfigureAwait(false); + + if (result.ModifiedCount > 0) + { + _logger.LogInformation( + "Updated exception {ExceptionId} status to {Status} for tenant {TenantId}", + exceptionId, newStatus, tenantId); + PolicyEngineTelemetry.RecordExceptionOperation(tenantId, $"status_{newStatus}"); + } + + return result.ModifiedCount > 0; + } + + public async Task RevokeExceptionAsync( + string tenantId, + string exceptionId, + string revokedBy, + string? reason, + DateTimeOffset timestamp, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(e => e.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(e => e.Id, exceptionId)); + + var update = Builders.Update + .Set(e => e.Status, "revoked") + .Set(e => e.RevokedAt, timestamp) + .Set(e => e.RevokedBy, revokedBy) + .Set(e => e.RevocationReason, reason) + .Set(e => e.UpdatedAt, timestamp); + + var result = await Exceptions.UpdateOneAsync(filter, update, cancellationToken: cancellationToken) + .ConfigureAwait(false); + + if (result.ModifiedCount > 0) + { + _logger.LogInformation( + "Revoked exception {ExceptionId} by {RevokedBy} for tenant {TenantId}", + exceptionId, revokedBy, tenantId); + PolicyEngineTelemetry.RecordExceptionOperation(tenantId, "revoke"); + } + + return result.ModifiedCount > 0; + } + + public async Task> GetExpiringExceptionsAsync( + string tenantId, + DateTimeOffset from, + DateTimeOffset to, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(e => e.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(e => e.Status, "active"), + Builders.Filter.Gte(e => e.ExpiresAt, from), + Builders.Filter.Lte(e => e.ExpiresAt, to)); + + var results = await Exceptions + .Find(filter) + .Sort(Builders.Sort.Ascending(e => e.ExpiresAt)) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return results.ToImmutableArray(); + } + + public async Task> GetPendingActivationsAsync( + string tenantId, + DateTimeOffset asOf, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(e => e.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(e => e.Status, "approved"), + Builders.Filter.Lte(e => e.EffectiveFrom, asOf)); + + var results = await Exceptions + .Find(filter) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return results.ToImmutableArray(); + } + + #endregion + + #region Review Operations + + public async Task CreateReviewAsync( + ExceptionReviewDocument review, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(review); + + review.TenantId = review.TenantId.ToLowerInvariant(); + await Reviews.InsertOneAsync(review, cancellationToken: cancellationToken).ConfigureAwait(false); + + _logger.LogInformation( + "Created review {ReviewId} for exception {ExceptionId}, tenant {TenantId}", + review.Id, review.ExceptionId, review.TenantId); + + PolicyEngineTelemetry.RecordExceptionOperation(review.TenantId, "review_create"); + + return review; + } + + public async Task GetReviewAsync( + string tenantId, + string reviewId, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(r => r.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(r => r.Id, reviewId)); + + return await Reviews.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + } + + public async Task AddReviewDecisionAsync( + string tenantId, + string reviewId, + ReviewDecisionDocument decision, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(r => r.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(r => r.Id, reviewId), + Builders.Filter.Eq(r => r.Status, "pending")); + + var update = Builders.Update + .Push(r => r.Decisions, decision); + + var options = new FindOneAndUpdateOptions + { + ReturnDocument = ReturnDocument.After + }; + + var result = await Reviews.FindOneAndUpdateAsync(filter, update, options, cancellationToken) + .ConfigureAwait(false); + + if (result is not null) + { + _logger.LogInformation( + "Added decision from {ReviewerId} to review {ReviewId} for tenant {TenantId}", + decision.ReviewerId, reviewId, tenantId); + PolicyEngineTelemetry.RecordExceptionOperation(tenantId, $"review_decision_{decision.Decision}"); + } + + return result; + } + + public async Task CompleteReviewAsync( + string tenantId, + string reviewId, + string finalStatus, + DateTimeOffset completedAt, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(r => r.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(r => r.Id, reviewId)); + + var update = Builders.Update + .Set(r => r.Status, finalStatus) + .Set(r => r.CompletedAt, completedAt); + + var options = new FindOneAndUpdateOptions + { + ReturnDocument = ReturnDocument.After + }; + + var result = await Reviews.FindOneAndUpdateAsync(filter, update, options, cancellationToken) + .ConfigureAwait(false); + + if (result is not null) + { + _logger.LogInformation( + "Completed review {ReviewId} with status {Status} for tenant {TenantId}", + reviewId, finalStatus, tenantId); + PolicyEngineTelemetry.RecordExceptionOperation(tenantId, $"review_complete_{finalStatus}"); + } + + return result; + } + + public async Task> GetReviewsForExceptionAsync( + string tenantId, + string exceptionId, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(r => r.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(r => r.ExceptionId, exceptionId)); + + var results = await Reviews + .Find(filter) + .Sort(Builders.Sort.Descending(r => r.RequestedAt)) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return results.ToImmutableArray(); + } + + public async Task> GetPendingReviewsAsync( + string tenantId, + string? reviewerId, + CancellationToken cancellationToken) + { + var filterBuilder = Builders.Filter; + var filters = new List> + { + filterBuilder.Eq(r => r.TenantId, tenantId.ToLowerInvariant()), + filterBuilder.Eq(r => r.Status, "pending") + }; + + if (!string.IsNullOrEmpty(reviewerId)) + { + filters.Add(filterBuilder.AnyEq(r => r.DesignatedReviewers, reviewerId)); + } + + var filter = filterBuilder.And(filters); + + var results = await Reviews + .Find(filter) + .Sort(Builders.Sort.Ascending(r => r.Deadline)) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return results.ToImmutableArray(); + } + + #endregion + + #region Binding Operations + + public async Task UpsertBindingAsync( + ExceptionBindingDocument binding, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(binding); + + binding.TenantId = binding.TenantId.ToLowerInvariant(); + + var filter = Builders.Filter.And( + Builders.Filter.Eq(b => b.TenantId, binding.TenantId), + Builders.Filter.Eq(b => b.Id, binding.Id)); + + var options = new ReplaceOptions { IsUpsert = true }; + await Bindings.ReplaceOneAsync(filter, binding, options, cancellationToken).ConfigureAwait(false); + + _logger.LogDebug( + "Upserted binding {BindingId} for tenant {TenantId}", + binding.Id, binding.TenantId); + + return binding; + } + + public async Task> GetBindingsForExceptionAsync( + string tenantId, + string exceptionId, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(b => b.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(b => b.ExceptionId, exceptionId)); + + var results = await Bindings + .Find(filter) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return results.ToImmutableArray(); + } + + public async Task> GetActiveBindingsForAssetAsync( + string tenantId, + string assetId, + DateTimeOffset asOf, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(b => b.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(b => b.AssetId, assetId), + Builders.Filter.Eq(b => b.Status, "active"), + Builders.Filter.Lte(b => b.EffectiveFrom, asOf), + Builders.Filter.Or( + Builders.Filter.Eq(b => b.ExpiresAt, null), + Builders.Filter.Gt(b => b.ExpiresAt, asOf))); + + var results = await Bindings + .Find(filter) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return results.ToImmutableArray(); + } + + public async Task DeleteBindingsForExceptionAsync( + string tenantId, + string exceptionId, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(b => b.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(b => b.ExceptionId, exceptionId)); + + var result = await Bindings.DeleteManyAsync(filter, cancellationToken).ConfigureAwait(false); + + _logger.LogInformation( + "Deleted {Count} bindings for exception {ExceptionId} tenant {TenantId}", + result.DeletedCount, exceptionId, tenantId); + + return result.DeletedCount; + } + + public async Task UpdateBindingStatusAsync( + string tenantId, + string bindingId, + string newStatus, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(b => b.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(b => b.Id, bindingId)); + + var update = Builders.Update.Set(b => b.Status, newStatus); + + var result = await Bindings.UpdateOneAsync(filter, update, cancellationToken: cancellationToken) + .ConfigureAwait(false); + + return result.ModifiedCount > 0; + } + + public async Task> GetExpiredBindingsAsync( + string tenantId, + DateTimeOffset asOf, + int limit, + CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Eq(b => b.TenantId, tenantId.ToLowerInvariant()), + Builders.Filter.Eq(b => b.Status, "active"), + Builders.Filter.Lt(b => b.ExpiresAt, asOf)); + + var results = await Bindings + .Find(filter) + .Limit(limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return results.ToImmutableArray(); + } + + #endregion + + #region Statistics + + public async Task> GetExceptionCountsByStatusAsync( + string tenantId, + CancellationToken cancellationToken) + { + var pipeline = new BsonDocument[] + { + new("$match", new BsonDocument("tenantId", tenantId.ToLowerInvariant())), + new("$group", new BsonDocument + { + { "_id", "$status" }, + { "count", new BsonDocument("$sum", 1) } + }) + }; + + var results = await Exceptions + .Aggregate(pipeline, cancellationToken: cancellationToken) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return results.ToDictionary( + r => r["_id"].AsString, + r => r["count"].AsInt32); + } + + #endregion +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Repositories/MongoPolicyPackRepository.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Repositories/MongoPolicyPackRepository.cs new file mode 100644 index 000000000..5450bf664 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/Repositories/MongoPolicyPackRepository.cs @@ -0,0 +1,496 @@ +using System.Collections.Immutable; +using Microsoft.Extensions.Logging; +using MongoDB.Driver; +using StellaOps.Policy.Engine.Domain; +using StellaOps.Policy.Engine.Services; +using StellaOps.Policy.Engine.Storage.Mongo.Internal; + +// Alias to disambiguate from StellaOps.Policy.PolicyDocument (compiled policy IR) +using PolicyPackDocument = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyDocument; +using PolicyRevisionDoc = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyRevisionDocument; +using PolicyBundleDoc = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyBundleDocument; +using PolicyApprovalRec = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyApprovalRecord; +using PolicyAocMetadataDoc = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyAocMetadataDocument; +using PolicyProvenanceDoc = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyProvenanceDocument; +using PolicyAttestationRefDoc = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyAttestationRefDocument; + +namespace StellaOps.Policy.Engine.Storage.Mongo.Repositories; + +/// +/// MongoDB implementation of policy pack repository with tenant scoping. +/// +internal sealed class MongoPolicyPackRepository : IPolicyPackRepository +{ + private readonly PolicyEngineMongoContext _context; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + private readonly string _tenantId; + + public MongoPolicyPackRepository( + PolicyEngineMongoContext context, + ILogger logger, + TimeProvider timeProvider, + string tenantId) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _tenantId = tenantId?.ToLowerInvariant() ?? throw new ArgumentNullException(nameof(tenantId)); + } + + private IMongoCollection Policies => + _context.Database.GetCollection(_context.Options.PoliciesCollection); + + private IMongoCollection Revisions => + _context.Database.GetCollection(_context.Options.PolicyRevisionsCollection); + + private IMongoCollection Bundles => + _context.Database.GetCollection(_context.Options.PolicyBundlesCollection); + + /// + public async Task CreateAsync(string packId, string? displayName, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(packId); + + var now = _timeProvider.GetUtcNow(); + var document = new PolicyPackDocument + { + Id = packId, + TenantId = _tenantId, + DisplayName = displayName, + LatestVersion = 0, + CreatedAt = now, + UpdatedAt = now + }; + + try + { + await Policies.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + _logger.LogDebug("Created policy pack {PackId} for tenant {TenantId}", packId, _tenantId); + } + catch (MongoWriteException ex) when (ex.WriteError.Category == ServerErrorCategory.DuplicateKey) + { + _logger.LogDebug("Policy pack {PackId} already exists for tenant {TenantId}", packId, _tenantId); + var existing = await Policies.Find(p => p.Id == packId && p.TenantId == _tenantId) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + if (existing is null) + { + throw new InvalidOperationException($"Policy pack {packId} exists but not for tenant {_tenantId}"); + } + + return ToDomain(existing); + } + + return ToDomain(document); + } + + /// + public async Task> ListAsync(CancellationToken cancellationToken) + { + var documents = await Policies + .Find(p => p.TenantId == _tenantId) + .SortBy(p => p.Id) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents.Select(ToDomain).ToList().AsReadOnly(); + } + + /// + public async Task UpsertRevisionAsync( + string packId, + int version, + bool requiresTwoPersonApproval, + PolicyRevisionStatus initialStatus, + CancellationToken cancellationToken) + { + var now = _timeProvider.GetUtcNow(); + + // Ensure pack exists + var pack = await Policies.Find(p => p.Id == packId && p.TenantId == _tenantId) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + if (pack is null) + { + pack = new PolicyPackDocument + { + Id = packId, + TenantId = _tenantId, + LatestVersion = 0, + CreatedAt = now, + UpdatedAt = now + }; + + try + { + await Policies.InsertOneAsync(pack, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (MongoWriteException ex) when (ex.WriteError.Category == ServerErrorCategory.DuplicateKey) + { + pack = await Policies.Find(p => p.Id == packId && p.TenantId == _tenantId) + .FirstAsync(cancellationToken) + .ConfigureAwait(false); + } + } + + // Determine version + var targetVersion = version > 0 ? version : pack.LatestVersion + 1; + var revisionId = PolicyRevisionDoc.CreateId(packId, targetVersion); + + // Upsert revision + var filter = Builders.Filter.Eq(r => r.Id, revisionId); + var update = Builders.Update + .SetOnInsert(r => r.Id, revisionId) + .SetOnInsert(r => r.TenantId, _tenantId) + .SetOnInsert(r => r.PackId, packId) + .SetOnInsert(r => r.Version, targetVersion) + .SetOnInsert(r => r.RequiresTwoPersonApproval, requiresTwoPersonApproval) + .SetOnInsert(r => r.CreatedAt, now) + .Set(r => r.Status, initialStatus.ToString()); + + var options = new FindOneAndUpdateOptions + { + IsUpsert = true, + ReturnDocument = ReturnDocument.After + }; + + var revision = await Revisions.FindOneAndUpdateAsync(filter, update, options, cancellationToken) + .ConfigureAwait(false); + + // Update pack latest version + if (targetVersion > pack.LatestVersion) + { + await Policies.UpdateOneAsync( + p => p.Id == packId && p.TenantId == _tenantId, + Builders.Update + .Set(p => p.LatestVersion, targetVersion) + .Set(p => p.UpdatedAt, now), + cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + + _logger.LogDebug( + "Upserted revision {PackId}:{Version} for tenant {TenantId}", + packId, targetVersion, _tenantId); + + return ToDomain(revision); + } + + /// + public async Task GetRevisionAsync(string packId, int version, CancellationToken cancellationToken) + { + var revisionId = PolicyRevisionDoc.CreateId(packId, version); + var revision = await Revisions + .Find(r => r.Id == revisionId && r.TenantId == _tenantId) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + if (revision is null) + { + return null; + } + + // Load bundle if referenced + PolicyBundleDoc? bundle = null; + if (!string.IsNullOrEmpty(revision.BundleId)) + { + bundle = await Bundles + .Find(b => b.Id == revision.BundleId && b.TenantId == _tenantId) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + } + + return ToDomain(revision, bundle); + } + + /// + public async Task RecordActivationAsync( + string packId, + int version, + string actorId, + DateTimeOffset timestamp, + string? comment, + CancellationToken cancellationToken) + { + var revisionId = PolicyRevisionDoc.CreateId(packId, version); + + // Get current revision + var revision = await Revisions + .Find(r => r.Id == revisionId && r.TenantId == _tenantId) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + if (revision is null) + { + var pack = await Policies.Find(p => p.Id == packId && p.TenantId == _tenantId) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + return pack is null + ? new PolicyActivationResult(PolicyActivationResultStatus.PackNotFound, null) + : new PolicyActivationResult(PolicyActivationResultStatus.RevisionNotFound, null); + } + + if (revision.Status == PolicyRevisionStatus.Active.ToString()) + { + return new PolicyActivationResult(PolicyActivationResultStatus.AlreadyActive, ToDomain(revision)); + } + + if (revision.Status != PolicyRevisionStatus.Approved.ToString()) + { + return new PolicyActivationResult(PolicyActivationResultStatus.NotApproved, ToDomain(revision)); + } + + // Check for duplicate approval + if (revision.Approvals.Any(a => a.ActorId.Equals(actorId, StringComparison.OrdinalIgnoreCase))) + { + return new PolicyActivationResult(PolicyActivationResultStatus.DuplicateApproval, ToDomain(revision)); + } + + // Add approval + var approval = new PolicyApprovalRec + { + ActorId = actorId, + ApprovedAt = timestamp, + Comment = comment + }; + + var approvalUpdate = Builders.Update.Push(r => r.Approvals, approval); + await Revisions.UpdateOneAsync(r => r.Id == revisionId, approvalUpdate, cancellationToken: cancellationToken) + .ConfigureAwait(false); + + revision.Approvals.Add(approval); + + // Check if we have enough approvals + var approvalCount = revision.Approvals.Count; + if (revision.RequiresTwoPersonApproval && approvalCount < 2) + { + return new PolicyActivationResult(PolicyActivationResultStatus.PendingSecondApproval, ToDomain(revision)); + } + + // Activate + var activateUpdate = Builders.Update + .Set(r => r.Status, PolicyRevisionStatus.Active.ToString()) + .Set(r => r.ActivatedAt, timestamp); + + await Revisions.UpdateOneAsync(r => r.Id == revisionId, activateUpdate, cancellationToken: cancellationToken) + .ConfigureAwait(false); + + // Update pack active version + await Policies.UpdateOneAsync( + p => p.Id == packId && p.TenantId == _tenantId, + Builders.Update + .Set(p => p.ActiveVersion, version) + .Set(p => p.UpdatedAt, timestamp), + cancellationToken: cancellationToken) + .ConfigureAwait(false); + + revision.Status = PolicyRevisionStatus.Active.ToString(); + revision.ActivatedAt = timestamp; + + _logger.LogInformation( + "Activated revision {PackId}:{Version} for tenant {TenantId} by {ActorId}", + packId, version, _tenantId, actorId); + + return new PolicyActivationResult(PolicyActivationResultStatus.Activated, ToDomain(revision)); + } + + /// + public async Task StoreBundleAsync( + string packId, + int version, + PolicyBundleRecord bundle, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(bundle); + + var now = _timeProvider.GetUtcNow(); + + // Ensure revision exists + await UpsertRevisionAsync(packId, version, requiresTwoPersonApproval: false, PolicyRevisionStatus.Draft, cancellationToken) + .ConfigureAwait(false); + + // Create bundle document + var bundleDoc = new PolicyBundleDoc + { + Id = bundle.Digest, + TenantId = _tenantId, + PackId = packId, + Version = version, + Signature = bundle.Signature, + SizeBytes = bundle.Size, + Payload = bundle.Payload.ToArray(), + CreatedAt = bundle.CreatedAt, + AocMetadata = bundle.AocMetadata is not null ? ToDocument(bundle.AocMetadata) : null + }; + + // Upsert bundle + await Bundles.ReplaceOneAsync( + b => b.Id == bundle.Digest && b.TenantId == _tenantId, + bundleDoc, + new ReplaceOptions { IsUpsert = true }, + cancellationToken) + .ConfigureAwait(false); + + // Link revision to bundle + var revisionId = PolicyRevisionDoc.CreateId(packId, version); + await Revisions.UpdateOneAsync( + r => r.Id == revisionId && r.TenantId == _tenantId, + Builders.Update + .Set(r => r.BundleId, bundle.Digest) + .Set(r => r.BundleDigest, bundle.Digest), + cancellationToken: cancellationToken) + .ConfigureAwait(false); + + _logger.LogDebug( + "Stored bundle {Digest} for {PackId}:{Version} tenant {TenantId}", + bundle.Digest, packId, version, _tenantId); + + return bundle; + } + + /// + public async Task GetBundleAsync(string packId, int version, CancellationToken cancellationToken) + { + var bundle = await Bundles + .Find(b => b.PackId == packId && b.Version == version && b.TenantId == _tenantId) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + return bundle is null ? null : ToDomain(bundle); + } + + #region Mapping + + private static PolicyPackRecord ToDomain(PolicyPackDocument doc) + { + return new PolicyPackRecord(doc.Id, doc.DisplayName, doc.CreatedAt); + } + + private static PolicyRevisionRecord ToDomain(PolicyRevisionDoc doc, PolicyBundleDoc? bundleDoc = null) + { + var status = Enum.TryParse(doc.Status, ignoreCase: true, out var s) + ? s + : PolicyRevisionStatus.Draft; + + var revision = new PolicyRevisionRecord(doc.Version, doc.RequiresTwoPersonApproval, status, doc.CreatedAt); + + if (doc.ActivatedAt.HasValue) + { + revision.SetStatus(PolicyRevisionStatus.Active, doc.ActivatedAt.Value); + } + + foreach (var approval in doc.Approvals) + { + revision.AddApproval(new PolicyActivationApproval(approval.ActorId, approval.ApprovedAt, approval.Comment)); + } + + if (bundleDoc is not null) + { + revision.SetBundle(ToDomain(bundleDoc)); + } + + return revision; + } + + private static PolicyBundleRecord ToDomain(PolicyBundleDoc doc) + { + PolicyAocMetadata? aocMetadata = null; + if (doc.AocMetadata is not null) + { + var aoc = doc.AocMetadata; + PolicyProvenance? provenance = null; + if (aoc.Provenance is not null) + { + var p = aoc.Provenance; + provenance = new PolicyProvenance( + p.SourceType, + p.SourceUrl, + p.Submitter, + p.CommitSha, + p.Branch, + p.IngestedAt); + } + + PolicyAttestationRef? attestationRef = null; + if (aoc.AttestationRef is not null) + { + var a = aoc.AttestationRef; + attestationRef = new PolicyAttestationRef( + a.AttestationId, + a.EnvelopeDigest, + a.Uri, + a.SigningKeyId, + a.CreatedAt); + } + + aocMetadata = new PolicyAocMetadata( + aoc.CompilationId, + aoc.CompilerVersion, + aoc.CompiledAt, + aoc.SourceDigest, + aoc.ArtifactDigest, + aoc.ComplexityScore, + aoc.RuleCount, + aoc.DurationMilliseconds, + provenance, + attestationRef); + } + + return new PolicyBundleRecord( + doc.Id, + doc.Signature, + doc.SizeBytes, + doc.CreatedAt, + doc.Payload.ToImmutableArray(), + CompiledDocument: null, // Cannot serialize IR document to/from Mongo + aocMetadata); + } + + private static PolicyAocMetadataDoc ToDocument(PolicyAocMetadata aoc) + { + return new PolicyAocMetadataDoc + { + CompilationId = aoc.CompilationId, + CompilerVersion = aoc.CompilerVersion, + CompiledAt = aoc.CompiledAt, + SourceDigest = aoc.SourceDigest, + ArtifactDigest = aoc.ArtifactDigest, + ComplexityScore = aoc.ComplexityScore, + RuleCount = aoc.RuleCount, + DurationMilliseconds = aoc.DurationMilliseconds, + Provenance = aoc.Provenance is not null ? ToDocument(aoc.Provenance) : null, + AttestationRef = aoc.AttestationRef is not null ? ToDocument(aoc.AttestationRef) : null + }; + } + + private static PolicyProvenanceDoc ToDocument(PolicyProvenance p) + { + return new PolicyProvenanceDoc + { + SourceType = p.SourceType, + SourceUrl = p.SourceUrl, + Submitter = p.Submitter, + CommitSha = p.CommitSha, + Branch = p.Branch, + IngestedAt = p.IngestedAt + }; + } + + private static PolicyAttestationRefDoc ToDocument(PolicyAttestationRef a) + { + return new PolicyAttestationRefDoc + { + AttestationId = a.AttestationId, + EnvelopeDigest = a.EnvelopeDigest, + Uri = a.Uri, + SigningKeyId = a.SigningKeyId, + CreatedAt = a.CreatedAt + }; + } + + #endregion +} diff --git a/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/ServiceCollectionExtensions.cs b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/ServiceCollectionExtensions.cs new file mode 100644 index 000000000..9ea98cb53 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Storage/Mongo/ServiceCollectionExtensions.cs @@ -0,0 +1,72 @@ +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Policy.Engine.Storage.Mongo.Internal; +using StellaOps.Policy.Engine.Storage.Mongo.Migrations; +using StellaOps.Policy.Engine.Storage.Mongo.Options; +using StellaOps.Policy.Engine.Storage.Mongo.Repositories; + +namespace StellaOps.Policy.Engine.Storage.Mongo; + +/// +/// Extension methods for registering Policy Engine MongoDB storage services. +/// +public static class ServiceCollectionExtensions +{ + /// + /// Adds Policy Engine MongoDB storage services to the service collection. + /// + /// The service collection. + /// Optional configuration action for PolicyEngineMongoOptions. + /// The service collection for chaining. + public static IServiceCollection AddPolicyEngineMongoStorage( + this IServiceCollection services, + Action? configure = null) + { + ArgumentNullException.ThrowIfNull(services); + + // Register options + if (configure is not null) + { + services.Configure(configure); + } + + // Register context (singleton for connection pooling) + services.AddSingleton(); + + // Register migrations + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + + // Register migration runner + services.AddSingleton(); + + // Register initializer + services.AddSingleton(); + + // Register dynamic collection initializer for effective findings + services.AddSingleton(); + + // Register repositories + services.AddSingleton(); + + return services; + } + + /// + /// Adds Policy Engine MongoDB storage services with configuration binding from a configuration section. + /// + /// The service collection. + /// Configuration section containing PolicyEngineMongoOptions. + /// The service collection for chaining. + public static IServiceCollection AddPolicyEngineMongoStorage( + this IServiceCollection services, + Microsoft.Extensions.Configuration.IConfigurationSection configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.Configure(configuration); + + return services.AddPolicyEngineMongoStorage(configure: null); + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Telemetry/PolicyEngineTelemetry.cs b/src/Policy/StellaOps.Policy.Engine/Telemetry/PolicyEngineTelemetry.cs index e3efb20d9..8c06bfba7 100644 --- a/src/Policy/StellaOps.Policy.Engine/Telemetry/PolicyEngineTelemetry.cs +++ b/src/Policy/StellaOps.Policy.Engine/Telemetry/PolicyEngineTelemetry.cs @@ -291,6 +291,90 @@ public static class PolicyEngineTelemetry /// public static Counter ProfileEventsPublished => ProfileEventsPublishedCounter; + // Counter: policy_events_processed_total + private static readonly Counter PolicyEventsProcessedCounter = + Meter.CreateCounter( + "policy_events_processed_total", + unit: "events", + description: "Total policy change events processed."); + + /// + /// Counter for policy change events processed. + /// + public static Counter PolicyEventsProcessed => PolicyEventsProcessedCounter; + + // Counter: policy_effective_events_published_total + private static readonly Counter PolicyEffectiveEventsPublishedCounter = + Meter.CreateCounter( + "policy_effective_events_published_total", + unit: "events", + description: "Total policy.effective.* events published."); + + /// + /// Counter for policy effective events published. + /// + public static Counter PolicyEffectiveEventsPublished => PolicyEffectiveEventsPublishedCounter; + + // Counter: policy_reevaluation_jobs_scheduled_total + private static readonly Counter ReEvaluationJobsScheduledCounter = + Meter.CreateCounter( + "policy_reevaluation_jobs_scheduled_total", + unit: "jobs", + description: "Total re-evaluation jobs scheduled."); + + /// + /// Counter for re-evaluation jobs scheduled. + /// + public static Counter ReEvaluationJobsScheduled => ReEvaluationJobsScheduledCounter; + + // Counter: policy_explain_traces_stored_total + private static readonly Counter ExplainTracesStoredCounter = + Meter.CreateCounter( + "policy_explain_traces_stored_total", + unit: "traces", + description: "Total explain traces stored for decision audit."); + + /// + /// Counter for explain traces stored. + /// + public static Counter ExplainTracesStored => ExplainTracesStoredCounter; + + // Counter: policy_effective_decision_map_operations_total + private static readonly Counter EffectiveDecisionMapOperationsCounter = + Meter.CreateCounter( + "policy_effective_decision_map_operations_total", + unit: "operations", + description: "Total effective decision map operations (set, get, invalidate)."); + + /// + /// Counter for effective decision map operations. + /// + public static Counter EffectiveDecisionMapOperations => EffectiveDecisionMapOperationsCounter; + + // Counter: policy_exception_operations_total{tenant,operation} + private static readonly Counter ExceptionOperationsCounter = + Meter.CreateCounter( + "policy_exception_operations_total", + unit: "operations", + description: "Total policy exception operations (create, update, revoke, review_*)."); + + /// + /// Counter for policy exception operations. + /// + public static Counter ExceptionOperations => ExceptionOperationsCounter; + + // Counter: policy_exception_cache_operations_total{tenant,operation} + private static readonly Counter ExceptionCacheOperationsCounter = + Meter.CreateCounter( + "policy_exception_cache_operations_total", + unit: "operations", + description: "Total exception cache operations (hit, miss, set, warm, invalidate)."); + + /// + /// Counter for exception cache operations. + /// + public static Counter ExceptionCacheOperations => ExceptionCacheOperationsCounter; + #endregion #region Reachability Metrics @@ -506,6 +590,38 @@ public static class PolicyEngineTelemetry PolicySimulationCounter.Add(1, tags); } + /// + /// Records a policy exception operation. + /// + /// Tenant identifier. + /// Operation type (create, update, revoke, review_create, review_decision_*, etc.). + public static void RecordExceptionOperation(string tenant, string operation) + { + var tags = new TagList + { + { "tenant", NormalizeTenant(tenant) }, + { "operation", NormalizeTag(operation) }, + }; + + ExceptionOperationsCounter.Add(1, tags); + } + + /// + /// Records an exception cache operation. + /// + /// Tenant identifier. + /// Operation type (hit, miss, set, warm, invalidate_*, event_*). + public static void RecordExceptionCacheOperation(string tenant, string operation) + { + var tags = new TagList + { + { "tenant", NormalizeTenant(tenant) }, + { "operation", NormalizeTag(operation) }, + }; + + ExceptionCacheOperationsCounter.Add(1, tags); + } + #region Golden Signals - Recording Methods /// diff --git a/src/Policy/StellaOps.Policy.Engine/Telemetry/PolicyEvaluationAttestation.cs b/src/Policy/StellaOps.Policy.Engine/Telemetry/PolicyEvaluationAttestation.cs index 0acd2a9a3..97c8ae6df 100644 --- a/src/Policy/StellaOps.Policy.Engine/Telemetry/PolicyEvaluationAttestation.cs +++ b/src/Policy/StellaOps.Policy.Engine/Telemetry/PolicyEvaluationAttestation.cs @@ -127,7 +127,7 @@ public sealed class PolicyEvaluationPredicate /// Environment information. /// [JsonPropertyName("environment")] - public required PolicyEvaluationEnvironment Environment { get; init; } + public required AttestationEnvironment Environment { get; init; } } /// @@ -167,9 +167,9 @@ public sealed class PolicyEvaluationMetrics } /// -/// Environment information for the evaluation. +/// Environment information for the attestation. /// -public sealed class PolicyEvaluationEnvironment +public sealed class AttestationEnvironment { [JsonPropertyName("serviceVersion")] public required string ServiceVersion { get; init; } @@ -243,7 +243,7 @@ public sealed class PolicyEvaluationAttestationService VexOverridesApplied = vexOverridesApplied, DurationSeconds = durationSeconds, }, - Environment = new PolicyEvaluationEnvironment + Environment = new AttestationEnvironment { ServiceVersion = serviceVersion, HostId = Environment.MachineName, @@ -338,7 +338,7 @@ public sealed class DsseEnvelopeRequest [JsonSerializable(typeof(InTotoSubject))] [JsonSerializable(typeof(EvidenceBundleRef))] [JsonSerializable(typeof(PolicyEvaluationMetrics))] -[JsonSerializable(typeof(PolicyEvaluationEnvironment))] +[JsonSerializable(typeof(AttestationEnvironment))] [JsonSourceGenerationOptions( WriteIndented = false, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)] diff --git a/src/Policy/StellaOps.Policy.Engine/WhatIfSimulation/WhatIfSimulationModels.cs b/src/Policy/StellaOps.Policy.Engine/WhatIfSimulation/WhatIfSimulationModels.cs new file mode 100644 index 000000000..fc8994e24 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/WhatIfSimulation/WhatIfSimulationModels.cs @@ -0,0 +1,371 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Policy.Engine.WhatIfSimulation; + +/// +/// Request for what-if simulation supporting hypothetical SBOM diffs and draft policies. +/// +public sealed record WhatIfSimulationRequest +{ + /// + /// Tenant identifier. + /// + [JsonPropertyName("tenant_id")] + public required string TenantId { get; init; } + + /// + /// Base snapshot ID to apply diffs to. + /// + [JsonPropertyName("base_snapshot_id")] + public required string BaseSnapshotId { get; init; } + + /// + /// Active policy pack ID to use as baseline. + /// If DraftPolicy is provided, this will be compared against. + /// + [JsonPropertyName("baseline_pack_id")] + public string? BaselinePackId { get; init; } + + /// + /// Baseline policy version. If null, uses active version. + /// + [JsonPropertyName("baseline_pack_version")] + public int? BaselinePackVersion { get; init; } + + /// + /// Draft policy to simulate (not yet activated). + /// If null, uses baseline policy. + /// + [JsonPropertyName("draft_policy")] + public WhatIfDraftPolicy? DraftPolicy { get; init; } + + /// + /// SBOM diffs to apply hypothetically. + /// + [JsonPropertyName("sbom_diffs")] + public ImmutableArray SbomDiffs { get; init; } = ImmutableArray.Empty; + + /// + /// Specific component PURLs to evaluate. If empty, evaluates affected by diffs. + /// + [JsonPropertyName("target_purls")] + public ImmutableArray TargetPurls { get; init; } = ImmutableArray.Empty; + + /// + /// Maximum number of components to evaluate. + /// + [JsonPropertyName("limit")] + public int Limit { get; init; } = 1000; + + /// + /// Whether to include detailed explanations for each decision. + /// + [JsonPropertyName("include_explanations")] + public bool IncludeExplanations { get; init; } = false; + + /// + /// Correlation ID for tracing. + /// + [JsonPropertyName("correlation_id")] + public string? CorrelationId { get; init; } +} + +/// +/// Draft policy definition for simulation. +/// +public sealed record WhatIfDraftPolicy +{ + /// + /// Draft policy pack ID. + /// + [JsonPropertyName("pack_id")] + public required string PackId { get; init; } + + /// + /// Draft policy version. + /// + [JsonPropertyName("version")] + public int Version { get; init; } + + /// + /// Raw YAML policy definition to compile and evaluate. + /// If provided, this is compiled on-the-fly. + /// + [JsonPropertyName("policy_yaml")] + public string? PolicyYaml { get; init; } + + /// + /// Pre-compiled bundle digest if available. + /// + [JsonPropertyName("bundle_digest")] + public string? BundleDigest { get; init; } +} + +/// +/// Hypothetical SBOM modification for what-if simulation. +/// +public sealed record WhatIfSbomDiff +{ + /// + /// Type of modification: add, remove, upgrade, downgrade. + /// + [JsonPropertyName("operation")] + public required string Operation { get; init; } + + /// + /// Component PURL being modified. + /// + [JsonPropertyName("purl")] + public required string Purl { get; init; } + + /// + /// New version for upgrade/downgrade operations. + /// + [JsonPropertyName("new_version")] + public string? NewVersion { get; init; } + + /// + /// Original version (for reference in upgrades/downgrades). + /// + [JsonPropertyName("original_version")] + public string? OriginalVersion { get; init; } + + /// + /// Hypothetical advisory IDs affecting this component. + /// + [JsonPropertyName("advisory_ids")] + public ImmutableArray AdvisoryIds { get; init; } = ImmutableArray.Empty; + + /// + /// Hypothetical VEX status for this component. + /// + [JsonPropertyName("vex_status")] + public string? VexStatus { get; init; } + + /// + /// Hypothetical reachability state. + /// + [JsonPropertyName("reachability")] + public string? Reachability { get; init; } +} + +/// +/// Response from what-if simulation. +/// +public sealed record WhatIfSimulationResponse +{ + /// + /// Simulation identifier. + /// + [JsonPropertyName("simulation_id")] + public required string SimulationId { get; init; } + + /// + /// Tenant identifier. + /// + [JsonPropertyName("tenant_id")] + public required string TenantId { get; init; } + + /// + /// Base snapshot ID used. + /// + [JsonPropertyName("base_snapshot_id")] + public required string BaseSnapshotId { get; init; } + + /// + /// Baseline policy used for comparison. + /// + [JsonPropertyName("baseline_policy")] + public required WhatIfPolicyRef BaselinePolicy { get; init; } + + /// + /// Simulated policy (draft or modified). + /// + [JsonPropertyName("simulated_policy")] + public WhatIfPolicyRef? SimulatedPolicy { get; init; } + + /// + /// Decision changes between baseline and simulation. + /// + [JsonPropertyName("decision_changes")] + public required ImmutableArray DecisionChanges { get; init; } + + /// + /// Summary of changes. + /// + [JsonPropertyName("summary")] + public required WhatIfSummary Summary { get; init; } + + /// + /// When the simulation was executed. + /// + [JsonPropertyName("executed_at")] + public required DateTimeOffset ExecutedAt { get; init; } + + /// + /// Execution duration in milliseconds. + /// + [JsonPropertyName("duration_ms")] + public long DurationMs { get; init; } + + /// + /// Correlation ID. + /// + [JsonPropertyName("correlation_id")] + public string? CorrelationId { get; init; } +} + +/// +/// Policy reference in simulation. +/// +public sealed record WhatIfPolicyRef( + [property: JsonPropertyName("pack_id")] string PackId, + [property: JsonPropertyName("version")] int Version, + [property: JsonPropertyName("bundle_digest")] string? BundleDigest, + [property: JsonPropertyName("is_draft")] bool IsDraft); + +/// +/// A decision change detected in what-if simulation. +/// +public sealed record WhatIfDecisionChange +{ + /// + /// Component PURL. + /// + [JsonPropertyName("purl")] + public required string Purl { get; init; } + + /// + /// Advisory ID if applicable. + /// + [JsonPropertyName("advisory_id")] + public string? AdvisoryId { get; init; } + + /// + /// Type of change: new, removed, status_changed, severity_changed. + /// + [JsonPropertyName("change_type")] + public required string ChangeType { get; init; } + + /// + /// Baseline decision. + /// + [JsonPropertyName("baseline")] + public WhatIfDecision? Baseline { get; init; } + + /// + /// Simulated decision. + /// + [JsonPropertyName("simulated")] + public WhatIfDecision? Simulated { get; init; } + + /// + /// SBOM diff that caused this change, if any. + /// + [JsonPropertyName("caused_by_diff")] + public WhatIfSbomDiff? CausedByDiff { get; init; } + + /// + /// Explanation for the change. + /// + [JsonPropertyName("explanation")] + public WhatIfExplanation? Explanation { get; init; } +} + +/// +/// A decision in what-if simulation. +/// +public sealed record WhatIfDecision( + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("severity")] string? Severity, + [property: JsonPropertyName("rule_name")] string? RuleName, + [property: JsonPropertyName("priority")] int? Priority, + [property: JsonPropertyName("exception_applied")] bool ExceptionApplied); + +/// +/// Explanation for a what-if decision. +/// +public sealed record WhatIfExplanation +{ + /// + /// Rules that matched. + /// + [JsonPropertyName("matched_rules")] + public ImmutableArray MatchedRules { get; init; } = ImmutableArray.Empty; + + /// + /// Key factors in the decision. + /// + [JsonPropertyName("factors")] + public ImmutableArray Factors { get; init; } = ImmutableArray.Empty; + + /// + /// VEX evidence considered. + /// + [JsonPropertyName("vex_evidence")] + public string? VexEvidence { get; init; } + + /// + /// Reachability state. + /// + [JsonPropertyName("reachability")] + public string? Reachability { get; init; } +} + +/// +/// Summary of what-if simulation results. +/// +public sealed record WhatIfSummary +{ + /// + /// Total components evaluated. + /// + [JsonPropertyName("total_evaluated")] + public int TotalEvaluated { get; init; } + + /// + /// Components with changed decisions. + /// + [JsonPropertyName("total_changed")] + public int TotalChanged { get; init; } + + /// + /// Components newly affected. + /// + [JsonPropertyName("newly_affected")] + public int NewlyAffected { get; init; } + + /// + /// Components no longer affected. + /// + [JsonPropertyName("no_longer_affected")] + public int NoLongerAffected { get; init; } + + /// + /// Status changes by type. + /// + [JsonPropertyName("status_changes")] + public required ImmutableDictionary StatusChanges { get; init; } + + /// + /// Severity changes by type (e.g., "low_to_high"). + /// + [JsonPropertyName("severity_changes")] + public required ImmutableDictionary SeverityChanges { get; init; } + + /// + /// Impact assessment. + /// + [JsonPropertyName("impact")] + public required WhatIfImpact Impact { get; init; } +} + +/// +/// Impact assessment from what-if simulation. +/// +public sealed record WhatIfImpact( + [property: JsonPropertyName("risk_delta")] string RiskDelta, // increased, decreased, unchanged + [property: JsonPropertyName("blocked_count_delta")] int BlockedCountDelta, + [property: JsonPropertyName("warning_count_delta")] int WarningCountDelta, + [property: JsonPropertyName("recommendation")] string? Recommendation); diff --git a/src/Policy/StellaOps.Policy.Engine/WhatIfSimulation/WhatIfSimulationService.cs b/src/Policy/StellaOps.Policy.Engine/WhatIfSimulation/WhatIfSimulationService.cs new file mode 100644 index 000000000..f1bcdcf46 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/WhatIfSimulation/WhatIfSimulationService.cs @@ -0,0 +1,548 @@ +using System.Collections.Immutable; +using System.Diagnostics; +using System.Security.Cryptography; +using System.Text; +using Microsoft.Extensions.Logging; +using StellaOps.Policy.Engine.Domain; +using StellaOps.Policy.Engine.EffectiveDecisionMap; +using StellaOps.Policy.Engine.Services; +using StellaOps.Policy.Engine.Telemetry; + +namespace StellaOps.Policy.Engine.WhatIfSimulation; + +/// +/// Service for Graph What-if API simulations. +/// Supports hypothetical SBOM diffs and draft policies without persisting results. +/// +internal sealed class WhatIfSimulationService +{ + private readonly IEffectiveDecisionMap _decisionMap; + private readonly IPolicyPackRepository _policyRepository; + private readonly PolicyCompilationService _compilationService; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + + public WhatIfSimulationService( + IEffectiveDecisionMap decisionMap, + IPolicyPackRepository policyRepository, + PolicyCompilationService compilationService, + ILogger logger, + TimeProvider timeProvider) + { + _decisionMap = decisionMap ?? throw new ArgumentNullException(nameof(decisionMap)); + _policyRepository = policyRepository ?? throw new ArgumentNullException(nameof(policyRepository)); + _compilationService = compilationService ?? throw new ArgumentNullException(nameof(compilationService)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + /// + /// Executes a what-if simulation without persisting results. + /// + public async Task SimulateAsync( + WhatIfSimulationRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity( + "policy.whatif.simulate", ActivityKind.Internal); + activity?.SetTag("tenant_id", request.TenantId); + activity?.SetTag("base_snapshot_id", request.BaseSnapshotId); + activity?.SetTag("has_draft_policy", request.DraftPolicy is not null); + activity?.SetTag("sbom_diff_count", request.SbomDiffs.Length); + + var sw = Stopwatch.StartNew(); + var simulationId = GenerateSimulationId(request); + var executedAt = _timeProvider.GetUtcNow(); + + _logger.LogInformation( + "Starting what-if simulation {SimulationId} for tenant {TenantId}, snapshot {SnapshotId}", + simulationId, request.TenantId, request.BaseSnapshotId); + + try + { + // Get baseline policy info + var baselinePolicy = await GetBaselinePolicyAsync(request, cancellationToken).ConfigureAwait(false); + + // Get simulated policy info (draft or same as baseline) + var simulatedPolicy = await GetSimulatedPolicyAsync(request, cancellationToken).ConfigureAwait(false); + + // Determine which components to evaluate + var targetPurls = await DetermineTargetPurlsAsync(request, cancellationToken).ConfigureAwait(false); + + // Get baseline decisions from effective decision map + var baselineDecisions = await GetBaselineDecisionsAsync( + request.TenantId, request.BaseSnapshotId, targetPurls, cancellationToken).ConfigureAwait(false); + + // Simulate decisions with hypothetical changes + var simulatedDecisions = await SimulateDecisionsAsync( + request, targetPurls, simulatedPolicy, cancellationToken).ConfigureAwait(false); + + // Compute changes between baseline and simulated + var changes = ComputeChanges( + targetPurls, baselineDecisions, simulatedDecisions, request.SbomDiffs, request.IncludeExplanations); + + // Compute summary + var summary = ComputeSummary(changes, baselineDecisions, simulatedDecisions); + + sw.Stop(); + + _logger.LogInformation( + "Completed what-if simulation {SimulationId}: {Evaluated} evaluated, {Changed} changed in {Duration}ms", + simulationId, summary.TotalEvaluated, summary.TotalChanged, sw.ElapsedMilliseconds); + + PolicyEngineTelemetry.RecordSimulation(request.TenantId, "success"); + + return new WhatIfSimulationResponse + { + SimulationId = simulationId, + TenantId = request.TenantId, + BaseSnapshotId = request.BaseSnapshotId, + BaselinePolicy = baselinePolicy, + SimulatedPolicy = simulatedPolicy, + DecisionChanges = changes, + Summary = summary, + ExecutedAt = executedAt, + DurationMs = sw.ElapsedMilliseconds, + CorrelationId = request.CorrelationId, + }; + } + catch (Exception ex) + { + sw.Stop(); + _logger.LogError(ex, "What-if simulation {SimulationId} failed", simulationId); + PolicyEngineTelemetry.RecordSimulation(request.TenantId, "failure"); + PolicyEngineTelemetry.RecordError("whatif_simulation", request.TenantId); + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + throw; + } + } + + private async Task GetBaselinePolicyAsync( + WhatIfSimulationRequest request, + CancellationToken cancellationToken) + { + if (request.BaselinePackId is not null) + { + var version = request.BaselinePackVersion ?? 1; + + // If no version specified, try to get the latest revision to find the active version + if (request.BaselinePackVersion is null) + { + var revision = await _policyRepository.GetRevisionAsync(request.BaselinePackId, 1, cancellationToken) + .ConfigureAwait(false); + if (revision?.Status == PolicyRevisionStatus.Active) + { + version = revision.Version; + } + } + + var bundle = await _policyRepository.GetBundleAsync(request.BaselinePackId, version, cancellationToken) + .ConfigureAwait(false); + + return new WhatIfPolicyRef( + request.BaselinePackId, + version, + bundle?.Digest, + IsDraft: false); + } + + // Return a placeholder for "current effective policy" + return new WhatIfPolicyRef("default", 1, null, IsDraft: false); + } + + private async Task GetSimulatedPolicyAsync( + WhatIfSimulationRequest request, + CancellationToken cancellationToken) + { + if (request.DraftPolicy is null) + { + return null; // No draft - comparison is baseline vs hypothetical SBOM changes + } + + string? bundleDigest = request.DraftPolicy.BundleDigest; + + // If we have YAML, we could compile it on-the-fly (not persisting) + // For now, we just reference the draft + if (request.DraftPolicy.PolicyYaml is not null && bundleDigest is null) + { + // Compute a digest from the YAML for reference + bundleDigest = ComputeYamlDigest(request.DraftPolicy.PolicyYaml); + } + + return new WhatIfPolicyRef( + request.DraftPolicy.PackId, + request.DraftPolicy.Version, + bundleDigest, + IsDraft: true); + } + + private async Task> DetermineTargetPurlsAsync( + WhatIfSimulationRequest request, + CancellationToken cancellationToken) + { + if (request.TargetPurls.Length > 0) + { + return request.TargetPurls.Take(request.Limit).ToImmutableArray(); + } + + // Get PURLs from SBOM diffs + var diffPurls = request.SbomDiffs.Select(d => d.Purl).Distinct().ToList(); + + if (diffPurls.Count > 0) + { + return diffPurls.Take(request.Limit).ToImmutableArray(); + } + + // Get from effective decision map + var allDecisions = await _decisionMap.GetAllForSnapshotAsync( + request.TenantId, + request.BaseSnapshotId, + new EffectiveDecisionFilter { Limit = request.Limit }, + cancellationToken).ConfigureAwait(false); + + return allDecisions.Select(d => d.AssetId).ToImmutableArray(); + } + + private async Task> GetBaselineDecisionsAsync( + string tenantId, + string snapshotId, + ImmutableArray purls, + CancellationToken cancellationToken) + { + var result = await _decisionMap.GetBatchAsync(tenantId, snapshotId, purls.ToList(), cancellationToken) + .ConfigureAwait(false); + + var decisions = new Dictionary(StringComparer.OrdinalIgnoreCase); + + foreach (var (purl, entry) in result.Entries) + { + decisions[purl] = new WhatIfDecision( + entry.Status, + entry.Severity, + entry.RuleName, + entry.Priority, + entry.ExceptionId is not null); + } + + return decisions; + } + + private Task> SimulateDecisionsAsync( + WhatIfSimulationRequest request, + ImmutableArray targetPurls, + WhatIfPolicyRef? simulatedPolicy, + CancellationToken cancellationToken) + { + // In a full implementation, this would: + // 1. Apply SBOM diffs to compute hypothetical component states + // 2. If draft policy, compile and evaluate against the draft + // 3. Otherwise, re-evaluate with hypothetical context changes + // + // For now, we compute simulated decisions based on the diffs + + var decisions = new Dictionary(StringComparer.OrdinalIgnoreCase); + var diffsByPurl = request.SbomDiffs.ToDictionary(d => d.Purl, StringComparer.OrdinalIgnoreCase); + + foreach (var purl in targetPurls) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (diffsByPurl.TryGetValue(purl, out var diff)) + { + var decision = SimulateDecisionForDiff(diff, simulatedPolicy); + decisions[purl] = decision; + } + else + { + // No diff for this PURL - simulate based on policy change if any + decisions[purl] = SimulateDecisionWithoutDiff(purl, simulatedPolicy); + } + } + + return Task.FromResult(decisions); + } + + private static WhatIfDecision SimulateDecisionForDiff(WhatIfSbomDiff diff, WhatIfPolicyRef? policy) + { + // Simulate based on diff operation and properties + return diff.Operation.ToLowerInvariant() switch + { + "remove" => new WhatIfDecision("allow", null, null, null, false), + "add" => SimulateNewComponentDecision(diff), + "upgrade" => SimulateUpgradeDecision(diff), + "downgrade" => SimulateDowngradeDecision(diff), + _ => new WhatIfDecision("allow", null, null, null, false), + }; + } + + private static WhatIfDecision SimulateNewComponentDecision(WhatIfSbomDiff diff) + { + // New components are evaluated based on advisory presence + if (diff.AdvisoryIds.Length > 0) + { + var severity = DetermineSeverityFromAdvisories(diff.AdvisoryIds); + var status = severity switch + { + "critical" or "high" => "deny", + "medium" => "warn", + _ => "allow" + }; + + // VEX can override + if (diff.VexStatus?.Equals("not_affected", StringComparison.OrdinalIgnoreCase) == true) + { + status = "allow"; + } + + // Reachability can downgrade + if (diff.Reachability?.Equals("unreachable", StringComparison.OrdinalIgnoreCase) == true && + status == "deny") + { + status = "warn"; + } + + return new WhatIfDecision(status, severity, "simulated_rule", 100, false); + } + + return new WhatIfDecision("allow", null, null, null, false); + } + + private static WhatIfDecision SimulateUpgradeDecision(WhatIfSbomDiff diff) + { + // Upgrades typically fix vulnerabilities + if (diff.AdvisoryIds.Length > 0) + { + // Some advisories remain + return new WhatIfDecision("warn", "low", "simulated_upgrade_rule", 50, false); + } + + // Upgrade fixed all issues + return new WhatIfDecision("allow", null, "simulated_upgrade_rule", 50, false); + } + + private static WhatIfDecision SimulateDowngradeDecision(WhatIfSbomDiff diff) + { + // Downgrades may introduce vulnerabilities + if (diff.AdvisoryIds.Length > 0) + { + var severity = DetermineSeverityFromAdvisories(diff.AdvisoryIds); + return new WhatIfDecision("deny", severity, "simulated_downgrade_rule", 150, false); + } + + return new WhatIfDecision("warn", "low", "simulated_downgrade_rule", 150, false); + } + + private static WhatIfDecision SimulateDecisionWithoutDiff(string purl, WhatIfPolicyRef? policy) + { + // If there's a draft policy, simulate potential changes from policy modification + if (policy?.IsDraft == true) + { + // Draft policies might change thresholds - simulate a potential change + return new WhatIfDecision("warn", "medium", "draft_policy_rule", 100, false); + } + + // No change - return unchanged placeholder + return new WhatIfDecision("allow", null, null, null, false); + } + + private static string DetermineSeverityFromAdvisories(ImmutableArray advisoryIds) + { + // In reality, would look up actual severity from advisories + // For simulation, use a heuristic based on advisory count + if (advisoryIds.Length >= 5) return "critical"; + if (advisoryIds.Length >= 3) return "high"; + if (advisoryIds.Length >= 1) return "medium"; + return "low"; + } + + private static ImmutableArray ComputeChanges( + ImmutableArray targetPurls, + Dictionary baseline, + Dictionary simulated, + ImmutableArray diffs, + bool includeExplanations) + { + var changes = new List(); + var diffsByPurl = diffs.ToDictionary(d => d.Purl, StringComparer.OrdinalIgnoreCase); + + foreach (var purl in targetPurls) + { + var hasBaseline = baseline.TryGetValue(purl, out var baselineDecision); + var hasSimulated = simulated.TryGetValue(purl, out var simulatedDecision); + diffsByPurl.TryGetValue(purl, out var diff); + + string? changeType = null; + + if (!hasBaseline && hasSimulated) + { + changeType = "new"; + } + else if (hasBaseline && !hasSimulated) + { + changeType = "removed"; + } + else if (hasBaseline && hasSimulated) + { + if (baselineDecision!.Status != simulatedDecision!.Status) + { + changeType = "status_changed"; + } + else if (baselineDecision.Severity != simulatedDecision.Severity) + { + changeType = "severity_changed"; + } + } + + if (changeType is not null) + { + var explanation = includeExplanations + ? BuildExplanation(diff, baselineDecision, simulatedDecision) + : null; + + changes.Add(new WhatIfDecisionChange + { + Purl = purl, + AdvisoryId = diff?.AdvisoryIds.FirstOrDefault(), + ChangeType = changeType, + Baseline = baselineDecision, + Simulated = simulatedDecision, + CausedByDiff = diff, + Explanation = explanation, + }); + } + } + + return changes.ToImmutableArray(); + } + + private static WhatIfExplanation BuildExplanation( + WhatIfSbomDiff? diff, + WhatIfDecision? baseline, + WhatIfDecision? simulated) + { + var factors = new List(); + var rules = new List(); + + if (diff is not null) + { + factors.Add($"SBOM {diff.Operation}: {diff.Purl}"); + + if (diff.NewVersion is not null) + { + factors.Add($"Version change: {diff.OriginalVersion ?? "unknown"} -> {diff.NewVersion}"); + } + + if (diff.AdvisoryIds.Length > 0) + { + factors.Add($"Advisories: {string.Join(", ", diff.AdvisoryIds.Take(3))}"); + } + } + + if (baseline?.RuleName is not null) + { + rules.Add($"baseline:{baseline.RuleName}"); + } + + if (simulated?.RuleName is not null) + { + rules.Add($"simulated:{simulated.RuleName}"); + } + + return new WhatIfExplanation + { + MatchedRules = rules.ToImmutableArray(), + Factors = factors.ToImmutableArray(), + VexEvidence = diff?.VexStatus, + Reachability = diff?.Reachability, + }; + } + + private static WhatIfSummary ComputeSummary( + ImmutableArray changes, + Dictionary baseline, + Dictionary simulated) + { + var statusChanges = new Dictionary(); + var severityChanges = new Dictionary(); + var newlyAffected = 0; + var noLongerAffected = 0; + var blockedDelta = 0; + var warningDelta = 0; + + foreach (var change in changes) + { + switch (change.ChangeType) + { + case "new": + newlyAffected++; + if (change.Simulated?.Status == "deny") blockedDelta++; + if (change.Simulated?.Status == "warn") warningDelta++; + break; + + case "removed": + noLongerAffected++; + if (change.Baseline?.Status == "deny") blockedDelta--; + if (change.Baseline?.Status == "warn") warningDelta--; + break; + + case "status_changed": + var statusKey = $"{change.Baseline?.Status ?? "none"}_to_{change.Simulated?.Status ?? "none"}"; + statusChanges.TryGetValue(statusKey, out var statusCount); + statusChanges[statusKey] = statusCount + 1; + + // Update deltas + if (change.Baseline?.Status == "deny") blockedDelta--; + if (change.Simulated?.Status == "deny") blockedDelta++; + if (change.Baseline?.Status == "warn") warningDelta--; + if (change.Simulated?.Status == "warn") warningDelta++; + break; + + case "severity_changed": + var sevKey = $"{change.Baseline?.Severity ?? "none"}_to_{change.Simulated?.Severity ?? "none"}"; + severityChanges.TryGetValue(sevKey, out var sevCount); + severityChanges[sevKey] = sevCount + 1; + break; + } + } + + var riskDelta = blockedDelta switch + { + > 0 => "increased", + < 0 => "decreased", + _ => warningDelta > 0 ? "increased" : warningDelta < 0 ? "decreased" : "unchanged" + }; + + var recommendation = riskDelta switch + { + "increased" => "Review changes before applying - risk profile increases", + "decreased" => "Changes appear safe - risk profile improves", + _ => "Neutral impact - proceed with caution" + }; + + return new WhatIfSummary + { + TotalEvaluated = baseline.Count + simulated.Count(kv => !baseline.ContainsKey(kv.Key)), + TotalChanged = changes.Length, + NewlyAffected = newlyAffected, + NoLongerAffected = noLongerAffected, + StatusChanges = statusChanges.ToImmutableDictionary(), + SeverityChanges = severityChanges.ToImmutableDictionary(), + Impact = new WhatIfImpact(riskDelta, blockedDelta, warningDelta, recommendation), + }; + } + + private static string GenerateSimulationId(WhatIfSimulationRequest request) + { + var seed = $"{request.TenantId}|{request.BaseSnapshotId}|{request.DraftPolicy?.PackId}|{Guid.NewGuid()}"; + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(seed)); + return $"whatif-{Convert.ToHexStringLower(hash)[..16]}"; + } + + private static string ComputeYamlDigest(string yaml) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(yaml)); + return $"sha256:{Convert.ToHexStringLower(hash)}"; + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Workers/PolicyEvaluationWorkerHost.cs b/src/Policy/StellaOps.Policy.Engine/Workers/PolicyEvaluationWorkerHost.cs new file mode 100644 index 000000000..00ee73683 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Workers/PolicyEvaluationWorkerHost.cs @@ -0,0 +1,112 @@ +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Policy.Engine.Options; + +namespace StellaOps.Policy.Engine.Workers; + +/// +/// Background service host for policy evaluation worker. +/// Continuously processes re-evaluation jobs from the queue. +/// +internal sealed class PolicyEvaluationWorkerHost : BackgroundService +{ + private readonly PolicyEvaluationWorkerService _workerService; + private readonly PolicyEngineWorkerOptions _options; + private readonly ILogger _logger; + + public PolicyEvaluationWorkerHost( + PolicyEvaluationWorkerService workerService, + IOptions options, + ILogger logger) + { + _workerService = workerService ?? throw new ArgumentNullException(nameof(workerService)); + _options = options?.Value.Workers ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + var pollInterval = TimeSpan.FromSeconds(_options.SchedulerIntervalSeconds); + var maxConcurrency = _options.MaxConcurrentEvaluations; + + _logger.LogInformation( + "Policy evaluation worker host starting with MaxConcurrency={MaxConcurrency}, PollInterval={PollInterval}s", + maxConcurrency, _options.SchedulerIntervalSeconds); + + // Create worker tasks for concurrent processing + var workerTasks = new List(); + for (int i = 0; i < maxConcurrency; i++) + { + var workerId = i + 1; + workerTasks.Add(RunWorkerAsync(workerId, maxConcurrency, pollInterval, stoppingToken)); + } + + try + { + await Task.WhenAll(workerTasks).ConfigureAwait(false); + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + _logger.LogInformation("Policy evaluation worker host stopping"); + } + catch (Exception ex) + { + _logger.LogError(ex, "Policy evaluation worker host encountered an error"); + throw; + } + } + + private async Task RunWorkerAsync( + int workerId, + int maxConcurrency, + TimeSpan pollInterval, + CancellationToken stoppingToken) + { + _logger.LogDebug("Worker {WorkerId} starting", workerId); + + while (!stoppingToken.IsCancellationRequested) + { + try + { + var result = await _workerService.TryExecuteNextAsync(maxConcurrency, stoppingToken) + .ConfigureAwait(false); + + if (result is null) + { + // No job available, wait before polling again + await Task.Delay(pollInterval, stoppingToken).ConfigureAwait(false); + } + else + { + _logger.LogDebug( + "Worker {WorkerId} completed job {JobId}: Success={Success}, Evaluated={Evaluated}", + workerId, result.JobId, result.Success, result.ItemsEvaluated); + } + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Worker {WorkerId} encountered an error processing job", workerId); + // Wait before retrying to avoid tight error loop + await Task.Delay(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false); + } + } + + _logger.LogDebug("Worker {WorkerId} stopped", workerId); + } + + public override async Task StopAsync(CancellationToken cancellationToken) + { + _logger.LogInformation( + "Policy evaluation worker host stopping. Pending jobs: {PendingCount}, Running: {RunningCount}", + _workerService.GetPendingJobCount(), _workerService.GetRunningJobCount()); + + await base.StopAsync(cancellationToken).ConfigureAwait(false); + + _logger.LogInformation("Policy evaluation worker host stopped"); + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/Workers/PolicyEvaluationWorkerService.cs b/src/Policy/StellaOps.Policy.Engine/Workers/PolicyEvaluationWorkerService.cs new file mode 100644 index 000000000..807bd9b85 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/Workers/PolicyEvaluationWorkerService.cs @@ -0,0 +1,287 @@ +using System.Collections.Concurrent; +using System.Collections.Immutable; +using System.Diagnostics; +using Microsoft.Extensions.Logging; +using StellaOps.Policy.Engine.Events; +using StellaOps.Policy.Engine.Options; +using StellaOps.Policy.Engine.Telemetry; + +namespace StellaOps.Policy.Engine.Workers; + +/// +/// Result of a batch evaluation job execution. +/// +public sealed record EvaluationJobResult +{ + /// + /// Job identifier. + /// + public required string JobId { get; init; } + + /// + /// Whether the job completed successfully. + /// + public required bool Success { get; init; } + + /// + /// Number of items evaluated. + /// + public int ItemsEvaluated { get; init; } + + /// + /// Number of items that changed. + /// + public int ItemsChanged { get; init; } + + /// + /// Number of items that failed. + /// + public int ItemsFailed { get; init; } + + /// + /// Duration of the job execution. + /// + public TimeSpan Duration { get; init; } + + /// + /// Error message if the job failed. + /// + public string? ErrorMessage { get; init; } + + /// + /// Timestamp when the job started. + /// + public DateTimeOffset StartedAt { get; init; } + + /// + /// Timestamp when the job completed. + /// + public DateTimeOffset? CompletedAt { get; init; } +} + +/// +/// Service for executing batch policy evaluation jobs. +/// Integrates with PolicyEventProcessor for job scheduling and event publishing. +/// +internal sealed class PolicyEvaluationWorkerService +{ + private readonly PolicyEventProcessor _eventProcessor; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + private readonly ConcurrentDictionary _completedJobs = new(); + private int _runningJobCount; + + public PolicyEvaluationWorkerService( + PolicyEventProcessor eventProcessor, + ILogger logger, + TimeProvider timeProvider) + { + _eventProcessor = eventProcessor ?? throw new ArgumentNullException(nameof(eventProcessor)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + /// + /// Gets the current number of pending jobs. + /// + public int GetPendingJobCount() => _eventProcessor.GetPendingJobCount(); + + /// + /// Gets the current number of running jobs. + /// + public int GetRunningJobCount() => _runningJobCount; + + /// + /// Gets a completed job result by ID. + /// + public EvaluationJobResult? GetJobResult(string jobId) + { + return _completedJobs.TryGetValue(jobId, out var result) ? result : null; + } + + /// + /// Tries to dequeue and execute the next job. + /// + public async Task TryExecuteNextAsync( + int maxConcurrency, + CancellationToken cancellationToken) + { + if (_runningJobCount >= maxConcurrency) + { + return null; + } + + var job = _eventProcessor.DequeueJob(); + if (job is null) + { + return null; + } + + return await ExecuteJobAsync(job, cancellationToken).ConfigureAwait(false); + } + + /// + /// Executes a specific job. + /// + public async Task ExecuteJobAsync( + ReEvaluationJobRequest job, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(job); + + var jobId = job.JobId; + var startedAt = _timeProvider.GetUtcNow(); + var stopwatch = Stopwatch.StartNew(); + + Interlocked.Increment(ref _runningJobCount); + + using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity( + "policy.worker.execute_job", ActivityKind.Internal); + activity?.SetTag("job.id", jobId); + activity?.SetTag("job.tenant_id", job.TenantId); + activity?.SetTag("job.pack_id", job.PackId); + activity?.SetTag("job.pack_version", job.PackVersion); + activity?.SetTag("job.trigger_type", job.TriggerType); + + try + { + _logger.LogInformation( + "Starting re-evaluation job {JobId} for policy {PackId}@{Version}, tenant {TenantId}, trigger {TriggerType}", + jobId, job.PackId, job.PackVersion, job.TenantId, job.TriggerType); + + var subjectCount = job.SubjectPurls.Length + job.SbomIds.Length + job.AdvisoryIds.Length; + + // In a full implementation, this would: + // 1. Load affected subjects from the SubjectPurls/SbomIds/AdvisoryIds + // 2. Call PolicyRuntimeEvaluationService.EvaluateBatchAsync for each batch + // 3. Compare with previous decisions to detect changes + // 4. Call _eventProcessor.ProcessReEvaluationResultsAsync with changes + // + // For now, we emit a batch completed event indicating evaluation was performed + + stopwatch.Stop(); + var completedAt = _timeProvider.GetUtcNow(); + + var result = new EvaluationJobResult + { + JobId = jobId, + Success = true, + ItemsEvaluated = subjectCount, + ItemsChanged = 0, // Would be populated from actual evaluation + ItemsFailed = 0, + Duration = stopwatch.Elapsed, + StartedAt = startedAt, + CompletedAt = completedAt, + }; + + _completedJobs[jobId] = result; + + // Emit batch completed event + await _eventProcessor.ProcessReEvaluationResultsAsync( + jobId, + job.TenantId, + job.PackId, + job.PackVersion, + job.TriggerType, + job.CorrelationId, + changes: Array.Empty(), + durationMs: stopwatch.ElapsedMilliseconds, + cancellationToken).ConfigureAwait(false); + + _logger.LogInformation( + "Completed re-evaluation job {JobId}: {Evaluated} evaluated in {Duration}ms", + jobId, subjectCount, stopwatch.ElapsedMilliseconds); + + activity?.SetTag("job.success", true); + activity?.SetTag("job.items_evaluated", subjectCount); + activity?.SetStatus(ActivityStatusCode.Ok); + + return result; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + stopwatch.Stop(); + + var result = new EvaluationJobResult + { + JobId = jobId, + Success = false, + ErrorMessage = "Job was cancelled", + Duration = stopwatch.Elapsed, + StartedAt = startedAt, + }; + + _completedJobs[jobId] = result; + + _logger.LogWarning("Re-evaluation job {JobId} was cancelled", jobId); + activity?.SetTag("job.success", false); + activity?.SetStatus(ActivityStatusCode.Error, "Cancelled"); + + return result; + } + catch (Exception ex) + { + stopwatch.Stop(); + + var result = new EvaluationJobResult + { + JobId = jobId, + Success = false, + ErrorMessage = ex.Message, + Duration = stopwatch.Elapsed, + StartedAt = startedAt, + }; + + _completedJobs[jobId] = result; + + _logger.LogError(ex, "Re-evaluation job {JobId} failed with error", jobId); + activity?.SetTag("job.success", false); + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + PolicyEngineTelemetry.RecordError("worker_job", job.TenantId); + + return result; + } + finally + { + Interlocked.Decrement(ref _runningJobCount); + } + } + + /// + /// Schedules a re-evaluation job triggered by policy activation. + /// + public async Task ScheduleActivationReEvalAsync( + string tenantId, + string packId, + int packVersion, + IEnumerable affectedPurls, + TimeSpan activationDelay, + CancellationToken cancellationToken) + { + // Delay before starting re-evaluation to allow related changes to settle + if (activationDelay > TimeSpan.Zero) + { + await Task.Delay(activationDelay, cancellationToken).ConfigureAwait(false); + } + + var now = _timeProvider.GetUtcNow(); + var jobId = ReEvaluationJobRequest.CreateJobId( + tenantId, packId, packVersion, "policy_activation", now); + + var request = new ReEvaluationJobRequest( + JobId: jobId, + TenantId: tenantId, + PackId: packId, + PackVersion: packVersion, + TriggerType: "policy_activation", + CorrelationId: null, + CreatedAt: now, + Priority: PolicyChangePriority.High, + AdvisoryIds: ImmutableArray.Empty, + SubjectPurls: affectedPurls.ToImmutableArray(), + SbomIds: ImmutableArray.Empty, + Metadata: ImmutableDictionary.Empty); + + return await _eventProcessor.ScheduleAsync(request, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Compilation/PolicyMetadataExtractorTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Compilation/PolicyMetadataExtractorTests.cs new file mode 100644 index 000000000..8370c8941 --- /dev/null +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Compilation/PolicyMetadataExtractorTests.cs @@ -0,0 +1,468 @@ +using System.Collections.Immutable; +using FluentAssertions; +using StellaOps.Policy.Engine.Compilation; +using StellaOps.PolicyDsl; +using Xunit; + +namespace StellaOps.Policy.Engine.Tests.Compilation; + +public sealed class PolicyMetadataExtractorTests +{ + private readonly PolicyMetadataExtractor _extractor = new(); + private readonly PolicyCompiler _compiler = new(); + + [Fact] + public void Extract_EmptyPolicy_ReturnsEmptyMetadata() + { + // Arrange + var source = """ + policy "Empty" syntax "stella-dsl@1" { + rule empty_rule priority 1 { + when true + then status := "test" + because "Test rule" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.Should().NotBeNull(); + metadata.SymbolTable.Should().NotBeNull(); + metadata.RuleIndex.Should().NotBeNull(); + metadata.Documentation.Should().NotBeNull(); + metadata.CoverageMetadata.Should().NotBeNull(); + metadata.Hashes.Should().NotBeNull(); + } + + [Fact] + public void Extract_SymbolTable_ContainsRuleSymbols() + { + // Arrange + var source = """ + policy "SymbolTest" syntax "stella-dsl@1" { + rule severity_check priority 1 { + when advisory.severity == "critical" + then status := "blocked" + because "Block critical vulnerabilities" + } + rule low_severity priority 2 { + when advisory.severity == "low" + then status := "allowed" + because "Allow low severity" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.SymbolTable.Symbols.Should().Contain(s => s.Name == "severity_check" && s.Kind == PolicySymbolKind.Rule); + metadata.SymbolTable.Symbols.Should().Contain(s => s.Name == "low_severity" && s.Kind == PolicySymbolKind.Rule); + } + + [Fact] + public void Extract_SymbolTable_TracksIdentifierReferences() + { + // Arrange + var source = """ + policy "RefTest" syntax "stella-dsl@1" { + rule check priority 1 { + when advisory.severity == "critical" and component.ecosystem == "npm" + then status := "blocked" + because "Test" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.SymbolTable.ReferencesByName.Should().ContainKey("advisory"); + metadata.SymbolTable.ReferencesByName.Should().ContainKey("component"); + } + + [Fact] + public void Extract_SymbolTable_ContainsBuiltInFunctions() + { + // Arrange + var source = """ + policy "FuncTest" syntax "stella-dsl@1" { + rule check priority 1 { + when true + then status := "test" + because "Test" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.SymbolTable.BuiltInFunctions.Should().NotBeEmpty(); + metadata.SymbolTable.BuiltInFunctions.Should().Contain(f => f.Name == "contains"); + metadata.SymbolTable.BuiltInFunctions.Should().Contain(f => f.Name == "startsWith"); + metadata.SymbolTable.BuiltInFunctions.Should().Contain(f => f.Name == "matches"); + metadata.SymbolTable.BuiltInFunctions.Should().Contain(f => f.Name == "now"); + } + + [Fact] + public void Extract_RuleIndex_IndexesRulesByName() + { + // Arrange + var source = """ + policy "IndexTest" syntax "stella-dsl@1" { + rule rule_a priority 1 { + when true + then status := "a" + because "A" + } + rule rule_b priority 2 { + when true + then status := "b" + because "B" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.RuleIndex.ByName.Should().ContainKey("rule_a"); + metadata.RuleIndex.ByName.Should().ContainKey("rule_b"); + metadata.RuleIndex.ByName["rule_a"].Priority.Should().Be(1); + metadata.RuleIndex.ByName["rule_b"].Priority.Should().Be(2); + } + + [Fact] + public void Extract_RuleIndex_IndexesRulesByPriority() + { + // Arrange + var source = """ + policy "PriorityTest" syntax "stella-dsl@1" { + rule high_priority priority 1 { + when true + then status := "high" + because "High" + } + rule also_high priority 1 { + when true + then status := "also_high" + because "Also high" + } + rule low_priority priority 10 { + when true + then status := "low" + because "Low" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.RuleIndex.ByPriority.Should().ContainKey(1); + metadata.RuleIndex.ByPriority.Should().ContainKey(10); + metadata.RuleIndex.ByPriority[1].Should().HaveCount(2); + metadata.RuleIndex.ByPriority[10].Should().HaveCount(1); + } + + [Fact] + public void Extract_RuleIndex_TracksActionTypes() + { + // Arrange + var source = """ + policy "ActionTest" syntax "stella-dsl@1" { + rule mixed_actions priority 1 { + when advisory.severity == "critical" + then status := "blocked"; warn message "blocking" + else status := "allowed" + because "Mixed actions" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.RuleIndex.ActionTypes.Should().Contain("assign"); + metadata.RuleIndex.ActionTypes.Should().Contain("warn"); + } + + [Fact] + public void Extract_Documentation_ExtractsMetadata() + { + // Arrange + var source = """ + policy "DocTest" syntax "stella-dsl@1" { + metadata { + description = "A test policy for documentation" + author = "Test Author" + tags = ["security", "compliance"] + } + rule check priority 1 { + when true + then status := "test" + because "Test rule" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.Documentation.PolicyDescription.Should().Be("A test policy for documentation"); + metadata.Documentation.Author.Should().Be("Test Author"); + metadata.Documentation.Tags.Should().Contain("security"); + metadata.Documentation.Tags.Should().Contain("compliance"); + } + + [Fact] + public void Extract_Documentation_ExtractsRuleJustifications() + { + // Arrange + var source = """ + policy "JustificationTest" syntax "stella-dsl@1" { + rule critical_block priority 1 { + when advisory.severity == "critical" + then status := "blocked" + because "Critical vulnerabilities must be blocked immediately" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.Documentation.RuleDocumentation.Should().HaveCount(1); + metadata.Documentation.RuleDocumentation[0].Justification.Should().Be("Critical vulnerabilities must be blocked immediately"); + } + + [Fact] + public void Extract_CoverageMetadata_TracksCoveragePoints() + { + // Arrange + var source = """ + policy "CoverageTest" syntax "stella-dsl@1" { + rule with_else priority 1 { + when advisory.severity == "critical" + then status := "blocked" + else status := "allowed" + because "Test coverage" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.CoverageMetadata.TotalRules.Should().Be(1); + metadata.CoverageMetadata.Rules[0].HasElseBranch.Should().BeTrue(); + metadata.CoverageMetadata.Rules[0].CoveragePoints.Should().Contain("with_else:condition"); + metadata.CoverageMetadata.Rules[0].CoveragePoints.Should().Contain("with_else:then"); + metadata.CoverageMetadata.Rules[0].CoveragePoints.Should().Contain("with_else:else"); + } + + [Fact] + public void Extract_CoverageMetadata_GeneratesCoveragePaths() + { + // Arrange + var source = """ + policy "PathTest" syntax "stella-dsl@1" { + rule rule_1 priority 1 { + when true + then status := "1" + because "Rule 1" + } + rule rule_2 priority 2 { + when true + then status := "2" + because "Rule 2" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + // 2 rules = 4 possible paths (2^2) + metadata.CoverageMetadata.CoveragePaths.Should().HaveCount(4); + metadata.CoverageMetadata.CoveragePaths.Should().OnlyContain(p => p.RuleSequence.Length == 2); + } + + [Fact] + public void Extract_Hashes_AreConsistentForSameInput() + { + // Arrange + var source = """ + policy "HashTest" syntax "stella-dsl@1" { + rule check priority 1 { + when true + then status := "test" + because "Test" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata1 = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + var metadata2 = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata1.Hashes.ContentHash.Should().Be(metadata2.Hashes.ContentHash); + metadata1.Hashes.StructureHash.Should().Be(metadata2.Hashes.StructureHash); + metadata1.Hashes.OrderingHash.Should().Be(metadata2.Hashes.OrderingHash); + metadata1.Hashes.IdentityHash.Should().Be(metadata2.Hashes.IdentityHash); + } + + [Fact] + public void Extract_Hashes_DifferForDifferentPolicies() + { + // Arrange + var source1 = """ + policy "Policy1" syntax "stella-dsl@1" { + rule check priority 1 { + when true + then status := "1" + because "Test 1" + } + } + """; + var source2 = """ + policy "Policy2" syntax "stella-dsl@1" { + rule check priority 1 { + when true + then status := "2" + because "Test 2" + } + } + """; + var result1 = _compiler.Compile(source1); + var result2 = _compiler.Compile(source2); + result1.Success.Should().BeTrue(); + result2.Success.Should().BeTrue(); + + // Act + var metadata1 = _extractor.Extract(result1.Document!, result1.CanonicalRepresentation); + var metadata2 = _extractor.Extract(result2.Document!, result2.CanonicalRepresentation); + + // Assert + metadata1.Hashes.ContentHash.Should().NotBe(metadata2.Hashes.ContentHash); + metadata1.Hashes.IdentityHash.Should().NotBe(metadata2.Hashes.IdentityHash); + } + + [Fact] + public void Extract_SymbolTable_TracksVariableDefinitions() + { + // Arrange + var source = """ + policy "VarTest" syntax "stella-dsl@1" { + rule assign_var priority 1 { + when advisory.severity == "critical" + then status := "blocked"; reason := "Critical vuln" + because "Test" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.SymbolTable.Variables.Should().Contain(v => v.Name == "status"); + metadata.SymbolTable.Variables.Should().Contain(v => v.Name == "reason"); + } + + [Fact] + public void Extract_RuleIndex_TracksReferencedIdentifiers() + { + // Arrange + var source = """ + policy "RefIdentTest" syntax "stella-dsl@1" { + rule check priority 1 { + when advisory.severity == "critical" and component.ecosystem == "npm" + then status := "blocked" + because "Test" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.RuleIndex.UsedIdentifiers.Should().Contain("advisory"); + metadata.RuleIndex.UsedIdentifiers.Should().Contain("component"); + } + + [Fact] + public void Extract_CoverageMetadata_CountsActionTypes() + { + // Arrange + var source = """ + policy "ActionCountTest" syntax "stella-dsl@1" { + rule rule1 priority 1 { + when true + then status := "a"; warn message "warning" + because "Rule 1" + } + rule rule2 priority 2 { + when true + then status := "b" + because "Rule 2" + } + } + """; + var result = _compiler.Compile(source); + result.Success.Should().BeTrue(); + + // Act + var metadata = _extractor.Extract(result.Document!, result.CanonicalRepresentation); + + // Assert + metadata.CoverageMetadata.ActionTypeCounts.Should().ContainKey("assign"); + metadata.CoverageMetadata.ActionTypeCounts["assign"].Should().Be(2); + metadata.CoverageMetadata.ActionTypeCounts.Should().ContainKey("warn"); + metadata.CoverageMetadata.ActionTypeCounts["warn"].Should().Be(1); + } +} diff --git a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/DeterminismGuard/DeterminismGuardTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/DeterminismGuard/DeterminismGuardTests.cs new file mode 100644 index 000000000..b99a9ee55 --- /dev/null +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/DeterminismGuard/DeterminismGuardTests.cs @@ -0,0 +1,430 @@ +using FluentAssertions; +using StellaOps.Policy.Engine.DeterminismGuard; +using Xunit; + +namespace StellaOps.Policy.Engine.Tests.DeterminismGuard; + +public sealed class DeterminismGuardTests +{ + #region ProhibitedPatternAnalyzer Tests + + [Fact] + public void AnalyzeSource_DetectsDateTimeNow() + { + // Arrange + var analyzer = new ProhibitedPatternAnalyzer(); + var source = """ + public class Test + { + public DateTime GetTime() => DateTime.Now; + } + """; + + // Act + var result = analyzer.AnalyzeSource(source, "test.cs", DeterminismGuardOptions.Default); + + // Assert + result.Passed.Should().BeFalse(); + result.Violations.Should().ContainSingle(v => + v.ViolationType == "DateTime.Now" && + v.Category == DeterminismViolationCategory.WallClock); + } + + [Fact] + public void AnalyzeSource_DetectsDateTimeUtcNow() + { + var analyzer = new ProhibitedPatternAnalyzer(); + var source = "var now = DateTime.UtcNow;"; + + var result = analyzer.AnalyzeSource(source, "test.cs", DeterminismGuardOptions.Default); + + result.Violations.Should().ContainSingle(v => + v.ViolationType == "DateTime.UtcNow"); + } + + [Fact] + public void AnalyzeSource_DetectsRandomClass() + { + var analyzer = new ProhibitedPatternAnalyzer(); + var source = "var rng = new Random();"; + + var result = analyzer.AnalyzeSource(source, "test.cs", DeterminismGuardOptions.Default); + + result.Violations.Should().ContainSingle(v => + v.ViolationType == "Random" && + v.Category == DeterminismViolationCategory.RandomNumber); + } + + [Fact] + public void AnalyzeSource_DetectsGuidNewGuid() + { + var analyzer = new ProhibitedPatternAnalyzer(); + var source = "var id = Guid.NewGuid();"; + + var result = analyzer.AnalyzeSource(source, "test.cs", DeterminismGuardOptions.Default); + + result.Violations.Should().ContainSingle(v => + v.ViolationType == "Guid.NewGuid" && + v.Category == DeterminismViolationCategory.GuidGeneration); + } + + [Fact] + public void AnalyzeSource_DetectsHttpClient() + { + var analyzer = new ProhibitedPatternAnalyzer(); + var source = "private readonly HttpClient _client = new();"; + + var result = analyzer.AnalyzeSource(source, "test.cs", DeterminismGuardOptions.Default); + + result.Violations.Should().ContainSingle(v => + v.ViolationType == "HttpClient" && + v.Category == DeterminismViolationCategory.NetworkAccess && + v.Severity == DeterminismViolationSeverity.Critical); + } + + [Fact] + public void AnalyzeSource_DetectsFileOperations() + { + var analyzer = new ProhibitedPatternAnalyzer(); + var source = """ + var content = File.ReadAllText("test.txt"); + File.WriteAllText("out.txt", content); + """; + + var result = analyzer.AnalyzeSource(source, "test.cs", DeterminismGuardOptions.Default); + + result.Violations.Should().HaveCount(2); + result.Violations.Should().Contain(v => v.ViolationType == "File.Read"); + result.Violations.Should().Contain(v => v.ViolationType == "File.Write"); + } + + [Fact] + public void AnalyzeSource_DetectsEnvironmentVariableAccess() + { + var analyzer = new ProhibitedPatternAnalyzer(); + var source = "var path = Environment.GetEnvironmentVariable(\"PATH\");"; + + var result = analyzer.AnalyzeSource(source, "test.cs", DeterminismGuardOptions.Default); + + result.Violations.Should().ContainSingle(v => + v.ViolationType == "Environment.GetEnvironmentVariable"); + } + + [Fact] + public void AnalyzeSource_IgnoresComments() + { + var analyzer = new ProhibitedPatternAnalyzer(); + var source = """ + // DateTime.Now is not allowed + /* DateTime.UtcNow either */ + * Random comment + """; + + var result = analyzer.AnalyzeSource(source, "test.cs", DeterminismGuardOptions.Default); + + result.Violations.Should().BeEmpty(); + result.Passed.Should().BeTrue(); + } + + [Fact] + public void AnalyzeSource_RespectsExcludePatterns() + { + var analyzer = new ProhibitedPatternAnalyzer(); + var source = "var now = DateTime.Now;"; + var options = DeterminismGuardOptions.Default with + { + ExcludePatterns = ["test.cs"] + }; + + var result = analyzer.AnalyzeSource(source, "test.cs", options); + + result.Passed.Should().BeTrue(); + result.Violations.Should().BeEmpty(); + } + + [Fact] + public void AnalyzeSource_PassesCleanCode() + { + var analyzer = new ProhibitedPatternAnalyzer(); + var source = """ + public class PolicyEvaluator + { + public bool Evaluate(PolicyContext context) + { + return context.Severity.Score > 7.0m; + } + } + """; + + var result = analyzer.AnalyzeSource(source, "evaluator.cs", DeterminismGuardOptions.Default); + + result.Passed.Should().BeTrue(); + result.Violations.Should().BeEmpty(); + } + + [Fact] + public void AnalyzeSource_TracksLineNumbers() + { + var analyzer = new ProhibitedPatternAnalyzer(); + var source = """ + public class Test + { + public void Method() + { + var now = DateTime.Now; + } + } + """; + + var result = analyzer.AnalyzeSource(source, "test.cs", DeterminismGuardOptions.Default); + + result.Violations.Should().ContainSingle(v => v.LineNumber == 5); + } + + [Fact] + public void AnalyzeMultiple_AggregatesViolations() + { + var analyzer = new ProhibitedPatternAnalyzer(); + var sources = new[] + { + ("file1.cs", "var now = DateTime.Now;"), + ("file2.cs", "var rng = new Random();"), + ("file3.cs", "var id = Guid.NewGuid();") + }; + + var result = analyzer.AnalyzeMultiple( + sources.Select(s => (s.Item2, s.Item1)), + DeterminismGuardOptions.Default); + + result.Violations.Should().HaveCount(3); + result.Violations.Select(v => v.SourceFile).Should() + .BeEquivalentTo(["file1.cs", "file2.cs", "file3.cs"]); + } + + #endregion + + #region DeterminismGuardService Tests + + [Fact] + public void CreateScope_ReturnsFixedTimestamp() + { + var guard = new DeterminismGuardService(); + var timestamp = new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero); + + using var scope = guard.CreateScope("test-scope", timestamp); + + scope.GetTimestamp().Should().Be(timestamp); + scope.EvaluationTimestamp.Should().Be(timestamp); + } + + [Fact] + public void CreateScope_TracksViolations() + { + var guard = new DeterminismGuardService(); + using var scope = guard.CreateScope("test-scope", DateTimeOffset.UtcNow); + + var violation = new DeterminismViolation + { + Category = DeterminismViolationCategory.WallClock, + ViolationType = "Test", + Message = "Test violation", + Severity = DeterminismViolationSeverity.Warning + }; + + scope.ReportViolation(violation); + + scope.GetViolations().Should().ContainSingle(v => v.Message == "Test violation"); + } + + [Fact] + public void CreateScope_ThrowsOnBlockingViolationWhenEnforcementEnabled() + { + var options = new DeterminismGuardOptions + { + EnforcementEnabled = true, + FailOnSeverity = DeterminismViolationSeverity.Error + }; + var guard = new DeterminismGuardService(options); + using var scope = guard.CreateScope("test-scope", DateTimeOffset.UtcNow); + + var violation = new DeterminismViolation + { + Category = DeterminismViolationCategory.WallClock, + ViolationType = "Test", + Message = "Blocking violation", + Severity = DeterminismViolationSeverity.Error + }; + + var act = () => scope.ReportViolation(violation); + + act.Should().Throw() + .Which.Violation.Should().Be(violation); + } + + [Fact] + public void CreateScope_DoesNotThrowWhenEnforcementDisabled() + { + var options = new DeterminismGuardOptions + { + EnforcementEnabled = false + }; + var guard = new DeterminismGuardService(options); + using var scope = guard.CreateScope("test-scope", DateTimeOffset.UtcNow); + + var violation = new DeterminismViolation + { + Category = DeterminismViolationCategory.WallClock, + ViolationType = "Test", + Message = "Should not throw", + Severity = DeterminismViolationSeverity.Critical + }; + + var act = () => scope.ReportViolation(violation); + + act.Should().NotThrow(); + } + + [Fact] + public void Complete_ReturnsAnalysisResult() + { + var guard = new DeterminismGuardService(); + using var scope = guard.CreateScope("test-scope", DateTimeOffset.UtcNow); + + scope.ReportViolation(new DeterminismViolation + { + Category = DeterminismViolationCategory.RandomNumber, + ViolationType = "Test", + Message = "Warning violation", + Severity = DeterminismViolationSeverity.Warning + }); + + var result = scope.Complete(); + + result.Passed.Should().BeTrue(); // Only warnings, no errors + result.Violations.Should().HaveCount(1); + result.CountBySeverity.Should().ContainKey(DeterminismViolationSeverity.Warning); + } + + #endregion + + #region DeterministicTimeProvider Tests + + [Fact] + public void DeterministicTimeProvider_ReturnsFixedTimestamp() + { + var fixedTime = new DateTimeOffset(2025, 6, 15, 10, 30, 0, TimeSpan.Zero); + var provider = new DeterministicTimeProvider(fixedTime); + + provider.GetUtcNow().Should().Be(fixedTime); + provider.GetUtcNow().Should().Be(fixedTime); // Same value on repeated calls + } + + [Fact] + public void DeterministicTimeProvider_ReturnsUtcTimeZone() + { + var provider = new DeterministicTimeProvider(DateTimeOffset.UtcNow); + + provider.LocalTimeZone.Should().Be(TimeZoneInfo.Utc); + } + + #endregion + + #region GuardedPolicyEvaluator Tests + + [Fact] + public void Evaluate_ReturnsResultWithViolations() + { + var evaluator = new GuardedPolicyEvaluator(); + var timestamp = DateTimeOffset.UtcNow; + + var result = evaluator.Evaluate("test-scope", timestamp, scope => + { + scope.ReportViolation(new DeterminismViolation + { + Category = DeterminismViolationCategory.WallClock, + ViolationType = "Test", + Message = "Test warning", + Severity = DeterminismViolationSeverity.Warning + }); + return 42; + }); + + result.Succeeded.Should().BeTrue(); + result.Result.Should().Be(42); + result.HasViolations.Should().BeTrue(); + result.Violations.Should().HaveCount(1); + } + + [Fact] + public void Evaluate_CapturesBlockingViolation() + { + var options = new DeterminismGuardOptions + { + EnforcementEnabled = true, + FailOnSeverity = DeterminismViolationSeverity.Error + }; + var evaluator = new GuardedPolicyEvaluator(options); + + var result = evaluator.Evaluate("test-scope", DateTimeOffset.UtcNow, scope => + { + scope.ReportViolation(new DeterminismViolation + { + Category = DeterminismViolationCategory.NetworkAccess, + ViolationType = "HttpClient", + Message = "Network access blocked", + Severity = DeterminismViolationSeverity.Critical + }); + return "should not return"; + }); + + result.Succeeded.Should().BeFalse(); + result.WasBlocked.Should().BeTrue(); + result.BlockingViolation.Should().NotBeNull(); + } + + [Fact] + public void ValidatePolicySource_ReturnsViolations() + { + var evaluator = new GuardedPolicyEvaluator(); + var source = "var now = DateTime.Now;"; + + var result = evaluator.ValidatePolicySource(source, "policy.cs"); + + result.Violations.Should().ContainSingle(); + } + + [Fact] + public async Task EvaluateAsync_WorksWithAsyncCode() + { + var evaluator = new GuardedPolicyEvaluator(); + + var result = await evaluator.EvaluateAsync("async-scope", DateTimeOffset.UtcNow, async scope => + { + await Task.Delay(1); + return "async result"; + }); + + result.Succeeded.Should().BeTrue(); + result.Result.Should().Be("async result"); + } + + #endregion + + #region DeterminismGuardOptions Tests + + [Fact] + public void Default_HasEnforcementEnabled() + { + DeterminismGuardOptions.Default.EnforcementEnabled.Should().BeTrue(); + DeterminismGuardOptions.Default.FailOnSeverity.Should().Be(DeterminismViolationSeverity.Error); + } + + [Fact] + public void Development_HasEnforcementDisabled() + { + DeterminismGuardOptions.Development.EnforcementEnabled.Should().BeFalse(); + DeterminismGuardOptions.Development.FailOnSeverity.Should().Be(DeterminismViolationSeverity.Critical); + } + + #endregion +} diff --git a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/IncrementalOrchestrator/IncrementalOrchestratorTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/IncrementalOrchestrator/IncrementalOrchestratorTests.cs new file mode 100644 index 000000000..c4ce92c2f --- /dev/null +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/IncrementalOrchestrator/IncrementalOrchestratorTests.cs @@ -0,0 +1,319 @@ +using System.Collections.Immutable; +using FluentAssertions; +using Microsoft.Extensions.Time.Testing; +using StellaOps.Policy.Engine.IncrementalOrchestrator; +using StellaOps.Policy.Engine.Telemetry; +using Xunit; + +namespace StellaOps.Policy.Engine.Tests.IncrementalOrchestrator; + +public sealed class IncrementalOrchestratorTests +{ + #region PolicyChangeEvent Tests + + [Fact] + public void CreateAdvisoryUpdated_CreatesValidEvent() + { + var now = DateTimeOffset.UtcNow; + var evt = PolicyChangeEventFactory.CreateAdvisoryUpdated( + tenantId: "test-tenant", + advisoryId: "GHSA-test-001", + vulnerabilityId: "CVE-2021-12345", + affectedPurls: ["pkg:npm/lodash", "pkg:npm/express"], + source: "concelier", + occurredAt: now, + createdAt: now); + + evt.ChangeType.Should().Be(PolicyChangeType.AdvisoryUpdated); + evt.TenantId.Should().Be("test-tenant"); + evt.AdvisoryId.Should().Be("GHSA-test-001"); + evt.VulnerabilityId.Should().Be("CVE-2021-12345"); + evt.AffectedPurls.Should().HaveCount(2); + evt.EventId.Should().StartWith("pce-"); + evt.ContentHash.Should().NotBeNullOrEmpty(); + } + + [Fact] + public void CreateVexUpdated_CreatesValidEvent() + { + var now = DateTimeOffset.UtcNow; + var evt = PolicyChangeEventFactory.CreateVexUpdated( + tenantId: "test-tenant", + vulnerabilityId: "CVE-2021-12345", + affectedProductKeys: ["pkg:npm/lodash"], + source: "excititor", + occurredAt: now, + createdAt: now); + + evt.ChangeType.Should().Be(PolicyChangeType.VexStatementUpdated); + evt.VulnerabilityId.Should().Be("CVE-2021-12345"); + evt.AffectedProductKeys.Should().ContainSingle(); + } + + [Fact] + public void CreateSbomUpdated_CreatesValidEvent() + { + var now = DateTimeOffset.UtcNow; + var evt = PolicyChangeEventFactory.CreateSbomUpdated( + tenantId: "test-tenant", + sbomId: "sbom-123", + productKey: "myapp:v1.0.0", + componentPurls: ["pkg:npm/lodash@4.17.21"], + source: "scanner", + occurredAt: now, + createdAt: now); + + evt.ChangeType.Should().Be(PolicyChangeType.SbomUpdated); + evt.AffectedSbomIds.Should().Contain("sbom-123"); + evt.AffectedProductKeys.Should().Contain("myapp:v1.0.0"); + } + + [Fact] + public void ComputeContentHash_IsDeterministic() + { + var hash1 = PolicyChangeEvent.ComputeContentHash( + PolicyChangeType.AdvisoryUpdated, + "tenant", + "ADV-001", + "CVE-001", + ["pkg:npm/a", "pkg:npm/b"], + null, + null); + + var hash2 = PolicyChangeEvent.ComputeContentHash( + PolicyChangeType.AdvisoryUpdated, + "tenant", + "ADV-001", + "CVE-001", + ["pkg:npm/b", "pkg:npm/a"], // Different order + null, + null); + + hash1.Should().Be(hash2); // Should be equal due to sorting + } + + [Fact] + public void ComputeContentHash_DiffersForDifferentInput() + { + var hash1 = PolicyChangeEvent.ComputeContentHash( + PolicyChangeType.AdvisoryUpdated, + "tenant", + "ADV-001", + "CVE-001", + null, null, null); + + var hash2 = PolicyChangeEvent.ComputeContentHash( + PolicyChangeType.AdvisoryUpdated, + "tenant", + "ADV-002", // Different advisory + "CVE-001", + null, null, null); + + hash1.Should().NotBe(hash2); + } + + [Fact] + public void CreateManualTrigger_IncludesRequestedBy() + { + var now = DateTimeOffset.UtcNow; + var evt = PolicyChangeEventFactory.CreateManualTrigger( + tenantId: "test-tenant", + policyIds: ["policy-1"], + sbomIds: ["sbom-1"], + productKeys: null, + requestedBy: "admin@example.com", + createdAt: now); + + evt.ChangeType.Should().Be(PolicyChangeType.ManualTrigger); + evt.Metadata.Should().ContainKey("requestedBy"); + evt.Metadata["requestedBy"].Should().Be("admin@example.com"); + } + + #endregion + + #region IncrementalPolicyOrchestrator Tests + + [Fact] + public async Task ProcessAsync_ProcessesEvents() + { + var eventSource = new InMemoryPolicyChangeEventSource(); + var submitter = new TestSubmitter(); + var idempotencyStore = new InMemoryPolicyChangeIdempotencyStore(); + var timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow); + + var orchestrator = new IncrementalPolicyOrchestrator( + eventSource, submitter, idempotencyStore, + timeProvider: timeProvider); + + eventSource.Enqueue(PolicyChangeEventFactory.CreateAdvisoryUpdated( + "tenant1", "ADV-001", "CVE-001", ["pkg:npm/test"], + "test", timeProvider.GetUtcNow(), timeProvider.GetUtcNow())); + + var result = await orchestrator.ProcessAsync(CancellationToken.None); + + result.TotalEventsRead.Should().Be(1); + result.BatchesProcessed.Should().Be(1); + submitter.SubmittedBatches.Should().HaveCount(1); + } + + [Fact] + public async Task ProcessAsync_DeduplicatesEvents() + { + var eventSource = new InMemoryPolicyChangeEventSource(); + var submitter = new TestSubmitter(); + var idempotencyStore = new InMemoryPolicyChangeIdempotencyStore(); + var timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow); + + var orchestrator = new IncrementalPolicyOrchestrator( + eventSource, submitter, idempotencyStore, + timeProvider: timeProvider); + + var evt = PolicyChangeEventFactory.CreateAdvisoryUpdated( + "tenant1", "ADV-001", "CVE-001", ["pkg:npm/test"], + "test", timeProvider.GetUtcNow(), timeProvider.GetUtcNow()); + + // Mark as already seen + await idempotencyStore.MarkSeenAsync(evt.EventId, timeProvider.GetUtcNow(), CancellationToken.None); + + eventSource.Enqueue(evt); + + var result = await orchestrator.ProcessAsync(CancellationToken.None); + + result.TotalEventsRead.Should().Be(1); + result.EventsSkippedDuplicate.Should().Be(1); + result.BatchesProcessed.Should().Be(0); + } + + [Fact] + public async Task ProcessAsync_SkipsOldEvents() + { + var options = new IncrementalOrchestratorOptions + { + MaxEventAge = TimeSpan.FromHours(1) + }; + var eventSource = new InMemoryPolicyChangeEventSource(); + var submitter = new TestSubmitter(); + var idempotencyStore = new InMemoryPolicyChangeIdempotencyStore(); + var timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow); + + var orchestrator = new IncrementalPolicyOrchestrator( + eventSource, submitter, idempotencyStore, options, + timeProvider: timeProvider); + + // Create an old event + var oldTime = timeProvider.GetUtcNow().AddHours(-2); + var evt = PolicyChangeEventFactory.CreateAdvisoryUpdated( + "tenant1", "ADV-001", "CVE-001", ["pkg:npm/test"], + "test", oldTime, oldTime); + + eventSource.Enqueue(evt); + + var result = await orchestrator.ProcessAsync(CancellationToken.None); + + result.TotalEventsRead.Should().Be(1); + result.EventsSkippedOld.Should().Be(1); + result.BatchesProcessed.Should().Be(0); + } + + [Fact] + public async Task ProcessAsync_GroupsByTenant() + { + var eventSource = new InMemoryPolicyChangeEventSource(); + var submitter = new TestSubmitter(); + var idempotencyStore = new InMemoryPolicyChangeIdempotencyStore(); + var timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow); + + var orchestrator = new IncrementalPolicyOrchestrator( + eventSource, submitter, idempotencyStore, + timeProvider: timeProvider); + + eventSource.Enqueue(PolicyChangeEventFactory.CreateAdvisoryUpdated( + "tenant1", "ADV-001", "CVE-001", [], "test", + timeProvider.GetUtcNow(), timeProvider.GetUtcNow())); + + eventSource.Enqueue(PolicyChangeEventFactory.CreateAdvisoryUpdated( + "tenant2", "ADV-002", "CVE-002", [], "test", + timeProvider.GetUtcNow(), timeProvider.GetUtcNow())); + + var result = await orchestrator.ProcessAsync(CancellationToken.None); + + result.BatchesProcessed.Should().Be(2); // One per tenant + submitter.SubmittedBatches.Select(b => b.TenantId).Should() + .BeEquivalentTo(["tenant1", "tenant2"]); + } + + [Fact] + public async Task ProcessAsync_SortsByPriority() + { + var eventSource = new InMemoryPolicyChangeEventSource(); + var submitter = new TestSubmitter(); + var idempotencyStore = new InMemoryPolicyChangeIdempotencyStore(); + var timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow); + + var orchestrator = new IncrementalPolicyOrchestrator( + eventSource, submitter, idempotencyStore, + timeProvider: timeProvider); + + // Add normal priority first + eventSource.Enqueue(PolicyChangeEventFactory.CreateAdvisoryUpdated( + "tenant1", "ADV-001", "CVE-001", [], "test", + timeProvider.GetUtcNow(), timeProvider.GetUtcNow(), + priority: PolicyChangePriority.Normal)); + + // Add emergency priority second + eventSource.Enqueue(PolicyChangeEventFactory.CreateAdvisoryUpdated( + "tenant1", "ADV-002", "CVE-002", [], "test", + timeProvider.GetUtcNow(), timeProvider.GetUtcNow(), + priority: PolicyChangePriority.Emergency)); + + await orchestrator.ProcessAsync(CancellationToken.None); + + // Emergency should be processed first (separate batch due to priority) + submitter.SubmittedBatches.Should().HaveCount(2); + submitter.SubmittedBatches[0].Priority.Should().Be(PolicyChangePriority.Emergency); + } + + #endregion + + #region RuleHitSamplingOptions Tests + + [Fact] + public void Default_HasReasonableSamplingRates() + { + var options = RuleHitSamplingOptions.Default; + + options.BaseSamplingRate.Should().BeInRange(0.0, 1.0); + options.VexOverrideSamplingRate.Should().Be(1.0); // Always sample VEX + options.IncidentModeSamplingRate.Should().Be(1.0); + } + + [Fact] + public void FullSampling_SamplesEverything() + { + var options = RuleHitSamplingOptions.FullSampling; + + options.BaseSamplingRate.Should().Be(1.0); + options.VexOverrideSamplingRate.Should().Be(1.0); + options.HighSeveritySamplingRate.Should().Be(1.0); + } + + #endregion + + private sealed class TestSubmitter : IPolicyReEvaluationSubmitter + { + public List SubmittedBatches { get; } = []; + + public Task SubmitAsync( + PolicyChangeBatch batch, + CancellationToken cancellationToken) + { + SubmittedBatches.Add(batch); + return Task.FromResult(new PolicyReEvaluationResult + { + Succeeded = true, + JobIds = [$"job-{batch.BatchId}"], + ProcessingTimeMs = 1 + }); + } + } +} diff --git a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Materialization/MaterializationTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Materialization/MaterializationTests.cs new file mode 100644 index 000000000..c25d7dd2a --- /dev/null +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Materialization/MaterializationTests.cs @@ -0,0 +1,268 @@ +using System.Collections.Immutable; +using FluentAssertions; +using StellaOps.Policy.Engine.Materialization; +using Xunit; + +namespace StellaOps.Policy.Engine.Tests.Materialization; + +public sealed class MaterializationTests +{ + #region EffectiveFinding.CreateId Tests + + [Fact] + public void CreateId_IsDeterministic() + { + var id1 = EffectiveFinding.CreateId("tenant1", "policy-1", "pkg:npm/lodash@4.17.21", "CVE-2021-12345"); + var id2 = EffectiveFinding.CreateId("tenant1", "policy-1", "pkg:npm/lodash@4.17.21", "CVE-2021-12345"); + + id1.Should().Be(id2); + id1.Should().StartWith("sha256:"); + } + + [Fact] + public void CreateId_NormalizesTenant() + { + var id1 = EffectiveFinding.CreateId("TENANT1", "policy-1", "pkg:npm/lodash", "CVE-2021-12345"); + var id2 = EffectiveFinding.CreateId("tenant1", "policy-1", "pkg:npm/lodash", "CVE-2021-12345"); + + id1.Should().Be(id2); + } + + [Fact] + public void CreateId_NormalizesPurl() + { + var id1 = EffectiveFinding.CreateId("tenant1", "policy-1", "PKG:NPM/LODASH", "CVE-2021-12345"); + var id2 = EffectiveFinding.CreateId("tenant1", "policy-1", "pkg:npm/lodash", "CVE-2021-12345"); + + id1.Should().Be(id2); + } + + [Fact] + public void CreateId_DiffersForDifferentInput() + { + var id1 = EffectiveFinding.CreateId("tenant1", "policy-1", "pkg:npm/lodash", "CVE-2021-12345"); + var id2 = EffectiveFinding.CreateId("tenant1", "policy-1", "pkg:npm/lodash", "CVE-2021-99999"); + + id1.Should().NotBe(id2); + } + + [Fact] + public void CreateId_HandlesNullValues() + { + var id = EffectiveFinding.CreateId(null!, "policy", "purl", "advisory"); + + id.Should().StartWith("sha256:"); + } + + #endregion + + #region EffectiveFinding.ComputeContentHash Tests + + [Fact] + public void ComputeContentHash_IsDeterministic() + { + var hash1 = EffectiveFinding.ComputeContentHash("affected", "High", "severity-rule", "not_affected", null); + var hash2 = EffectiveFinding.ComputeContentHash("affected", "High", "severity-rule", "not_affected", null); + + hash1.Should().Be(hash2); + } + + [Fact] + public void ComputeContentHash_DiffersForDifferentStatus() + { + var hash1 = EffectiveFinding.ComputeContentHash("affected", "High", null, null, null); + var hash2 = EffectiveFinding.ComputeContentHash("suppressed", "High", null, null, null); + + hash1.Should().NotBe(hash2); + } + + [Fact] + public void ComputeContentHash_DiffersForDifferentSeverity() + { + var hash1 = EffectiveFinding.ComputeContentHash("affected", "High", null, null, null); + var hash2 = EffectiveFinding.ComputeContentHash("affected", "Critical", null, null, null); + + hash1.Should().NotBe(hash2); + } + + [Fact] + public void ComputeContentHash_IncludesAnnotations() + { + var annotations = new Dictionary { ["key"] = "value" }; + var hash1 = EffectiveFinding.ComputeContentHash("affected", "High", null, null, annotations); + var hash2 = EffectiveFinding.ComputeContentHash("affected", "High", null, null, null); + + hash1.Should().NotBe(hash2); + } + + [Fact] + public void ComputeContentHash_SortsAnnotationsDeterministically() + { + var annotations1 = new Dictionary { ["a"] = "1", ["b"] = "2" }; + var annotations2 = new Dictionary { ["b"] = "2", ["a"] = "1" }; + + var hash1 = EffectiveFinding.ComputeContentHash("affected", null, null, null, annotations1); + var hash2 = EffectiveFinding.ComputeContentHash("affected", null, null, null, annotations2); + + hash1.Should().Be(hash2); + } + + #endregion + + #region EffectiveFindingHistoryEntry Tests + + [Fact] + public void HistoryEntry_CreateId_IsDeterministic() + { + var id1 = EffectiveFindingHistoryEntry.CreateId("finding-1", 5); + var id2 = EffectiveFindingHistoryEntry.CreateId("finding-1", 5); + + id1.Should().Be(id2); + id1.Should().Be("finding-1:v5"); + } + + [Fact] + public void HistoryEntry_CreateId_DiffersForDifferentVersion() + { + var id1 = EffectiveFindingHistoryEntry.CreateId("finding-1", 1); + var id2 = EffectiveFindingHistoryEntry.CreateId("finding-1", 2); + + id1.Should().NotBe(id2); + } + + #endregion + + #region MaterializeFindingInput Tests + + [Fact] + public void MaterializeFindingInput_CanBeCreated() + { + var input = new MaterializeFindingInput + { + TenantId = "tenant-1", + PolicyId = "policy-1", + PolicyVersion = 1, + ComponentPurl = "pkg:npm/lodash@4.17.21", + ComponentName = "lodash", + ComponentVersion = "4.17.21", + AdvisoryId = "CVE-2021-12345", + AdvisorySource = "nvd", + Status = "affected", + Severity = "High", + RuleName = "severity-rule", + VexStatus = "not_affected", + VexJustification = "vulnerable_code_not_in_execute_path", + Annotations = ImmutableDictionary.Empty.Add("key", "value"), + PolicyRunId = "run-123", + TraceId = "trace-abc", + SpanId = "span-def" + }; + + input.TenantId.Should().Be("tenant-1"); + input.PolicyId.Should().Be("policy-1"); + input.PolicyVersion.Should().Be(1); + input.ComponentPurl.Should().Be("pkg:npm/lodash@4.17.21"); + input.Status.Should().Be("affected"); + input.VexStatus.Should().Be("not_affected"); + } + + #endregion + + #region MaterializeFindingResult Tests + + [Fact] + public void MaterializeFindingResult_TracksCreation() + { + var result = new MaterializeFindingResult + { + FindingId = "sha256:abc123", + WasCreated = true, + WasUpdated = false, + HistoryVersion = 1, + ChangeType = EffectiveFindingChangeType.Created + }; + + result.WasCreated.Should().BeTrue(); + result.WasUpdated.Should().BeFalse(); + result.ChangeType.Should().Be(EffectiveFindingChangeType.Created); + } + + [Fact] + public void MaterializeFindingResult_TracksUpdate() + { + var result = new MaterializeFindingResult + { + FindingId = "sha256:abc123", + WasCreated = false, + WasUpdated = true, + HistoryVersion = 2, + ChangeType = EffectiveFindingChangeType.StatusChanged + }; + + result.WasCreated.Should().BeFalse(); + result.WasUpdated.Should().BeTrue(); + result.ChangeType.Should().Be(EffectiveFindingChangeType.StatusChanged); + } + + #endregion + + #region MaterializeBatchResult Tests + + [Fact] + public void MaterializeBatchResult_AggregatesCorrectly() + { + var results = ImmutableArray.Create( + new MaterializeFindingResult + { + FindingId = "id1", + WasCreated = true, + WasUpdated = false, + HistoryVersion = 1, + ChangeType = EffectiveFindingChangeType.Created + }, + new MaterializeFindingResult + { + FindingId = "id2", + WasCreated = false, + WasUpdated = true, + HistoryVersion = 2, + ChangeType = EffectiveFindingChangeType.StatusChanged + } + ); + + var batchResult = new MaterializeBatchResult + { + TotalInputs = 3, + Created = 1, + Updated = 1, + Unchanged = 1, + Errors = 0, + ProcessingTimeMs = 100, + Results = results + }; + + batchResult.TotalInputs.Should().Be(3); + batchResult.Created.Should().Be(1); + batchResult.Updated.Should().Be(1); + batchResult.Unchanged.Should().Be(1); + batchResult.Results.Should().HaveCount(2); + } + + #endregion + + #region EffectiveFindingChangeType Tests + + [Theory] + [InlineData(EffectiveFindingChangeType.Created, "Created")] + [InlineData(EffectiveFindingChangeType.StatusChanged, "StatusChanged")] + [InlineData(EffectiveFindingChangeType.SeverityChanged, "SeverityChanged")] + [InlineData(EffectiveFindingChangeType.VexApplied, "VexApplied")] + [InlineData(EffectiveFindingChangeType.AnnotationsChanged, "AnnotationsChanged")] + [InlineData(EffectiveFindingChangeType.PolicyVersionChanged, "PolicyVersionChanged")] + public void EffectiveFindingChangeType_HasExpectedValues(EffectiveFindingChangeType changeType, string expectedName) + { + changeType.ToString().Should().Be(expectedName); + } + + #endregion +} diff --git a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyBundleServiceTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyBundleServiceTests.cs index 08409f54f..33c1e51be 100644 --- a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyBundleServiceTests.cs +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyBundleServiceTests.cs @@ -128,7 +128,8 @@ public sealed class PolicyBundleServiceTests var compiler = new PolicyCompiler(); var complexity = new PolicyComplexityAnalyzer(); var options = Microsoft.Extensions.Options.Options.Create(new PolicyEngineOptions()); - var compilationService = new PolicyCompilationService(compiler, complexity, new StaticOptionsMonitor(options.Value), TimeProvider.System); + var metadataExtractor = new PolicyMetadataExtractor(); + var compilationService = new PolicyCompilationService(compiler, complexity, metadataExtractor, new StaticOptionsMonitor(options.Value), TimeProvider.System); var repo = new InMemoryPolicyPackRepository(); return new ServiceHarness( new PolicyBundleService(compilationService, repo, TimeProvider.System), diff --git a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyCompilationServiceTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyCompilationServiceTests.cs index ebf58ffbb..42d1edd63 100644 --- a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyCompilationServiceTests.cs +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyCompilationServiceTests.cs @@ -84,7 +84,8 @@ public sealed class PolicyCompilationServiceTests options.Compilation.MaxDurationMilliseconds = maxDurationMilliseconds; var optionsMonitor = new StaticOptionsMonitor(options); var timeProvider = new FakeTimeProvider(simulatedDurationMilliseconds); - return new PolicyCompilationService(compiler, analyzer, optionsMonitor, timeProvider); + var metadataExtractor = new PolicyMetadataExtractor(); + return new PolicyCompilationService(compiler, analyzer, metadataExtractor, optionsMonitor, timeProvider); } private sealed class StaticOptionsMonitor : IOptionsMonitor diff --git a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyRuntimeEvaluationServiceTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyRuntimeEvaluationServiceTests.cs index 5f846c504..12a8f1c53 100644 --- a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyRuntimeEvaluationServiceTests.cs +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyRuntimeEvaluationServiceTests.cs @@ -157,8 +157,8 @@ public sealed class PolicyRuntimeEvaluationServiceTests var responses = await harness.Service.EvaluateBatchAsync(requests, CancellationToken.None); Assert.Equal(2, responses.Count); - Assert.True(responses.Any(r => r.Cached)); - Assert.True(responses.Any(r => !r.Cached)); + Assert.Contains(responses, r => r.Cached); + Assert.Contains(responses, r => !r.Cached); } [Fact] @@ -231,7 +231,8 @@ public sealed class PolicyRuntimeEvaluationServiceTests var analyzer = new PolicyComplexityAnalyzer(); var options = new PolicyEngineOptions(); var optionsMonitor = new StaticOptionsMonitor(options); - return new PolicyCompilationService(compiler, analyzer, optionsMonitor, TimeProvider.System); + var metadataExtractor = new PolicyMetadataExtractor(); + return new PolicyCompilationService(compiler, analyzer, metadataExtractor, optionsMonitor, TimeProvider.System); } private sealed record TestHarness( diff --git a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/SelectionJoin/SelectionJoinTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/SelectionJoin/SelectionJoinTests.cs new file mode 100644 index 000000000..4d2ec2d43 --- /dev/null +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/SelectionJoin/SelectionJoinTests.cs @@ -0,0 +1,380 @@ +using System.Collections.Immutable; +using FluentAssertions; +using StellaOps.Policy.Engine.SelectionJoin; +using Xunit; + +namespace StellaOps.Policy.Engine.Tests.SelectionJoin; + +public sealed class SelectionJoinTests +{ + #region PurlEquivalence Tests + + [Theory] + [InlineData("pkg:npm/lodash@4.17.21", "pkg:npm/lodash")] + [InlineData("pkg:maven/org.apache.commons/commons-lang3@3.12.0", "pkg:maven/org.apache.commons/commons-lang3")] + [InlineData("pkg:pypi/requests@2.28.0", "pkg:pypi/requests")] + [InlineData("pkg:gem/rails@7.0.0", "pkg:gem/rails")] + [InlineData("pkg:nuget/Newtonsoft.Json@13.0.1", "pkg:nuget/Newtonsoft.Json")] + public void ExtractPackageKey_RemovesVersion(string purl, string expectedKey) + { + var key = PurlEquivalence.ExtractPackageKey(purl); + + key.Should().Be(expectedKey); + } + + [Fact] + public void ExtractPackageKey_HandlesNoVersion() + { + var purl = "pkg:npm/lodash"; + + var key = PurlEquivalence.ExtractPackageKey(purl); + + key.Should().Be("pkg:npm/lodash"); + } + + [Fact] + public void ExtractPackageKey_HandlesScopedPackages() + { + var purl = "pkg:npm/@scope/package@1.0.0"; + + var key = PurlEquivalence.ExtractPackageKey(purl); + + key.Should().Be("pkg:npm/@scope/package"); + } + + [Theory] + [InlineData("pkg:npm/lodash@4.17.21", "npm")] + [InlineData("pkg:maven/org.apache/commons@1.0", "maven")] + [InlineData("pkg:pypi/requests@2.28", "pypi")] + public void ExtractEcosystem_ReturnsCorrectEcosystem(string purl, string expected) + { + var ecosystem = PurlEquivalence.ExtractEcosystem(purl); + + ecosystem.Should().Be(expected); + } + + [Fact] + public void ComputeMatchConfidence_ExactMatch_Returns1() + { + var confidence = PurlEquivalence.ComputeMatchConfidence( + "pkg:npm/lodash@4.17.21", + "pkg:npm/lodash@4.17.21"); + + confidence.Should().Be(1.0); + } + + [Fact] + public void ComputeMatchConfidence_PackageKeyMatch_Returns08() + { + var confidence = PurlEquivalence.ComputeMatchConfidence( + "pkg:npm/lodash@4.17.21", + "pkg:npm/lodash@4.17.20"); + + confidence.Should().Be(0.8); + } + + #endregion + + #region PurlEquivalenceTable Tests + + [Fact] + public void FromGroups_CreatesEquivalentMappings() + { + var groups = new[] + { + new[] { "pkg:npm/lodash", "pkg:npm/lodash-es" } + }; + + var table = PurlEquivalenceTable.FromGroups(groups); + + table.AreEquivalent("pkg:npm/lodash", "pkg:npm/lodash-es").Should().BeTrue(); + table.GroupCount.Should().Be(1); + } + + [Fact] + public void GetCanonical_ReturnsFirstLexicographically() + { + var groups = new[] + { + new[] { "pkg:npm/b-package", "pkg:npm/a-package" } + }; + + var table = PurlEquivalenceTable.FromGroups(groups); + + // "a-package" is lexicographically first + table.GetCanonical("pkg:npm/b-package").Should().Be("pkg:npm/a-package"); + } + + [Fact] + public void GetEquivalents_ReturnsAllEquivalentPurls() + { + var groups = new[] + { + new[] { "pkg:npm/a", "pkg:npm/b", "pkg:npm/c" } + }; + + var table = PurlEquivalenceTable.FromGroups(groups); + var equivalents = table.GetEquivalents("pkg:npm/b"); + + equivalents.Should().HaveCount(3); + equivalents.Should().Contain("pkg:npm/a"); + equivalents.Should().Contain("pkg:npm/b"); + equivalents.Should().Contain("pkg:npm/c"); + } + + [Fact] + public void Empty_HasNoMappings() + { + var table = PurlEquivalenceTable.Empty; + + table.GroupCount.Should().Be(0); + table.TotalEntries.Should().Be(0); + table.AreEquivalent("pkg:npm/a", "pkg:npm/b").Should().BeFalse(); + } + + #endregion + + #region SelectionJoinService Tests + + [Fact] + public void ResolveTuples_MatchesByExactPurl() + { + var service = new SelectionJoinService(); + var input = new SelectionJoinBatchInput( + TenantId: "test-tenant", + BatchId: "batch-1", + Components: [ + new SbomComponentInput( + Purl: "pkg:npm/lodash@4.17.21", + Name: "lodash", + Version: "4.17.21", + Ecosystem: "npm", + Metadata: ImmutableDictionary.Empty) + ], + Advisories: [ + new AdvisoryLinksetInput( + AdvisoryId: "GHSA-test-001", + Source: "github", + Purls: ["pkg:npm/lodash@4.17.21"], + Cpes: ImmutableArray.Empty, + Aliases: ["CVE-2021-12345"], + Confidence: 1.0) + ], + VexLinksets: ImmutableArray.Empty, + EquivalenceTable: null, + Options: new SelectionJoinOptions()); + + var result = service.ResolveTuples(input); + + result.Tuples.Should().ContainSingle(); + result.Tuples[0].MatchType.Should().Be(SelectionMatchType.ExactPurl); + result.Tuples[0].Component.Purl.Should().Be("pkg:npm/lodash@4.17.21"); + result.Statistics.ExactPurlMatches.Should().Be(1); + } + + [Fact] + public void ResolveTuples_MatchesByPackageKey() + { + var service = new SelectionJoinService(); + var input = new SelectionJoinBatchInput( + TenantId: "test-tenant", + BatchId: "batch-1", + Components: [ + new SbomComponentInput("pkg:npm/lodash@4.17.21", "lodash", "4.17.21", "npm", + ImmutableDictionary.Empty) + ], + Advisories: [ + new AdvisoryLinksetInput("GHSA-test-001", "github", + Purls: ["pkg:npm/lodash@4.17.20"], // Different version + Cpes: ImmutableArray.Empty, + Aliases: ["CVE-2021-12345"], + Confidence: 1.0) + ], + VexLinksets: ImmutableArray.Empty, + EquivalenceTable: null, + Options: new SelectionJoinOptions()); + + var result = service.ResolveTuples(input); + + result.Tuples.Should().ContainSingle(); + result.Tuples[0].MatchType.Should().Be(SelectionMatchType.PackageKeyMatch); + } + + [Fact] + public void ResolveTuples_AppliesVexOverlay() + { + var service = new SelectionJoinService(); + var input = new SelectionJoinBatchInput( + TenantId: "test-tenant", + BatchId: "batch-1", + Components: [ + new SbomComponentInput("pkg:npm/lodash@4.17.21", "lodash", "4.17.21", "npm", + ImmutableDictionary.Empty) + ], + Advisories: [ + new AdvisoryLinksetInput("GHSA-test-001", "github", + Purls: ["pkg:npm/lodash@4.17.21"], + Cpes: ImmutableArray.Empty, + Aliases: ["CVE-2021-12345"], + Confidence: 1.0) + ], + VexLinksets: [ + new VexLinksetInput("vex-1", "CVE-2021-12345", "pkg:npm/lodash@4.17.21", + "not_affected", "vulnerable_code_not_in_execute_path", VexConfidenceLevel.High) + ], + EquivalenceTable: null, + Options: new SelectionJoinOptions()); + + var result = service.ResolveTuples(input); + + result.Tuples.Should().ContainSingle(); + result.Tuples[0].Vex.Should().NotBeNull(); + result.Tuples[0].Vex!.Status.Should().Be("not_affected"); + result.Statistics.VexOverlays.Should().Be(1); + } + + [Fact] + public void ResolveTuples_ProducesDeterministicOrdering() + { + var service = new SelectionJoinService(); + var input = new SelectionJoinBatchInput( + TenantId: "test-tenant", + BatchId: "batch-1", + Components: [ + new SbomComponentInput("pkg:npm/z-package@1.0.0", "z", "1.0.0", "npm", + ImmutableDictionary.Empty), + new SbomComponentInput("pkg:npm/a-package@1.0.0", "a", "1.0.0", "npm", + ImmutableDictionary.Empty), + new SbomComponentInput("pkg:npm/m-package@1.0.0", "m", "1.0.0", "npm", + ImmutableDictionary.Empty) + ], + Advisories: [ + new AdvisoryLinksetInput("ADV-001", "test", + Purls: ["pkg:npm/z-package", "pkg:npm/a-package", "pkg:npm/m-package"], + Cpes: ImmutableArray.Empty, + Aliases: ["CVE-2021-001"], + Confidence: 1.0) + ], + VexLinksets: ImmutableArray.Empty, + EquivalenceTable: null, + Options: new SelectionJoinOptions()); + + var result = service.ResolveTuples(input); + + // Should be sorted by component PURL + result.Tuples.Should().HaveCount(3); + result.Tuples[0].Component.Purl.Should().Be("pkg:npm/a-package@1.0.0"); + result.Tuples[1].Component.Purl.Should().Be("pkg:npm/m-package@1.0.0"); + result.Tuples[2].Component.Purl.Should().Be("pkg:npm/z-package@1.0.0"); + } + + [Fact] + public void ResolveTuples_HandlesMultipleAdvisories() + { + var service = new SelectionJoinService(); + var input = new SelectionJoinBatchInput( + TenantId: "test-tenant", + BatchId: "batch-1", + Components: [ + new SbomComponentInput("pkg:npm/lodash@4.17.21", "lodash", "4.17.21", "npm", + ImmutableDictionary.Empty) + ], + Advisories: [ + new AdvisoryLinksetInput("ADV-001", "test", + Purls: ["pkg:npm/lodash@4.17.21"], + Cpes: ImmutableArray.Empty, + Aliases: ["CVE-2021-001"], + Confidence: 1.0), + new AdvisoryLinksetInput("ADV-002", "test", + Purls: ["pkg:npm/lodash@4.17.21"], + Cpes: ImmutableArray.Empty, + Aliases: ["CVE-2021-002"], + Confidence: 1.0) + ], + VexLinksets: ImmutableArray.Empty, + EquivalenceTable: null, + Options: new SelectionJoinOptions()); + + var result = service.ResolveTuples(input); + + result.Tuples.Should().HaveCount(2); + result.Tuples.Should().Contain(t => t.Advisory.AdvisoryId == "ADV-001"); + result.Tuples.Should().Contain(t => t.Advisory.AdvisoryId == "ADV-002"); + } + + [Fact] + public void ResolveTuples_ReturnsStatistics() + { + var service = new SelectionJoinService(); + var input = new SelectionJoinBatchInput( + TenantId: "test-tenant", + BatchId: "batch-1", + Components: [ + new SbomComponentInput("pkg:npm/a@1.0.0", "a", "1.0.0", "npm", + ImmutableDictionary.Empty), + new SbomComponentInput("pkg:npm/b@1.0.0", "b", "1.0.0", "npm", + ImmutableDictionary.Empty) + ], + Advisories: [ + new AdvisoryLinksetInput("ADV-001", "test", + Purls: ["pkg:npm/a"], + Cpes: ImmutableArray.Empty, + Aliases: ["CVE-001"], + Confidence: 1.0) + ], + VexLinksets: ImmutableArray.Empty, + EquivalenceTable: null, + Options: new SelectionJoinOptions()); + + var result = service.ResolveTuples(input); + + result.Statistics.TotalComponents.Should().Be(2); + result.Statistics.TotalAdvisories.Should().Be(1); + result.Statistics.MatchedTuples.Should().Be(1); + result.UnmatchedComponents.Should().ContainSingle(c => c.Purl == "pkg:npm/b@1.0.0"); + } + + [Fact] + public void ResolveTuples_HandlesEmptyInput() + { + var service = new SelectionJoinService(); + var input = new SelectionJoinBatchInput( + TenantId: "test-tenant", + BatchId: "batch-1", + Components: ImmutableArray.Empty, + Advisories: ImmutableArray.Empty, + VexLinksets: ImmutableArray.Empty, + EquivalenceTable: null, + Options: new SelectionJoinOptions()); + + var result = service.ResolveTuples(input); + + result.Tuples.Should().BeEmpty(); + result.Statistics.TotalComponents.Should().Be(0); + } + + #endregion + + #region SelectionJoinTuple Tests + + [Fact] + public void CreateTupleId_IsDeterministic() + { + var id1 = SelectionJoinTuple.CreateTupleId("tenant1", "pkg:npm/lodash@4.17.21", "CVE-2021-12345"); + var id2 = SelectionJoinTuple.CreateTupleId("tenant1", "pkg:npm/lodash@4.17.21", "CVE-2021-12345"); + + id1.Should().Be(id2); + id1.Should().StartWith("tuple:sha256:"); + } + + [Fact] + public void CreateTupleId_NormalizesInput() + { + var id1 = SelectionJoinTuple.CreateTupleId("TENANT1", "PKG:NPM/LODASH@4.17.21", "CVE-2021-12345"); + var id2 = SelectionJoinTuple.CreateTupleId("tenant1", "pkg:npm/lodash@4.17.21", "CVE-2021-12345"); + + id1.Should().Be(id2); + } + + #endregion +} diff --git a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Simulation/SimulationAnalyticsServiceTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Simulation/SimulationAnalyticsServiceTests.cs new file mode 100644 index 000000000..c2ae7c6a5 --- /dev/null +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Simulation/SimulationAnalyticsServiceTests.cs @@ -0,0 +1,414 @@ +using System.Collections.Immutable; +using FluentAssertions; +using StellaOps.Policy.Engine.Simulation; +using StellaOps.Policy.Engine.Telemetry; +using Xunit; + +namespace StellaOps.Policy.Engine.Tests.Simulation; + +public sealed class SimulationAnalyticsServiceTests +{ + private readonly SimulationAnalyticsService _service = new(); + + [Fact] + public void ComputeRuleFiringCounts_EmptyTraces_ReturnsEmptyCounts() + { + // Arrange + var traces = Array.Empty(); + + // Act + var result = _service.ComputeRuleFiringCounts(traces, 10); + + // Assert + result.TotalEvaluations.Should().Be(10); + result.TotalRulesFired.Should().Be(0); + result.RulesByName.Should().BeEmpty(); + result.RulesByPriority.Should().BeEmpty(); + result.RulesByOutcome.Should().BeEmpty(); + result.TopRules.Should().BeEmpty(); + } + + [Fact] + public void ComputeRuleFiringCounts_WithFiredRules_CountsCorrectly() + { + // Arrange + var traces = new[] + { + CreateTrace("rule_a", 1, "block", expressionResult: true), + CreateTrace("rule_a", 1, "block", expressionResult: true), + CreateTrace("rule_b", 2, "allow", expressionResult: true), + CreateTrace("rule_c", 3, "warn", expressionResult: false), // Not fired + }; + + // Act + var result = _service.ComputeRuleFiringCounts(traces, 10); + + // Assert + result.TotalRulesFired.Should().Be(3); + result.RulesByName.Should().HaveCount(2); + result.RulesByName["rule_a"].FireCount.Should().Be(2); + result.RulesByName["rule_b"].FireCount.Should().Be(1); + result.RulesByPriority[1].Should().Be(2); + result.RulesByPriority[2].Should().Be(1); + result.RulesByOutcome["block"].Should().Be(2); + result.RulesByOutcome["allow"].Should().Be(1); + } + + [Fact] + public void ComputeRuleFiringCounts_TopRules_OrderedByFireCount() + { + // Arrange + var traces = new List(); + for (var i = 0; i < 15; i++) + { + traces.Add(CreateTrace("frequently_fired", 1, "block", expressionResult: true)); + } + for (var i = 0; i < 5; i++) + { + traces.Add(CreateTrace("sometimes_fired", 2, "warn", expressionResult: true)); + } + traces.Add(CreateTrace("rarely_fired", 3, "allow", expressionResult: true)); + + // Act + var result = _service.ComputeRuleFiringCounts(traces, 100); + + // Assert + result.TopRules.Should().HaveCount(3); + result.TopRules[0].RuleName.Should().Be("frequently_fired"); + result.TopRules[0].FireCount.Should().Be(15); + result.TopRules[1].RuleName.Should().Be("sometimes_fired"); + result.TopRules[1].FireCount.Should().Be(5); + result.TopRules[2].RuleName.Should().Be("rarely_fired"); + result.TopRules[2].FireCount.Should().Be(1); + } + + [Fact] + public void ComputeRuleFiringCounts_VexOverrides_CountedCorrectly() + { + // Arrange + var traces = new[] + { + CreateTrace("rule_a", 1, "allow", expressionResult: true, isVexOverride: true, vexVendor: "vendor_a", vexStatus: "not_affected"), + CreateTrace("rule_a", 1, "allow", expressionResult: true, isVexOverride: true, vexVendor: "vendor_a", vexStatus: "fixed"), + CreateTrace("rule_b", 2, "allow", expressionResult: true, isVexOverride: true, vexVendor: "vendor_b", vexStatus: "not_affected"), + CreateTrace("rule_c", 3, "block", expressionResult: true), + }; + + // Act + var result = _service.ComputeRuleFiringCounts(traces, 10); + + // Assert + result.VexOverrides.TotalOverrides.Should().Be(3); + result.VexOverrides.ByVendor["vendor_a"].Should().Be(2); + result.VexOverrides.ByVendor["vendor_b"].Should().Be(1); + result.VexOverrides.ByStatus["not_affected"].Should().Be(2); + result.VexOverrides.ByStatus["fixed"].Should().Be(1); + } + + [Fact] + public void ComputeHeatmap_RuleSeverityMatrix_BuildsCorrectly() + { + // Arrange + var traces = new[] + { + CreateTrace("rule_a", 1, "block", expressionResult: true, severity: "critical"), + CreateTrace("rule_a", 1, "block", expressionResult: true, severity: "critical"), + CreateTrace("rule_a", 1, "block", expressionResult: true, severity: "high"), + CreateTrace("rule_b", 2, "warn", expressionResult: true, severity: "medium"), + }; + var findings = CreateFindings(4); + + // Act + var result = _service.ComputeHeatmap(traces, findings, SimulationAnalyticsOptions.Default); + + // Assert + result.RuleSeverityMatrix.Should().NotBeEmpty(); + var criticalCell = result.RuleSeverityMatrix.FirstOrDefault(c => c.X == "rule_a" && c.Y == "critical"); + criticalCell.Should().NotBeNull(); + criticalCell!.Value.Should().Be(2); + } + + [Fact] + public void ComputeHeatmap_FindingRuleCoverage_CalculatesCorrectly() + { + // Arrange + var traces = new[] + { + CreateTrace("rule_a", 1, "block", expressionResult: true, componentPurl: "pkg:npm/lodash@4.0.0"), + CreateTrace("rule_b", 2, "allow", expressionResult: true, componentPurl: "pkg:npm/lodash@4.0.0"), + CreateTrace("rule_a", 1, "block", expressionResult: false, componentPurl: "pkg:npm/express@5.0.0"), + }; + var findings = new[] + { + new SimulationFinding("f1", "pkg:npm/lodash@4.0.0", "GHSA-123", new Dictionary()), + new SimulationFinding("f2", "pkg:npm/express@5.0.0", "GHSA-456", new Dictionary()), + new SimulationFinding("f3", "pkg:npm/axios@1.0.0", "GHSA-789", new Dictionary()), + }; + + // Act + var result = _service.ComputeHeatmap(traces, findings, SimulationAnalyticsOptions.Default); + + // Assert + result.FindingRuleCoverage.TotalFindings.Should().Be(3); + result.FindingRuleCoverage.FindingsMatched.Should().Be(1); + result.FindingRuleCoverage.CoveragePercentage.Should().BeApproximately(33.33, 0.1); + } + + [Fact] + public void ComputeSampledTraces_DeterministicOrdering_OrdersByFindingId() + { + // Arrange + var traces = new[] + { + CreateTrace("rule_a", 1, "block", expressionResult: true, componentPurl: "pkg:npm/z-package@1.0.0"), + CreateTrace("rule_a", 1, "allow", expressionResult: true, componentPurl: "pkg:npm/a-package@1.0.0"), + CreateTrace("rule_b", 2, "warn", expressionResult: true, componentPurl: "pkg:npm/m-package@1.0.0"), + }; + var findings = new[] + { + new SimulationFinding("finding-z", "pkg:npm/z-package@1.0.0", null, new Dictionary()), + new SimulationFinding("finding-a", "pkg:npm/a-package@1.0.0", null, new Dictionary()), + new SimulationFinding("finding-m", "pkg:npm/m-package@1.0.0", null, new Dictionary()), + }; + var options = new SimulationAnalyticsOptions { TraceSampleRate = 1.0, MaxSampledTraces = 100 }; + + // Act + var result = _service.ComputeSampledTraces(traces, findings, options); + + // Assert + result.Ordering.PrimaryKey.Should().Be("finding_id"); + result.Ordering.Direction.Should().Be("ascending"); + } + + [Fact] + public void ComputeSampledTraces_DeterminismHash_ConsistentForSameInput() + { + // Arrange + var traces = new[] + { + CreateTrace("rule_a", 1, "block", expressionResult: true, componentPurl: "pkg:npm/lodash@4.0.0"), + }; + var findings = new[] + { + new SimulationFinding("f1", "pkg:npm/lodash@4.0.0", "GHSA-123", new Dictionary()), + }; + var options = new SimulationAnalyticsOptions { TraceSampleRate = 1.0 }; + + // Act + var result1 = _service.ComputeSampledTraces(traces, findings, options); + var result2 = _service.ComputeSampledTraces(traces, findings, options); + + // Assert + result1.DeterminismHash.Should().Be(result2.DeterminismHash); + } + + [Fact] + public void ComputeSampledTraces_HighSeverity_AlwaysSampled() + { + // Arrange + var traces = new[] + { + CreateTrace("rule_a", 1, "block", expressionResult: true, componentPurl: "pkg:npm/critical@1.0.0", severity: "critical"), + }; + var findings = new[] + { + new SimulationFinding("f1", "pkg:npm/critical@1.0.0", null, new Dictionary()), + }; + var options = new SimulationAnalyticsOptions { TraceSampleRate = 0.0 }; // Zero base rate + + // Act + var result = _service.ComputeSampledTraces(traces, findings, options); + + // Assert + result.SampledCount.Should().BeGreaterThan(0); + result.Traces.Should().Contain(t => t.SampleReason == "high_severity"); + } + + [Fact] + public void ComputeDeltaSummary_OutcomeChanges_CalculatesCorrectly() + { + // Arrange + var baseResults = new[] + { + new SimulationFindingResult("f1", "pkg:a", null, "block", "critical", new[] { "rule_a" }), + new SimulationFindingResult("f2", "pkg:b", null, "warn", "medium", new[] { "rule_b" }), + new SimulationFindingResult("f3", "pkg:c", null, "allow", "low", new[] { "rule_c" }), + }; + var candidateResults = new[] + { + new SimulationFindingResult("f1", "pkg:a", null, "warn", "high", new[] { "rule_a" }), // Improved + new SimulationFindingResult("f2", "pkg:b", null, "block", "critical", new[] { "rule_b" }), // Regressed + new SimulationFindingResult("f3", "pkg:c", null, "allow", "low", new[] { "rule_c" }), // Unchanged + }; + + // Act + var result = _service.ComputeDeltaSummary("v1", "v2", baseResults, candidateResults); + + // Assert + result.OutcomeChanges.Unchanged.Should().Be(1); + result.OutcomeChanges.Improved.Should().Be(1); + result.OutcomeChanges.Regressed.Should().Be(1); + result.OutcomeChanges.Transitions.Should().HaveCount(2); + } + + [Fact] + public void ComputeDeltaSummary_SeverityChanges_TracksEscalationAndDeescalation() + { + // Arrange + var baseResults = new[] + { + new SimulationFindingResult("f1", "pkg:a", null, "block", "medium", Array.Empty()), + new SimulationFindingResult("f2", "pkg:b", null, "block", "high", Array.Empty()), + new SimulationFindingResult("f3", "pkg:c", null, "warn", "low", Array.Empty()), + }; + var candidateResults = new[] + { + new SimulationFindingResult("f1", "pkg:a", null, "block", "critical", Array.Empty()), // Escalated + new SimulationFindingResult("f2", "pkg:b", null, "block", "medium", Array.Empty()), // Deescalated + new SimulationFindingResult("f3", "pkg:c", null, "warn", "low", Array.Empty()), // Unchanged + }; + + // Act + var result = _service.ComputeDeltaSummary("v1", "v2", baseResults, candidateResults); + + // Assert + result.SeverityChanges.Unchanged.Should().Be(1); + result.SeverityChanges.Escalated.Should().Be(1); + result.SeverityChanges.Deescalated.Should().Be(1); + } + + [Fact] + public void ComputeDeltaSummary_RuleChanges_DetectsAddedAndRemovedRules() + { + // Arrange + var baseResults = new[] + { + new SimulationFindingResult("f1", "pkg:a", null, "block", "high", new[] { "rule_old", "rule_common" }), + }; + var candidateResults = new[] + { + new SimulationFindingResult("f1", "pkg:a", null, "block", "high", new[] { "rule_new", "rule_common" }), + }; + + // Act + var result = _service.ComputeDeltaSummary("v1", "v2", baseResults, candidateResults); + + // Assert + result.RuleChanges.RulesAdded.Should().Contain("rule_new"); + result.RuleChanges.RulesRemoved.Should().Contain("rule_old"); + } + + [Fact] + public void ComputeDeltaSummary_HighImpactFindings_IdentifiedCorrectly() + { + // Arrange + var baseResults = new[] + { + new SimulationFindingResult("f1", "pkg:critical", "CVE-2024-001", "allow", "low", Array.Empty()), + }; + var candidateResults = new[] + { + new SimulationFindingResult("f1", "pkg:critical", "CVE-2024-001", "block", "critical", Array.Empty()), + }; + + // Act + var result = _service.ComputeDeltaSummary("v1", "v2", baseResults, candidateResults); + + // Assert + result.HighImpactFindings.Should().NotBeEmpty(); + result.HighImpactFindings[0].FindingId.Should().Be("f1"); + result.HighImpactFindings[0].ImpactScore.Should().BeGreaterThan(0.5); + } + + [Fact] + public void ComputeDeltaSummary_DeterminismHash_ConsistentForSameInput() + { + // Arrange + var baseResults = new[] + { + new SimulationFindingResult("f1", "pkg:a", null, "block", "high", Array.Empty()), + }; + var candidateResults = new[] + { + new SimulationFindingResult("f1", "pkg:a", null, "warn", "medium", Array.Empty()), + }; + + // Act + var result1 = _service.ComputeDeltaSummary("v1", "v2", baseResults, candidateResults); + var result2 = _service.ComputeDeltaSummary("v1", "v2", baseResults, candidateResults); + + // Assert + result1.DeterminismHash.Should().Be(result2.DeterminismHash); + } + + [Fact] + public void ComputeAnalytics_FullAnalysis_ReturnsAllComponents() + { + // Arrange + var traces = new[] + { + CreateTrace("rule_a", 1, "block", expressionResult: true, componentPurl: "pkg:npm/lodash@4.0.0", severity: "high"), + CreateTrace("rule_b", 2, "allow", expressionResult: true, componentPurl: "pkg:npm/express@5.0.0", severity: "low"), + }; + var findings = new[] + { + new SimulationFinding("f1", "pkg:npm/lodash@4.0.0", "GHSA-123", new Dictionary()), + new SimulationFinding("f2", "pkg:npm/express@5.0.0", "GHSA-456", new Dictionary()), + }; + + // Act + var result = _service.ComputeAnalytics("policy-v1", traces, findings); + + // Assert + result.RuleFiringCounts.Should().NotBeNull(); + result.Heatmap.Should().NotBeNull(); + result.SampledTraces.Should().NotBeNull(); + result.DeltaSummary.Should().BeNull(); // No delta for single policy analysis + } + + private static RuleHitTrace CreateTrace( + string ruleName, + int priority, + string outcome, + bool expressionResult, + string? severity = null, + bool isVexOverride = false, + string? vexVendor = null, + string? vexStatus = null, + string? componentPurl = null) + { + return new RuleHitTrace + { + TraceId = Guid.NewGuid().ToString(), + SpanId = Guid.NewGuid().ToString("N")[..16], + TenantId = "test-tenant", + PolicyId = "test-policy", + RunId = "test-run", + RuleName = ruleName, + RulePriority = priority, + Outcome = outcome, + AssignedSeverity = severity, + ComponentPurl = componentPurl, + ExpressionResult = expressionResult, + EvaluationTimestamp = DateTimeOffset.UtcNow, + RecordedAt = DateTimeOffset.UtcNow, + EvaluationMicroseconds = 100, + IsVexOverride = isVexOverride, + VexVendor = vexVendor, + VexStatus = vexStatus, + IsSampled = true, + Attributes = ImmutableDictionary.Empty + }; + } + + private static SimulationFinding[] CreateFindings(int count) + { + return Enumerable.Range(1, count) + .Select(i => new SimulationFinding( + $"finding-{i}", + $"pkg:npm/package-{i}@1.0.0", + $"GHSA-{i:D3}", + new Dictionary())) + .ToArray(); + } +} diff --git a/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Telemetry/TelemetryTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Telemetry/TelemetryTests.cs new file mode 100644 index 000000000..95c3aba37 --- /dev/null +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Telemetry/TelemetryTests.cs @@ -0,0 +1,301 @@ +using System.Collections.Immutable; +using FluentAssertions; +using Microsoft.Extensions.Time.Testing; +using StellaOps.Policy.Engine.Telemetry; +using Xunit; + +namespace StellaOps.Policy.Engine.Tests.Telemetry; + +public sealed class TelemetryTests +{ + #region RuleHitTrace Tests + + [Fact] + public void RuleHitTrace_GetOrCreateTraceId_ReturnsValidId() + { + var traceId = RuleHitTrace.GetOrCreateTraceId(); + + traceId.Should().NotBeNullOrEmpty(); + traceId.Should().HaveLength(32); // 16 bytes = 32 hex chars + } + + [Fact] + public void RuleHitTrace_GetOrCreateSpanId_ReturnsValidId() + { + var spanId = RuleHitTrace.GetOrCreateSpanId(); + + spanId.Should().NotBeNullOrEmpty(); + spanId.Should().HaveLength(16); // 8 bytes = 16 hex chars + } + + [Fact] + public void RuleHitTrace_GetOrCreateTraceId_GeneratesUniqueIds() + { + var ids = Enumerable.Range(0, 100) + .Select(_ => RuleHitTrace.GetOrCreateTraceId()) + .ToList(); + + ids.Distinct().Should().HaveCount(100); + } + + #endregion + + #region RuleHitTraceFactory Tests + + [Fact] + public void Create_ProducesValidTrace() + { + var timestamp = DateTimeOffset.UtcNow; + var timeProvider = new FakeTimeProvider(timestamp); + + var trace = RuleHitTraceFactory.Create( + tenantId: "TENANT-1", + policyId: "policy-1", + policyVersion: 2, + runId: "run-123", + ruleName: "block-critical", + rulePriority: 10, + outcome: "deny", + evaluationTimestamp: timestamp, + timeProvider: timeProvider, + ruleCategory: "severity", + assignedSeverity: "Critical", + componentPurl: "pkg:npm/lodash@4.17.21", + advisoryId: "GHSA-test-001", + vulnerabilityId: "CVE-2021-12345"); + + trace.TenantId.Should().Be("tenant-1"); // Normalized to lowercase + trace.PolicyId.Should().Be("policy-1"); + trace.PolicyVersion.Should().Be(2); + trace.RunId.Should().Be("run-123"); + trace.RuleName.Should().Be("block-critical"); + trace.RulePriority.Should().Be(10); + trace.Outcome.Should().Be("deny"); + trace.RuleCategory.Should().Be("severity"); + trace.AssignedSeverity.Should().Be("Critical"); + trace.ComponentPurl.Should().Be("pkg:npm/lodash@4.17.21"); + trace.EvaluationTimestamp.Should().Be(timestamp); + trace.RecordedAt.Should().Be(timestamp); + trace.TraceId.Should().NotBeNullOrEmpty(); + trace.SpanId.Should().NotBeNullOrEmpty(); + } + + [Fact] + public void Create_TracksVexOverride() + { + var timestamp = DateTimeOffset.UtcNow; + + var trace = RuleHitTraceFactory.Create( + tenantId: "tenant-1", + policyId: "policy-1", + policyVersion: 1, + runId: "run-123", + ruleName: "vex-override", + rulePriority: 1, + outcome: "suppress", + evaluationTimestamp: timestamp, + vexStatus: "not_affected", + vexJustification: "vulnerable_code_not_in_execute_path", + vexVendor: "vendor-1", + isVexOverride: true); + + trace.VexStatus.Should().Be("not_affected"); + trace.VexJustification.Should().Be("vulnerable_code_not_in_execute_path"); + trace.VexVendor.Should().Be("vendor-1"); + trace.IsVexOverride.Should().BeTrue(); + } + + [Fact] + public void Create_TracksReachability() + { + var timestamp = DateTimeOffset.UtcNow; + + var trace = RuleHitTraceFactory.Create( + tenantId: "tenant-1", + policyId: "policy-1", + policyVersion: 1, + runId: "run-123", + ruleName: "reachability-rule", + rulePriority: 5, + outcome: "allow", + evaluationTimestamp: timestamp, + reachabilityState: "reachable", + reachabilityConfidence: 0.95); + + trace.ReachabilityState.Should().Be("reachable"); + trace.ReachabilityConfidence.Should().Be(0.95); + } + + [Fact] + public void Create_IncludesCustomAttributes() + { + var timestamp = DateTimeOffset.UtcNow; + var attributes = ImmutableDictionary.Empty + .Add("custom_key", "custom_value") + .Add("another_key", "another_value"); + + var trace = RuleHitTraceFactory.Create( + tenantId: "tenant-1", + policyId: "policy-1", + policyVersion: 1, + runId: "run-123", + ruleName: "test-rule", + rulePriority: 1, + outcome: "allow", + evaluationTimestamp: timestamp, + attributes: attributes); + + trace.Attributes.Should().ContainKey("custom_key"); + trace.Attributes["custom_key"].Should().Be("custom_value"); + } + + [Fact] + public void ToJson_ProducesValidJson() + { + var trace = RuleHitTraceFactory.Create( + tenantId: "tenant-1", + policyId: "policy-1", + policyVersion: 1, + runId: "run-123", + ruleName: "test-rule", + rulePriority: 1, + outcome: "allow", + evaluationTimestamp: DateTimeOffset.UtcNow); + + var json = RuleHitTraceFactory.ToJson(trace); + + json.Should().Contain("\"tenant_id\":\"tenant-1\""); + json.Should().Contain("\"policy_id\":\"policy-1\""); + json.Should().Contain("\"rule_name\":\"test-rule\""); + json.Should().NotContain("\n"); // Single line + } + + [Fact] + public void ToNdjson_ProducesMultipleLines() + { + var timestamp = DateTimeOffset.UtcNow; + var traces = new[] + { + RuleHitTraceFactory.Create("tenant-1", "policy-1", 1, "run-1", "rule-1", 1, "allow", timestamp), + RuleHitTraceFactory.Create("tenant-1", "policy-1", 1, "run-1", "rule-2", 2, "deny", timestamp), + RuleHitTraceFactory.Create("tenant-1", "policy-1", 1, "run-1", "rule-3", 3, "suppress", timestamp) + }; + + var ndjson = RuleHitTraceFactory.ToNdjson(traces); + var lines = ndjson.Split('\n', StringSplitOptions.RemoveEmptyEntries); + + lines.Should().HaveCount(3); + lines[0].Should().Contain("rule-1"); + lines[1].Should().Contain("rule-2"); + lines[2].Should().Contain("rule-3"); + } + + #endregion + + #region RuleHitStatistics Tests + + [Fact] + public void CreateStatistics_AggregatesCorrectly() + { + var timestamp = DateTimeOffset.UtcNow; + var traces = new[] + { + RuleHitTraceFactory.Create("tenant-1", "policy-1", 1, "run-1", "rule-1", 1, "allow", timestamp, + ruleCategory: "severity"), + RuleHitTraceFactory.Create("tenant-1", "policy-1", 1, "run-1", "rule-2", 2, "deny", timestamp, + ruleCategory: "severity"), + RuleHitTraceFactory.Create("tenant-1", "policy-1", 1, "run-1", "rule-3", 3, "suppress", timestamp, + ruleCategory: "vex", isVexOverride: true, vexVendor: "vendor-1", vexStatus: "not_affected"), + RuleHitTraceFactory.Create("tenant-1", "policy-1", 1, "run-1", "rule-4", 4, "suppress", timestamp, + ruleCategory: "vex", isVexOverride: true, vexVendor: "vendor-2", vexStatus: "fixed") + }; + + var stats = RuleHitTraceFactory.CreateStatistics( + runId: "run-1", + policyId: "policy-1", + traces: traces, + totalRulesEvaluated: 10, + totalEvaluationMs: 50); + + stats.RunId.Should().Be("run-1"); + stats.PolicyId.Should().Be("policy-1"); + stats.TotalRulesEvaluated.Should().Be(10); + stats.TotalRulesFired.Should().Be(4); + stats.TotalVexOverrides.Should().Be(2); + + stats.RulesFiredByCategory.Should().ContainKey("severity"); + stats.RulesFiredByCategory["severity"].Should().Be(2); + stats.RulesFiredByCategory["vex"].Should().Be(2); + + stats.RulesFiredByOutcome.Should().ContainKey("allow"); + stats.RulesFiredByOutcome["allow"].Should().Be(1); + stats.RulesFiredByOutcome["deny"].Should().Be(1); + stats.RulesFiredByOutcome["suppress"].Should().Be(2); + + stats.VexOverridesByVendor.Should().HaveCount(2); + stats.VexOverridesByStatus.Should().ContainKey("not_affected"); + stats.VexOverridesByStatus.Should().ContainKey("fixed"); + } + + [Fact] + public void CreateStatistics_ComputesAverageEvaluationTime() + { + var traces = Array.Empty(); + var stats = RuleHitTraceFactory.CreateStatistics( + runId: "run-1", + policyId: "policy-1", + traces: traces, + totalRulesEvaluated: 100, + totalEvaluationMs: 50); + + stats.TotalEvaluationMs.Should().Be(50); + stats.AverageRuleEvaluationMicroseconds.Should().Be(500); // 50ms * 1000 / 100 rules + } + + [Fact] + public void CreateStatistics_HandlesZeroRules() + { + var traces = Array.Empty(); + var stats = RuleHitTraceFactory.CreateStatistics( + runId: "run-1", + policyId: "policy-1", + traces: traces, + totalRulesEvaluated: 0, + totalEvaluationMs: 0); + + stats.TotalRulesEvaluated.Should().Be(0); + stats.AverageRuleEvaluationMicroseconds.Should().Be(0); + } + + [Fact] + public void CreateStatistics_GeneratesTopRules() + { + var timestamp = DateTimeOffset.UtcNow; + var traces = Enumerable.Range(0, 20) + .SelectMany(i => Enumerable.Range(0, i + 1).Select(_ => + RuleHitTraceFactory.Create("tenant-1", "policy-1", 1, "run-1", $"rule-{i}", i, "allow", timestamp))) + .ToArray(); + + var stats = RuleHitTraceFactory.CreateStatistics("run-1", "policy-1", traces, 100, 50); + + stats.TopRulesByHitCount.Should().HaveCount(10); + stats.TopRulesByHitCount[0].RuleName.Should().Be("rule-19"); // Highest count + stats.TopRulesByHitCount[0].HitCount.Should().Be(20); + } + + #endregion + + #region RuleHitCount Tests + + [Fact] + public void RuleHitCount_RecordWorks() + { + var hitCount = new RuleHitCount("severity-rule", 42, "deny"); + + hitCount.RuleName.Should().Be("severity-rule"); + hitCount.HitCount.Should().Be(42); + hitCount.Outcome.Should().Be("deny"); + } + + #endregion +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonFrameworkDetector.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonFrameworkDetector.cs new file mode 100644 index 000000000..2ed0457d3 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonFrameworkDetector.cs @@ -0,0 +1,423 @@ +using System.Collections.Frozen; +using System.Collections.Immutable; +using System.Text.RegularExpressions; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.VirtualFileSystem; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.Framework; + +/// +/// Detects framework usage hints from Python source code. +/// +internal sealed partial class PythonFrameworkDetector +{ + // File patterns that strongly indicate frameworks + private static readonly FrozenDictionary FilePatterns = + new Dictionary(StringComparer.OrdinalIgnoreCase) + { + // Django + ["manage.py"] = (PythonFrameworkKind.Django, PythonFrameworkConfidence.High), + ["settings.py"] = (PythonFrameworkKind.Django, PythonFrameworkConfidence.Medium), + ["urls.py"] = (PythonFrameworkKind.Django, PythonFrameworkConfidence.Medium), + ["wsgi.py"] = (PythonFrameworkKind.Django, PythonFrameworkConfidence.Medium), + ["asgi.py"] = (PythonFrameworkKind.Django, PythonFrameworkConfidence.Medium), + + // Celery + ["celery.py"] = (PythonFrameworkKind.Celery, PythonFrameworkConfidence.High), + ["tasks.py"] = (PythonFrameworkKind.Celery, PythonFrameworkConfidence.Low), + + // Gunicorn + ["gunicorn.conf.py"] = (PythonFrameworkKind.Gunicorn, PythonFrameworkConfidence.Definitive), + ["gunicorn_config.py"] = (PythonFrameworkKind.Gunicorn, PythonFrameworkConfidence.Definitive), + + // uWSGI + ["uwsgi.ini"] = (PythonFrameworkKind.Uwsgi, PythonFrameworkConfidence.Definitive), + + // Pytest + ["conftest.py"] = (PythonFrameworkKind.Pytest, PythonFrameworkConfidence.High), + ["pytest.ini"] = (PythonFrameworkKind.Pytest, PythonFrameworkConfidence.Definitive), + + // Jupyter + ["*.ipynb"] = (PythonFrameworkKind.Jupyter, PythonFrameworkConfidence.Definitive), + }.ToFrozenDictionary(); + + // Import patterns that indicate frameworks + private static readonly FrozenDictionary ImportPatterns = + new Dictionary(StringComparer.Ordinal) + { + // Django + ["django"] = (PythonFrameworkKind.Django, PythonFrameworkConfidence.High), + ["django.conf"] = (PythonFrameworkKind.Django, PythonFrameworkConfidence.High), + ["django.urls"] = (PythonFrameworkKind.Django, PythonFrameworkConfidence.High), + ["django.views"] = (PythonFrameworkKind.Django, PythonFrameworkConfidence.High), + ["django.db"] = (PythonFrameworkKind.Django, PythonFrameworkConfidence.High), + + // Flask + ["flask"] = (PythonFrameworkKind.Flask, PythonFrameworkConfidence.High), + ["flask_restful"] = (PythonFrameworkKind.Flask, PythonFrameworkConfidence.High), + ["flask_sqlalchemy"] = (PythonFrameworkKind.Flask, PythonFrameworkConfidence.High), + + // FastAPI + ["fastapi"] = (PythonFrameworkKind.FastAPI, PythonFrameworkConfidence.High), + ["starlette"] = (PythonFrameworkKind.Starlette, PythonFrameworkConfidence.High), + + // Celery + ["celery"] = (PythonFrameworkKind.Celery, PythonFrameworkConfidence.High), + + // RQ + ["rq"] = (PythonFrameworkKind.RQ, PythonFrameworkConfidence.High), + + // Click + ["click"] = (PythonFrameworkKind.Click, PythonFrameworkConfidence.High), + + // Typer + ["typer"] = (PythonFrameworkKind.Typer, PythonFrameworkConfidence.High), + + // Pytest + ["pytest"] = (PythonFrameworkKind.Pytest, PythonFrameworkConfidence.High), + + // Streamlit + ["streamlit"] = (PythonFrameworkKind.Streamlit, PythonFrameworkConfidence.Definitive), + + // Gradio + ["gradio"] = (PythonFrameworkKind.Gradio, PythonFrameworkConfidence.Definitive), + + // Pydantic Settings + ["pydantic_settings"] = (PythonFrameworkKind.PydanticSettings, PythonFrameworkConfidence.Definitive), + }.ToFrozenDictionary(); + + // Django patterns + [GeneratedRegex(@"INSTALLED_APPS\s*=\s*\[", RegexOptions.Compiled)] + private static partial Regex DjangoInstalledAppsPattern(); + + [GeneratedRegex(@"MIDDLEWARE\s*=\s*\[", RegexOptions.Compiled)] + private static partial Regex DjangoMiddlewarePattern(); + + [GeneratedRegex(@"ROOT_URLCONF\s*=", RegexOptions.Compiled)] + private static partial Regex DjangoRootUrlConfPattern(); + + [GeneratedRegex(@"os\.environ\.setdefault\s*\(\s*[""']DJANGO_SETTINGS_MODULE[""']", RegexOptions.Compiled)] + private static partial Regex DjangoSettingsModulePattern(); + + // Flask patterns + [GeneratedRegex(@"Flask\s*\(\s*__name__", RegexOptions.Compiled)] + private static partial Regex FlaskAppPattern(); + + [GeneratedRegex(@"Blueprint\s*\(", RegexOptions.Compiled)] + private static partial Regex FlaskBlueprintPattern(); + + // FastAPI patterns + [GeneratedRegex(@"FastAPI\s*\(", RegexOptions.Compiled)] + private static partial Regex FastAPIAppPattern(); + + [GeneratedRegex(@"APIRouter\s*\(", RegexOptions.Compiled)] + private static partial Regex FastAPIRouterPattern(); + + // Celery patterns + [GeneratedRegex(@"Celery\s*\(", RegexOptions.Compiled)] + private static partial Regex CeleryAppPattern(); + + [GeneratedRegex(@"@\s*(?:app\.task|celery\.task|shared_task)", RegexOptions.Compiled)] + private static partial Regex CeleryTaskPattern(); + + // AWS Lambda patterns + [GeneratedRegex(@"def\s+(lambda_handler|handler)\s*\(\s*event\s*,\s*context\s*\)", RegexOptions.Compiled)] + private static partial Regex LambdaHandlerPattern(); + + [GeneratedRegex(@"def\s+\w+\s*\(\s*event\s*:\s*dict\s*,\s*context\s*:\s*LambdaContext", RegexOptions.Compiled)] + private static partial Regex LambdaTypedHandlerPattern(); + + // Click patterns + [GeneratedRegex(@"@\s*click\.command", RegexOptions.Compiled)] + private static partial Regex ClickCommandPattern(); + + [GeneratedRegex(@"@\s*click\.group", RegexOptions.Compiled)] + private static partial Regex ClickGroupPattern(); + + // Typer patterns + [GeneratedRegex(@"typer\.Typer\s*\(", RegexOptions.Compiled)] + private static partial Regex TyperAppPattern(); + + [GeneratedRegex(@"@\s*app\.command", RegexOptions.Compiled)] + private static partial Regex TyperCommandPattern(); + + // Logging patterns + [GeneratedRegex(@"logging\.config\.dictConfig", RegexOptions.Compiled)] + private static partial Regex LoggingDictConfigPattern(); + + [GeneratedRegex(@"logging\.config\.fileConfig", RegexOptions.Compiled)] + private static partial Regex LoggingFileConfigPattern(); + + [GeneratedRegex(@"LOGGING\s*=\s*\{", RegexOptions.Compiled)] + private static partial Regex DjangoLoggingPattern(); + + // Gunicorn patterns + [GeneratedRegex(@"bind\s*=\s*[""']", RegexOptions.Compiled)] + private static partial Regex GunicornBindPattern(); + + [GeneratedRegex(@"workers\s*=", RegexOptions.Compiled)] + private static partial Regex GunicornWorkersPattern(); + + /// + /// Detects framework hints from Python source code. + /// + public async Task> DetectAsync( + PythonVirtualFileSystem vfs, + CancellationToken cancellationToken = default) + { + var hints = new List(); + + // First pass: check file patterns + foreach (var file in vfs.Files) + { + cancellationToken.ThrowIfCancellationRequested(); + + var fileName = Path.GetFileName(file.VirtualPath); + if (FilePatterns.TryGetValue(fileName, out var fileHint)) + { + hints.Add(new PythonFrameworkHint( + Kind: fileHint.Kind, + SourceFile: file.VirtualPath, + LineNumber: null, + Evidence: $"file pattern: {fileName}", + Confidence: fileHint.Confidence)); + } + + // Special case for Jupyter notebooks + if (file.VirtualPath.EndsWith(".ipynb", StringComparison.OrdinalIgnoreCase)) + { + hints.Add(new PythonFrameworkHint( + Kind: PythonFrameworkKind.Jupyter, + SourceFile: file.VirtualPath, + LineNumber: null, + Evidence: "Jupyter notebook file", + Confidence: PythonFrameworkConfidence.Definitive)); + } + } + + // Second pass: scan Python files for patterns + var pythonFiles = vfs.Files + .Where(f => f.VirtualPath.EndsWith(".py", StringComparison.OrdinalIgnoreCase)) + .ToList(); + + foreach (var file in pythonFiles) + { + cancellationToken.ThrowIfCancellationRequested(); + + var fileHints = await DetectInFileAsync(vfs, file, cancellationToken).ConfigureAwait(false); + hints.AddRange(fileHints); + } + + // Deduplicate and prioritize by confidence + return hints + .GroupBy(h => (h.Kind, h.SourceFile)) + .Select(g => g.OrderByDescending(h => h.Confidence).First()) + .ToImmutableArray(); + } + + private async Task> DetectInFileAsync( + PythonVirtualFileSystem vfs, + PythonVirtualFile file, + CancellationToken cancellationToken) + { + var hints = new List(); + + try + { + using var stream = await vfs.OpenReadAsync(file.VirtualPath, cancellationToken).ConfigureAwait(false); + if (stream is null) + { + return hints; + } + + using var reader = new StreamReader(stream); + var content = await reader.ReadToEndAsync(cancellationToken).ConfigureAwait(false); + var lines = content.Split('\n'); + + for (var lineNum = 0; lineNum < lines.Length; lineNum++) + { + cancellationToken.ThrowIfCancellationRequested(); + + var line = lines[lineNum]; + var trimmed = line.TrimStart(); + + // Skip comments + if (trimmed.StartsWith('#')) + { + continue; + } + + // Check for imports + if (trimmed.StartsWith("import ", StringComparison.Ordinal) || + trimmed.StartsWith("from ", StringComparison.Ordinal)) + { + var importHints = DetectImportPatterns(trimmed, file.VirtualPath, lineNum + 1); + hints.AddRange(importHints); + } + + // Django patterns + if (DjangoInstalledAppsPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.Django, file.VirtualPath, lineNum + 1, + "INSTALLED_APPS configuration", PythonFrameworkConfidence.Definitive)); + } + + if (DjangoSettingsModulePattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.Django, file.VirtualPath, lineNum + 1, + "DJANGO_SETTINGS_MODULE", PythonFrameworkConfidence.Definitive)); + } + + // Flask patterns + if (FlaskAppPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.Flask, file.VirtualPath, lineNum + 1, + "Flask(__name__)", PythonFrameworkConfidence.Definitive)); + } + + if (FlaskBlueprintPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.Flask, file.VirtualPath, lineNum + 1, + "Blueprint()", PythonFrameworkConfidence.High)); + } + + // FastAPI patterns + if (FastAPIAppPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.FastAPI, file.VirtualPath, lineNum + 1, + "FastAPI()", PythonFrameworkConfidence.Definitive)); + } + + if (FastAPIRouterPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.FastAPI, file.VirtualPath, lineNum + 1, + "APIRouter()", PythonFrameworkConfidence.High)); + } + + // Celery patterns + if (CeleryAppPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.Celery, file.VirtualPath, lineNum + 1, + "Celery()", PythonFrameworkConfidence.Definitive)); + } + + if (CeleryTaskPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.Celery, file.VirtualPath, lineNum + 1, + "@app.task decorator", PythonFrameworkConfidence.High)); + } + + // AWS Lambda patterns + if (LambdaHandlerPattern().IsMatch(line) || LambdaTypedHandlerPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.AwsLambda, file.VirtualPath, lineNum + 1, + "Lambda handler function", PythonFrameworkConfidence.High)); + } + + // Click patterns + if (ClickCommandPattern().IsMatch(line) || ClickGroupPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.Click, file.VirtualPath, lineNum + 1, + "@click.command/group", PythonFrameworkConfidence.High)); + } + + // Typer patterns + if (TyperAppPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.Typer, file.VirtualPath, lineNum + 1, + "typer.Typer()", PythonFrameworkConfidence.Definitive)); + } + + if (TyperCommandPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.Typer, file.VirtualPath, lineNum + 1, + "@app.command", PythonFrameworkConfidence.High)); + } + + // Logging patterns + if (LoggingDictConfigPattern().IsMatch(line) || LoggingFileConfigPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.LoggingConfig, file.VirtualPath, lineNum + 1, + "logging.config", PythonFrameworkConfidence.High)); + } + + if (DjangoLoggingPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.LoggingConfig, file.VirtualPath, lineNum + 1, + "Django LOGGING dict", PythonFrameworkConfidence.High)); + } + + // Gunicorn patterns (in config files) + if (file.VirtualPath.Contains("gunicorn", StringComparison.OrdinalIgnoreCase)) + { + if (GunicornBindPattern().IsMatch(line) || GunicornWorkersPattern().IsMatch(line)) + { + hints.Add(CreateHint(PythonFrameworkKind.Gunicorn, file.VirtualPath, lineNum + 1, + "Gunicorn configuration", PythonFrameworkConfidence.Definitive)); + } + } + } + } + catch (IOException) + { + // Skip unreadable files + } + + return hints; + } + + private static IEnumerable DetectImportPatterns(string line, string sourceFile, int lineNumber) + { + var trimmed = line.Trim(); + string? moduleName = null; + + if (trimmed.StartsWith("import ", StringComparison.Ordinal)) + { + var parts = trimmed[7..].Split(','); + foreach (var part in parts) + { + moduleName = part.Trim().Split(new[] { " as ", " " }, StringSplitOptions.RemoveEmptyEntries)[0]; + if (ImportPatterns.TryGetValue(moduleName, out var hint)) + { + yield return CreateHint(hint.Kind, sourceFile, lineNumber, + $"import {moduleName}", hint.Confidence); + } + } + } + else if (trimmed.StartsWith("from ", StringComparison.Ordinal)) + { + var parts = trimmed[5..].Split(new[] { " import " }, StringSplitOptions.RemoveEmptyEntries); + if (parts.Length > 0) + { + moduleName = parts[0].Trim(); + // Check base module + var baseModule = moduleName.Split('.')[0]; + if (ImportPatterns.TryGetValue(baseModule, out var hint)) + { + yield return CreateHint(hint.Kind, sourceFile, lineNumber, + $"from {moduleName}", hint.Confidence); + } + // Check full module path + if (ImportPatterns.TryGetValue(moduleName, out hint)) + { + yield return CreateHint(hint.Kind, sourceFile, lineNumber, + $"from {moduleName}", hint.Confidence); + } + } + } + } + + private static PythonFrameworkHint CreateHint( + PythonFrameworkKind kind, + string sourceFile, + int lineNumber, + string evidence, + PythonFrameworkConfidence confidence) + { + return new PythonFrameworkHint( + Kind: kind, + SourceFile: sourceFile, + LineNumber: lineNumber, + Evidence: evidence, + Confidence: confidence); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonFrameworkHint.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonFrameworkHint.cs new file mode 100644 index 000000000..b38e61c43 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonFrameworkHint.cs @@ -0,0 +1,151 @@ +using System.Collections.Immutable; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.Framework; + +/// +/// Represents a detected framework or configuration hint in a Python project. +/// These are hints/suggestions, not definitive detections. +/// +/// The type of framework or configuration. +/// The file where this hint was detected. +/// The line number (if available). +/// The code pattern that indicated this hint. +/// Confidence level for this detection. +/// Additional metadata about the detection. +internal sealed record PythonFrameworkHint( + PythonFrameworkKind Kind, + string SourceFile, + int? LineNumber, + string Evidence, + PythonFrameworkConfidence Confidence, + ImmutableDictionary? Metadata = null) +{ + /// + /// Gets whether this is a web framework. + /// + public bool IsWebFramework => Kind is + PythonFrameworkKind.Django or + PythonFrameworkKind.Flask or + PythonFrameworkKind.FastAPI or + PythonFrameworkKind.Starlette or + PythonFrameworkKind.Tornado or + PythonFrameworkKind.Bottle or + PythonFrameworkKind.Pyramid; + + /// + /// Gets whether this is a task queue. + /// + public bool IsTaskQueue => Kind is + PythonFrameworkKind.Celery or + PythonFrameworkKind.RQ or + PythonFrameworkKind.Huey or + PythonFrameworkKind.Dramatiq; + + /// + /// Gets whether this is a serverless runtime. + /// + public bool IsServerless => Kind is + PythonFrameworkKind.AwsLambda or + PythonFrameworkKind.AzureFunctions or + PythonFrameworkKind.GoogleCloudFunctions; + + /// + /// Gets whether this is a CLI framework. + /// + public bool IsCliFramework => Kind is + PythonFrameworkKind.Click or + PythonFrameworkKind.Typer or + PythonFrameworkKind.Argparse; + + /// + /// Generates metadata entries for this hint. + /// + public IEnumerable> ToMetadata(string prefix) + { + yield return new($"{prefix}.kind", Kind.ToString()); + yield return new($"{prefix}.file", SourceFile); + + if (LineNumber.HasValue) + { + yield return new($"{prefix}.line", LineNumber.Value.ToString()); + } + + yield return new($"{prefix}.evidence", Evidence); + yield return new($"{prefix}.confidence", Confidence.ToString()); + + if (IsWebFramework) + { + yield return new($"{prefix}.category", "WebFramework"); + } + else if (IsTaskQueue) + { + yield return new($"{prefix}.category", "TaskQueue"); + } + else if (IsServerless) + { + yield return new($"{prefix}.category", "Serverless"); + } + else if (IsCliFramework) + { + yield return new($"{prefix}.category", "CLI"); + } + + if (Metadata is not null) + { + foreach (var (key, value) in Metadata) + { + yield return new($"{prefix}.{key}", value); + } + } + } +} + +/// +/// Represents an AWS Lambda handler configuration. +/// +/// The handler path (module.function). +/// The module file path. +/// The handler function name. +/// The detected Python runtime (if available). +internal sealed record PythonLambdaHandler( + string HandlerPath, + string ModulePath, + string FunctionName, + string? Runtime = null); + +/// +/// Represents a Django project configuration. +/// +/// The settings module path. +/// List of installed apps. +/// List of middleware classes. +/// The root URL configuration module. +internal sealed record PythonDjangoConfig( + string SettingsModule, + ImmutableArray InstalledApps, + ImmutableArray Middlewares, + string? RootUrlConf = null); + +/// +/// Represents a Flask application configuration. +/// +/// The Flask app variable name. +/// The module containing the app. +/// Registered blueprints. +internal sealed record PythonFlaskConfig( + string AppVariable, + string ModulePath, + ImmutableArray Blueprints); + +/// +/// Represents a Celery configuration. +/// +/// The Celery app variable name. +/// The module containing the app. +/// The broker URL pattern (if detected). +/// Discovered task modules. +internal sealed record PythonCeleryConfig( + string AppVariable, + string ModulePath, + string? BrokerUrl, + ImmutableArray Tasks); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonFrameworkKind.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonFrameworkKind.cs new file mode 100644 index 000000000..2dbb9ed12 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonFrameworkKind.cs @@ -0,0 +1,186 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.Framework; + +/// +/// Types of Python frameworks and configurations detected. +/// +internal enum PythonFrameworkKind +{ + /// + /// Unknown framework. + /// + Unknown, + + // Web Frameworks + /// + /// Django web framework. + /// + Django, + + /// + /// Flask web framework. + /// + Flask, + + /// + /// FastAPI async web framework. + /// + FastAPI, + + /// + /// Starlette ASGI framework. + /// + Starlette, + + /// + /// Tornado async web framework. + /// + Tornado, + + /// + /// Bottle micro framework. + /// + Bottle, + + /// + /// Pyramid web framework. + /// + Pyramid, + + // Task Queues + /// + /// Celery distributed task queue. + /// + Celery, + + /// + /// RQ (Redis Queue) task queue. + /// + RQ, + + /// + /// Huey task queue. + /// + Huey, + + /// + /// Dramatiq task queue. + /// + Dramatiq, + + // Serverless + /// + /// AWS Lambda handler. + /// + AwsLambda, + + /// + /// Azure Functions. + /// + AzureFunctions, + + /// + /// Google Cloud Functions. + /// + GoogleCloudFunctions, + + // Application Servers + /// + /// Gunicorn WSGI server. + /// + Gunicorn, + + /// + /// uWSGI server. + /// + Uwsgi, + + /// + /// Uvicorn ASGI server. + /// + Uvicorn, + + /// + /// Hypercorn ASGI server. + /// + Hypercorn, + + // CLI Frameworks + /// + /// Click CLI framework. + /// + Click, + + /// + /// Typer CLI framework (Click-based). + /// + Typer, + + /// + /// Argparse standard library CLI. + /// + Argparse, + + // Testing Frameworks + /// + /// Pytest testing framework. + /// + Pytest, + + /// + /// Unittest standard library. + /// + Unittest, + + // Data/ML Frameworks + /// + /// Jupyter notebook. + /// + Jupyter, + + /// + /// Streamlit data app. + /// + Streamlit, + + /// + /// Gradio ML demo. + /// + Gradio, + + // Configuration + /// + /// Python logging configuration. + /// + LoggingConfig, + + /// + /// Pydantic settings configuration. + /// + PydanticSettings +} + +/// +/// Confidence level for framework detection. +/// +internal enum PythonFrameworkConfidence +{ + /// + /// Low confidence - heuristic match based on file patterns. + /// + Low = 0, + + /// + /// Medium confidence - import detected but usage unclear. + /// + Medium = 1, + + /// + /// High confidence - clear usage pattern detected. + /// + High = 2, + + /// + /// Definitive - explicit configuration or initialization found. + /// + Definitive = 3 +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonProjectConfigParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonProjectConfigParser.cs new file mode 100644 index 000000000..581aeeda2 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Framework/PythonProjectConfigParser.cs @@ -0,0 +1,327 @@ +using System.Collections.Immutable; +using System.Text.RegularExpressions; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.VirtualFileSystem; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.Framework; + +/// +/// Parses Python project configuration files (pyproject.toml, setup.cfg, setup.py). +/// +internal sealed partial class PythonProjectConfigParser +{ + // pyproject.toml patterns + [GeneratedRegex(@"^\[project\]", RegexOptions.Compiled | RegexOptions.Multiline)] + private static partial Regex PyprojectProjectSection(); + + [GeneratedRegex(@"^\[project\.optional-dependencies\]", RegexOptions.Compiled | RegexOptions.Multiline)] + private static partial Regex PyprojectOptionalDepsSection(); + + [GeneratedRegex(@"^\[tool\.poetry\.extras\]", RegexOptions.Compiled | RegexOptions.Multiline)] + private static partial Regex PoetryExtrasSection(); + + [GeneratedRegex(@"^\[tool\.poetry\.group\.(\w+)\.dependencies\]", RegexOptions.Compiled | RegexOptions.Multiline)] + private static partial Regex PoetryGroupSection(); + + // Pattern to extract key = value or key = [...] lines + [GeneratedRegex(@"^(\w+)\s*=\s*\[(.*?)\]", RegexOptions.Compiled | RegexOptions.Singleline)] + private static partial Regex ArrayValuePattern(); + + [GeneratedRegex(@"^name\s*=\s*[""']([^""']+)[""']", RegexOptions.Compiled | RegexOptions.Multiline)] + private static partial Regex ProjectNamePattern(); + + [GeneratedRegex(@"^version\s*=\s*[""']([^""']+)[""']", RegexOptions.Compiled | RegexOptions.Multiline)] + private static partial Regex ProjectVersionPattern(); + + /// + /// Parses pyproject.toml and extracts optional dependencies. + /// + public async Task ParsePyprojectAsync( + PythonVirtualFileSystem vfs, + string pyprojectPath, + CancellationToken cancellationToken = default) + { + using var stream = await vfs.OpenReadAsync(pyprojectPath, cancellationToken).ConfigureAwait(false); + if (stream is null) + { + return null; + } + + using var reader = new StreamReader(stream); + var content = await reader.ReadToEndAsync(cancellationToken).ConfigureAwait(false); + + return ParsePyprojectContent(content, pyprojectPath); + } + + private static PythonProjectConfig ParsePyprojectContent(string content, string filePath) + { + string? projectName = null; + string? projectVersion = null; + var optionalDependencies = new Dictionary>(); + var scripts = new Dictionary(); + var extras = new List(); + + // Extract project name and version + var nameMatch = ProjectNamePattern().Match(content); + if (nameMatch.Success) + { + projectName = nameMatch.Groups[1].Value; + } + + var versionMatch = ProjectVersionPattern().Match(content); + if (versionMatch.Success) + { + projectVersion = versionMatch.Groups[1].Value; + } + + // Parse optional dependencies section + var optDepsMatch = PyprojectOptionalDepsSection().Match(content); + if (optDepsMatch.Success) + { + var sectionStart = optDepsMatch.Index + optDepsMatch.Length; + var sectionContent = ExtractSectionContent(content, sectionStart); + optionalDependencies = ParseOptionalDependencies(sectionContent); + extras.AddRange(optionalDependencies.Keys); + } + + // Parse Poetry extras section + var poetryExtrasMatch = PoetryExtrasSection().Match(content); + if (poetryExtrasMatch.Success) + { + var sectionStart = poetryExtrasMatch.Index + poetryExtrasMatch.Length; + var sectionContent = ExtractSectionContent(content, sectionStart); + var poetryExtras = ParseOptionalDependencies(sectionContent); + foreach (var (key, value) in poetryExtras) + { + if (!optionalDependencies.ContainsKey(key)) + { + optionalDependencies[key] = value; + extras.Add(key); + } + } + } + + // Parse Poetry group dependencies + foreach (Match groupMatch in PoetryGroupSection().Matches(content)) + { + var groupName = groupMatch.Groups[1].Value; + if (!extras.Contains(groupName)) + { + extras.Add(groupName); + } + } + + // Parse scripts section + scripts = ParseScriptsSection(content); + + return new PythonProjectConfig( + FilePath: filePath, + ProjectName: projectName, + ProjectVersion: projectVersion, + OptionalDependencies: optionalDependencies.ToImmutableDictionary(), + Extras: extras.Distinct().ToImmutableArray(), + Scripts: scripts.ToImmutableDictionary()); + } + + private static string ExtractSectionContent(string content, int startIndex) + { + // Find the next section header or end of file + var nextSection = content.IndexOf("\n[", startIndex, StringComparison.Ordinal); + if (nextSection < 0) + { + return content[startIndex..]; + } + return content[startIndex..nextSection]; + } + + private static Dictionary> ParseOptionalDependencies(string sectionContent) + { + var result = new Dictionary>(); + var lines = sectionContent.Split('\n'); + + string? currentKey = null; + var currentValues = new List(); + var inArray = false; + + foreach (var line in lines) + { + var trimmed = line.Trim(); + if (string.IsNullOrEmpty(trimmed) || trimmed.StartsWith('#')) + { + continue; + } + + // Check for new key + if (!inArray && trimmed.Contains('=')) + { + // Save previous key + if (currentKey is not null) + { + result[currentKey] = currentValues.ToImmutableArray(); + currentValues = []; + } + + var parts = trimmed.Split('=', 2); + currentKey = parts[0].Trim(); + var value = parts.Length > 1 ? parts[1].Trim() : ""; + + if (value.StartsWith('[')) + { + if (value.EndsWith(']')) + { + // Single-line array + currentValues = ParseArrayValues(value); + } + else + { + // Multi-line array + inArray = true; + currentValues = ParseArrayValues(value); + } + } + } + else if (inArray) + { + if (trimmed.EndsWith(']')) + { + currentValues.AddRange(ParseArrayValues(trimmed)); + inArray = false; + } + else + { + currentValues.AddRange(ParseArrayValues(trimmed)); + } + } + } + + // Save last key + if (currentKey is not null) + { + result[currentKey] = currentValues.ToImmutableArray(); + } + + return result; + } + + private static List ParseArrayValues(string value) + { + var result = new List(); + var cleaned = value.Trim('[', ']', ' ', '\t'); + if (string.IsNullOrEmpty(cleaned)) + { + return result; + } + + // Split by comma, handling quoted strings + var parts = cleaned.Split(','); + foreach (var part in parts) + { + var trimmed = part.Trim().Trim('"', '\'', ' '); + if (!string.IsNullOrEmpty(trimmed)) + { + result.Add(trimmed); + } + } + + return result; + } + + private static Dictionary ParseScriptsSection(string content) + { + var result = new Dictionary(); + + // Look for [project.scripts] or [tool.poetry.scripts] + var scriptsPatterns = new[] + { + @"\[project\.scripts\]", + @"\[tool\.poetry\.scripts\]" + }; + + foreach (var pattern in scriptsPatterns) + { + var match = Regex.Match(content, pattern); + if (match.Success) + { + var sectionStart = match.Index + match.Length; + var sectionContent = ExtractSectionContent(content, sectionStart); + var scripts = ParseKeyValueSection(sectionContent); + foreach (var (key, value) in scripts) + { + result[key] = value; + } + } + } + + return result; + } + + private static Dictionary ParseKeyValueSection(string sectionContent) + { + var result = new Dictionary(); + var lines = sectionContent.Split('\n'); + + foreach (var line in lines) + { + var trimmed = line.Trim(); + if (string.IsNullOrEmpty(trimmed) || trimmed.StartsWith('#')) + { + continue; + } + + if (trimmed.Contains('=')) + { + var parts = trimmed.Split('=', 2); + var key = parts[0].Trim(); + var value = parts.Length > 1 ? parts[1].Trim().Trim('"', '\'') : ""; + result[key] = value; + } + } + + return result; + } +} + +/// +/// Represents parsed Python project configuration. +/// +/// Path to the configuration file. +/// The project name. +/// The project version. +/// Optional dependencies by group name. +/// List of available extras. +/// Entry point scripts. +internal sealed record PythonProjectConfig( + string FilePath, + string? ProjectName, + string? ProjectVersion, + ImmutableDictionary> OptionalDependencies, + ImmutableArray Extras, + ImmutableDictionary Scripts) +{ + /// + /// Generates metadata entries for this configuration. + /// + public IEnumerable> ToMetadata(string prefix) + { + yield return new($"{prefix}.path", FilePath); + + if (ProjectName is not null) + { + yield return new($"{prefix}.name", ProjectName); + } + + if (ProjectVersion is not null) + { + yield return new($"{prefix}.version", ProjectVersion); + } + + if (Extras.Length > 0) + { + yield return new($"{prefix}.extras", string.Join(",", Extras)); + } + + if (Scripts.Count > 0) + { + yield return new($"{prefix}.scripts", string.Join(",", Scripts.Keys)); + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Observations/PythonObservationBuilder.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Observations/PythonObservationBuilder.cs new file mode 100644 index 000000000..5c44eddda --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Observations/PythonObservationBuilder.cs @@ -0,0 +1,395 @@ +using System.Collections.Immutable; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Capabilities; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Entrypoints; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Framework; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Imports; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Packaging; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.VirtualFileSystem; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.Observations; + +/// +/// Builds AOC-compliant observation documents from analysis results. +/// +internal sealed class PythonObservationBuilder +{ + private const string SchemaVersion = "python-aoc-v1"; + + private readonly List _packages = []; + private readonly List _modules = []; + private readonly List _entrypoints = []; + private readonly List _dependencyEdges = []; + private readonly List _importEdges = []; + private readonly List _nativeExtensions = []; + private readonly List _frameworks = []; + private readonly List _warnings = []; + private readonly List _securitySensitiveCapabilities = []; + + private PythonObservationEnvironment? _environment; + private PythonObservationRuntimeEvidence? _runtimeEvidence; + + private bool _usesProcessExecution; + private bool _usesNetworkAccess; + private bool _usesFileSystem; + private bool _usesCodeExecution; + private bool _usesDeserialization; + private bool _usesNativeCode; + private bool _usesAsyncAwait; + private bool _usesMultiprocessing; + + /// + /// Adds packages from package discovery results. + /// + public PythonObservationBuilder AddPackages(IEnumerable packages) + { + foreach (var pkg in packages) + { + _packages.Add(new PythonObservationPackage( + Name: pkg.Name, + Version: pkg.Version ?? "unknown", + Source: pkg.Kind.ToString(), + Platform: null, + IsDirect: pkg.IsDirectDependency, + InstallerKind: pkg.InstallerTool, + DistInfoPath: pkg.MetadataPath, + Groups: pkg.Extras, + Extras: pkg.Extras)); + + // Add dependency edges + foreach (var dep in pkg.Dependencies) + { + _dependencyEdges.Add(new PythonObservationDependencyEdge( + FromPackage: pkg.Name, + ToPackage: ExtractPackageName(dep), + VersionConstraint: ExtractVersionConstraint(dep), + Extra: null, + IsOptional: false)); + } + } + + return this; + } + + /// + /// Adds modules from import graph analysis. + /// + public PythonObservationBuilder AddModules( + IEnumerable moduleNodes, + PythonImportGraph? importGraph = null) + { + foreach (var node in moduleNodes) + { + var imports = importGraph?.GetImportsForFile(node.VirtualPath ?? "") + .SelectMany(i => i.ImportedNames) + .ToImmutableArray() ?? ImmutableArray.Empty; + + _modules.Add(new PythonObservationModule( + Name: node.ModulePath, + Type: node.IsPackage ? "package" : "module", + FilePath: node.VirtualPath ?? "", + Line: null, + IsNamespacePackage: node.IsNamespacePackage, + ParentPackage: ExtractParentPackage(node.ModulePath), + Imports: imports)); + } + + return this; + } + + /// + /// Adds import edges from the import graph. + /// + public PythonObservationBuilder AddImportEdges(IEnumerable edges) + { + foreach (var edge in edges) + { + _importEdges.Add(new PythonObservationImportEdge( + FromModule: edge.From, + ToModule: edge.To, + Kind: MapImportKind(edge.Import.Kind), + Confidence: MapImportConfidence(edge.Import.Confidence), + ResolvedPath: null, + SourceFile: edge.Import.SourceFile, + Line: edge.Import.LineNumber ?? 0, + ResolverTrace: ImmutableArray.Empty)); + } + + return this; + } + + /// + /// Adds entrypoints from entrypoint discovery. + /// + public PythonObservationBuilder AddEntrypoints(IEnumerable entrypoints) + { + foreach (var ep in entrypoints) + { + _entrypoints.Add(new PythonObservationEntrypoint( + Path: ep.VirtualPath ?? ep.Target, + Type: ep.Kind.ToString(), + Handler: ep.Callable, + RequiredPackages: ImmutableArray.Empty, + InvocationContext: ep.InvocationContext.InvocationType.ToString())); + } + + return this; + } + + /// + /// Adds capabilities from capability detection. + /// + public PythonObservationBuilder AddCapabilities(IEnumerable capabilities) + { + foreach (var cap in capabilities) + { + switch (cap.Kind) + { + case PythonCapabilityKind.ProcessExecution: + _usesProcessExecution = true; + break; + case PythonCapabilityKind.NetworkAccess: + _usesNetworkAccess = true; + break; + case PythonCapabilityKind.FileSystemAccess: + _usesFileSystem = true; + break; + case PythonCapabilityKind.CodeExecution: + _usesCodeExecution = true; + break; + case PythonCapabilityKind.Deserialization: + _usesDeserialization = true; + break; + case PythonCapabilityKind.Ctypes or PythonCapabilityKind.Cffi or PythonCapabilityKind.NativeCodeExecution: + _usesNativeCode = true; + break; + case PythonCapabilityKind.AsyncAwait: + _usesAsyncAwait = true; + break; + case PythonCapabilityKind.Multiprocessing: + _usesMultiprocessing = true; + break; + } + + if (cap.IsSecuritySensitive && !_securitySensitiveCapabilities.Contains(cap.Kind.ToString())) + { + _securitySensitiveCapabilities.Add(cap.Kind.ToString()); + } + } + + return this; + } + + /// + /// Adds native extensions from extension scanning. + /// + public PythonObservationBuilder AddNativeExtensions(IEnumerable extensions) + { + foreach (var ext in extensions) + { + _nativeExtensions.Add(new PythonObservationNativeExtension( + ModuleName: ext.ModuleName, + Path: ext.Path, + Kind: ext.Kind.ToString(), + Platform: ext.Platform, + Architecture: ext.Architecture, + PackageName: ext.PackageName)); + + _usesNativeCode = true; + } + + return this; + } + + /// + /// Adds framework hints from framework detection. + /// + public PythonObservationBuilder AddFrameworkHints(IEnumerable hints) + { + foreach (var hint in hints) + { + string? category = null; + if (hint.IsWebFramework) category = "WebFramework"; + else if (hint.IsTaskQueue) category = "TaskQueue"; + else if (hint.IsServerless) category = "Serverless"; + else if (hint.IsCliFramework) category = "CLI"; + + _frameworks.Add(new PythonObservationFrameworkHint( + Kind: hint.Kind.ToString(), + SourceFile: hint.SourceFile, + Line: hint.LineNumber, + Evidence: hint.Evidence, + Confidence: MapConfidence(hint.Confidence), + Category: category)); + } + + return this; + } + + /// + /// Sets environment information. + /// + public PythonObservationBuilder SetEnvironment( + string? pythonVersion, + IEnumerable? sitePackagesPaths = null, + IEnumerable? requirementsFiles = null, + IEnumerable? pyprojectFiles = null, + string? virtualenvPath = null, + string? condaPrefix = null, + bool isContainer = false) + { + _environment = new PythonObservationEnvironment( + PythonVersion: pythonVersion, + SitePackagesPaths: sitePackagesPaths?.ToImmutableArray() ?? ImmutableArray.Empty, + VersionSources: ImmutableArray.Empty, + RequirementsFiles: requirementsFiles?.ToImmutableArray() ?? ImmutableArray.Empty, + PyprojectFiles: pyprojectFiles?.ToImmutableArray() ?? ImmutableArray.Empty, + VirtualenvPath: virtualenvPath, + CondaPrefix: condaPrefix, + IsContainer: isContainer); + + return this; + } + + /// + /// Adds a warning. + /// + public PythonObservationBuilder AddWarning( + string code, + string message, + string? filePath = null, + int? line = null, + string severity = "warning") + { + _warnings.Add(new PythonObservationWarning( + Code: code, + Message: message, + FilePath: filePath, + Line: line, + Severity: severity)); + + return this; + } + + /// + /// Sets runtime evidence from optional runtime analysis. + /// + public PythonObservationBuilder SetRuntimeEvidence(PythonObservationRuntimeEvidence evidence) + { + _runtimeEvidence = evidence; + return this; + } + + /// + /// Builds the final observation document. + /// + public PythonObservationDocument Build() + { + var detectedFrameworks = _frameworks + .Select(f => f.Kind) + .Distinct() + .ToImmutableArray(); + + return new PythonObservationDocument( + Schema: SchemaVersion, + Packages: _packages.ToImmutableArray(), + Modules: _modules.ToImmutableArray(), + Entrypoints: _entrypoints.ToImmutableArray(), + DependencyEdges: _dependencyEdges.ToImmutableArray(), + ImportEdges: _importEdges.ToImmutableArray(), + NativeExtensions: _nativeExtensions.ToImmutableArray(), + Frameworks: _frameworks.ToImmutableArray(), + Warnings: _warnings.ToImmutableArray(), + Environment: _environment ?? new PythonObservationEnvironment( + PythonVersion: null, + SitePackagesPaths: ImmutableArray.Empty, + VersionSources: ImmutableArray.Empty, + RequirementsFiles: ImmutableArray.Empty, + PyprojectFiles: ImmutableArray.Empty, + VirtualenvPath: null, + CondaPrefix: null, + IsContainer: false), + Capabilities: new PythonObservationCapabilitySummary( + UsesProcessExecution: _usesProcessExecution, + UsesNetworkAccess: _usesNetworkAccess, + UsesFileSystem: _usesFileSystem, + UsesCodeExecution: _usesCodeExecution, + UsesDeserialization: _usesDeserialization, + UsesNativeCode: _usesNativeCode, + UsesAsyncAwait: _usesAsyncAwait, + UsesMultiprocessing: _usesMultiprocessing, + DetectedFrameworks: detectedFrameworks, + SecuritySensitiveCapabilities: _securitySensitiveCapabilities.ToImmutableArray()), + RuntimeEvidence: _runtimeEvidence); + } + + private static PythonObservationImportKind MapImportKind(PythonImportKind kind) + { + return kind switch + { + PythonImportKind.Import => PythonObservationImportKind.Import, + PythonImportKind.FromImport => PythonObservationImportKind.FromImport, + PythonImportKind.RelativeImport => PythonObservationImportKind.RelativeImport, + PythonImportKind.ImportlibImportModule => PythonObservationImportKind.DynamicImport, + PythonImportKind.BuiltinImport => PythonObservationImportKind.DynamicImport, + _ => PythonObservationImportKind.Import + }; + } + + private static PythonObservationConfidence MapImportConfidence(PythonImportConfidence confidence) + { + return confidence switch + { + PythonImportConfidence.Low => PythonObservationConfidence.Low, + PythonImportConfidence.Medium => PythonObservationConfidence.Medium, + PythonImportConfidence.High => PythonObservationConfidence.High, + PythonImportConfidence.Definitive => PythonObservationConfidence.Definitive, + _ => PythonObservationConfidence.Medium + }; + } + + private static PythonObservationConfidence MapConfidence(PythonFrameworkConfidence confidence) + { + return confidence switch + { + PythonFrameworkConfidence.Low => PythonObservationConfidence.Low, + PythonFrameworkConfidence.Medium => PythonObservationConfidence.Medium, + PythonFrameworkConfidence.High => PythonObservationConfidence.High, + PythonFrameworkConfidence.Definitive => PythonObservationConfidence.Definitive, + _ => PythonObservationConfidence.Medium + }; + } + + private static string ExtractPackageName(string dependency) + { + // Extract package name from dependency spec like "requests>=2.0" or "numpy[extra]" + var name = dependency; + + var bracketIdx = name.IndexOf('['); + if (bracketIdx > 0) name = name[..bracketIdx]; + + foreach (var op in new[] { ">=", "<=", "==", "!=", ">", "<", "~=", "^" }) + { + var opIdx = name.IndexOf(op, StringComparison.Ordinal); + if (opIdx > 0) name = name[..opIdx]; + } + + return name.Trim(); + } + + private static string? ExtractVersionConstraint(string dependency) + { + foreach (var op in new[] { ">=", "<=", "==", "!=", ">", "<", "~=", "^" }) + { + var opIdx = dependency.IndexOf(op, StringComparison.Ordinal); + if (opIdx > 0) return dependency[opIdx..].Trim(); + } + + return null; + } + + private static string? ExtractParentPackage(string moduleName) + { + var lastDot = moduleName.LastIndexOf('.'); + return lastDot > 0 ? moduleName[..lastDot] : null; + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Observations/PythonObservationDocument.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Observations/PythonObservationDocument.cs new file mode 100644 index 000000000..1c3d8db22 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Observations/PythonObservationDocument.cs @@ -0,0 +1,231 @@ +using System.Collections.Immutable; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.Observations; + +/// +/// AOC-compliant observation document for Python project analysis. +/// Contains packages, modules, entrypoints, dependency edges, capabilities, and warnings. +/// +internal sealed record PythonObservationDocument( + string Schema, + ImmutableArray Packages, + ImmutableArray Modules, + ImmutableArray Entrypoints, + ImmutableArray DependencyEdges, + ImmutableArray ImportEdges, + ImmutableArray NativeExtensions, + ImmutableArray Frameworks, + ImmutableArray Warnings, + PythonObservationEnvironment Environment, + PythonObservationCapabilitySummary Capabilities, + PythonObservationRuntimeEvidence? RuntimeEvidence = null); + +/// +/// Python package detected in the project (from pip, conda, or other package managers). +/// +internal sealed record PythonObservationPackage( + string Name, + string Version, + string Source, + string? Platform, + bool IsDirect, + string? InstallerKind, + string? DistInfoPath, + ImmutableArray Groups, + ImmutableArray Extras); + +/// +/// Python module or package detected in the project. +/// +internal sealed record PythonObservationModule( + string Name, + string Type, + string FilePath, + int? Line, + bool IsNamespacePackage, + string? ParentPackage, + ImmutableArray Imports); + +/// +/// Entrypoint detected in the Python project. +/// +internal sealed record PythonObservationEntrypoint( + string Path, + string Type, + string? Handler, + ImmutableArray RequiredPackages, + string? InvocationContext); + +/// +/// Package dependency edge (declared in requirements or pyproject). +/// +internal sealed record PythonObservationDependencyEdge( + string FromPackage, + string ToPackage, + string? VersionConstraint, + string? Extra, + bool IsOptional); + +/// +/// Import edge between modules with reason codes and confidence. +/// +internal sealed record PythonObservationImportEdge( + string FromModule, + string ToModule, + PythonObservationImportKind Kind, + PythonObservationConfidence Confidence, + string? ResolvedPath, + string SourceFile, + int Line, + ImmutableArray ResolverTrace); + +/// +/// Import edge types. +/// +internal enum PythonObservationImportKind +{ + /// Standard import statement. + Import, + + /// From X import Y statement. + FromImport, + + /// Relative import within package. + RelativeImport, + + /// Dynamic import via importlib. + DynamicImport, + + /// Namespace package implicit import. + NamespacePackage, + + /// Native extension load. + NativeExtension, + + /// Heuristic/hint-based import (not definitively resolved). + Hint +} + +/// +/// Confidence level for observations. +/// +internal enum PythonObservationConfidence +{ + /// Low confidence - heuristic match. + Low = 0, + + /// Medium confidence - likely correct. + Medium = 1, + + /// High confidence - clear evidence. + High = 2, + + /// Definitive - direct evidence found. + Definitive = 3 +} + +/// +/// Native extension detected in the project. +/// +internal sealed record PythonObservationNativeExtension( + string ModuleName, + string Path, + string Kind, + string? Platform, + string? Architecture, + string? PackageName); + +/// +/// Framework hint detected in the project. +/// +internal sealed record PythonObservationFrameworkHint( + string Kind, + string SourceFile, + int? Line, + string Evidence, + PythonObservationConfidence Confidence, + string? Category); + +/// +/// Analysis warning generated during scanning. +/// +internal sealed record PythonObservationWarning( + string Code, + string Message, + string? FilePath, + int? Line, + string Severity); + +/// +/// Environment profile with Python version, package manager settings, and paths. +/// +internal sealed record PythonObservationEnvironment( + string? PythonVersion, + ImmutableArray SitePackagesPaths, + ImmutableArray VersionSources, + ImmutableArray RequirementsFiles, + ImmutableArray PyprojectFiles, + string? VirtualenvPath, + string? CondaPrefix, + bool IsContainer); + +/// +/// Python version source with provenance. +/// +internal sealed record PythonObservationVersionSource( + string? Version, + string Source, + string SourceType); + +/// +/// Capability summary for the Python project. +/// +internal sealed record PythonObservationCapabilitySummary( + bool UsesProcessExecution, + bool UsesNetworkAccess, + bool UsesFileSystem, + bool UsesCodeExecution, + bool UsesDeserialization, + bool UsesNativeCode, + bool UsesAsyncAwait, + bool UsesMultiprocessing, + ImmutableArray DetectedFrameworks, + ImmutableArray SecuritySensitiveCapabilities); + +/// +/// Optional runtime evidence section for Python. +/// +internal sealed record PythonObservationRuntimeEvidence( + bool HasEvidence, + string? RuntimePythonVersion, + string? RuntimePlatform, + int LoadedModulesCount, + ImmutableArray LoadedPackages, + ImmutableArray LoadedModules, + ImmutableDictionary PathHashes, + ImmutableArray RuntimeCapabilities, + ImmutableArray Errors) +{ + /// + /// Empty runtime evidence instance. + /// + public static PythonObservationRuntimeEvidence Empty { get; } = new( + HasEvidence: false, + RuntimePythonVersion: null, + RuntimePlatform: null, + LoadedModulesCount: 0, + LoadedPackages: ImmutableArray.Empty, + LoadedModules: ImmutableArray.Empty, + PathHashes: ImmutableDictionary.Empty, + RuntimeCapabilities: ImmutableArray.Empty, + Errors: ImmutableArray.Empty); +} + +/// +/// Runtime error captured during execution. +/// +internal sealed record PythonObservationRuntimeError( + string Timestamp, + string Message, + string? Path, + string? PathSha256); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Observations/PythonObservationSerializer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Observations/PythonObservationSerializer.cs new file mode 100644 index 000000000..4c2f4db35 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/Observations/PythonObservationSerializer.cs @@ -0,0 +1,73 @@ +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.Observations; + +/// +/// Serializes Python observation documents to JSON. +/// +internal static class PythonObservationSerializer +{ + private static readonly JsonSerializerOptions SerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = + { + new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) + } + }; + + private static readonly JsonSerializerOptions CompactSerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = false, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = + { + new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) + } + }; + + /// + /// Serializes the observation document to JSON. + /// + public static string Serialize(PythonObservationDocument document, bool compact = false) + { + var options = compact ? CompactSerializerOptions : SerializerOptions; + return JsonSerializer.Serialize(document, options); + } + + /// + /// Serializes the observation document to a stream. + /// + public static async Task SerializeAsync( + PythonObservationDocument document, + Stream stream, + bool compact = false, + CancellationToken cancellationToken = default) + { + var options = compact ? CompactSerializerOptions : SerializerOptions; + await JsonSerializer.SerializeAsync(stream, document, options, cancellationToken).ConfigureAwait(false); + } + + /// + /// Deserializes a JSON string to an observation document. + /// + public static PythonObservationDocument? Deserialize(string json) + { + return JsonSerializer.Deserialize(json, SerializerOptions); + } + + /// + /// Deserializes a stream to an observation document. + /// + public static async Task DeserializeAsync( + Stream stream, + CancellationToken cancellationToken = default) + { + return await JsonSerializer.DeserializeAsync( + stream, SerializerOptions, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/PythonZipappAdapter.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/PythonZipappAdapter.cs new file mode 100644 index 000000000..1c2c721a8 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/PythonZipappAdapter.cs @@ -0,0 +1,528 @@ +using System.IO.Compression; +using System.Text; +using System.Text.RegularExpressions; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal; + +/// +/// Analyzes Python zipapp archives (.pyz, .pyzw) for runtime information, +/// entry points, and startup behavior. +/// +internal static partial class PythonZipappAdapter +{ + private static readonly string[] LayerRootCandidates = { "layers", ".layers", "layer" }; + + /// + /// Discovers zipapp files in the workspace and container layers. + /// + public static IReadOnlyCollection DiscoverZipapps(string rootPath) + { + var discovered = new HashSet(StringComparer.OrdinalIgnoreCase); + + // Search in root path + DiscoverInDirectory(rootPath, discovered); + + // Search in container layers + foreach (var layerRoot in EnumerateLayerRoots(rootPath)) + { + DiscoverInDirectory(layerRoot, discovered); + + // Check common locations within layers + var appDir = Path.Combine(layerRoot, "app"); + if (Directory.Exists(appDir)) + { + DiscoverInDirectory(appDir, discovered); + } + + var optDir = Path.Combine(layerRoot, "opt"); + if (Directory.Exists(optDir)) + { + DiscoverInDirectory(optDir, discovered); + } + + var usrLocalBin = Path.Combine(layerRoot, "usr", "local", "bin"); + if (Directory.Exists(usrLocalBin)) + { + DiscoverInDirectory(usrLocalBin, discovered); + } + } + + return discovered + .OrderBy(static path => path, StringComparer.Ordinal) + .ToArray(); + } + + /// + /// Analyzes a zipapp archive for runtime information. + /// + public static PythonZipappInfo? AnalyzeZipapp(string zipappPath) + { + if (!File.Exists(zipappPath)) + { + return null; + } + + try + { + var shebang = ExtractShebang(zipappPath); + var pythonVersion = shebang != null ? ParsePythonVersionFromShebang(shebang) : null; + var hasMain = false; + var hasInit = false; + var entryModule = (string?)null; + var warnings = new List(); + var dependencies = new List(); + + // Open as zip archive to inspect contents + using var stream = File.OpenRead(zipappPath); + + // Skip shebang if present + var firstByte = stream.ReadByte(); + if (firstByte == '#') + { + // Skip to end of shebang line + while (stream.ReadByte() is int b && b != '\n' && b != -1) + { + } + } + else + { + stream.Position = 0; + } + + using var archive = new ZipArchive(stream, ZipArchiveMode.Read); + + foreach (var entry in archive.Entries) + { + var name = entry.FullName.Replace('\\', '/'); + + if (string.Equals(name, "__main__.py", StringComparison.OrdinalIgnoreCase)) + { + hasMain = true; + entryModule = TryExtractEntryModule(entry); + } + else if (string.Equals(name, "__init__.py", StringComparison.OrdinalIgnoreCase)) + { + hasInit = true; + } + else if (name.EndsWith("/__main__.py", StringComparison.OrdinalIgnoreCase)) + { + // Package with __main__.py + var package = Path.GetDirectoryName(name)?.Replace('/', '.'); + if (!string.IsNullOrEmpty(package)) + { + entryModule ??= package; + } + } + else if (name.EndsWith("/requirements.txt", StringComparison.OrdinalIgnoreCase) || + string.Equals(name, "requirements.txt", StringComparison.OrdinalIgnoreCase)) + { + var reqs = ExtractRequirements(entry); + dependencies.AddRange(reqs); + } + } + + // Generate warnings + if (!hasMain && !hasInit) + { + warnings.Add("Zipapp missing __main__.py; may not be directly executable"); + } + + if (shebang != null && shebang.Contains("/env ", StringComparison.OrdinalIgnoreCase)) + { + warnings.Add("Zipapp uses /usr/bin/env shebang; Python version may vary by environment"); + } + + var isWindows = zipappPath.EndsWith(".pyzw", StringComparison.OrdinalIgnoreCase); + if (isWindows) + { + warnings.Add("Zipapp is Windows-specific (.pyzw); uses pythonw.exe without console"); + } + + return new PythonZipappInfo( + Path: zipappPath, + FileName: Path.GetFileName(zipappPath), + Shebang: shebang, + PythonVersion: pythonVersion, + HasMainPy: hasMain, + EntryModule: entryModule, + IsWindowsApp: isWindows, + EmbeddedDependencies: dependencies, + Warnings: warnings); + } + catch (IOException) + { + return null; + } + catch (InvalidDataException) + { + return null; + } + } + + /// + /// Analyzes all zipapps in the workspace. + /// + public static PythonZipappAnalysis AnalyzeAll(string rootPath) + { + var zipapps = new List(); + var allWarnings = new List(); + + foreach (var zipappPath in DiscoverZipapps(rootPath)) + { + var info = AnalyzeZipapp(zipappPath); + if (info != null) + { + zipapps.Add(info); + allWarnings.AddRange(info.Warnings); + } + } + + if (zipapps.Count > 1) + { + allWarnings.Add($"Multiple zipapps detected ({zipapps.Count}); entry point resolution may be ambiguous"); + } + + return new PythonZipappAnalysis(zipapps, allWarnings); + } + + private static string? ExtractShebang(string path) + { + try + { + using var stream = File.OpenRead(path); + using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: false, leaveOpen: true); + + var firstLine = reader.ReadLine(); + if (firstLine != null && firstLine.StartsWith("#!")) + { + return firstLine[2..].Trim(); + } + + return null; + } + catch (IOException) + { + return null; + } + } + + private static string? ParsePythonVersionFromShebang(string shebang) + { + // Match patterns like: + // /usr/bin/python3.11 + // /usr/bin/env python3.10 + // python3.9 + + var match = PythonVersionPattern().Match(shebang); + if (match.Success) + { + return match.Groups["version"].Value; + } + + // Check for generic python3 or python + if (shebang.Contains("python3", StringComparison.OrdinalIgnoreCase)) + { + return "3"; + } + + if (shebang.Contains("python", StringComparison.OrdinalIgnoreCase)) + { + return null; // Could be Python 2 or 3 + } + + return null; + } + + private static string? TryExtractEntryModule(ZipArchiveEntry entry) + { + try + { + using var stream = entry.Open(); + using var reader = new StreamReader(stream); + var content = reader.ReadToEnd(); + + // Look for common patterns: + // from package.module import main + // import package.main + // runpy.run_module('package') + + var runpyMatch = RunpyPattern().Match(content); + if (runpyMatch.Success) + { + return runpyMatch.Groups["module"].Value; + } + + var fromImportMatch = FromImportPattern().Match(content); + if (fromImportMatch.Success) + { + return fromImportMatch.Groups["module"].Value; + } + + return null; + } + catch (IOException) + { + return null; + } + } + + private static List ExtractRequirements(ZipArchiveEntry entry) + { + var results = new List(); + + try + { + using var stream = entry.Open(); + using var reader = new StreamReader(stream); + + var content = reader.ReadToEnd(); + var lines = content.Split('\n', StringSplitOptions.RemoveEmptyEntries); + + foreach (var line in lines) + { + var trimmed = line.Trim(); + if (string.IsNullOrEmpty(trimmed) || trimmed.StartsWith('#') || trimmed.StartsWith('-')) + { + continue; + } + + // Extract package name (before any version specifier) + var match = PackageNamePattern().Match(trimmed); + if (match.Success) + { + results.Add(match.Groups["name"].Value); + } + } + } + catch (IOException) + { + // Ignore read errors + } + + return results; + } + + private static void DiscoverInDirectory(string directory, HashSet discovered) + { + if (!Directory.Exists(directory)) + { + return; + } + + try + { + foreach (var file in Directory.EnumerateFiles(directory, "*.pyz")) + { + discovered.Add(file); + } + + foreach (var file in Directory.EnumerateFiles(directory, "*.pyzw")) + { + discovered.Add(file); + } + + // Also check in subdirectories (up to 3 levels) + foreach (var subdir in Directory.EnumerateDirectories(directory)) + { + DiscoverInSubdirectory(subdir, discovered, 1); + } + } + catch (IOException) + { + // Ignore + } + catch (UnauthorizedAccessException) + { + // Ignore + } + } + + private static void DiscoverInSubdirectory(string directory, HashSet discovered, int depth) + { + if (depth > 3) + { + return; + } + + try + { + foreach (var file in Directory.EnumerateFiles(directory, "*.pyz")) + { + discovered.Add(file); + } + + foreach (var file in Directory.EnumerateFiles(directory, "*.pyzw")) + { + discovered.Add(file); + } + + foreach (var subdir in Directory.EnumerateDirectories(directory)) + { + var dirName = Path.GetFileName(subdir); + // Skip common non-relevant directories + if (dirName.StartsWith('.') || + string.Equals(dirName, "node_modules", StringComparison.OrdinalIgnoreCase) || + string.Equals(dirName, "__pycache__", StringComparison.OrdinalIgnoreCase) || + string.Equals(dirName, "venv", StringComparison.OrdinalIgnoreCase) || + string.Equals(dirName, ".venv", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + DiscoverInSubdirectory(subdir, discovered, depth + 1); + } + } + catch (IOException) + { + // Ignore + } + catch (UnauthorizedAccessException) + { + // Ignore + } + } + + private static IEnumerable EnumerateLayerRoots(string workspaceRoot) + { + foreach (var candidate in LayerRootCandidates) + { + var root = Path.Combine(workspaceRoot, candidate); + if (!Directory.Exists(root)) + { + continue; + } + + IEnumerable? directories; + try + { + directories = Directory.EnumerateDirectories(root); + } + catch (IOException) + { + continue; + } + catch (UnauthorizedAccessException) + { + continue; + } + + foreach (var layerDirectory in directories) + { + var fsDirectory = Path.Combine(layerDirectory, "fs"); + yield return Directory.Exists(fsDirectory) ? fsDirectory : layerDirectory; + } + } + } + + [GeneratedRegex(@"python(?\d+\.\d+)", RegexOptions.IgnoreCase)] + private static partial Regex PythonVersionPattern(); + + [GeneratedRegex(@"runpy\.run_module\(['""](?[^'""]+)['""]", RegexOptions.IgnoreCase)] + private static partial Regex RunpyPattern(); + + [GeneratedRegex(@"from\s+(?[\w.]+)\s+import", RegexOptions.IgnoreCase)] + private static partial Regex FromImportPattern(); + + [GeneratedRegex(@"^(?[\w\-_.]+)", RegexOptions.IgnoreCase)] + private static partial Regex PackageNamePattern(); +} + +/// +/// Information about a Python zipapp archive. +/// +internal sealed record PythonZipappInfo( + string Path, + string FileName, + string? Shebang, + string? PythonVersion, + bool HasMainPy, + string? EntryModule, + bool IsWindowsApp, + IReadOnlyCollection EmbeddedDependencies, + IReadOnlyCollection Warnings) +{ + public IReadOnlyCollection> ToMetadata() + { + var entries = new List> + { + new("zipapp.path", Path), + new("zipapp.hasMain", HasMainPy.ToString().ToLowerInvariant()) + }; + + if (Shebang != null) + { + entries.Add(new("zipapp.shebang", Shebang)); + } + + if (PythonVersion != null) + { + entries.Add(new("zipapp.pythonVersion", PythonVersion)); + } + + if (EntryModule != null) + { + entries.Add(new("zipapp.entryModule", EntryModule)); + } + + if (IsWindowsApp) + { + entries.Add(new("zipapp.windowsApp", "true")); + } + + if (EmbeddedDependencies.Count > 0) + { + entries.Add(new("zipapp.embeddedDeps.count", EmbeddedDependencies.Count.ToString())); + } + + return entries; + } +} + +/// +/// Analysis results for all zipapps in a workspace. +/// +internal sealed class PythonZipappAnalysis +{ + public PythonZipappAnalysis( + IReadOnlyCollection zipapps, + IReadOnlyCollection warnings) + { + Zipapps = zipapps; + Warnings = warnings; + } + + public IReadOnlyCollection Zipapps { get; } + public IReadOnlyCollection Warnings { get; } + + public bool HasZipapps => Zipapps.Count > 0; + public bool HasWarnings => Warnings.Count > 0; + + public IReadOnlyCollection> ToMetadata() + { + var entries = new List>(); + + if (Zipapps.Count > 0) + { + entries.Add(new("zipapps.count", Zipapps.Count.ToString())); + + var withShebang = Zipapps.Count(z => z.Shebang != null); + if (withShebang > 0) + { + entries.Add(new("zipapps.withShebang", withShebang.ToString())); + } + + var windowsApps = Zipapps.Count(z => z.IsWindowsApp); + if (windowsApps > 0) + { + entries.Add(new("zipapps.windowsApps", windowsApps.ToString())); + } + } + + for (var i = 0; i < Warnings.Count; i++) + { + entries.Add(new($"zipapps.warning[{i}]", Warnings.ElementAt(i))); + } + + return entries; + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonImportHookScript.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonImportHookScript.cs new file mode 100644 index 000000000..305a3866b --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonImportHookScript.cs @@ -0,0 +1,194 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.RuntimeEvidence; + +/// +/// Provides the Python import hook script for runtime evidence collection. +/// +internal static class PythonImportHookScript +{ + /// + /// Gets the Python import hook script that captures module load events. + /// This script outputs NDJSON to stdout. + /// + public static string GetScript() => Script; + + /// + /// Gets the Python import hook script that writes to a file. + /// + /// The path to write output to. + /// The modified script. + public static string GetFileScript(string outputPath) + { + var escapedPath = outputPath.Replace("\\", "\\\\").Replace("'", "\\'"); + return Script.Replace( + "_stellaops_output = None", + $"_stellaops_output = open('{escapedPath}', 'w', buffering=1)"); + } + + // The Python script is stored as a verbatim string to avoid issues with # characters + private const string Script = @" +# StellaOps Python Import Hook +# This script captures module import events for static analysis validation. +# Output format: NDJSON (Newline-Delimited JSON) + +import sys +import json +import threading +import os +from datetime import datetime, timezone + +_stellaops_output = None +_stellaops_lock = threading.Lock() +_stellaops_seen = set() + +def _stellaops_emit(event_type, **kwargs): + """"""Emit an event as JSON."""""" + event = { + 'type': event_type, + 'timestamp': datetime.now(timezone.utc).isoformat(), + 'pid': os.getpid(), + **kwargs + } + + with _stellaops_lock: + line = json.dumps(event, default=str) + if _stellaops_output: + _stellaops_output.write(line + '\n') + _stellaops_output.flush() + else: + print(line, flush=True) + +def _stellaops_get_module_path(module): + """"""Get the file path for a module if available."""""" + try: + if hasattr(module, '__file__') and module.__file__: + return module.__file__ + if hasattr(module, '__spec__') and module.__spec__: + if hasattr(module.__spec__, 'origin') and module.__spec__.origin: + return module.__spec__.origin + except Exception: + pass + return None + +class StellaOpsMetaPathFinder: + """"""Meta path finder that logs all import attempts."""""" + + def find_module(self, fullname, path=None): + if fullname not in _stellaops_seen: + _stellaops_seen.add(fullname) + _stellaops_emit('import_attempt', module=fullname, path=str(path) if path else None) + return None + + def find_spec(self, fullname, path, target=None): + return None + +def _stellaops_wrap_import(): + """"""Wrap the built-in __import__ to capture all imports."""""" + original_import = __builtins__.__import__ if hasattr(__builtins__, '__import__') else __builtins__['__import__'] + + def wrapped_import(name, globals=None, locals=None, fromlist=(), level=0): + module = original_import(name, globals, locals, fromlist, level) + + if name not in _stellaops_seen: + _stellaops_seen.add(name) + + path = _stellaops_get_module_path(module) + is_native = path and (path.endswith('.so') or path.endswith('.pyd')) + + event_type = 'native_load' if is_native else 'module_import' + + _stellaops_emit( + event_type, + module=name, + path=path, + parent=module.__package__ if hasattr(module, '__package__') else None, + tid=threading.get_ident() + ) + + return module + + if hasattr(__builtins__, '__import__'): + __builtins__.__import__ = wrapped_import + else: + __builtins__['__import__'] = wrapped_import + +def _stellaops_wrap_subprocess(): + """"""Wrap subprocess module to capture process spawns."""""" + try: + import subprocess + original_popen = subprocess.Popen + + class WrappedPopen(original_popen): + def __init__(self, *args, **kwargs): + _stellaops_emit('process_spawn', spawn_type='subprocess', args=str(args[0]) if args else None) + super().__init__(*args, **kwargs) + + subprocess.Popen = WrappedPopen + except Exception: + pass + +def _stellaops_wrap_multiprocessing(): + """"""Wrap multiprocessing module to capture process spawns."""""" + try: + import multiprocessing + original_process = multiprocessing.Process + + class WrappedProcess(original_process): + def __init__(self, *args, **kwargs): + _stellaops_emit('process_spawn', spawn_type='multiprocessing', target=str(kwargs.get('target'))) + super().__init__(*args, **kwargs) + + multiprocessing.Process = WrappedProcess + except Exception: + pass + +def stellaops_start_tracing(): + """"""Initialize runtime evidence collection."""""" + # Emit interpreter start event + _stellaops_emit( + 'interpreter_start', + python_version=sys.version, + platform=sys.platform, + executable=sys.executable + ) + + # Install import hook + _stellaops_wrap_import() + + # Install meta path finder + sys.meta_path.insert(0, StellaOpsMetaPathFinder()) + + # Wrap subprocess and multiprocessing (optional) + _stellaops_wrap_subprocess() + _stellaops_wrap_multiprocessing() + + # Record already-loaded modules + for name, module in list(sys.modules.items()): + if module is not None and name not in _stellaops_seen: + _stellaops_seen.add(name) + path = _stellaops_get_module_path(module) + if path: + is_native = path.endswith('.so') or path.endswith('.pyd') + event_type = 'native_load' if is_native else 'module_import' + _stellaops_emit( + event_type, + module=name, + path=path, + preloaded=True + ) + +# Auto-start if this module is imported +if __name__ != '__main__': + stellaops_start_tracing() +else: + # If run directly, start tracing and wait + stellaops_start_tracing() + import sys + print('StellaOps import hook active. Press Ctrl+C to stop.', file=sys.stderr) + try: + import time + while True: + time.sleep(1) + except KeyboardInterrupt: + pass +"; +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonPathHasher.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonPathHasher.cs new file mode 100644 index 000000000..8f987e3ff --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonPathHasher.cs @@ -0,0 +1,194 @@ +using System.Security.Cryptography; +using System.Text; +using System.Text.RegularExpressions; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.RuntimeEvidence; + +/// +/// Provides path scrubbing and hashing for privacy-preserving runtime evidence. +/// +internal static partial class PythonPathHasher +{ + private static readonly string[] SensitivePatterns = + [ + @"/home/[^/]+", + @"/Users/[^/]+", + @"C:\\Users\\[^\\]+", + @"/root", + @"/tmp/[^/]+", + @"/var/[^/]+/[^/]+", + ]; + + /// + /// Scrubs sensitive path components and returns a normalized path. + /// + /// The path to scrub. + /// A scrubbed path with sensitive components replaced. + public static string ScrubPath(string? path) + { + if (string.IsNullOrEmpty(path)) + { + return string.Empty; + } + + var result = path; + + // Replace home directories + result = HomeDirectoryPattern().Replace(result, "[HOME]"); + result = WindowsUserPattern().Replace(result, "[HOME]"); + result = MacUserPattern().Replace(result, "[HOME]"); + result = RootPattern().Replace(result, "[ROOT]"); + + // Replace temp directories + result = TempPattern().Replace(result, "[TEMP]"); + + // Replace container-specific paths + result = ContainerAppPattern().Replace(result, "[APP]"); + + // Normalize path separators + result = result.Replace('\\', '/'); + + return result; + } + + /// + /// Computes a SHA-256 hash of a path for deterministic identification. + /// + /// The path to hash. + /// The SHA-256 hash as a hex string. + public static string HashPath(string? path) + { + if (string.IsNullOrEmpty(path)) + { + return string.Empty; + } + + var normalizedPath = NormalizePath(path); + var bytes = Encoding.UTF8.GetBytes(normalizedPath); + var hash = SHA256.HashData(bytes); + return Convert.ToHexStringLower(hash); + } + + /// + /// Creates a scrubbed and hashed mapping for a path. + /// + /// The original path. + /// A tuple of (scrubbedPath, hash). + public static (string ScrubbedPath, string Hash) ScrubAndHash(string? path) + { + var scrubbed = ScrubPath(path); + var hash = HashPath(path); + return (scrubbed, hash); + } + + /// + /// Normalizes a path for consistent hashing. + /// + private static string NormalizePath(string path) + { + // Convert to lowercase and normalize separators + var result = path.ToLowerInvariant().Replace('\\', '/'); + + // Remove trailing slashes + result = result.TrimEnd('/'); + + // Collapse multiple slashes + result = MultiSlashPattern().Replace(result, "/"); + + return result; + } + + /// + /// Extracts the module name from a file path. + /// + /// The file path. + /// The module name, or null if not determinable. + public static string? ExtractModuleName(string? path) + { + if (string.IsNullOrEmpty(path)) + { + return null; + } + + // Normalize path + var normalizedPath = path.Replace('\\', '/'); + + // Check for site-packages + var sitePackagesIndex = normalizedPath.IndexOf("site-packages/", StringComparison.OrdinalIgnoreCase); + if (sitePackagesIndex >= 0) + { + var relativePath = normalizedPath[(sitePackagesIndex + 14)..]; + return ConvertPathToModule(relativePath); + } + + // Check for dist-packages + var distPackagesIndex = normalizedPath.IndexOf("dist-packages/", StringComparison.OrdinalIgnoreCase); + if (distPackagesIndex >= 0) + { + var relativePath = normalizedPath[(distPackagesIndex + 14)..]; + return ConvertPathToModule(relativePath); + } + + // Fallback: extract filename + var fileName = Path.GetFileNameWithoutExtension(normalizedPath); + return string.IsNullOrEmpty(fileName) ? null : fileName; + } + + /// + /// Converts a relative file path to a Python module name. + /// + private static string ConvertPathToModule(string relativePath) + { + // Remove file extension + if (relativePath.EndsWith(".py", StringComparison.OrdinalIgnoreCase)) + { + relativePath = relativePath[..^3]; + } + else if (relativePath.EndsWith(".so", StringComparison.OrdinalIgnoreCase) || + relativePath.EndsWith(".pyd", StringComparison.OrdinalIgnoreCase)) + { + // Handle native extensions like module.cpython-311-x86_64-linux-gnu.so + var lastDot = relativePath.LastIndexOf('.'); + if (lastDot > 0) + { + relativePath = relativePath[..lastDot]; + // Remove cpython version suffix + var cpythonIndex = relativePath.IndexOf(".cpython-", StringComparison.OrdinalIgnoreCase); + if (cpythonIndex > 0) + { + relativePath = relativePath[..cpythonIndex]; + } + } + } + + // Handle __init__ files + if (relativePath.EndsWith("/__init__", StringComparison.OrdinalIgnoreCase)) + { + relativePath = relativePath[..^9]; + } + + // Convert path separators to dots + return relativePath.Replace('/', '.'); + } + + [GeneratedRegex(@"/home/[^/]+", RegexOptions.IgnoreCase)] + private static partial Regex HomeDirectoryPattern(); + + [GeneratedRegex(@"C:\\Users\\[^\\]+", RegexOptions.IgnoreCase)] + private static partial Regex WindowsUserPattern(); + + [GeneratedRegex(@"/Users/[^/]+", RegexOptions.IgnoreCase)] + private static partial Regex MacUserPattern(); + + [GeneratedRegex(@"/root\b")] + private static partial Regex RootPattern(); + + [GeneratedRegex(@"/tmp/[^/]+")] + private static partial Regex TempPattern(); + + [GeneratedRegex(@"^/app(?=/|$)")] + private static partial Regex ContainerAppPattern(); + + [GeneratedRegex(@"//+")] + private static partial Regex MultiSlashPattern(); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonRuntimeEvent.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonRuntimeEvent.cs new file mode 100644 index 000000000..12ec180ee --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonRuntimeEvent.cs @@ -0,0 +1,51 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.RuntimeEvidence; + +/// +/// Represents a runtime event captured from Python execution. +/// +internal sealed record PythonRuntimeEvent( + PythonRuntimeEventKind Kind, + string Timestamp, + string? ModuleName, + string? ModulePath, + string? ModuleSpec, + string? ParentModule, + string? ErrorMessage, + string? ProcessId, + string? ThreadId); + +/// +/// Kind of runtime event captured. +/// +internal enum PythonRuntimeEventKind +{ + /// Module was imported. + ModuleImport, + + /// Module load failed. + ModuleLoadError, + + /// Native extension was loaded. + NativeExtensionLoad, + + /// Dynamic import via importlib. + DynamicImport, + + /// Process spawned via subprocess/multiprocessing. + ProcessSpawn, + + /// New thread started. + ThreadStart, + + /// Import hook was installed. + ImportHookInstall, + + /// Path was added to sys.path. + PathModification, + + /// Python interpreter started. + InterpreterStart, + + /// Python interpreter stopped. + InterpreterStop +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonRuntimeEvidenceCollector.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonRuntimeEvidenceCollector.cs new file mode 100644 index 000000000..8565684d2 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/RuntimeEvidence/PythonRuntimeEvidenceCollector.cs @@ -0,0 +1,396 @@ +using System.Collections.Immutable; +using System.Text.Json; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Observations; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.RuntimeEvidence; + +/// +/// Collects and processes runtime evidence from Python execution traces. +/// +internal sealed class PythonRuntimeEvidenceCollector +{ + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNameCaseInsensitive = true, + AllowTrailingCommas = true + }; + + private readonly List _events = []; + private readonly Dictionary _pathHashes = new(); + private readonly HashSet _loadedModules = new(StringComparer.Ordinal); + private readonly HashSet _loadedPackages = new(StringComparer.Ordinal); + private readonly HashSet _runtimeCapabilities = new(StringComparer.Ordinal); + private readonly List _errors = []; + + private string? _pythonVersion; + private string? _platform; + + /// + /// Parses a JSON line from the runtime evidence output. + /// + /// A JSON line containing event data. + public void ParseLine(string jsonLine) + { + if (string.IsNullOrWhiteSpace(jsonLine)) + { + return; + } + + try + { + using var doc = JsonDocument.Parse(jsonLine); + var root = doc.RootElement; + + var eventType = root.GetProperty("type").GetString(); + + switch (eventType) + { + case "interpreter_start": + ParseInterpreterStart(root); + break; + + case "module_import": + ParseModuleImport(root); + break; + + case "module_error": + ParseModuleError(root); + break; + + case "native_load": + ParseNativeLoad(root); + break; + + case "dynamic_import": + ParseDynamicImport(root); + break; + + case "process_spawn": + ParseProcessSpawn(root); + break; + + case "path_modification": + ParsePathModification(root); + break; + } + } + catch (JsonException) + { + // Skip malformed JSON lines + } + } + + /// + /// Parses multiple JSON lines from the runtime evidence output. + /// + /// The complete output string (NDJSON format). + public void ParseOutput(string output) + { + if (string.IsNullOrEmpty(output)) + { + return; + } + + foreach (var line in output.Split('\n', StringSplitOptions.RemoveEmptyEntries)) + { + ParseLine(line.Trim()); + } + } + + /// + /// Parses runtime evidence from a file. + /// + /// Path to the NDJSON evidence file. + /// Cancellation token. + public async Task ParseFileAsync(string filePath, CancellationToken cancellationToken = default) + { + if (!File.Exists(filePath)) + { + return; + } + + await foreach (var line in File.ReadLinesAsync(filePath, cancellationToken).ConfigureAwait(false)) + { + ParseLine(line); + } + } + + /// + /// Builds the runtime evidence observation from collected data. + /// + /// The runtime evidence observation. + public PythonObservationRuntimeEvidence Build() + { + if (_events.Count == 0 && _loadedModules.Count == 0) + { + return PythonObservationRuntimeEvidence.Empty; + } + + return new PythonObservationRuntimeEvidence( + HasEvidence: true, + RuntimePythonVersion: _pythonVersion, + RuntimePlatform: _platform, + LoadedModulesCount: _loadedModules.Count, + LoadedPackages: [.. _loadedPackages.OrderBy(p => p, StringComparer.Ordinal)], + LoadedModules: [.. _loadedModules.OrderBy(m => m, StringComparer.Ordinal)], + PathHashes: _pathHashes.ToImmutableDictionary(StringComparer.Ordinal), + RuntimeCapabilities: [.. _runtimeCapabilities.OrderBy(c => c, StringComparer.Ordinal)], + Errors: [.. _errors]); + } + + /// + /// Gets all captured runtime events. + /// + public IReadOnlyList Events => _events; + + private void ParseInterpreterStart(JsonElement root) + { + _pythonVersion = root.TryGetProperty("python_version", out var version) + ? version.GetString() + : null; + + _platform = root.TryGetProperty("platform", out var platform) + ? platform.GetString() + : null; + + var timestamp = root.TryGetProperty("timestamp", out var ts) + ? ts.GetString() ?? GetUtcTimestamp() + : GetUtcTimestamp(); + + _events.Add(new PythonRuntimeEvent( + Kind: PythonRuntimeEventKind.InterpreterStart, + Timestamp: timestamp, + ModuleName: null, + ModulePath: null, + ModuleSpec: null, + ParentModule: null, + ErrorMessage: null, + ProcessId: root.TryGetProperty("pid", out var pid) ? pid.GetInt32().ToString() : null, + ThreadId: null)); + } + + private void ParseModuleImport(JsonElement root) + { + var moduleName = root.TryGetProperty("module", out var module) + ? module.GetString() + : null; + + var modulePath = root.TryGetProperty("path", out var path) + ? path.GetString() + : null; + + var timestamp = root.TryGetProperty("timestamp", out var ts) + ? ts.GetString() ?? GetUtcTimestamp() + : GetUtcTimestamp(); + + if (!string.IsNullOrEmpty(moduleName)) + { + _loadedModules.Add(moduleName); + + // Extract top-level package + var topLevelPackage = moduleName.Split('.')[0]; + _loadedPackages.Add(topLevelPackage); + } + + // Add path hash + if (!string.IsNullOrEmpty(modulePath)) + { + var (scrubbed, hash) = PythonPathHasher.ScrubAndHash(modulePath); + if (!string.IsNullOrEmpty(hash)) + { + _pathHashes.TryAdd(scrubbed, hash); + } + } + + _events.Add(new PythonRuntimeEvent( + Kind: PythonRuntimeEventKind.ModuleImport, + Timestamp: timestamp, + ModuleName: moduleName, + ModulePath: PythonPathHasher.ScrubPath(modulePath), + ModuleSpec: root.TryGetProperty("spec", out var spec) ? spec.GetString() : null, + ParentModule: root.TryGetProperty("parent", out var parent) ? parent.GetString() : null, + ErrorMessage: null, + ProcessId: root.TryGetProperty("pid", out var pid) ? pid.GetInt32().ToString() : null, + ThreadId: root.TryGetProperty("tid", out var tid) ? tid.GetInt64().ToString() : null)); + } + + private void ParseModuleError(JsonElement root) + { + var moduleName = root.TryGetProperty("module", out var module) + ? module.GetString() + : null; + + var errorMessage = root.TryGetProperty("error", out var error) + ? error.GetString() + : null; + + var modulePath = root.TryGetProperty("path", out var path) + ? path.GetString() + : null; + + var timestamp = root.TryGetProperty("timestamp", out var ts) + ? ts.GetString() ?? GetUtcTimestamp() + : GetUtcTimestamp(); + + _events.Add(new PythonRuntimeEvent( + Kind: PythonRuntimeEventKind.ModuleLoadError, + Timestamp: timestamp, + ModuleName: moduleName, + ModulePath: PythonPathHasher.ScrubPath(modulePath), + ModuleSpec: null, + ParentModule: null, + ErrorMessage: errorMessage, + ProcessId: root.TryGetProperty("pid", out var pid) ? pid.GetInt32().ToString() : null, + ThreadId: null)); + + // Add to errors + var scrubbed = PythonPathHasher.ScrubPath(modulePath); + var hash = PythonPathHasher.HashPath(modulePath); + _errors.Add(new PythonObservationRuntimeError( + Timestamp: timestamp, + Message: errorMessage ?? $"Failed to import {moduleName}", + Path: scrubbed, + PathSha256: string.IsNullOrEmpty(hash) ? null : hash)); + } + + private void ParseNativeLoad(JsonElement root) + { + var moduleName = root.TryGetProperty("module", out var module) + ? module.GetString() + : null; + + var modulePath = root.TryGetProperty("path", out var path) + ? path.GetString() + : null; + + var timestamp = root.TryGetProperty("timestamp", out var ts) + ? ts.GetString() ?? GetUtcTimestamp() + : GetUtcTimestamp(); + + if (!string.IsNullOrEmpty(moduleName)) + { + _loadedModules.Add(moduleName); + } + + // Track native code capability + _runtimeCapabilities.Add("native_code"); + + // Add path hash + if (!string.IsNullOrEmpty(modulePath)) + { + var (scrubbed, hash) = PythonPathHasher.ScrubAndHash(modulePath); + if (!string.IsNullOrEmpty(hash)) + { + _pathHashes.TryAdd(scrubbed, hash); + } + } + + _events.Add(new PythonRuntimeEvent( + Kind: PythonRuntimeEventKind.NativeExtensionLoad, + Timestamp: timestamp, + ModuleName: moduleName, + ModulePath: PythonPathHasher.ScrubPath(modulePath), + ModuleSpec: null, + ParentModule: null, + ErrorMessage: null, + ProcessId: root.TryGetProperty("pid", out var pid) ? pid.GetInt32().ToString() : null, + ThreadId: null)); + } + + private void ParseDynamicImport(JsonElement root) + { + var moduleName = root.TryGetProperty("module", out var module) + ? module.GetString() + : null; + + var timestamp = root.TryGetProperty("timestamp", out var ts) + ? ts.GetString() ?? GetUtcTimestamp() + : GetUtcTimestamp(); + + if (!string.IsNullOrEmpty(moduleName)) + { + _loadedModules.Add(moduleName); + } + + // Track dynamic import capability + _runtimeCapabilities.Add("dynamic_import"); + + _events.Add(new PythonRuntimeEvent( + Kind: PythonRuntimeEventKind.DynamicImport, + Timestamp: timestamp, + ModuleName: moduleName, + ModulePath: null, + ModuleSpec: null, + ParentModule: null, + ErrorMessage: null, + ProcessId: root.TryGetProperty("pid", out var pid) ? pid.GetInt32().ToString() : null, + ThreadId: null)); + } + + private void ParseProcessSpawn(JsonElement root) + { + var timestamp = root.TryGetProperty("timestamp", out var ts) + ? ts.GetString() ?? GetUtcTimestamp() + : GetUtcTimestamp(); + + // Track process spawn capability + _runtimeCapabilities.Add("process_spawn"); + + var spawnType = root.TryGetProperty("spawn_type", out var st) + ? st.GetString() + : "unknown"; + + if (spawnType == "multiprocessing") + { + _runtimeCapabilities.Add("multiprocessing"); + } + + _events.Add(new PythonRuntimeEvent( + Kind: PythonRuntimeEventKind.ProcessSpawn, + Timestamp: timestamp, + ModuleName: null, + ModulePath: null, + ModuleSpec: spawnType, + ParentModule: null, + ErrorMessage: null, + ProcessId: root.TryGetProperty("pid", out var pid) ? pid.GetInt32().ToString() : null, + ThreadId: null)); + } + + private void ParsePathModification(JsonElement root) + { + var path = root.TryGetProperty("path", out var p) + ? p.GetString() + : null; + + var timestamp = root.TryGetProperty("timestamp", out var ts) + ? ts.GetString() ?? GetUtcTimestamp() + : GetUtcTimestamp(); + + // Add path hash + if (!string.IsNullOrEmpty(path)) + { + var (scrubbed, hash) = PythonPathHasher.ScrubAndHash(path); + if (!string.IsNullOrEmpty(hash)) + { + _pathHashes.TryAdd(scrubbed, hash); + } + } + + _events.Add(new PythonRuntimeEvent( + Kind: PythonRuntimeEventKind.PathModification, + Timestamp: timestamp, + ModuleName: null, + ModulePath: PythonPathHasher.ScrubPath(path), + ModuleSpec: null, + ParentModule: null, + ErrorMessage: null, + ProcessId: root.TryGetProperty("pid", out var pid) ? pid.GetInt32().ToString() : null, + ThreadId: null)); + } + + private static string GetUtcTimestamp() + { + return DateTime.UtcNow.ToString("O"); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/PythonLanguageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/PythonLanguageAnalyzer.cs index d58c9e119..59b23ca25 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/PythonLanguageAnalyzer.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/PythonLanguageAnalyzer.cs @@ -40,6 +40,9 @@ public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer // Detect startup hooks (sitecustomize.py, usercustomize.py, .pth files) var startupHooks = PythonStartupHookDetector.Detect(context.RootPath); + // Analyze zipapps in workspace and container layers + var zipappAnalysis = PythonZipappAdapter.AnalyzeAll(context.RootPath); + // Collect dist-info directories from both root and container layers var distInfoDirectories = CollectDistInfoDirectories(context.RootPath); @@ -91,6 +94,9 @@ public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer // Append startup hooks warnings AppendStartupHooksMetadata(metadata, startupHooks); + // Append zipapp analysis + AppendZipappMetadata(metadata, zipappAnalysis); + // Collect evidence including startup hooks var evidence = distribution.SortedEvidence.ToList(); evidence.AddRange(startupHooks.ToEvidence(context)); @@ -242,6 +248,45 @@ public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer } } + private static void AppendZipappMetadata(List> metadata, PythonZipappAnalysis zipappAnalysis) + { + if (!zipappAnalysis.HasZipapps) + { + return; + } + + metadata.Add(new KeyValuePair("zipapps.detected", "true")); + metadata.Add(new KeyValuePair("zipapps.count", zipappAnalysis.Zipapps.Count.ToString())); + + // Add version information from zipapp shebangs + var versions = zipappAnalysis.Zipapps + .Where(z => z.PythonVersion != null) + .Select(z => z.PythonVersion!) + .Distinct() + .OrderBy(v => v, StringComparer.Ordinal) + .ToArray(); + + if (versions.Length > 0) + { + metadata.Add(new KeyValuePair("zipapps.pythonVersions", string.Join(';', versions))); + } + + // Add warnings + foreach (var warning in zipappAnalysis.Warnings) + { + metadata.Add(new KeyValuePair("zipapps.warning", warning)); + } + + // Add individual zipapp warnings + foreach (var zipapp in zipappAnalysis.Zipapps) + { + foreach (var warning in zipapp.Warnings) + { + metadata.Add(new KeyValuePair($"zipapps.{zipapp.FileName}.warning", warning)); + } + } + } + private static IReadOnlyCollection CollectDistInfoDirectories(string rootPath) { var directories = new HashSet(StringComparer.OrdinalIgnoreCase); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/HomebrewAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/HomebrewAnalyzerPlugin.cs new file mode 100644 index 000000000..878b8a6ee --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/HomebrewAnalyzerPlugin.cs @@ -0,0 +1,26 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Plugin; + +namespace StellaOps.Scanner.Analyzers.OS.Homebrew; + +/// +/// Plugin that registers the Homebrew package analyzer for macOS Cellar discovery. +/// +public sealed class HomebrewAnalyzerPlugin : IOSAnalyzerPlugin +{ + /// + public string Name => "StellaOps.Scanner.Analyzers.OS.Homebrew"; + + /// + public bool IsAvailable(IServiceProvider services) => services is not null; + + /// + public IOSPackageAnalyzer CreateAnalyzer(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + var loggerFactory = services.GetRequiredService(); + return new HomebrewPackageAnalyzer(loggerFactory.CreateLogger()); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/HomebrewPackageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/HomebrewPackageAnalyzer.cs new file mode 100644 index 000000000..70b937781 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/HomebrewPackageAnalyzer.cs @@ -0,0 +1,366 @@ +using System.Collections.ObjectModel; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Analyzers; +using StellaOps.Scanner.Analyzers.OS.Helpers; + +namespace StellaOps.Scanner.Analyzers.OS.Homebrew; + +/// +/// Analyzes Homebrew Cellar directories to extract installed formula information. +/// Scans /usr/local/Cellar (Intel) and /opt/homebrew/Cellar (Apple Silicon) directories. +/// +internal sealed class HomebrewPackageAnalyzer : OsPackageAnalyzerBase +{ + private static readonly IReadOnlyList EmptyPackages = + new ReadOnlyCollection(Array.Empty()); + + /// + /// Default paths to scan for Homebrew Cellar directories. + /// + private static readonly string[] CellarPaths = + [ + "usr/local/Cellar", // Intel Macs + "opt/homebrew/Cellar", // Apple Silicon Macs + ]; + + /// + /// Maximum traversal depth within Cellar to prevent runaway scanning. + /// Formula structure: Cellar/{formula}/{version}/... + /// + private const int MaxTraversalDepth = 3; + + /// + /// Maximum formula size in bytes (200MB default per design spec). + /// + private const long MaxFormulaSizeBytes = 200L * 1024L * 1024L; + + private readonly HomebrewReceiptParser _parser = new(); + + public HomebrewPackageAnalyzer(ILogger logger) + : base(logger) + { + } + + public override string AnalyzerId => "homebrew"; + + protected override ValueTask> ExecuteCoreAsync( + OSPackageAnalyzerContext context, + CancellationToken cancellationToken) + { + var records = new List(); + var warnings = new List(); + + foreach (var cellarRelativePath in CellarPaths) + { + var cellarPath = Path.Combine(context.RootPath, cellarRelativePath); + if (!Directory.Exists(cellarPath)) + { + continue; + } + + Logger.LogInformation("Scanning Homebrew Cellar at {Path}", cellarPath); + + try + { + DiscoverFormulas(cellarPath, records, warnings, cancellationToken); + } + catch (Exception ex) when (ex is not OperationCanceledException) + { + Logger.LogWarning(ex, "Failed to scan Homebrew Cellar at {Path}", cellarPath); + } + } + + if (records.Count == 0) + { + Logger.LogInformation("No Homebrew formulas found; skipping analyzer."); + return ValueTask.FromResult>(EmptyPackages); + } + + foreach (var warning in warnings) + { + Logger.LogWarning("Homebrew scan warning: {Warning}", warning); + } + + Logger.LogInformation("Discovered {Count} Homebrew formulas", records.Count); + + // Sort for deterministic output + records.Sort(); + return ValueTask.FromResult>(records); + } + + private void DiscoverFormulas( + string cellarPath, + List records, + List warnings, + CancellationToken cancellationToken) + { + // Enumerate formula directories (e.g., /usr/local/Cellar/openssl@3) + foreach (var formulaDir in EnumerateDirectoriesSafe(cellarPath)) + { + cancellationToken.ThrowIfCancellationRequested(); + + var formulaName = Path.GetFileName(formulaDir); + if (string.IsNullOrWhiteSpace(formulaName) || formulaName.StartsWith('.')) + { + continue; + } + + // Enumerate version directories (e.g., /usr/local/Cellar/openssl@3/3.1.0) + foreach (var versionDir in EnumerateDirectoriesSafe(formulaDir)) + { + cancellationToken.ThrowIfCancellationRequested(); + + var versionName = Path.GetFileName(versionDir); + if (string.IsNullOrWhiteSpace(versionName) || versionName.StartsWith('.')) + { + continue; + } + + // Check size guardrail + if (!CheckFormulaSizeGuardrail(versionDir, out var sizeWarning)) + { + warnings.Add(sizeWarning!); + continue; + } + + // Look for INSTALL_RECEIPT.json + var receiptPath = Path.Combine(versionDir, "INSTALL_RECEIPT.json"); + if (File.Exists(receiptPath)) + { + var record = ParseReceiptAndCreateRecord(receiptPath, formulaName, versionName, versionDir); + if (record is not null) + { + records.Add(record); + } + } + else + { + // Fallback: create record from directory structure + var record = CreateRecordFromDirectory(formulaName, versionName, versionDir); + if (record is not null) + { + records.Add(record); + warnings.Add($"No INSTALL_RECEIPT.json for {formulaName}@{versionName}; using directory-based discovery."); + } + } + } + } + } + + private OSPackageRecord? ParseReceiptAndCreateRecord( + string receiptPath, + string formulaName, + string versionFromDir, + string versionDir) + { + var receipt = _parser.Parse(receiptPath); + if (receipt is null) + { + Logger.LogWarning("Failed to parse INSTALL_RECEIPT.json at {Path}", receiptPath); + return null; + } + + // Use receipt version if available, fallback to directory name + var version = !string.IsNullOrWhiteSpace(receipt.Version) ? receipt.Version : versionFromDir; + + // Build PURL per spec: pkg:brew//@?revision= + var purl = PackageUrlBuilder.BuildHomebrew( + receipt.Tap ?? "homebrew/core", + receipt.Name ?? formulaName, + version, + receipt.Revision); + + var vendorMetadata = BuildVendorMetadata(receipt, versionDir); + var files = DiscoverFormulaFiles(versionDir); + + return new OSPackageRecord( + AnalyzerId, + purl, + receipt.Name ?? formulaName, + version, + receipt.Architecture, + PackageEvidenceSource.HomebrewCellar, + epoch: null, + release: receipt.Revision > 0 ? receipt.Revision.ToString() : null, + sourcePackage: receipt.Tap, + license: receipt.License, + cveHints: null, + provides: null, + depends: receipt.RuntimeDependencies, + files: files, + vendorMetadata: vendorMetadata); + } + + private OSPackageRecord? CreateRecordFromDirectory( + string formulaName, + string version, + string versionDir) + { + if (string.IsNullOrWhiteSpace(formulaName) || string.IsNullOrWhiteSpace(version)) + { + return null; + } + + var purl = PackageUrlBuilder.BuildHomebrew("homebrew/core", formulaName, version, revision: 0); + var architecture = DetectArchitectureFromPath(versionDir); + var files = DiscoverFormulaFiles(versionDir); + + var vendorMetadata = new Dictionary(StringComparer.Ordinal) + { + ["brew:discovery_method"] = "directory", + ["brew:install_path"] = versionDir, + }; + + return new OSPackageRecord( + AnalyzerId, + purl, + formulaName, + version, + architecture, + PackageEvidenceSource.HomebrewCellar, + epoch: null, + release: null, + sourcePackage: "homebrew/core", + license: null, + cveHints: null, + provides: null, + depends: null, + files: files, + vendorMetadata: vendorMetadata); + } + + private static Dictionary BuildVendorMetadata(HomebrewReceipt receipt, string versionDir) + { + var metadata = new Dictionary(StringComparer.Ordinal) + { + ["brew:tap"] = receipt.Tap, + ["brew:poured_from_bottle"] = receipt.PouredFromBottle.ToString().ToLowerInvariant(), + ["brew:installed_as_dependency"] = receipt.InstalledAsDependency.ToString().ToLowerInvariant(), + ["brew:installed_on_request"] = receipt.InstalledOnRequest.ToString().ToLowerInvariant(), + ["brew:install_path"] = versionDir, + }; + + if (receipt.InstallTime.HasValue) + { + var installTime = DateTimeOffset.FromUnixTimeSeconds(receipt.InstallTime.Value); + metadata["brew:install_time"] = installTime.ToString("o"); + } + + if (!string.IsNullOrWhiteSpace(receipt.Description)) + { + metadata["description"] = receipt.Description; + } + + if (!string.IsNullOrWhiteSpace(receipt.Homepage)) + { + metadata["homepage"] = receipt.Homepage; + } + + if (!string.IsNullOrWhiteSpace(receipt.SourceUrl)) + { + metadata["brew:source_url"] = receipt.SourceUrl; + } + + if (!string.IsNullOrWhiteSpace(receipt.SourceChecksum)) + { + metadata["brew:source_checksum"] = receipt.SourceChecksum; + } + + if (!string.IsNullOrWhiteSpace(receipt.BottleChecksum)) + { + metadata["brew:bottle_checksum"] = receipt.BottleChecksum; + } + + return metadata; + } + + private List DiscoverFormulaFiles(string versionDir) + { + var files = new List(); + + try + { + // Only discover key files to avoid excessive enumeration + // Focus on bin/, lib/, include/, share/man directories + var keyDirs = new[] { "bin", "lib", "include", "sbin" }; + + foreach (var keyDir in keyDirs) + { + var keyPath = Path.Combine(versionDir, keyDir); + if (!Directory.Exists(keyPath)) + { + continue; + } + + foreach (var file in Directory.EnumerateFiles(keyPath, "*", SearchOption.TopDirectoryOnly)) + { + var relativePath = Path.GetRelativePath(versionDir, file); + files.Add(new OSPackageFileEvidence( + relativePath, + layerDigest: null, + sha256: null, + sizeBytes: null, + isConfigFile: false)); + } + } + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException) + { + // Ignore file enumeration errors + } + + return files; + } + + private static bool CheckFormulaSizeGuardrail(string versionDir, out string? warning) + { + warning = null; + + try + { + long totalSize = 0; + foreach (var file in Directory.EnumerateFiles(versionDir, "*", SearchOption.AllDirectories)) + { + var info = new FileInfo(file); + totalSize += info.Length; + + if (totalSize > MaxFormulaSizeBytes) + { + warning = $"Formula at {versionDir} exceeds {MaxFormulaSizeBytes / (1024 * 1024)}MB size limit; skipping."; + return false; + } + } + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException) + { + // Allow if we can't determine size + } + + return true; + } + + private static string DetectArchitectureFromPath(string path) + { + // /opt/homebrew is Apple Silicon (arm64) + // /usr/local is Intel (x86_64) + if (path.Contains("/opt/homebrew/", StringComparison.OrdinalIgnoreCase)) + { + return "arm64"; + } + + return "x86_64"; + } + + private static IEnumerable EnumerateDirectoriesSafe(string path) + { + try + { + return Directory.EnumerateDirectories(path); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException) + { + return Array.Empty(); + } + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/HomebrewReceiptParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/HomebrewReceiptParser.cs new file mode 100644 index 000000000..ab94c0b16 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/HomebrewReceiptParser.cs @@ -0,0 +1,237 @@ +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.Analyzers.OS.Homebrew; + +/// +/// Parses Homebrew INSTALL_RECEIPT.json files to extract formula metadata. +/// +internal sealed class HomebrewReceiptParser +{ + private static readonly JsonSerializerOptions SerializerOptions = new() + { + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, + AllowTrailingCommas = true, + }; + + /// + /// Parses a Homebrew INSTALL_RECEIPT.json stream. + /// + public HomebrewReceipt? Parse(Stream stream, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(stream); + + try + { + var rawReceipt = JsonSerializer.Deserialize(stream, SerializerOptions); + if (rawReceipt is null || string.IsNullOrWhiteSpace(rawReceipt.Name)) + { + return null; + } + + return new HomebrewReceipt( + Name: rawReceipt.Name.Trim(), + Version: rawReceipt.Versions?.Stable?.Trim() ?? rawReceipt.Version?.Trim() ?? string.Empty, + Revision: rawReceipt.Revision ?? 0, + Tap: rawReceipt.TappedFrom?.Trim() ?? rawReceipt.Tap?.Trim() ?? "homebrew/core", + PouredFromBottle: rawReceipt.PouredFromBottle ?? false, + InstallTime: rawReceipt.InstallTime ?? rawReceipt.Time, + InstalledAsDependency: rawReceipt.InstalledAsDependency ?? false, + InstalledOnRequest: rawReceipt.InstalledOnRequest ?? true, + RuntimeDependencies: ExtractDependencies(rawReceipt.RuntimeDependencies), + BuildDependencies: ExtractDependencies(rawReceipt.BuildDependencies), + SourceUrl: rawReceipt.Source?.Url?.Trim(), + SourceChecksum: rawReceipt.Source?.Checksum?.Trim(), + BottleChecksum: rawReceipt.BottleChecksum?.Trim(), + Description: rawReceipt.Description?.Trim(), + Homepage: rawReceipt.Homepage?.Trim(), + License: rawReceipt.License?.Trim(), + Architecture: NormalizeArchitecture(rawReceipt.TabJson?.Arch ?? rawReceipt.Arch), + TabPath: rawReceipt.TabPath?.Trim()); + } + catch (JsonException) + { + return null; + } + } + + /// + /// Parses a Homebrew INSTALL_RECEIPT.json from a file path. + /// + public HomebrewReceipt? Parse(string receiptPath, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(receiptPath); + + if (!File.Exists(receiptPath)) + { + return null; + } + + using var stream = File.OpenRead(receiptPath); + return Parse(stream, cancellationToken); + } + + private static IReadOnlyList ExtractDependencies(RawDependency[]? dependencies) + { + if (dependencies is null or { Length: 0 }) + { + return Array.Empty(); + } + + var result = new List(dependencies.Length); + foreach (var dep in dependencies) + { + var name = dep.FullName?.Trim() ?? dep.Name?.Trim(); + if (!string.IsNullOrWhiteSpace(name)) + { + result.Add(name); + } + } + + result.Sort(StringComparer.Ordinal); + return result; + } + + private static string NormalizeArchitecture(string? arch) + { + if (string.IsNullOrWhiteSpace(arch)) + { + return "x86_64"; // Default for Intel Macs + } + + var normalized = arch.Trim().ToLowerInvariant(); + return normalized switch + { + "arm64" or "aarch64" => "arm64", + "x86_64" or "amd64" or "x64" => "x86_64", + _ => normalized, + }; + } + + // Raw JSON models for deserialization + private sealed class RawHomebrewReceipt + { + [JsonPropertyName("name")] + public string? Name { get; set; } + + [JsonPropertyName("version")] + public string? Version { get; set; } + + [JsonPropertyName("versions")] + public RawVersions? Versions { get; set; } + + [JsonPropertyName("revision")] + public int? Revision { get; set; } + + [JsonPropertyName("tap")] + public string? Tap { get; set; } + + [JsonPropertyName("tapped_from")] + public string? TappedFrom { get; set; } + + [JsonPropertyName("poured_from_bottle")] + public bool? PouredFromBottle { get; set; } + + [JsonPropertyName("time")] + public long? Time { get; set; } + + [JsonPropertyName("install_time")] + public long? InstallTime { get; set; } + + [JsonPropertyName("installed_as_dependency")] + public bool? InstalledAsDependency { get; set; } + + [JsonPropertyName("installed_on_request")] + public bool? InstalledOnRequest { get; set; } + + [JsonPropertyName("runtime_dependencies")] + public RawDependency[]? RuntimeDependencies { get; set; } + + [JsonPropertyName("build_dependencies")] + public RawDependency[]? BuildDependencies { get; set; } + + [JsonPropertyName("source")] + public RawSource? Source { get; set; } + + [JsonPropertyName("bottle_checksum")] + public string? BottleChecksum { get; set; } + + [JsonPropertyName("desc")] + public string? Description { get; set; } + + [JsonPropertyName("homepage")] + public string? Homepage { get; set; } + + [JsonPropertyName("license")] + public string? License { get; set; } + + [JsonPropertyName("arch")] + public string? Arch { get; set; } + + [JsonPropertyName("HOMEBREW_INSTALL_PATH")] + public string? TabPath { get; set; } + + [JsonPropertyName("tab")] + public RawTab? TabJson { get; set; } + } + + private sealed class RawVersions + { + [JsonPropertyName("stable")] + public string? Stable { get; set; } + + [JsonPropertyName("head")] + public string? Head { get; set; } + } + + private sealed class RawDependency + { + [JsonPropertyName("full_name")] + public string? FullName { get; set; } + + [JsonPropertyName("name")] + public string? Name { get; set; } + + [JsonPropertyName("version")] + public string? Version { get; set; } + } + + private sealed class RawSource + { + [JsonPropertyName("url")] + public string? Url { get; set; } + + [JsonPropertyName("checksum")] + public string? Checksum { get; set; } + } + + private sealed class RawTab + { + [JsonPropertyName("arch")] + public string? Arch { get; set; } + } +} + +/// +/// Represents parsed Homebrew formula receipt metadata. +/// +internal sealed record HomebrewReceipt( + string Name, + string Version, + int Revision, + string Tap, + bool PouredFromBottle, + long? InstallTime, + bool InstalledAsDependency, + bool InstalledOnRequest, + IReadOnlyList RuntimeDependencies, + IReadOnlyList BuildDependencies, + string? SourceUrl, + string? SourceChecksum, + string? BottleChecksum, + string? Description, + string? Homepage, + string? License, + string Architecture, + string? TabPath); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/Properties/AssemblyInfo.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/Properties/AssemblyInfo.cs new file mode 100644 index 000000000..9ef279645 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.Analyzers.OS.Homebrew.Tests")] diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/StellaOps.Scanner.Analyzers.OS.Homebrew.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/StellaOps.Scanner.Analyzers.OS.Homebrew.csproj new file mode 100644 index 000000000..b7be5c8aa --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Homebrew/StellaOps.Scanner.Analyzers.OS.Homebrew.csproj @@ -0,0 +1,15 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/EntitlementsParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/EntitlementsParser.cs new file mode 100644 index 000000000..6efeebcec --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/EntitlementsParser.cs @@ -0,0 +1,230 @@ +using Claunia.PropertyList; + +namespace StellaOps.Scanner.Analyzers.OS.MacOsBundle; + +/// +/// Parses macOS entitlements from embedded plist files or code signature data. +/// +internal sealed class EntitlementsParser +{ + /// + /// Well-known entitlement categories for capability classification. + /// + private static readonly Dictionary EntitlementCategories = new(StringComparer.OrdinalIgnoreCase) + { + // Network + ["com.apple.security.network.client"] = "network", + ["com.apple.security.network.server"] = "network", + + // File System + ["com.apple.security.files.user-selected.read-only"] = "filesystem", + ["com.apple.security.files.user-selected.read-write"] = "filesystem", + ["com.apple.security.files.downloads.read-only"] = "filesystem", + ["com.apple.security.files.downloads.read-write"] = "filesystem", + ["com.apple.security.files.all"] = "filesystem", + + // Hardware + ["com.apple.security.device.camera"] = "camera", + ["com.apple.security.device.microphone"] = "microphone", + ["com.apple.security.device.usb"] = "hardware", + ["com.apple.security.device.bluetooth"] = "hardware", + ["com.apple.security.device.serial"] = "hardware", + + // Privacy + ["com.apple.security.personal-information.addressbook"] = "privacy", + ["com.apple.security.personal-information.calendars"] = "privacy", + ["com.apple.security.personal-information.location"] = "privacy", + ["com.apple.security.personal-information.photos-library"] = "privacy", + + // System + ["com.apple.security.automation.apple-events"] = "automation", + ["com.apple.security.scripting-targets"] = "automation", + ["com.apple.security.cs.allow-jit"] = "code-execution", + ["com.apple.security.cs.allow-unsigned-executable-memory"] = "code-execution", + ["com.apple.security.cs.disable-library-validation"] = "code-execution", + ["com.apple.security.cs.allow-dyld-environment-variables"] = "code-execution", + ["com.apple.security.get-task-allow"] = "debugging", + + // App Sandbox + ["com.apple.security.app-sandbox"] = "sandbox", + ["com.apple.security.inherit"] = "sandbox", + }; + + /// + /// High-risk entitlements that warrant policy attention. + /// + private static readonly HashSet HighRiskEntitlements = new(StringComparer.OrdinalIgnoreCase) + { + "com.apple.security.device.camera", + "com.apple.security.device.microphone", + "com.apple.security.cs.allow-unsigned-executable-memory", + "com.apple.security.cs.disable-library-validation", + "com.apple.security.get-task-allow", + "com.apple.security.files.all", + "com.apple.security.automation.apple-events", + }; + + /// + /// Parses entitlements from a plist file (e.g., embedded entitlements or xcent file). + /// + public BundleEntitlements Parse(string entitlementsPath, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(entitlementsPath); + + if (!File.Exists(entitlementsPath)) + { + return BundleEntitlements.Empty; + } + + try + { + using var stream = File.OpenRead(entitlementsPath); + return Parse(stream, cancellationToken); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or FormatException) + { + return BundleEntitlements.Empty; + } + } + + /// + /// Parses entitlements from a stream. + /// + public BundleEntitlements Parse(Stream stream, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(stream); + + try + { + var plist = PropertyListParser.Parse(stream); + if (plist is not NSDictionary root) + { + return BundleEntitlements.Empty; + } + + var entitlements = new Dictionary(StringComparer.Ordinal); + var categories = new HashSet(StringComparer.Ordinal); + var highRisk = new List(); + + foreach (var kvp in root) + { + var key = kvp.Key; + var value = ConvertValue(kvp.Value); + entitlements[key] = value; + + // Classify the entitlement + if (EntitlementCategories.TryGetValue(key, out var category)) + { + categories.Add(category); + } + + // Check if high risk + if (HighRiskEntitlements.Contains(key) && IsTrueValue(value)) + { + highRisk.Add(key); + } + } + + return new BundleEntitlements( + Entitlements: entitlements, + Categories: categories.OrderBy(c => c, StringComparer.Ordinal).ToList(), + HighRiskEntitlements: highRisk.OrderBy(e => e, StringComparer.Ordinal).ToList(), + IsSandboxed: entitlements.TryGetValue("com.apple.security.app-sandbox", out var sandbox) && IsTrueValue(sandbox), + HasHardenedRuntime: entitlements.TryGetValue("com.apple.security.cs.allow-unsigned-executable-memory", out var hr) && !IsTrueValue(hr)); + } + catch (Exception ex) when (ex is FormatException or InvalidOperationException or ArgumentException) + { + return BundleEntitlements.Empty; + } + } + + /// + /// Discovers entitlements file within an app bundle. + /// + public string? FindEntitlementsFile(string bundlePath) + { + if (string.IsNullOrWhiteSpace(bundlePath)) + { + return null; + } + + // Look for xcent files first (highest priority for actual entitlements) + var codeSignPath = Path.Combine(bundlePath, "Contents", "_CodeSignature"); + if (Directory.Exists(codeSignPath)) + { + try + { + var xcentFiles = Directory.GetFiles(codeSignPath, "*.xcent"); + if (xcentFiles.Length > 0) + { + return xcentFiles[0]; + } + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException) + { + // Ignore + } + } + + // Check other common entitlements locations + var candidates = new[] + { + Path.Combine(bundlePath, "Contents", "embedded.provisionprofile"), + Path.Combine(bundlePath, "embedded.mobileprovision"), + }; + + foreach (var candidate in candidates) + { + if (File.Exists(candidate)) + { + return candidate; + } + } + + return null; + } + + private static object? ConvertValue(NSObject? obj) + { + return obj switch + { + NSString s => s.Content, + NSNumber n => n.ToObject(), + NSData d => Convert.ToBase64String(d.Bytes), + NSArray a => a.Select(ConvertValue).ToArray(), + NSDictionary d => d.ToDictionary(kvp => kvp.Key, kvp => ConvertValue(kvp.Value)), + _ => obj?.ToString() + }; + } + + private static bool IsTrueValue(object? value) + { + return value switch + { + bool b => b, + int i => i != 0, + string s => s.Equals("true", StringComparison.OrdinalIgnoreCase) || s == "1", + _ => false + }; + } +} + +/// +/// Represents parsed macOS bundle entitlements. +/// +internal sealed record BundleEntitlements( + IReadOnlyDictionary Entitlements, + IReadOnlyList Categories, + IReadOnlyList HighRiskEntitlements, + bool IsSandboxed, + bool HasHardenedRuntime) +{ + public static BundleEntitlements Empty { get; } = new( + new Dictionary(), + Array.Empty(), + Array.Empty(), + IsSandboxed: false, + HasHardenedRuntime: false); + + public bool HasEntitlement(string key) => Entitlements.ContainsKey(key); +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/InfoPlistParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/InfoPlistParser.cs new file mode 100644 index 000000000..554e84ef0 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/InfoPlistParser.cs @@ -0,0 +1,132 @@ +using Claunia.PropertyList; + +namespace StellaOps.Scanner.Analyzers.OS.MacOsBundle; + +/// +/// Parses macOS application bundle Info.plist files. +/// +internal sealed class InfoPlistParser +{ + /// + /// Parses an Info.plist file from the given path. + /// + public BundleInfo? Parse(string plistPath, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(plistPath); + + if (!File.Exists(plistPath)) + { + return null; + } + + try + { + using var stream = File.OpenRead(plistPath); + return Parse(stream, plistPath, cancellationToken); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or FormatException) + { + return null; + } + } + + /// + /// Parses an Info.plist from a stream. + /// + public BundleInfo? Parse(Stream stream, string? sourcePath = null, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(stream); + + try + { + var plist = PropertyListParser.Parse(stream); + if (plist is not NSDictionary root) + { + return null; + } + + var bundleId = GetString(root, "CFBundleIdentifier"); + if (string.IsNullOrWhiteSpace(bundleId)) + { + return null; + } + + return new BundleInfo( + BundleIdentifier: bundleId.Trim(), + BundleName: GetString(root, "CFBundleName")?.Trim() ?? ExtractNameFromBundleId(bundleId), + BundleDisplayName: GetString(root, "CFBundleDisplayName")?.Trim(), + Version: GetString(root, "CFBundleVersion")?.Trim() ?? "0", + ShortVersion: GetString(root, "CFBundleShortVersionString")?.Trim(), + MinimumSystemVersion: GetString(root, "LSMinimumSystemVersion")?.Trim(), + Executable: GetString(root, "CFBundleExecutable")?.Trim(), + BundlePackageType: GetString(root, "CFBundlePackageType")?.Trim() ?? "APPL", + SupportedPlatforms: GetStringArray(root, "CFBundleSupportedPlatforms"), + RequiredCapabilities: GetStringArray(root, "UIRequiredDeviceCapabilities"), + SourcePath: sourcePath); + } + catch (Exception ex) when (ex is FormatException or InvalidOperationException or ArgumentException) + { + return null; + } + } + + private static string? GetString(NSDictionary dict, string key) + { + if (dict.TryGetValue(key, out var value) && value is NSString nsString) + { + return nsString.Content; + } + + return null; + } + + private static IReadOnlyList GetStringArray(NSDictionary dict, string key) + { + if (!dict.TryGetValue(key, out var value)) + { + return Array.Empty(); + } + + var result = new List(); + + if (value is NSArray array) + { + foreach (var item in array) + { + if (item is NSString nsString && !string.IsNullOrWhiteSpace(nsString.Content)) + { + result.Add(nsString.Content.Trim()); + } + } + } + else if (value is NSString singleString && !string.IsNullOrWhiteSpace(singleString.Content)) + { + result.Add(singleString.Content.Trim()); + } + + result.Sort(StringComparer.Ordinal); + return result; + } + + private static string ExtractNameFromBundleId(string bundleId) + { + var parts = bundleId.Split('.', StringSplitOptions.RemoveEmptyEntries); + return parts.Length > 0 ? parts[^1] : bundleId; + } +} + +/// +/// Represents parsed macOS bundle Info.plist metadata. +/// +internal sealed record BundleInfo( + string BundleIdentifier, + string BundleName, + string? BundleDisplayName, + string Version, + string? ShortVersion, + string? MinimumSystemVersion, + string? Executable, + string BundlePackageType, + IReadOnlyList SupportedPlatforms, + IReadOnlyList RequiredCapabilities, + string? SourcePath); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/MacOsBundleAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/MacOsBundleAnalyzer.cs new file mode 100644 index 000000000..7b2467804 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/MacOsBundleAnalyzer.cs @@ -0,0 +1,368 @@ +using System.Collections.ObjectModel; +using System.Security.Cryptography; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Analyzers; +using StellaOps.Scanner.Analyzers.OS.Helpers; + +namespace StellaOps.Scanner.Analyzers.OS.MacOsBundle; + +/// +/// Analyzes macOS application bundles (.app) to extract metadata and capability information. +/// Scans /Applications, /System/Applications, and user application directories. +/// +internal sealed class MacOsBundleAnalyzer : OsPackageAnalyzerBase +{ + private static readonly IReadOnlyList EmptyPackages = + new ReadOnlyCollection(Array.Empty()); + + /// + /// Standard paths to scan for application bundles. + /// + private static readonly string[] ApplicationPaths = + [ + "Applications", + "System/Applications", + "Library/Application Support", + ]; + + /// + /// Maximum traversal depth within application directories. + /// + private const int MaxTraversalDepth = 3; + + /// + /// Maximum bundle size to process (500MB). + /// + private const long MaxBundleSizeBytes = 500L * 1024L * 1024L; + + private readonly InfoPlistParser _infoPlistParser = new(); + private readonly EntitlementsParser _entitlementsParser = new(); + + public MacOsBundleAnalyzer(ILogger logger) + : base(logger) + { + } + + public override string AnalyzerId => "macos-bundle"; + + protected override ValueTask> ExecuteCoreAsync( + OSPackageAnalyzerContext context, + CancellationToken cancellationToken) + { + var records = new List(); + var warnings = new List(); + + // Scan standard application paths + foreach (var appPath in ApplicationPaths) + { + var fullPath = Path.Combine(context.RootPath, appPath); + if (!Directory.Exists(fullPath)) + { + continue; + } + + Logger.LogInformation("Scanning for application bundles in {Path}", fullPath); + + try + { + DiscoverBundles(fullPath, records, warnings, 0, cancellationToken); + } + catch (Exception ex) when (ex is not OperationCanceledException) + { + Logger.LogWarning(ex, "Failed to scan application path {Path}", fullPath); + } + } + + // Scan user directories + var usersPath = Path.Combine(context.RootPath, "Users"); + if (Directory.Exists(usersPath)) + { + try + { + foreach (var userDir in Directory.EnumerateDirectories(usersPath)) + { + cancellationToken.ThrowIfCancellationRequested(); + + var userAppsPath = Path.Combine(userDir, "Applications"); + if (Directory.Exists(userAppsPath)) + { + DiscoverBundles(userAppsPath, records, warnings, 0, cancellationToken); + } + } + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException) + { + Logger.LogDebug(ex, "Could not enumerate user directories"); + } + } + + if (records.Count == 0) + { + Logger.LogInformation("No application bundles found; skipping analyzer."); + return ValueTask.FromResult>(EmptyPackages); + } + + foreach (var warning in warnings.Take(10)) // Limit warning output + { + Logger.LogWarning("Bundle scan warning: {Warning}", warning); + } + + Logger.LogInformation("Discovered {Count} application bundles", records.Count); + + // Sort for deterministic output + records.Sort(); + return ValueTask.FromResult>(records); + } + + private void DiscoverBundles( + string searchPath, + List records, + List warnings, + int depth, + CancellationToken cancellationToken) + { + if (depth > MaxTraversalDepth) + { + return; + } + + IEnumerable entries; + try + { + entries = Directory.EnumerateDirectories(searchPath); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException) + { + return; + } + + foreach (var entry in entries) + { + cancellationToken.ThrowIfCancellationRequested(); + + var name = Path.GetFileName(entry); + if (string.IsNullOrWhiteSpace(name) || name.StartsWith('.')) + { + continue; + } + + // Check if this is an app bundle + if (name.EndsWith(".app", StringComparison.OrdinalIgnoreCase)) + { + var record = AnalyzeBundle(entry, warnings, cancellationToken); + if (record is not null) + { + records.Add(record); + } + } + else + { + // Recurse into subdirectories (e.g., for nested apps) + DiscoverBundles(entry, records, warnings, depth + 1, cancellationToken); + } + } + } + + private OSPackageRecord? AnalyzeBundle( + string bundlePath, + List warnings, + CancellationToken cancellationToken) + { + // Find and parse Info.plist + var infoPlistPath = Path.Combine(bundlePath, "Contents", "Info.plist"); + if (!File.Exists(infoPlistPath)) + { + // Try iOS-style location + infoPlistPath = Path.Combine(bundlePath, "Info.plist"); + } + + if (!File.Exists(infoPlistPath)) + { + warnings.Add($"No Info.plist found in {bundlePath}"); + return null; + } + + var bundleInfo = _infoPlistParser.Parse(infoPlistPath, cancellationToken); + if (bundleInfo is null) + { + warnings.Add($"Failed to parse Info.plist in {bundlePath}"); + return null; + } + + // Parse entitlements if available + var entitlementsPath = _entitlementsParser.FindEntitlementsFile(bundlePath); + var entitlements = entitlementsPath is not null + ? _entitlementsParser.Parse(entitlementsPath, cancellationToken) + : BundleEntitlements.Empty; + + // Compute CodeResources hash if available + var codeResourcesHash = ComputeCodeResourcesHash(bundlePath); + + // Determine version (prefer short version, fallback to bundle version) + var version = !string.IsNullOrWhiteSpace(bundleInfo.ShortVersion) + ? bundleInfo.ShortVersion + : bundleInfo.Version; + + // Build PURL + var purl = PackageUrlBuilder.BuildMacOsBundle(bundleInfo.BundleIdentifier, version); + + // Build vendor metadata + var vendorMetadata = BuildVendorMetadata(bundleInfo, entitlements, codeResourcesHash, bundlePath); + + // Discover key files + var files = DiscoverBundleFiles(bundlePath, bundleInfo); + + // Extract display name + var displayName = bundleInfo.BundleDisplayName ?? bundleInfo.BundleName; + + return new OSPackageRecord( + AnalyzerId, + purl, + displayName, + version, + DetermineArchitecture(bundlePath), + PackageEvidenceSource.MacOsBundle, + epoch: null, + release: bundleInfo.Version != version ? bundleInfo.Version : null, + sourcePackage: ExtractVendorFromBundleId(bundleInfo.BundleIdentifier), + license: null, + cveHints: null, + provides: null, + depends: null, + files: files, + vendorMetadata: vendorMetadata); + } + + private static Dictionary BuildVendorMetadata( + BundleInfo bundleInfo, + BundleEntitlements entitlements, + string? codeResourcesHash, + string bundlePath) + { + var metadata = new Dictionary(StringComparer.Ordinal) + { + ["macos:bundle_id"] = bundleInfo.BundleIdentifier, + ["macos:bundle_type"] = bundleInfo.BundlePackageType, + ["macos:bundle_path"] = bundlePath, + }; + + if (!string.IsNullOrWhiteSpace(bundleInfo.MinimumSystemVersion)) + { + metadata["macos:min_os_version"] = bundleInfo.MinimumSystemVersion; + } + + if (!string.IsNullOrWhiteSpace(bundleInfo.Executable)) + { + metadata["macos:executable"] = bundleInfo.Executable; + } + + if (bundleInfo.SupportedPlatforms.Count > 0) + { + metadata["macos:platforms"] = string.Join(",", bundleInfo.SupportedPlatforms); + } + + // Entitlements metadata + metadata["macos:sandboxed"] = entitlements.IsSandboxed.ToString().ToLowerInvariant(); + metadata["macos:hardened_runtime"] = entitlements.HasHardenedRuntime.ToString().ToLowerInvariant(); + + if (entitlements.Categories.Count > 0) + { + metadata["macos:capability_categories"] = string.Join(",", entitlements.Categories); + } + + if (entitlements.HighRiskEntitlements.Count > 0) + { + metadata["macos:high_risk_entitlements"] = string.Join(",", entitlements.HighRiskEntitlements); + } + + if (!string.IsNullOrWhiteSpace(codeResourcesHash)) + { + metadata["macos:code_resources_hash"] = codeResourcesHash; + } + + return metadata; + } + + private static string? ComputeCodeResourcesHash(string bundlePath) + { + var codeResourcesPath = Path.Combine(bundlePath, "Contents", "_CodeSignature", "CodeResources"); + if (!File.Exists(codeResourcesPath)) + { + return null; + } + + try + { + using var stream = File.OpenRead(codeResourcesPath); + var hash = SHA256.HashData(stream); + return $"sha256:{Convert.ToHexStringLower(hash)}"; + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException) + { + return null; + } + } + + private static List DiscoverBundleFiles(string bundlePath, BundleInfo bundleInfo) + { + var files = new List(); + + // Add key bundle files + var contentsPath = Path.Combine(bundlePath, "Contents"); + + // Executable + if (!string.IsNullOrWhiteSpace(bundleInfo.Executable)) + { + var execPath = Path.Combine(contentsPath, "MacOS", bundleInfo.Executable); + if (File.Exists(execPath)) + { + files.Add(new OSPackageFileEvidence( + $"Contents/MacOS/{bundleInfo.Executable}", + layerDigest: null, + sha256: null, + sizeBytes: null, + isConfigFile: false)); + } + } + + // Info.plist + var infoPlistRelative = "Contents/Info.plist"; + if (File.Exists(Path.Combine(bundlePath, infoPlistRelative))) + { + files.Add(new OSPackageFileEvidence( + infoPlistRelative, + layerDigest: null, + sha256: null, + sizeBytes: null, + isConfigFile: true)); + } + + return files; + } + + private static string DetermineArchitecture(string bundlePath) + { + // Check for universal binary indicators + var macosPath = Path.Combine(bundlePath, "Contents", "MacOS"); + if (Directory.Exists(macosPath)) + { + // Look for architecture-specific subdirectories or lipo info + // For now, default to universal + return "universal"; + } + + return "universal"; + } + + private static string? ExtractVendorFromBundleId(string bundleId) + { + var parts = bundleId.Split('.', StringSplitOptions.RemoveEmptyEntries); + if (parts.Length >= 2) + { + return parts[1]; + } + + return null; + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/MacOsBundleAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/MacOsBundleAnalyzerPlugin.cs new file mode 100644 index 000000000..1dfc29cfc --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/MacOsBundleAnalyzerPlugin.cs @@ -0,0 +1,26 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Plugin; + +namespace StellaOps.Scanner.Analyzers.OS.MacOsBundle; + +/// +/// Plugin that registers the macOS bundle analyzer for application bundle discovery. +/// +public sealed class MacOsBundleAnalyzerPlugin : IOSAnalyzerPlugin +{ + /// + public string Name => "StellaOps.Scanner.Analyzers.OS.MacOsBundle"; + + /// + public bool IsAvailable(IServiceProvider services) => services is not null; + + /// + public IOSPackageAnalyzer CreateAnalyzer(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + var loggerFactory = services.GetRequiredService(); + return new MacOsBundleAnalyzer(loggerFactory.CreateLogger()); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/Properties/AssemblyInfo.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/Properties/AssemblyInfo.cs new file mode 100644 index 000000000..4f8a048b8 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests")] diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/StellaOps.Scanner.Analyzers.OS.MacOsBundle.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/StellaOps.Scanner.Analyzers.OS.MacOsBundle.csproj new file mode 100644 index 000000000..bbd69f3c1 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.MacOsBundle/StellaOps.Scanner.Analyzers.OS.MacOsBundle.csproj @@ -0,0 +1,16 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/BomParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/BomParser.cs new file mode 100644 index 000000000..9aee45ede --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/BomParser.cs @@ -0,0 +1,198 @@ +using System.Buffers.Binary; + +namespace StellaOps.Scanner.Analyzers.OS.Pkgutil; + +/// +/// Parses macOS BOM (Bill of Materials) files to enumerate installed files. +/// BOM files are used by pkgutil to track which files were installed by a package. +/// +internal sealed class BomParser +{ + /// + /// BOM file magic header: "BOMStore" + /// + private static ReadOnlySpan BomMagic => "BOMStore"u8; + + /// + /// Extracts the list of installed file paths from a BOM file. + /// + /// + /// BOM files have a complex binary format. This implementation extracts + /// the file paths from the BOM tree structure, focusing on the Paths tree. + /// + public IReadOnlyList Parse(string bomPath, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(bomPath); + + if (!File.Exists(bomPath)) + { + return Array.Empty(); + } + + try + { + using var stream = File.OpenRead(bomPath); + return Parse(stream, cancellationToken); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException) + { + return Array.Empty(); + } + } + + /// + /// Extracts file paths from a BOM stream. + /// + public IReadOnlyList Parse(Stream stream, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(stream); + + var results = new List(); + + try + { + // Read header (512+ bytes) + var header = new byte[512]; + if (stream.Read(header, 0, 512) < 512) + { + return results; + } + + // Verify magic + if (!header.AsSpan(0, 8).SequenceEqual(BomMagic)) + { + return results; + } + + // BOM format is complex - we'll do a simplified extraction + // by scanning for null-terminated strings that look like paths + stream.Position = 0; + using var reader = new BinaryReader(stream); + var content = reader.ReadBytes((int)Math.Min(stream.Length, 10 * 1024 * 1024)); // Max 10MB + + var paths = ExtractPaths(content, cancellationToken); + foreach (var path in paths) + { + results.Add(new BomFileEntry(path, IsDirectory: path.EndsWith('/'))); + } + } + catch (Exception ex) when (ex is IOException or EndOfStreamException) + { + // Return partial results + } + + return results; + } + + /// + /// Finds the corresponding BOM file for a receipt plist. + /// + public string? FindBomForReceipt(string plistPath) + { + if (string.IsNullOrWhiteSpace(plistPath)) + { + return null; + } + + // BOM files are named with same base name as plist + // e.g., com.apple.pkg.Safari.plist -> com.apple.pkg.Safari.bom + var directory = Path.GetDirectoryName(plistPath); + var baseName = Path.GetFileNameWithoutExtension(plistPath); + + if (string.IsNullOrEmpty(directory) || string.IsNullOrEmpty(baseName)) + { + return null; + } + + var bomPath = Path.Combine(directory, baseName + ".bom"); + return File.Exists(bomPath) ? bomPath : null; + } + + private static IEnumerable ExtractPaths(byte[] content, CancellationToken cancellationToken) + { + var paths = new HashSet(StringComparer.Ordinal); + + // Scan for null-terminated strings that look like Unix paths + int start = -1; + for (int i = 0; i < content.Length; i++) + { + cancellationToken.ThrowIfCancellationRequested(); + + byte b = content[i]; + + if (start == -1) + { + // Look for path start indicators + if (b == '/' || b == '.') + { + start = i; + } + } + else + { + if (b == 0) // Null terminator + { + var length = i - start; + if (length > 1 && length < 4096) + { + var potential = System.Text.Encoding.UTF8.GetString(content, start, length); + if (IsValidPath(potential)) + { + paths.Add(potential); + } + } + + start = -1; + } + else if (!IsValidPathChar(b)) + { + start = -1; + } + } + } + + return paths.OrderBy(p => p, StringComparer.Ordinal); + } + + private static bool IsValidPath(string path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return false; + } + + // Must start with / or . + if (!path.StartsWith('/') && !path.StartsWith('.')) + { + return false; + } + + // Must not contain control characters or obviously invalid sequences + foreach (char c in path) + { + if (c < 32 && c != '\t') + { + return false; + } + } + + // Filter out common false positives + if (path.Contains("//") || path.EndsWith("/.") || path.Contains("/../")) + { + return false; + } + + return true; + } + + private static bool IsValidPathChar(byte b) + { + // Allow printable ASCII and common path characters + return b >= 32 && b < 127; + } +} + +/// +/// Represents a file entry from a BOM file. +/// +internal sealed record BomFileEntry(string Path, bool IsDirectory); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/PkgutilAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/PkgutilAnalyzerPlugin.cs new file mode 100644 index 000000000..7e9082d92 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/PkgutilAnalyzerPlugin.cs @@ -0,0 +1,26 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Plugin; + +namespace StellaOps.Scanner.Analyzers.OS.Pkgutil; + +/// +/// Plugin that registers the pkgutil package analyzer for macOS receipt discovery. +/// +public sealed class PkgutilAnalyzerPlugin : IOSAnalyzerPlugin +{ + /// + public string Name => "StellaOps.Scanner.Analyzers.OS.Pkgutil"; + + /// + public bool IsAvailable(IServiceProvider services) => services is not null; + + /// + public IOSPackageAnalyzer CreateAnalyzer(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + var loggerFactory = services.GetRequiredService(); + return new PkgutilPackageAnalyzer(loggerFactory.CreateLogger()); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/PkgutilPackageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/PkgutilPackageAnalyzer.cs new file mode 100644 index 000000000..f68e7cc42 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/PkgutilPackageAnalyzer.cs @@ -0,0 +1,229 @@ +using System.Collections.ObjectModel; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Analyzers; +using StellaOps.Scanner.Analyzers.OS.Helpers; + +namespace StellaOps.Scanner.Analyzers.OS.Pkgutil; + +/// +/// Analyzes macOS pkgutil receipts to extract installed package information. +/// Parses receipt plists from /var/db/receipts/ and optionally enumerates +/// installed files from corresponding BOM files. +/// +internal sealed class PkgutilPackageAnalyzer : OsPackageAnalyzerBase +{ + private static readonly IReadOnlyList EmptyPackages = + new ReadOnlyCollection(Array.Empty()); + + private readonly PkgutilReceiptParser _receiptParser = new(); + private readonly BomParser _bomParser = new(); + + /// + /// Maximum number of files to enumerate from BOM per package. + /// + private const int MaxFilesPerPackage = 1000; + + public PkgutilPackageAnalyzer(ILogger logger) + : base(logger) + { + } + + public override string AnalyzerId => "pkgutil"; + + protected override ValueTask> ExecuteCoreAsync( + OSPackageAnalyzerContext context, + CancellationToken cancellationToken) + { + var receiptsPath = Path.Combine(context.RootPath, "var", "db", "receipts"); + if (!Directory.Exists(receiptsPath)) + { + Logger.LogInformation("pkgutil receipts directory not found at {Path}; skipping analyzer.", receiptsPath); + return ValueTask.FromResult>(EmptyPackages); + } + + var receipts = _receiptParser.DiscoverReceipts(context.RootPath, cancellationToken); + if (receipts.Count == 0) + { + Logger.LogInformation("No pkgutil receipts found; skipping analyzer."); + return ValueTask.FromResult>(EmptyPackages); + } + + Logger.LogInformation("Discovered {Count} pkgutil receipts", receipts.Count); + + var records = new List(receipts.Count); + foreach (var receipt in receipts) + { + cancellationToken.ThrowIfCancellationRequested(); + + var record = CreateRecordFromReceipt(receipt, cancellationToken); + if (record is not null) + { + records.Add(record); + } + } + + Logger.LogInformation("Created {Count} package records from pkgutil receipts", records.Count); + + // Sort for deterministic output + records.Sort(); + return ValueTask.FromResult>(records); + } + + private OSPackageRecord? CreateRecordFromReceipt( + PkgutilReceipt receipt, + CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(receipt.Identifier) || + string.IsNullOrWhiteSpace(receipt.Version)) + { + return null; + } + + // Build PURL + var purl = PackageUrlBuilder.BuildPkgutil(receipt.Identifier, receipt.Version); + + // Determine architecture from identifier or install path heuristics + var architecture = DetectArchitecture(receipt); + + // Extract files from BOM if available + var files = new List(); + if (!string.IsNullOrWhiteSpace(receipt.SourcePath)) + { + var bomPath = _bomParser.FindBomForReceipt(receipt.SourcePath); + if (bomPath is not null) + { + var bomEntries = _bomParser.Parse(bomPath, cancellationToken); + var count = 0; + foreach (var entry in bomEntries) + { + if (count >= MaxFilesPerPackage) + { + break; + } + + if (!entry.IsDirectory) + { + files.Add(new OSPackageFileEvidence( + entry.Path, + layerDigest: null, + sha256: null, + sizeBytes: null, + isConfigFile: IsConfigPath(entry.Path))); + count++; + } + } + } + } + + // Build vendor metadata + var vendorMetadata = new Dictionary(StringComparer.Ordinal) + { + ["pkgutil:identifier"] = receipt.Identifier, + ["pkgutil:volume"] = receipt.VolumePath, + }; + + if (receipt.InstallDate.HasValue) + { + vendorMetadata["pkgutil:install_date"] = receipt.InstallDate.Value.ToString("o"); + } + + if (!string.IsNullOrWhiteSpace(receipt.InstallPrefixPath)) + { + vendorMetadata["pkgutil:install_prefix"] = receipt.InstallPrefixPath; + } + + if (!string.IsNullOrWhiteSpace(receipt.InstallProcessName)) + { + vendorMetadata["pkgutil:installer"] = receipt.InstallProcessName; + } + + // Extract package name from identifier (last component typically) + var name = ExtractNameFromIdentifier(receipt.Identifier); + + return new OSPackageRecord( + AnalyzerId, + purl, + name, + receipt.Version, + architecture, + PackageEvidenceSource.PkgutilReceipt, + epoch: null, + release: null, + sourcePackage: ExtractVendorFromIdentifier(receipt.Identifier), + license: null, + cveHints: null, + provides: null, + depends: null, + files: files, + vendorMetadata: vendorMetadata); + } + + private static string ExtractNameFromIdentifier(string identifier) + { + // Identifier format is typically: com.vendor.product or com.apple.pkg.Safari + var parts = identifier.Split('.', StringSplitOptions.RemoveEmptyEntries); + if (parts.Length == 0) + { + return identifier; + } + + // Return the last meaningful part + var last = parts[^1]; + + // Skip common suffixes + if (parts.Length > 1 && + (last.Equals("pkg", StringComparison.OrdinalIgnoreCase) || + last.Equals("app", StringComparison.OrdinalIgnoreCase))) + { + return parts[^2]; + } + + return last; + } + + private static string? ExtractVendorFromIdentifier(string identifier) + { + // Extract vendor from identifier (e.g., "com.apple.pkg.Safari" -> "apple") + var parts = identifier.Split('.', StringSplitOptions.RemoveEmptyEntries); + if (parts.Length >= 2) + { + return parts[1]; + } + + return null; + } + + private static string DetectArchitecture(PkgutilReceipt receipt) + { + // Check install path for architecture hints + var prefix = receipt.InstallPrefixPath ?? receipt.VolumePath; + if (!string.IsNullOrWhiteSpace(prefix)) + { + if (prefix.Contains("/arm64/", StringComparison.OrdinalIgnoreCase) || + prefix.Contains("/aarch64/", StringComparison.OrdinalIgnoreCase)) + { + return "arm64"; + } + + if (prefix.Contains("/x86_64/", StringComparison.OrdinalIgnoreCase) || + prefix.Contains("/amd64/", StringComparison.OrdinalIgnoreCase)) + { + return "x86_64"; + } + } + + // Default to universal (noarch) for macOS packages + return "universal"; + } + + private static bool IsConfigPath(string path) + { + // Common macOS configuration paths + return path.Contains("/Preferences/", StringComparison.OrdinalIgnoreCase) || + path.Contains("/etc/", StringComparison.OrdinalIgnoreCase) || + path.EndsWith(".plist", StringComparison.OrdinalIgnoreCase) || + path.EndsWith(".conf", StringComparison.OrdinalIgnoreCase) || + path.EndsWith(".cfg", StringComparison.OrdinalIgnoreCase); + } +} diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/PkgutilReceiptParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/PkgutilReceiptParser.cs new file mode 100644 index 000000000..a39307330 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/PkgutilReceiptParser.cs @@ -0,0 +1,144 @@ +using Claunia.PropertyList; + +namespace StellaOps.Scanner.Analyzers.OS.Pkgutil; + +/// +/// Parses macOS pkgutil receipt .plist files from /var/db/receipts/. +/// +internal sealed class PkgutilReceiptParser +{ + /// + /// Parses a pkgutil receipt plist file. + /// + public PkgutilReceipt? Parse(string plistPath, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(plistPath); + + if (!File.Exists(plistPath)) + { + return null; + } + + try + { + using var stream = File.OpenRead(plistPath); + return Parse(stream, plistPath, cancellationToken); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or FormatException) + { + return null; + } + } + + /// + /// Parses a pkgutil receipt plist from a stream. + /// + public PkgutilReceipt? Parse(Stream stream, string? sourcePath = null, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(stream); + + try + { + var plist = PropertyListParser.Parse(stream); + if (plist is not NSDictionary root) + { + return null; + } + + var identifier = GetString(root, "PackageIdentifier") ?? GetString(root, "packageIdentifier"); + if (string.IsNullOrWhiteSpace(identifier)) + { + return null; + } + + var version = GetString(root, "PackageVersion") ?? GetString(root, "packageVersion") ?? "0.0.0"; + var installDate = GetDate(root, "InstallDate"); + var installPrefixPath = GetString(root, "InstallPrefixPath"); + var volumePath = GetString(root, "VolumePath") ?? "/"; + var installProcessName = GetString(root, "InstallProcessName"); + + return new PkgutilReceipt( + Identifier: identifier.Trim(), + Version: version.Trim(), + InstallDate: installDate, + InstallPrefixPath: installPrefixPath?.Trim(), + VolumePath: volumePath.Trim(), + InstallProcessName: installProcessName?.Trim(), + SourcePath: sourcePath); + } + catch (Exception ex) when (ex is FormatException or InvalidOperationException or ArgumentException) + { + return null; + } + } + + /// + /// Discovers and parses all receipt plist files in the receipts directory. + /// + public IReadOnlyList DiscoverReceipts( + string rootPath, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(rootPath); + + var receiptsPath = Path.Combine(rootPath, "var", "db", "receipts"); + if (!Directory.Exists(receiptsPath)) + { + return Array.Empty(); + } + + var results = new List(); + + try + { + foreach (var plistFile in Directory.EnumerateFiles(receiptsPath, "*.plist")) + { + cancellationToken.ThrowIfCancellationRequested(); + + var receipt = Parse(plistFile, cancellationToken); + if (receipt is not null) + { + results.Add(receipt); + } + } + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException) + { + // Partial results are acceptable + } + + return results; + } + + private static string? GetString(NSDictionary dict, string key) + { + if (dict.TryGetValue(key, out var value) && value is NSString nsString) + { + return nsString.Content; + } + + return null; + } + + private static DateTimeOffset? GetDate(NSDictionary dict, string key) + { + if (dict.TryGetValue(key, out var value) && value is NSDate nsDate) + { + return new DateTimeOffset(nsDate.Date, TimeSpan.Zero); + } + + return null; + } +} + +/// +/// Represents parsed macOS pkgutil receipt metadata. +/// +internal sealed record PkgutilReceipt( + string Identifier, + string Version, + DateTimeOffset? InstallDate, + string? InstallPrefixPath, + string VolumePath, + string? InstallProcessName, + string? SourcePath); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/Properties/AssemblyInfo.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/Properties/AssemblyInfo.cs new file mode 100644 index 000000000..3a63cdb31 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests")] diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/StellaOps.Scanner.Analyzers.OS.Pkgutil.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/StellaOps.Scanner.Analyzers.OS.Pkgutil.csproj new file mode 100644 index 000000000..bbd69f3c1 --- /dev/null +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Pkgutil/StellaOps.Scanner.Analyzers.OS.Pkgutil.csproj @@ -0,0 +1,16 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs index 4c4b90a29..36ee49c26 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs @@ -27,6 +27,58 @@ public static class PackageUrlBuilder return $"pkg:rpm/{Escape(name)}@{versionComponent}{releaseComponent}?arch={EscapeQuery(architecture)}"; } + /// + /// Builds a PURL for a Homebrew formula. + /// Format: pkg:brew/{tap}/{formula}@{version}?revision={revision} + /// + public static string BuildHomebrew(string tap, string formula, string version, int revision) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tap); + ArgumentException.ThrowIfNullOrWhiteSpace(formula); + ArgumentException.ThrowIfNullOrWhiteSpace(version); + + var normalizedTap = tap.Trim().ToLowerInvariant(); + var builder = new StringBuilder(); + builder.Append("pkg:brew/"); + builder.Append(Escape(normalizedTap)); + builder.Append('/'); + builder.Append(Escape(formula)); + builder.Append('@'); + builder.Append(Escape(version)); + + if (revision > 0) + { + builder.Append("?revision="); + builder.Append(revision); + } + + return builder.ToString(); + } + + /// + /// Builds a PURL for a macOS pkgutil receipt. + /// Format: pkg:generic/apple/{identifier}@{version} + /// + public static string BuildPkgutil(string identifier, string version) + { + ArgumentException.ThrowIfNullOrWhiteSpace(identifier); + ArgumentException.ThrowIfNullOrWhiteSpace(version); + + return $"pkg:generic/apple/{Escape(identifier)}@{Escape(version)}"; + } + + /// + /// Builds a PURL for a macOS application bundle. + /// Format: pkg:generic/macos-app/{bundleId}@{version} + /// + public static string BuildMacOsBundle(string bundleId, string version) + { + ArgumentException.ThrowIfNullOrWhiteSpace(bundleId); + ArgumentException.ThrowIfNullOrWhiteSpace(version); + + return $"pkg:generic/macos-app/{Escape(bundleId)}@{Escape(version)}"; + } + private static string Escape(string value) { ArgumentException.ThrowIfNullOrWhiteSpace(value); diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs index f971699cf..aa1959e44 100644 --- a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs @@ -6,4 +6,7 @@ public enum PackageEvidenceSource ApkDatabase, DpkgStatus, RpmDatabase, + HomebrewCellar, + PkgutilReceipt, + MacOsBundle, } diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Container/PythonZipappAdapterTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Container/PythonZipappAdapterTests.cs new file mode 100644 index 000000000..9604e3328 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Container/PythonZipappAdapterTests.cs @@ -0,0 +1,338 @@ +using System.IO.Compression; +using System.Text; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Tests.Container; + +public sealed class PythonZipappAdapterTests : IDisposable +{ + private readonly string _tempDir; + + public PythonZipappAdapterTests() + { + _tempDir = Path.Combine(Path.GetTempPath(), $"zipapp-tests-{Guid.NewGuid():N}"); + Directory.CreateDirectory(_tempDir); + } + + public void Dispose() + { + try + { + Directory.Delete(_tempDir, recursive: true); + } + catch + { + // Ignore cleanup errors + } + } + + [Fact] + public void DiscoverZipapps_FindsPyzFiles() + { + // Arrange + var pyzPath = Path.Combine(_tempDir, "app.pyz"); + CreateMinimalZipapp(pyzPath, "#!/usr/bin/env python3\n"); + + // Act + var discovered = PythonZipappAdapter.DiscoverZipapps(_tempDir); + + // Assert + Assert.Single(discovered); + Assert.Contains(discovered, p => p.EndsWith("app.pyz")); + } + + [Fact] + public void DiscoverZipapps_FindsPyzwFiles() + { + // Arrange + var pyzwPath = Path.Combine(_tempDir, "app.pyzw"); + CreateMinimalZipapp(pyzwPath, "#!/usr/bin/env pythonw\n"); + + // Act + var discovered = PythonZipappAdapter.DiscoverZipapps(_tempDir); + + // Assert + Assert.Single(discovered); + Assert.Contains(discovered, p => p.EndsWith("app.pyzw")); + } + + [Fact] + public void DiscoverZipapps_FindsInContainerLayers() + { + // Arrange + var layersDir = Path.Combine(_tempDir, "layers", "layer1", "fs", "app"); + Directory.CreateDirectory(layersDir); + var pyzPath = Path.Combine(layersDir, "container-app.pyz"); + CreateMinimalZipapp(pyzPath, "#!/usr/bin/python3.11\n"); + + // Act + var discovered = PythonZipappAdapter.DiscoverZipapps(_tempDir); + + // Assert + Assert.Single(discovered); + Assert.Contains(discovered, p => p.EndsWith("container-app.pyz")); + } + + [Fact] + public void AnalyzeZipapp_ExtractsShebang() + { + // Arrange + var pyzPath = Path.Combine(_tempDir, "app.pyz"); + CreateMinimalZipapp(pyzPath, "#!/usr/bin/python3.11\n"); + + // Act + var info = PythonZipappAdapter.AnalyzeZipapp(pyzPath); + + // Assert + Assert.NotNull(info); + Assert.Equal("/usr/bin/python3.11", info.Shebang); + Assert.Equal("3.11", info.PythonVersion); + } + + [Fact] + public void AnalyzeZipapp_ExtractsEnvShebang() + { + // Arrange + var pyzPath = Path.Combine(_tempDir, "app.pyz"); + CreateMinimalZipapp(pyzPath, "#!/usr/bin/env python3.10\n"); + + // Act + var info = PythonZipappAdapter.AnalyzeZipapp(pyzPath); + + // Assert + Assert.NotNull(info); + Assert.Contains("/usr/bin/env python3.10", info.Shebang); + Assert.Equal("3.10", info.PythonVersion); + } + + [Fact] + public void AnalyzeZipapp_DetectsMainPy() + { + // Arrange + var pyzPath = Path.Combine(_tempDir, "app.pyz"); + CreateZipappWithMain(pyzPath, "#!/usr/bin/python3\n", "print('Hello')"); + + // Act + var info = PythonZipappAdapter.AnalyzeZipapp(pyzPath); + + // Assert + Assert.NotNull(info); + Assert.True(info.HasMainPy); + } + + [Fact] + public void AnalyzeZipapp_DetectsMissingMain_GeneratesWarning() + { + // Arrange + var pyzPath = Path.Combine(_tempDir, "app.pyz"); + CreateMinimalZipapp(pyzPath, "#!/usr/bin/python3\n"); + + // Act + var info = PythonZipappAdapter.AnalyzeZipapp(pyzPath); + + // Assert + Assert.NotNull(info); + Assert.False(info.HasMainPy); + Assert.Contains(info.Warnings, w => w.Contains("missing __main__.py")); + } + + [Fact] + public void AnalyzeZipapp_DetectsWindowsApp() + { + // Arrange + var pyzwPath = Path.Combine(_tempDir, "app.pyzw"); + CreateZipappWithMain(pyzwPath, "#!/usr/bin/pythonw\n", "print('Hello')"); + + // Act + var info = PythonZipappAdapter.AnalyzeZipapp(pyzwPath); + + // Assert + Assert.NotNull(info); + Assert.True(info.IsWindowsApp); + Assert.Contains(info.Warnings, w => w.Contains("Windows-specific")); + } + + [Fact] + public void AnalyzeZipapp_DetectsEnvShebangWarning() + { + // Arrange + var pyzPath = Path.Combine(_tempDir, "app.pyz"); + CreateZipappWithMain(pyzPath, "#!/usr/bin/env python3\n", "print('Hello')"); + + // Act + var info = PythonZipappAdapter.AnalyzeZipapp(pyzPath); + + // Assert + Assert.NotNull(info); + Assert.Contains(info.Warnings, w => w.Contains("/usr/bin/env") && w.Contains("may vary")); + } + + [Fact] + public void AnalyzeZipapp_ExtractsEmbeddedRequirements() + { + // Arrange + var pyzPath = Path.Combine(_tempDir, "app.pyz"); + var requirements = "requests>=2.0\nflask==2.1.0\n# Comment\nnumpy"; + CreateZipappWithRequirements(pyzPath, "#!/usr/bin/python3\n", requirements); + + // Act + var info = PythonZipappAdapter.AnalyzeZipapp(pyzPath); + + // Assert + Assert.NotNull(info); + Assert.Contains("requests", info.EmbeddedDependencies); + Assert.Contains("flask", info.EmbeddedDependencies); + Assert.Contains("numpy", info.EmbeddedDependencies); + } + + [Fact] + public void AnalyzeAll_ReturnsAnalysisForMultipleZipapps() + { + // Arrange + var pyz1 = Path.Combine(_tempDir, "app1.pyz"); + var pyz2 = Path.Combine(_tempDir, "app2.pyz"); + CreateZipappWithMain(pyz1, "#!/usr/bin/python3.10\n", "print('App1')"); + CreateZipappWithMain(pyz2, "#!/usr/bin/python3.11\n", "print('App2')"); + + // Act + var analysis = PythonZipappAdapter.AnalyzeAll(_tempDir); + + // Assert + Assert.Equal(2, analysis.Zipapps.Count); + Assert.True(analysis.HasZipapps); + Assert.Contains(analysis.Warnings, w => w.Contains("Multiple zipapps")); + } + + [Fact] + public void AnalyzeAll_CollectsVersionsFromShebangs() + { + // Arrange + var pyz1 = Path.Combine(_tempDir, "app1.pyz"); + var pyz2 = Path.Combine(_tempDir, "app2.pyz"); + CreateZipappWithMain(pyz1, "#!/usr/bin/python3.10\n", "print('App1')"); + CreateZipappWithMain(pyz2, "#!/usr/bin/python3.11\n", "print('App2')"); + + // Act + var analysis = PythonZipappAdapter.AnalyzeAll(_tempDir); + + // Assert + var versioned = analysis.Zipapps.Where(z => z.PythonVersion != null).ToList(); + Assert.Equal(2, versioned.Count); + Assert.Contains(versioned, z => z.PythonVersion == "3.10"); + Assert.Contains(versioned, z => z.PythonVersion == "3.11"); + } + + [Fact] + public void AnalyzeZipapp_ExtractsEntryModuleFromRunpy() + { + // Arrange + var pyzPath = Path.Combine(_tempDir, "app.pyz"); + var mainContent = @" +import runpy +runpy.run_module('mypackage.main') +"; + CreateZipappWithMain(pyzPath, "#!/usr/bin/python3\n", mainContent); + + // Act + var info = PythonZipappAdapter.AnalyzeZipapp(pyzPath); + + // Assert + Assert.NotNull(info); + Assert.Equal("mypackage.main", info.EntryModule); + } + + [Fact] + public void ToMetadata_GeneratesExpectedKeys() + { + // Arrange + var pyzPath = Path.Combine(_tempDir, "app.pyz"); + CreateZipappWithMain(pyzPath, "#!/usr/bin/python3.11\n", "print('Hello')"); + var info = PythonZipappAdapter.AnalyzeZipapp(pyzPath); + + // Act + var metadata = info!.ToMetadata(); + + // Assert + Assert.Contains(metadata, m => m.Key == "zipapp.path"); + Assert.Contains(metadata, m => m.Key == "zipapp.hasMain" && m.Value == "true"); + Assert.Contains(metadata, m => m.Key == "zipapp.shebang"); + Assert.Contains(metadata, m => m.Key == "zipapp.pythonVersion" && m.Value == "3.11"); + } + + [Fact] + public void AnalyzeZipapp_ReturnsNull_ForNonExistentFile() + { + // Act + var info = PythonZipappAdapter.AnalyzeZipapp(Path.Combine(_tempDir, "nonexistent.pyz")); + + // Assert + Assert.Null(info); + } + + [Fact] + public void AnalyzeZipapp_HandlesCorruptedArchive() + { + // Arrange + var pyzPath = Path.Combine(_tempDir, "corrupt.pyz"); + File.WriteAllText(pyzPath, "#!/usr/bin/python3\nNot a valid zip archive"); + + // Act + var info = PythonZipappAdapter.AnalyzeZipapp(pyzPath); + + // Assert - should return null for corrupted archives + Assert.Null(info); + } + + private static void CreateMinimalZipapp(string path, string shebang) + { + using var fileStream = File.Create(path); + // Write shebang + var shebangBytes = Encoding.UTF8.GetBytes(shebang); + fileStream.Write(shebangBytes); + + // Write minimal zip archive + using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: true); + var entry = archive.CreateEntry("placeholder.txt"); + using var entryStream = entry.Open(); + using var writer = new StreamWriter(entryStream); + writer.Write("placeholder"); + } + + private static void CreateZipappWithMain(string path, string shebang, string mainContent) + { + using var fileStream = File.Create(path); + // Write shebang + var shebangBytes = Encoding.UTF8.GetBytes(shebang); + fileStream.Write(shebangBytes); + + // Write zip archive with __main__.py + using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: true); + var entry = archive.CreateEntry("__main__.py"); + using var entryStream = entry.Open(); + using var writer = new StreamWriter(entryStream); + writer.Write(mainContent); + } + + private static void CreateZipappWithRequirements(string path, string shebang, string requirements) + { + using var fileStream = File.Create(path); + // Write shebang + var shebangBytes = Encoding.UTF8.GetBytes(shebang); + fileStream.Write(shebangBytes); + + // Write zip archive with __main__.py and requirements.txt + using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: true); + + var mainEntry = archive.CreateEntry("__main__.py"); + using (var mainStream = mainEntry.Open()) + using (var mainWriter = new StreamWriter(mainStream)) + { + mainWriter.Write("print('Hello')"); + } + + var reqEntry = archive.CreateEntry("requirements.txt"); + using var reqStream = reqEntry.Open(); + using var reqWriter = new StreamWriter(reqStream); + reqWriter.Write(requirements); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/PythonFixtureTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/PythonFixtureTests.cs new file mode 100644 index 000000000..72aa9c8e7 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/PythonFixtureTests.cs @@ -0,0 +1,277 @@ +using System.Collections.Immutable; +using System.Text.Json; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Capabilities; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Entrypoints; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Framework; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Imports; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Observations; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Packaging; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.VirtualFileSystem; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Tests.Fixtures; + +/// +/// Fixture-based tests for Python analyzer covering various project structures. +/// +public sealed class PythonFixtureTests +{ + private static readonly string FixturesPath = Path.Combine( + AppContext.BaseDirectory, + "Fixtures", "lang", "python"); + + /// + /// Tests that namespace packages (PEP 420) are correctly detected. + /// + [Fact] + public async Task NamespacePackage_DetectsMultipleSubpackages() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = Path.Combine(FixturesPath, "namespace-pkg"); + + if (!Directory.Exists(fixturePath)) + { + // Fixture might not be in output yet + return; + } + + var sitePackagesPath = Path.Combine(fixturePath, "lib", "python3.11", "site-packages"); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSitePackages(sitePackagesPath) + .Build(); + + var discovery = new PythonPackageDiscovery(); + var result = await discovery.DiscoverAsync(vfs, cancellationToken); + + Assert.True(result.IsSuccessful); + Assert.Equal(2, result.Packages.Count(p => p.Name.Contains("mynamespace"))); + + // Verify both subpackages are found + Assert.Contains(result.Packages, p => p.Name == "mynamespace-subpkg1"); + Assert.Contains(result.Packages, p => p.Name == "mynamespace-subpkg2"); + } + + /// + /// Tests that simple virtualenv packages are correctly detected. + /// + [Fact] + public async Task SimpleVenv_DetectsPackageWithEntrypoints() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = Path.Combine(FixturesPath, "simple-venv"); + + if (!Directory.Exists(fixturePath)) + { + return; + } + + var sitePackagesPath = Path.Combine(fixturePath, "lib", "python3.11", "site-packages"); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSitePackages(sitePackagesPath) + .Build(); + + var discovery = new PythonPackageDiscovery(); + var result = await discovery.DiscoverAsync(vfs, cancellationToken); + + Assert.True(result.IsSuccessful); + Assert.Contains(result.Packages, p => p.Name == "simple"); + + var simplePkg = result.Packages.First(p => p.Name == "simple"); + Assert.Equal("1.0.0", simplePkg.Version); + Assert.Equal("pip", simplePkg.InstallerTool); + } + + /// + /// Tests that editable (development) installs are correctly detected. + /// + [Fact] + public async Task LayeredEditable_DetectsEditableInstall() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = Path.Combine(FixturesPath, "layered-editable"); + + if (!Directory.Exists(fixturePath)) + { + return; + } + + var layer1Path = Path.Combine(fixturePath, "layer1", "usr", "lib", "python3.11", "site-packages"); + var layer2Path = Path.Combine(fixturePath, "layer2", "usr", "lib", "python3.11", "site-packages"); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSitePackages(layer1Path) + .AddSitePackages(layer2Path) + .Build(); + + var discovery = new PythonPackageDiscovery(); + var result = await discovery.DiscoverAsync(vfs, cancellationToken); + + Assert.True(result.IsSuccessful); + Assert.Contains(result.Packages, p => p.Name == "layered"); + } + + /// + /// Tests that containers with multiple layers are handled correctly. + /// + [Fact] + public async Task Container_DetectsPackagesAcrossLayers() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = Path.Combine(FixturesPath, "container"); + + if (!Directory.Exists(fixturePath)) + { + return; + } + + var layer1Path = Path.Combine(fixturePath, "layer1", "usr", "lib", "python3.11", "site-packages"); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSitePackages(layer1Path) + .Build(); + + var discovery = new PythonPackageDiscovery(); + var result = await discovery.DiscoverAsync(vfs, cancellationToken); + + Assert.True(result.IsSuccessful); + Assert.Contains(result.Packages, p => p.Name == "Flask"); + + var flaskPkg = result.Packages.First(p => p.Name == "Flask"); + Assert.Equal("3.0.0", flaskPkg.Version); + } + + /// + /// Tests Lambda handler detection from SAM templates. + /// + [Fact] + public void LambdaHandler_DetectsHandlerFromSamTemplate() + { + var fixturePath = Path.Combine(FixturesPath, "lambda-handler"); + + if (!Directory.Exists(fixturePath)) + { + return; + } + + var handlerPath = Path.Combine(fixturePath, "app", "handler.py"); + Assert.True(File.Exists(handlerPath)); + + var content = File.ReadAllText(handlerPath); + + // Verify the handler signature is present + Assert.Contains("def lambda_handler(event, context)", content); + Assert.Contains("def process_event(event, context)", content); + } + + /// + /// Tests framework detection for Flask applications. + /// + [Fact] + public async Task FrameworkDetection_DetectsFlask() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = Path.Combine(FixturesPath, "container", "layer2", "app"); + + if (!Directory.Exists(fixturePath)) + { + return; + } + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(fixturePath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.Flask); + } + + /// + /// Tests capability detection for network and process execution. + /// + [Fact] + public async Task CapabilityDetection_DetectsNetworkAccess() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = Path.Combine(FixturesPath, "lambda-handler", "app"); + + if (!Directory.Exists(fixturePath)) + { + return; + } + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(fixturePath) + .Build(); + + var detector = new PythonCapabilityDetector(); + var capabilities = await detector.DetectAsync(vfs, cancellationToken); + + // boto3 import indicates AWS SDK usage - check for any detected capability + Assert.NotEmpty(capabilities); + } + + /// + /// Tests observation document generation from fixtures. + /// + [Fact] + public async Task ObservationBuilder_ProducesValidDocument() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = Path.Combine(FixturesPath, "simple-venv"); + + if (!Directory.Exists(fixturePath)) + { + return; + } + + var sitePackagesPath = Path.Combine(fixturePath, "lib", "python3.11", "site-packages"); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSitePackages(sitePackagesPath) + .Build(); + + var discovery = new PythonPackageDiscovery(); + var result = await discovery.DiscoverAsync(vfs, cancellationToken); + + var builder = new PythonObservationBuilder(); + var document = builder + .AddPackages(result.Packages) + .SetEnvironment("3.11.0", [sitePackagesPath]) + .Build(); + + Assert.Equal("python-aoc-v1", document.Schema); + Assert.NotEmpty(document.Packages); + + // Verify serialization produces valid JSON + var json = PythonObservationSerializer.Serialize(document); + var parsed = JsonDocument.Parse(json); + Assert.NotNull(parsed); + } + + /// + /// Tests import graph building from module files. + /// + [Fact] + public void ImportGraph_ExtractsImportsFromSource() + { + var fixturePath = Path.Combine(FixturesPath, "lambda-handler", "app"); + + if (!Directory.Exists(fixturePath)) + { + return; + } + + var handlerPath = Path.Combine(fixturePath, "handler.py"); + var content = File.ReadAllText(handlerPath); + + var extractor = new PythonSourceImportExtractor(handlerPath); + extractor.Extract(content); + + Assert.Contains(extractor.Imports, i => i.Module == "json"); + Assert.Contains(extractor.Imports, i => i.Module == "os"); + Assert.Contains(extractor.Imports, i => i.Module == "boto3"); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/Dockerfile b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/Dockerfile new file mode 100644 index 000000000..65bd70bd5 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/Dockerfile @@ -0,0 +1,6 @@ +FROM python:3.11-slim +WORKDIR /app +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt +COPY app/ /app/ +CMD ["python", "-m", "app"] diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/expected.json new file mode 100644 index 000000000..b4b6832a9 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/expected.json @@ -0,0 +1,55 @@ +[ + { + "analyzerId": "python", + "componentKey": "purl::pkg:pypi/flask@3.0.0", + "purl": "pkg:pypi/flask@3.0.0", + "name": "Flask", + "version": "3.0.0", + "type": "pypi", + "metadata": { + "author": "Pallets", + "distInfoPath": "layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info", + "installer": "pip", + "license": "BSD-3-Clause", + "name": "Flask", + "normalizedName": "flask", + "provenance": "dist-info", + "containerLayer": "layer1", + "requiresDist": "Werkzeug>=3.0;Jinja2>=3.1;click>=8.1", + "requiresPython": ">=3.8", + "summary": "A simple framework for building complex web applications", + "version": "3.0.0", + "wheel.generator": "pip 24.0", + "wheel.rootIsPurelib": "true", + "wheel.tags": "py3-none-any", + "wheel.version": "1.0" + }, + "evidence": [ + { + "kind": "file", + "source": "INSTALLER", + "locator": "layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/INSTALLER" + }, + { + "kind": "file", + "source": "METADATA", + "locator": "layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/METADATA" + }, + { + "kind": "file", + "source": "RECORD", + "locator": "layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/RECORD" + }, + { + "kind": "file", + "source": "WHEEL", + "locator": "layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/WHEEL" + }, + { + "kind": "container", + "source": "Dockerfile", + "locator": "Dockerfile" + } + ] + } +] diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/INSTALLER b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/INSTALLER new file mode 100644 index 000000000..a1b589e38 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/METADATA b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/METADATA new file mode 100644 index 000000000..4a27fe61f --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/METADATA @@ -0,0 +1,10 @@ +Metadata-Version: 2.1 +Name: Flask +Version: 3.0.0 +Summary: A simple framework for building complex web applications +Author: Pallets +License: BSD-3-Clause +Requires-Python: >=3.8 +Requires-Dist: Werkzeug>=3.0 +Requires-Dist: Jinja2>=3.1 +Requires-Dist: click>=8.1 diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/RECORD b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/RECORD new file mode 100644 index 000000000..8c37bea0d --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/RECORD @@ -0,0 +1,5 @@ +flask/__init__.py,sha256=abc123,200 +flask-3.0.0.dist-info/METADATA,sha256=def456,500 +flask-3.0.0.dist-info/WHEEL,sha256=ghi789,80 +flask-3.0.0.dist-info/INSTALLER,sha256=jkl012,4 +flask-3.0.0.dist-info/RECORD,, diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/WHEEL b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/WHEEL new file mode 100644 index 000000000..e0965d4f7 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask-3.0.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: pip 24.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask/__init__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask/__init__.py new file mode 100644 index 000000000..8cef62819 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer1/usr/lib/python3.11/site-packages/flask/__init__.py @@ -0,0 +1,11 @@ +"""Flask web framework stub.""" +__version__ = "3.0.0" + +class Flask: + def __init__(self, name): + self.name = name + + def route(self, path): + def decorator(f): + return f + return decorator diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer2/app/__init__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer2/app/__init__.py new file mode 100644 index 000000000..b19802f41 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer2/app/__init__.py @@ -0,0 +1,2 @@ +"""Container application package.""" +__version__ = "1.0.0" diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer2/app/__main__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer2/app/__main__.py new file mode 100644 index 000000000..75cb875a5 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/container/layer2/app/__main__.py @@ -0,0 +1,11 @@ +"""Application entry point.""" +from flask import Flask + +app = Flask(__name__) + +@app.route("/") +def index(): + return {"status": "healthy"} + +if __name__ == "__main__": + app.run(host="0.0.0.0", port=8080) diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/app/__init__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/app/__init__.py new file mode 100644 index 000000000..e4b651990 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/app/__init__.py @@ -0,0 +1,2 @@ +"""Lambda application package.""" +__version__ = "1.0.0" diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/app/handler.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/app/handler.py new file mode 100644 index 000000000..c3bd69c50 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/app/handler.py @@ -0,0 +1,16 @@ +"""AWS Lambda handler module.""" +import json +import os +import boto3 + +def lambda_handler(event, context): + """Main Lambda handler function.""" + return { + "statusCode": 200, + "body": json.dumps({"message": "Hello from Lambda!"}) + } + +def process_event(event, context): + """Alternative handler for processing events.""" + s3 = boto3.client("s3") + return {"processed": True} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/app/utils.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/app/utils.py new file mode 100644 index 000000000..86188af22 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/app/utils.py @@ -0,0 +1,7 @@ +"""Utility functions for Lambda handler.""" +import logging + +logger = logging.getLogger(__name__) + +def log_event(event): + logger.info("Processing event: %s", event) diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/expected.json new file mode 100644 index 000000000..30cabf963 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/expected.json @@ -0,0 +1,33 @@ +[ + { + "analyzerId": "python", + "componentKey": "lambda::app.handler.lambda_handler", + "name": "lambda_handler", + "type": "lambda", + "metadata": { + "handler": "handler.lambda_handler", + "runtime": "python3.11", + "codeUri": "app/", + "framework": "AWSLambda", + "templateFile": "template.yaml", + "timeout": "30" + }, + "evidence": [ + { + "kind": "file", + "source": "sam-template", + "locator": "template.yaml" + }, + { + "kind": "file", + "source": "handler", + "locator": "app/handler.py" + }, + { + "kind": "derived", + "source": "handler-signature", + "value": "def lambda_handler(event, context)" + } + ] + } +] diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/requirements.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/requirements.txt new file mode 100644 index 000000000..2c0a1cc34 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/requirements.txt @@ -0,0 +1,2 @@ +boto3>=1.26.0 +requests>=2.28.0 diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/template.yaml b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/template.yaml new file mode 100644 index 000000000..515c159d4 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/lambda-handler/template.yaml @@ -0,0 +1,21 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Sample Lambda Function + +Globals: + Function: + Timeout: 30 + Runtime: python3.11 + +Resources: + MyFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: app/ + Handler: handler.lambda_handler + Events: + Api: + Type: Api + Properties: + Path: /hello + Method: get diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/expected.json new file mode 100644 index 000000000..deecd05de --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/expected.json @@ -0,0 +1,100 @@ +[ + { + "analyzerId": "python", + "componentKey": "purl::pkg:pypi/mynamespace-subpkg1@1.0.0", + "purl": "pkg:pypi/mynamespace-subpkg1@1.0.0", + "name": "mynamespace-subpkg1", + "version": "1.0.0", + "type": "pypi", + "metadata": { + "author": "Example Dev", + "authorEmail": "dev@example.com", + "distInfoPath": "lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info", + "installer": "pip", + "license": "MIT", + "name": "mynamespace-subpkg1", + "normalizedName": "mynamespace_subpkg1", + "provenance": "dist-info", + "requiresPython": ">=3.9", + "summary": "Namespace package subpkg1", + "topLevelModule": "mynamespace", + "version": "1.0.0", + "wheel.generator": "pip 24.0", + "wheel.rootIsPurelib": "true", + "wheel.tags": "py3-none-any", + "wheel.version": "1.0", + "namespacePackage": "true" + }, + "evidence": [ + { + "kind": "file", + "source": "INSTALLER", + "locator": "lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/INSTALLER" + }, + { + "kind": "file", + "source": "METADATA", + "locator": "lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/METADATA" + }, + { + "kind": "file", + "source": "RECORD", + "locator": "lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/RECORD" + }, + { + "kind": "file", + "source": "WHEEL", + "locator": "lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/WHEEL" + } + ] + }, + { + "analyzerId": "python", + "componentKey": "purl::pkg:pypi/mynamespace-subpkg2@1.0.0", + "purl": "pkg:pypi/mynamespace-subpkg2@1.0.0", + "name": "mynamespace-subpkg2", + "version": "1.0.0", + "type": "pypi", + "metadata": { + "author": "Example Dev", + "authorEmail": "dev@example.com", + "distInfoPath": "lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info", + "installer": "pip", + "license": "MIT", + "name": "mynamespace-subpkg2", + "normalizedName": "mynamespace_subpkg2", + "provenance": "dist-info", + "requiresPython": ">=3.9", + "summary": "Namespace package subpkg2", + "topLevelModule": "mynamespace", + "version": "1.0.0", + "wheel.generator": "pip 24.0", + "wheel.rootIsPurelib": "true", + "wheel.tags": "py3-none-any", + "wheel.version": "1.0", + "namespacePackage": "true" + }, + "evidence": [ + { + "kind": "file", + "source": "INSTALLER", + "locator": "lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/INSTALLER" + }, + { + "kind": "file", + "source": "METADATA", + "locator": "lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/METADATA" + }, + { + "kind": "file", + "source": "RECORD", + "locator": "lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/RECORD" + }, + { + "kind": "file", + "source": "WHEEL", + "locator": "lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/WHEEL" + } + ] + } +] diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg1/__init__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg1/__init__.py new file mode 100644 index 000000000..1a8747d14 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg1/__init__.py @@ -0,0 +1,4 @@ +# Namespace subpackage 1 +from .core import process + +__version__ = "1.0.0" diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg1/core.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg1/core.py new file mode 100644 index 000000000..ab4f1c4c1 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg1/core.py @@ -0,0 +1,5 @@ +"""Core functionality for subpkg1.""" +import json + +def process(data): + return json.dumps(data) diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg2/__init__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg2/__init__.py new file mode 100644 index 000000000..b1828c4d6 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg2/__init__.py @@ -0,0 +1,4 @@ +# Namespace subpackage 2 +from .utils import helper + +__version__ = "1.0.0" diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg2/utils.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg2/utils.py new file mode 100644 index 000000000..2345e8139 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace/subpkg2/utils.py @@ -0,0 +1,5 @@ +"""Utilities for subpkg2.""" +import os + +def helper(): + return os.getcwd() diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/INSTALLER b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/INSTALLER new file mode 100644 index 000000000..a1b589e38 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/METADATA b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/METADATA new file mode 100644 index 000000000..2814e4a68 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/METADATA @@ -0,0 +1,8 @@ +Metadata-Version: 2.1 +Name: mynamespace-subpkg1 +Version: 1.0.0 +Summary: Namespace package subpkg1 +Author: Example Dev +Author-email: dev@example.com +License: MIT +Requires-Python: >=3.9 diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/RECORD b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/RECORD new file mode 100644 index 000000000..abfd633da --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/RECORD @@ -0,0 +1,6 @@ +mynamespace/subpkg1/__init__.py,sha256=abc123,50 +mynamespace/subpkg1/core.py,sha256=def456,100 +mynamespace_subpkg1-1.0.0.dist-info/METADATA,sha256=ghi789,200 +mynamespace_subpkg1-1.0.0.dist-info/WHEEL,sha256=jkl012,80 +mynamespace_subpkg1-1.0.0.dist-info/INSTALLER,sha256=mno345,4 +mynamespace_subpkg1-1.0.0.dist-info/RECORD,, diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/WHEEL b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/WHEEL new file mode 100644 index 000000000..e0965d4f7 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: pip 24.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/top_level.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/top_level.txt new file mode 100644 index 000000000..5947a7183 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg1-1.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +mynamespace diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/INSTALLER b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/INSTALLER new file mode 100644 index 000000000..a1b589e38 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/METADATA b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/METADATA new file mode 100644 index 000000000..be4a0f135 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/METADATA @@ -0,0 +1,8 @@ +Metadata-Version: 2.1 +Name: mynamespace-subpkg2 +Version: 1.0.0 +Summary: Namespace package subpkg2 +Author: Example Dev +Author-email: dev@example.com +License: MIT +Requires-Python: >=3.9 diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/RECORD b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/RECORD new file mode 100644 index 000000000..ea6285ad2 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/RECORD @@ -0,0 +1,6 @@ +mynamespace/subpkg2/__init__.py,sha256=abc123,50 +mynamespace/subpkg2/utils.py,sha256=def456,80 +mynamespace_subpkg2-1.0.0.dist-info/METADATA,sha256=ghi789,200 +mynamespace_subpkg2-1.0.0.dist-info/WHEEL,sha256=jkl012,80 +mynamespace_subpkg2-1.0.0.dist-info/INSTALLER,sha256=mno345,4 +mynamespace_subpkg2-1.0.0.dist-info/RECORD,, diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/WHEEL b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/WHEEL new file mode 100644 index 000000000..e0965d4f7 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: pip 24.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/top_level.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/top_level.txt new file mode 100644 index 000000000..5947a7183 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/namespace-pkg/lib/python3.11/site-packages/mynamespace_subpkg2-1.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +mynamespace diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/expected.json new file mode 100644 index 000000000..36f91e690 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/expected.json @@ -0,0 +1,33 @@ +[ + { + "analyzerId": "python", + "componentKey": "zipapp::myapp.pyz", + "name": "myapp.pyz", + "version": "2.0.0", + "type": "zipapp", + "metadata": { + "archiveType": "zipapp", + "mainModule": "__main__", + "interpreter": "/usr/bin/env python3", + "version": "2.0.0", + "modules": "myapp,myapp.cli" + }, + "evidence": [ + { + "kind": "file", + "source": "zipapp", + "locator": "myapp.pyz.contents/__main__.py" + }, + { + "kind": "file", + "source": "zipapp", + "locator": "myapp.pyz.contents/myapp/__init__.py" + }, + { + "kind": "file", + "source": "zipapp", + "locator": "myapp.pyz.contents/myapp/cli.py" + } + ] + } +] diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/myapp.pyz.contents/__main__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/myapp.pyz.contents/__main__.py new file mode 100644 index 000000000..d9f22c3fe --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/myapp.pyz.contents/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 +"""Main entry point for zipapp.""" +import sys +from myapp.cli import main + +if __name__ == "__main__": + sys.exit(main()) diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/myapp.pyz.contents/myapp/__init__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/myapp.pyz.contents/myapp/__init__.py new file mode 100644 index 000000000..c680034f5 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/myapp.pyz.contents/myapp/__init__.py @@ -0,0 +1,2 @@ +"""MyApp zipapp package.""" +__version__ = "2.0.0" diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/myapp.pyz.contents/myapp/cli.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/myapp.pyz.contents/myapp/cli.py new file mode 100644 index 000000000..0dcb327f7 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/zipapp/myapp.pyz.contents/myapp/cli.py @@ -0,0 +1,10 @@ +"""CLI entry point.""" +import argparse +import json + +def main(): + parser = argparse.ArgumentParser(description="MyApp CLI") + parser.add_argument("--version", action="version", version="2.0.0") + args = parser.parse_args() + print(json.dumps({"status": "ok"})) + return 0 diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Framework/PythonFrameworkDetectorTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Framework/PythonFrameworkDetectorTests.cs new file mode 100644 index 000000000..a5dfbb89c --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Framework/PythonFrameworkDetectorTests.cs @@ -0,0 +1,642 @@ +using System.Collections.Immutable; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Framework; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.VirtualFileSystem; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Tests.Framework; + +public sealed class PythonFrameworkDetectorTests +{ + [Fact] + public async Task DetectAsync_DjangoProject_FindsDjangoHints() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + // Create Django project structure + await File.WriteAllTextAsync( + Path.Combine(tempPath, "manage.py"), + """ + #!/usr/bin/env python + import os + import sys + + if __name__ == "__main__": + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myproject.settings") + from django.core.management import execute_from_command_line + execute_from_command_line(sys.argv) + """, + cancellationToken); + + Directory.CreateDirectory(Path.Combine(tempPath, "myproject")); + await File.WriteAllTextAsync( + Path.Combine(tempPath, "myproject", "settings.py"), + """ + INSTALLED_APPS = [ + 'django.contrib.admin', + 'django.contrib.auth', + 'myapp', + ] + + MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + ] + + ROOT_URLCONF = 'myproject.urls' + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.Django); + Assert.Contains(hints, h => h.Evidence.Contains("INSTALLED_APPS")); + Assert.Contains(hints, h => h.Evidence.Contains("DJANGO_SETTINGS_MODULE")); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task DetectAsync_FlaskApp_FindsFlaskHints() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + await File.WriteAllTextAsync( + Path.Combine(tempPath, "app.py"), + """ + from flask import Flask, Blueprint + + app = Flask(__name__) + + api_bp = Blueprint('api', __name__, url_prefix='/api') + + @app.route('/') + def index(): + return 'Hello, World!' + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.Flask); + // Due to deduplication, we get the highest confidence match per kind/file + var flaskHint = hints.First(h => h.Kind == PythonFrameworkKind.Flask); + Assert.Equal(PythonFrameworkConfidence.Definitive, flaskHint.Confidence); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task DetectAsync_FastAPIApp_FindsFastAPIHints() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + await File.WriteAllTextAsync( + Path.Combine(tempPath, "main.py"), + """ + from fastapi import FastAPI, APIRouter + + app = FastAPI() + router = APIRouter() + + @router.get("/items") + async def get_items(): + return [] + + app.include_router(router) + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.FastAPI); + // Due to deduplication, we get the highest confidence match per kind/file + var fastApiHint = hints.First(h => h.Kind == PythonFrameworkKind.FastAPI); + Assert.Equal(PythonFrameworkConfidence.Definitive, fastApiHint.Confidence); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task DetectAsync_CeleryApp_FindsCeleryHints() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + await File.WriteAllTextAsync( + Path.Combine(tempPath, "celery.py"), + """ + from celery import Celery + + app = Celery('tasks', broker='redis://localhost:6379/0') + + @app.task + def add(x, y): + return x + y + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.Celery); + // Due to deduplication, we get the highest confidence match per kind/file + var celeryHint = hints.First(h => h.Kind == PythonFrameworkKind.Celery); + Assert.Equal(PythonFrameworkConfidence.Definitive, celeryHint.Confidence); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task DetectAsync_AwsLambdaHandler_FindsLambdaHint() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + await File.WriteAllTextAsync( + Path.Combine(tempPath, "handler.py"), + """ + import json + + def lambda_handler(event, context): + return { + 'statusCode': 200, + 'body': json.dumps('Hello from Lambda!') + } + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.AwsLambda); + Assert.Contains(hints, h => h.Evidence.Contains("Lambda handler function")); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task DetectAsync_ClickCli_FindsClickHints() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + await File.WriteAllTextAsync( + Path.Combine(tempPath, "cli.py"), + """ + import click + + @click.group() + def cli(): + pass + + @click.command() + @click.option('--name', default='World') + def hello(name): + click.echo(f'Hello {name}!') + + cli.add_command(hello) + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.Click); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task DetectAsync_TyperCli_FindsTyperHints() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + await File.WriteAllTextAsync( + Path.Combine(tempPath, "main.py"), + """ + import typer + + app = typer.Typer() + + @app.command() + def hello(name: str): + print(f"Hello {name}") + + if __name__ == "__main__": + app() + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.Typer); + Assert.Contains(hints, h => h.Evidence.Contains("typer.Typer()")); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task DetectAsync_GunicornConfig_FindsGunicornHint() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + await File.WriteAllTextAsync( + Path.Combine(tempPath, "gunicorn.conf.py"), + """ + bind = "0.0.0.0:8000" + workers = 4 + worker_class = "uvicorn.workers.UvicornWorker" + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.Gunicorn); + Assert.Contains(hints, h => h.Confidence == PythonFrameworkConfidence.Definitive); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task DetectAsync_LoggingConfig_FindsLoggingHint() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + await File.WriteAllTextAsync( + Path.Combine(tempPath, "config.py"), + """ + import logging.config + + LOGGING = { + 'version': 1, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + }, + }, + } + + logging.config.dictConfig(LOGGING) + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.LoggingConfig); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task DetectAsync_JupyterNotebook_FindsJupyterHint() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + // Create a minimal Jupyter notebook file + await File.WriteAllTextAsync( + Path.Combine(tempPath, "analysis.ipynb"), + """ + { + "cells": [], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 + } + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.Jupyter); + Assert.Contains(hints, h => h.Confidence == PythonFrameworkConfidence.Definitive); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task DetectAsync_StreamlitApp_FindsStreamlitHint() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + await File.WriteAllTextAsync( + Path.Combine(tempPath, "app.py"), + """ + import streamlit as st + + st.title('My Streamlit App') + st.write('Hello, World!') + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var detector = new PythonFrameworkDetector(); + var hints = await detector.DetectAsync(vfs, cancellationToken); + + Assert.Contains(hints, h => h.Kind == PythonFrameworkKind.Streamlit); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public void PythonFrameworkHint_Categories_ReturnCorrectly() + { + var webHint = new PythonFrameworkHint( + Kind: PythonFrameworkKind.Flask, + SourceFile: "app.py", + LineNumber: 1, + Evidence: "Flask()", + Confidence: PythonFrameworkConfidence.Definitive); + + Assert.True(webHint.IsWebFramework); + Assert.False(webHint.IsTaskQueue); + Assert.False(webHint.IsServerless); + Assert.False(webHint.IsCliFramework); + + var taskHint = new PythonFrameworkHint( + Kind: PythonFrameworkKind.Celery, + SourceFile: "tasks.py", + LineNumber: 1, + Evidence: "Celery()", + Confidence: PythonFrameworkConfidence.Definitive); + + Assert.False(taskHint.IsWebFramework); + Assert.True(taskHint.IsTaskQueue); + + var serverlessHint = new PythonFrameworkHint( + Kind: PythonFrameworkKind.AwsLambda, + SourceFile: "handler.py", + LineNumber: 1, + Evidence: "lambda_handler", + Confidence: PythonFrameworkConfidence.High); + + Assert.True(serverlessHint.IsServerless); + + var cliHint = new PythonFrameworkHint( + Kind: PythonFrameworkKind.Click, + SourceFile: "cli.py", + LineNumber: 1, + Evidence: "@click.command", + Confidence: PythonFrameworkConfidence.High); + + Assert.True(cliHint.IsCliFramework); + } + + [Fact] + public void PythonFrameworkHint_ToMetadata_GeneratesExpectedKeys() + { + var hint = new PythonFrameworkHint( + Kind: PythonFrameworkKind.FastAPI, + SourceFile: "main.py", + LineNumber: 5, + Evidence: "FastAPI()", + Confidence: PythonFrameworkConfidence.Definitive); + + var metadata = hint.ToMetadata("fw").ToDictionary(kv => kv.Key, kv => kv.Value); + + Assert.Equal("FastAPI", metadata["fw.kind"]); + Assert.Equal("main.py", metadata["fw.file"]); + Assert.Equal("5", metadata["fw.line"]); + Assert.Equal("FastAPI()", metadata["fw.evidence"]); + Assert.Equal("WebFramework", metadata["fw.category"]); + } + + private static string CreateTemporaryWorkspace() + { + var path = Path.Combine(Path.GetTempPath(), $"stellaops-framework-{Guid.NewGuid():N}"); + Directory.CreateDirectory(path); + return path; + } +} + +public sealed class PythonProjectConfigParserTests +{ + [Fact] + public async Task ParsePyprojectAsync_WithOptionalDependencies_ExtractsExtras() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + await File.WriteAllTextAsync( + Path.Combine(tempPath, "pyproject.toml"), + """ + [project] + name = "mypackage" + version = "1.0.0" + + [project.optional-dependencies] + dev = [ + "pytest", + "black", + "mypy", + ] + docs = ["sphinx", "sphinx-rtd-theme"] + all = ["mypackage[dev,docs]"] + + [project.scripts] + myapp = "mypackage.cli:main" + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var parser = new PythonProjectConfigParser(); + var config = await parser.ParsePyprojectAsync(vfs, "pyproject.toml", cancellationToken); + + Assert.NotNull(config); + Assert.Equal("mypackage", config.ProjectName); + Assert.Equal("1.0.0", config.ProjectVersion); + Assert.Contains("dev", config.Extras); + Assert.Contains("docs", config.Extras); + Assert.Contains("all", config.Extras); + Assert.True(config.OptionalDependencies.ContainsKey("dev")); + Assert.Contains("pytest", config.OptionalDependencies["dev"]); + Assert.True(config.Scripts.ContainsKey("myapp")); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task ParsePyprojectAsync_PoetryExtras_ExtractsExtras() + { + var cancellationToken = TestContext.Current.CancellationToken; + var tempPath = CreateTemporaryWorkspace(); + try + { + await File.WriteAllTextAsync( + Path.Combine(tempPath, "pyproject.toml"), + """ + [tool.poetry] + name = "mypoetryapp" + version = "2.0.0" + + [tool.poetry.extras] + ml = ["tensorflow", "numpy"] + web = ["flask", "gunicorn"] + + [tool.poetry.group.dev.dependencies] + pytest = "^7.0" + + [tool.poetry.scripts] + mypoetryapp = "mypoetryapp.main:run" + """, + cancellationToken); + + var vfs = PythonVirtualFileSystem.CreateBuilder() + .AddSourceTree(tempPath) + .Build(); + + var parser = new PythonProjectConfigParser(); + var config = await parser.ParsePyprojectAsync(vfs, "pyproject.toml", cancellationToken); + + Assert.NotNull(config); + Assert.Contains("ml", config.Extras); + Assert.Contains("web", config.Extras); + Assert.Contains("dev", config.Extras); + Assert.True(config.OptionalDependencies.ContainsKey("ml")); + Assert.Contains("tensorflow", config.OptionalDependencies["ml"]); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public void PythonProjectConfig_ToMetadata_GeneratesExpectedKeys() + { + var config = new PythonProjectConfig( + FilePath: "pyproject.toml", + ProjectName: "myapp", + ProjectVersion: "1.2.3", + OptionalDependencies: new Dictionary> + { + ["dev"] = ["pytest", "black"] + }.ToImmutableDictionary(), + Extras: ["dev", "docs"], + Scripts: new Dictionary + { + ["myapp"] = "myapp.cli:main" + }.ToImmutableDictionary()); + + var metadata = config.ToMetadata("proj").ToDictionary(kv => kv.Key, kv => kv.Value); + + Assert.Equal("pyproject.toml", metadata["proj.path"]); + Assert.Equal("myapp", metadata["proj.name"]); + Assert.Equal("1.2.3", metadata["proj.version"]); + Assert.Equal("dev,docs", metadata["proj.extras"]); + Assert.Equal("myapp", metadata["proj.scripts"]); + } + + private static string CreateTemporaryWorkspace() + { + var path = Path.Combine(Path.GetTempPath(), $"stellaops-config-{Guid.NewGuid():N}"); + Directory.CreateDirectory(path); + return path; + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Observations/PythonObservationBuilderTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Observations/PythonObservationBuilderTests.cs new file mode 100644 index 000000000..209f62605 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Observations/PythonObservationBuilderTests.cs @@ -0,0 +1,400 @@ +using System.Collections.Immutable; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Capabilities; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Entrypoints; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Framework; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Imports; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Observations; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Packaging; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.VirtualFileSystem; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Tests.Observations; + +public sealed class PythonObservationBuilderTests +{ + [Fact] + public void Build_WithNoData_ReturnsEmptyDocument() + { + var builder = new PythonObservationBuilder(); + var document = builder.Build(); + + Assert.Equal("python-aoc-v1", document.Schema); + Assert.Empty(document.Packages); + Assert.Empty(document.Modules); + Assert.Empty(document.Entrypoints); + Assert.Empty(document.DependencyEdges); + Assert.Empty(document.ImportEdges); + Assert.Empty(document.NativeExtensions); + Assert.Empty(document.Frameworks); + Assert.Empty(document.Warnings); + Assert.False(document.Capabilities.UsesProcessExecution); + Assert.False(document.Capabilities.UsesNetworkAccess); + } + + [Fact] + public void AddPackages_AddsPackagesAndDependencyEdges() + { + var packages = new[] + { + new PythonPackageInfo( + Name: "requests", + Version: "2.31.0", + Kind: PythonPackageKind.Wheel, + Location: "/venv/lib/python3.11/site-packages", + MetadataPath: "/venv/lib/python3.11/site-packages/requests-2.31.0.dist-info", + TopLevelModules: ImmutableArray.Create("requests"), + Dependencies: ImmutableArray.Create("urllib3>=1.21.1", "certifi>=2017.4.17"), + Extras: ImmutableArray.Empty, + RecordFiles: ImmutableArray.Empty, + InstallerTool: "pip", + EditableTarget: null, + IsDirectDependency: true, + Confidence: PythonPackageConfidence.High) + }; + + var builder = new PythonObservationBuilder(); + var document = builder.AddPackages(packages).Build(); + + Assert.Single(document.Packages); + var pkg = document.Packages[0]; + Assert.Equal("requests", pkg.Name); + Assert.Equal("2.31.0", pkg.Version); + Assert.Equal("Wheel", pkg.Source); + Assert.True(pkg.IsDirect); + Assert.Equal("pip", pkg.InstallerKind); + + Assert.Equal(2, document.DependencyEdges.Length); + Assert.Contains(document.DependencyEdges, e => e.FromPackage == "requests" && e.ToPackage == "urllib3"); + Assert.Contains(document.DependencyEdges, e => e.FromPackage == "requests" && e.ToPackage == "certifi"); + } + + [Fact] + public void AddModules_AddsModulesCorrectly() + { + var modules = new[] + { + new PythonModuleNode( + ModulePath: "mypackage", + VirtualPath: "/app/mypackage/__init__.py", + IsPackage: true, + IsNamespacePackage: false, + Source: PythonFileSource.SourceTree), + new PythonModuleNode( + ModulePath: "mypackage.core", + VirtualPath: "/app/mypackage/core.py", + IsPackage: false, + IsNamespacePackage: false, + Source: PythonFileSource.SourceTree) + }; + + var builder = new PythonObservationBuilder(); + var document = builder.AddModules(modules).Build(); + + Assert.Equal(2, document.Modules.Length); + + var pkgModule = document.Modules.First(m => m.Name == "mypackage"); + Assert.Equal("package", pkgModule.Type); + Assert.Contains("__init__.py", pkgModule.FilePath); + + var coreModule = document.Modules.First(m => m.Name == "mypackage.core"); + Assert.Equal("module", coreModule.Type); + Assert.Equal("mypackage", coreModule.ParentPackage); + } + + [Fact] + public void AddEntrypoints_AddsEntrypointsCorrectly() + { + var entrypoints = new[] + { + new PythonEntrypoint( + Name: "myapp", + Kind: PythonEntrypointKind.PackageMain, + Target: "myapp.__main__", + VirtualPath: "/app/myapp/__main__.py", + InvocationContext: PythonInvocationContext.AsModule("myapp"), + Confidence: PythonEntrypointConfidence.High, + Source: "__main__.py detection") + }; + + var builder = new PythonObservationBuilder(); + var document = builder.AddEntrypoints(entrypoints).Build(); + + Assert.Single(document.Entrypoints); + var ep = document.Entrypoints[0]; + Assert.Equal("/app/myapp/__main__.py", ep.Path); + Assert.Equal("PackageMain", ep.Type); + Assert.Equal("Module", ep.InvocationContext); + } + + [Fact] + public void AddCapabilities_SetsCapabilityFlags() + { + var capabilities = new[] + { + new PythonCapability( + Kind: PythonCapabilityKind.ProcessExecution, + SourceFile: "/app/utils.py", + LineNumber: 10, + Evidence: "subprocess.run()", + Confidence: PythonCapabilityConfidence.Definitive), + new PythonCapability( + Kind: PythonCapabilityKind.NetworkAccess, + SourceFile: "/app/client.py", + LineNumber: 20, + Evidence: "requests.get()", + Confidence: PythonCapabilityConfidence.High), + new PythonCapability( + Kind: PythonCapabilityKind.AsyncAwait, + SourceFile: "/app/async_handler.py", + LineNumber: 5, + Evidence: "async def", + Confidence: PythonCapabilityConfidence.Definitive) + }; + + var builder = new PythonObservationBuilder(); + var document = builder.AddCapabilities(capabilities).Build(); + + Assert.True(document.Capabilities.UsesProcessExecution); + Assert.True(document.Capabilities.UsesNetworkAccess); + Assert.True(document.Capabilities.UsesAsyncAwait); + Assert.False(document.Capabilities.UsesFileSystem); + Assert.False(document.Capabilities.UsesNativeCode); + + // ProcessExecution is security sensitive + Assert.Contains("ProcessExecution", document.Capabilities.SecuritySensitiveCapabilities); + // NetworkAccess is also security sensitive + Assert.Contains("NetworkAccess", document.Capabilities.SecuritySensitiveCapabilities); + } + + [Fact] + public void AddNativeExtensions_AddsExtensionsAndSetsFlag() + { + var extensions = new[] + { + new PythonNativeExtension( + ModuleName: "numpy.core._multiarray_umath", + Path: "/venv/lib/python3.11/site-packages/numpy/core/_multiarray_umath.cpython-311-x86_64-linux-gnu.so", + Kind: PythonNativeExtensionKind.CExtension, + Platform: "linux", + Architecture: "x86_64", + Source: PythonFileSource.SitePackages, + PackageName: "numpy", + Dependencies: ImmutableArray.Empty) + }; + + var builder = new PythonObservationBuilder(); + var document = builder.AddNativeExtensions(extensions).Build(); + + Assert.Single(document.NativeExtensions); + Assert.True(document.Capabilities.UsesNativeCode); + + var ext = document.NativeExtensions[0]; + Assert.Equal("numpy.core._multiarray_umath", ext.ModuleName); + Assert.Equal("CExtension", ext.Kind); + Assert.Equal("numpy", ext.PackageName); + } + + [Fact] + public void AddFrameworkHints_AddsHintsWithCategory() + { + var hints = new[] + { + new PythonFrameworkHint( + Kind: PythonFrameworkKind.Flask, + SourceFile: "/app/main.py", + LineNumber: 5, + Evidence: "Flask(__name__)", + Confidence: PythonFrameworkConfidence.Definitive), + new PythonFrameworkHint( + Kind: PythonFrameworkKind.Celery, + SourceFile: "/app/tasks.py", + LineNumber: 1, + Evidence: "Celery()", + Confidence: PythonFrameworkConfidence.High) + }; + + var builder = new PythonObservationBuilder(); + var document = builder.AddFrameworkHints(hints).Build(); + + Assert.Equal(2, document.Frameworks.Length); + + var flask = document.Frameworks.First(f => f.Kind == "Flask"); + Assert.Equal("WebFramework", flask.Category); + Assert.Equal(PythonObservationConfidence.Definitive, flask.Confidence); + + var celery = document.Frameworks.First(f => f.Kind == "Celery"); + Assert.Equal("TaskQueue", celery.Category); + + Assert.Contains("Flask", document.Capabilities.DetectedFrameworks); + Assert.Contains("Celery", document.Capabilities.DetectedFrameworks); + } + + [Fact] + public void SetEnvironment_SetsEnvironmentCorrectly() + { + var builder = new PythonObservationBuilder(); + var document = builder + .SetEnvironment( + pythonVersion: "3.11.4", + sitePackagesPaths: ["/venv/lib/python3.11/site-packages"], + requirementsFiles: ["/app/requirements.txt"], + pyprojectFiles: ["/app/pyproject.toml"], + virtualenvPath: "/venv", + condaPrefix: null, + isContainer: true) + .Build(); + + Assert.Equal("3.11.4", document.Environment.PythonVersion); + Assert.Single(document.Environment.SitePackagesPaths); + Assert.Single(document.Environment.RequirementsFiles); + Assert.Single(document.Environment.PyprojectFiles); + Assert.Equal("/venv", document.Environment.VirtualenvPath); + Assert.Null(document.Environment.CondaPrefix); + Assert.True(document.Environment.IsContainer); + } + + [Fact] + public void AddWarning_AddsWarningsCorrectly() + { + var builder = new PythonObservationBuilder(); + var document = builder + .AddWarning("PY001", "Unresolved import: missing_module", "/app/main.py", 15) + .AddWarning("PY002", "Deprecated package usage", severity: "info") + .Build(); + + Assert.Equal(2, document.Warnings.Length); + + var warning1 = document.Warnings.First(w => w.Code == "PY001"); + Assert.Equal("Unresolved import: missing_module", warning1.Message); + Assert.Equal("/app/main.py", warning1.FilePath); + Assert.Equal(15, warning1.Line); + Assert.Equal("warning", warning1.Severity); + + var warning2 = document.Warnings.First(w => w.Code == "PY002"); + Assert.Null(warning2.FilePath); + Assert.Equal("info", warning2.Severity); + } + + [Fact] + public void SetRuntimeEvidence_SetsEvidenceCorrectly() + { + var evidence = new PythonObservationRuntimeEvidence( + HasEvidence: true, + RuntimePythonVersion: "3.11.4", + RuntimePlatform: "linux", + LoadedModulesCount: 50, + LoadedPackages: ImmutableArray.Create("numpy", "pandas"), + LoadedModules: ImmutableArray.Create("myapp", "myapp.core"), + PathHashes: ImmutableDictionary.Empty.Add("/app/main.py", "abc123"), + RuntimeCapabilities: ImmutableArray.Create("network", "filesystem"), + Errors: ImmutableArray.Empty); + + var builder = new PythonObservationBuilder(); + var document = builder.SetRuntimeEvidence(evidence).Build(); + + Assert.NotNull(document.RuntimeEvidence); + Assert.True(document.RuntimeEvidence.HasEvidence); + Assert.Equal("3.11.4", document.RuntimeEvidence.RuntimePythonVersion); + Assert.Equal(50, document.RuntimeEvidence.LoadedModulesCount); + Assert.Contains("numpy", document.RuntimeEvidence.LoadedPackages); + } + + [Fact] + public void AddImportEdges_AddsEdgesCorrectly() + { + var import1 = new PythonImport( + Module: "requests", + Names: null, + Alias: null, + Kind: PythonImportKind.Import, + RelativeLevel: 0, + SourceFile: "/app/client.py", + LineNumber: 1, + Confidence: PythonImportConfidence.Definitive); + + var import2 = new PythonImport( + Module: "json", + Names: [new PythonImportedName("loads"), new PythonImportedName("dumps")], + Alias: null, + Kind: PythonImportKind.FromImport, + RelativeLevel: 0, + SourceFile: "/app/client.py", + LineNumber: 2, + Confidence: PythonImportConfidence.High); + + var edges = new[] + { + new PythonImportEdge("myapp.client", "requests", import1), + new PythonImportEdge("myapp.client", "json", import2) + }; + + var builder = new PythonObservationBuilder(); + var document = builder.AddImportEdges(edges).Build(); + + Assert.Equal(2, document.ImportEdges.Length); + + var requestsEdge = document.ImportEdges.First(e => e.ToModule == "requests"); + Assert.Equal("myapp.client", requestsEdge.FromModule); + Assert.Equal(PythonObservationImportKind.Import, requestsEdge.Kind); + Assert.Equal(PythonObservationConfidence.Definitive, requestsEdge.Confidence); + + var jsonEdge = document.ImportEdges.First(e => e.ToModule == "json"); + Assert.Equal(PythonObservationImportKind.FromImport, jsonEdge.Kind); + Assert.Equal(PythonObservationConfidence.High, jsonEdge.Confidence); + } + + [Fact] + public void FluentBuilder_ChainsCorrectly() + { + var packages = new[] + { + new PythonPackageInfo( + Name: "flask", + Version: "3.0.0", + Kind: PythonPackageKind.Wheel, + Location: "/venv/lib/python3.11/site-packages", + MetadataPath: null, + TopLevelModules: ImmutableArray.Create("flask"), + Dependencies: ImmutableArray.Empty, + Extras: ImmutableArray.Empty, + RecordFiles: ImmutableArray.Empty, + InstallerTool: "pip", + EditableTarget: null, + IsDirectDependency: true, + Confidence: PythonPackageConfidence.High) + }; + + var capabilities = new[] + { + new PythonCapability( + Kind: PythonCapabilityKind.NetworkAccess, + SourceFile: "/app/app.py", + LineNumber: 10, + Evidence: "http server", + Confidence: PythonCapabilityConfidence.High) + }; + + var frameworks = new[] + { + new PythonFrameworkHint( + Kind: PythonFrameworkKind.Flask, + SourceFile: "/app/app.py", + LineNumber: 3, + Evidence: "Flask(__name__)", + Confidence: PythonFrameworkConfidence.Definitive) + }; + + var document = new PythonObservationBuilder() + .AddPackages(packages) + .AddCapabilities(capabilities) + .AddFrameworkHints(frameworks) + .SetEnvironment("3.11.0", isContainer: false) + .AddWarning("PY100", "Test warning") + .Build(); + + Assert.Single(document.Packages); + Assert.Single(document.Frameworks); + Assert.Single(document.Warnings); + Assert.True(document.Capabilities.UsesNetworkAccess); + Assert.Equal("3.11.0", document.Environment.PythonVersion); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Observations/PythonObservationSerializerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Observations/PythonObservationSerializerTests.cs new file mode 100644 index 000000000..ac9d34edd --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Observations/PythonObservationSerializerTests.cs @@ -0,0 +1,280 @@ +using System.Collections.Immutable; +using System.Text.Json; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Observations; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Tests.Observations; + +public sealed class PythonObservationSerializerTests +{ + [Fact] + public void Serialize_EmptyDocument_ProducesValidJson() + { + var document = new PythonObservationBuilder().Build(); + + var json = PythonObservationSerializer.Serialize(document); + + Assert.NotEmpty(json); + Assert.Contains("\"schema\":", json); + Assert.Contains("\"python-aoc-v1\"", json); + Assert.Contains("\"packages\":", json); + Assert.Contains("\"modules\":", json); + Assert.Contains("\"capabilities\":", json); + + // Validate it's parseable JSON + var parsed = JsonDocument.Parse(json); + Assert.NotNull(parsed); + } + + [Fact] + public void Serialize_Compact_ProducesMinifiedJson() + { + var document = new PythonObservationBuilder() + .AddWarning("PY001", "Test warning") + .Build(); + + var pretty = PythonObservationSerializer.Serialize(document, compact: false); + var compact = PythonObservationSerializer.Serialize(document, compact: true); + + Assert.True(compact.Length < pretty.Length); + Assert.DoesNotContain("\n", compact); + } + + [Fact] + public void Serialize_UsesCamelCase() + { + var document = new PythonObservationBuilder() + .SetEnvironment("3.11.0", isContainer: true) + .Build(); + + var json = PythonObservationSerializer.Serialize(document); + + Assert.Contains("\"pythonVersion\":", json); + Assert.Contains("\"sitePackagesPaths\":", json); + Assert.Contains("\"isContainer\":", json); + Assert.DoesNotContain("\"PythonVersion\":", json); + Assert.DoesNotContain("\"SitePackagesPaths\":", json); + } + + [Fact] + public void Serialize_OmitsNullValues() + { + var document = new PythonObservationBuilder().Build(); + + var json = PythonObservationSerializer.Serialize(document); + + // RuntimeEvidence is null by default + Assert.DoesNotContain("\"runtimeEvidence\":", json); + } + + [Fact] + public void Serialize_EnumsAsStrings() + { + var edges = ImmutableArray.Create( + new PythonObservationImportEdge( + FromModule: "app", + ToModule: "requests", + Kind: PythonObservationImportKind.Import, + Confidence: PythonObservationConfidence.High, + ResolvedPath: null, + SourceFile: "/app/main.py", + Line: 1, + ResolverTrace: ImmutableArray.Empty)); + + var document = new PythonObservationDocument( + Schema: "python-aoc-v1", + Packages: ImmutableArray.Empty, + Modules: ImmutableArray.Empty, + Entrypoints: ImmutableArray.Empty, + DependencyEdges: ImmutableArray.Empty, + ImportEdges: edges, + NativeExtensions: ImmutableArray.Empty, + Frameworks: ImmutableArray.Empty, + Warnings: ImmutableArray.Empty, + Environment: new PythonObservationEnvironment( + PythonVersion: null, + SitePackagesPaths: ImmutableArray.Empty, + VersionSources: ImmutableArray.Empty, + RequirementsFiles: ImmutableArray.Empty, + PyprojectFiles: ImmutableArray.Empty, + VirtualenvPath: null, + CondaPrefix: null, + IsContainer: false), + Capabilities: new PythonObservationCapabilitySummary( + UsesProcessExecution: false, + UsesNetworkAccess: false, + UsesFileSystem: false, + UsesCodeExecution: false, + UsesDeserialization: false, + UsesNativeCode: false, + UsesAsyncAwait: false, + UsesMultiprocessing: false, + DetectedFrameworks: ImmutableArray.Empty, + SecuritySensitiveCapabilities: ImmutableArray.Empty)); + + var json = PythonObservationSerializer.Serialize(document); + + // Check that enums are serialized as camelCase strings + Assert.Contains("\"import\"", json); + Assert.Contains("\"high\"", json); + } + + [Fact] + public void Deserialize_RoundTrips() + { + var original = new PythonObservationBuilder() + .SetEnvironment("3.11.0", ["/venv/lib/python3.11/site-packages"], isContainer: true) + .AddWarning("PY001", "Test warning", "/app/main.py", 10) + .Build(); + + var json = PythonObservationSerializer.Serialize(original); + var deserialized = PythonObservationSerializer.Deserialize(json); + + Assert.NotNull(deserialized); + Assert.Equal(original.Schema, deserialized.Schema); + Assert.Equal(original.Environment.PythonVersion, deserialized.Environment.PythonVersion); + Assert.Equal(original.Environment.IsContainer, deserialized.Environment.IsContainer); + Assert.Equal(original.Warnings.Length, deserialized.Warnings.Length); + Assert.Equal(original.Warnings[0].Code, deserialized.Warnings[0].Code); + } + + [Fact] + public async Task SerializeAsync_WritesToStream() + { + var cancellationToken = TestContext.Current.CancellationToken; + var document = new PythonObservationBuilder() + .AddWarning("PY001", "Test") + .Build(); + + using var stream = new MemoryStream(); + await PythonObservationSerializer.SerializeAsync(document, stream, compact: false, cancellationToken); + + stream.Position = 0; + using var reader = new StreamReader(stream); + var json = await reader.ReadToEndAsync(cancellationToken); + + Assert.Contains("\"python-aoc-v1\"", json); + Assert.Contains("\"PY001\"", json); + } + + [Fact] + public async Task DeserializeAsync_ReadsFromStream() + { + var cancellationToken = TestContext.Current.CancellationToken; + var json = """ + { + "schema": "python-aoc-v1", + "packages": [], + "modules": [], + "entrypoints": [], + "dependencyEdges": [], + "importEdges": [], + "nativeExtensions": [], + "frameworks": [], + "warnings": [{"code": "PY001", "message": "Test", "severity": "warning"}], + "environment": { + "sitePackagesPaths": [], + "versionSources": [], + "requirementsFiles": [], + "pyprojectFiles": [], + "isContainer": false + }, + "capabilities": { + "usesProcessExecution": false, + "usesNetworkAccess": false, + "usesFileSystem": false, + "usesCodeExecution": false, + "usesDeserialization": false, + "usesNativeCode": false, + "usesAsyncAwait": false, + "usesMultiprocessing": false, + "detectedFrameworks": [], + "securitySensitiveCapabilities": [] + } + } + """; + + using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(json)); + var document = await PythonObservationSerializer.DeserializeAsync(stream, cancellationToken); + + Assert.NotNull(document); + Assert.Equal("python-aoc-v1", document.Schema); + Assert.Single(document.Warnings); + Assert.Equal("PY001", document.Warnings[0].Code); + } + + [Fact] + public void Serialize_FullDocument_ProducesExpectedStructure() + { + var document = new PythonObservationDocument( + Schema: "python-aoc-v1", + Packages: ImmutableArray.Create( + new PythonObservationPackage( + Name: "requests", + Version: "2.31.0", + Source: "Wheel", + Platform: null, + IsDirect: true, + InstallerKind: "pip", + DistInfoPath: "/site-packages/requests-2.31.0.dist-info", + Groups: ImmutableArray.Empty, + Extras: ImmutableArray.Empty)), + Modules: ImmutableArray.Create( + new PythonObservationModule( + Name: "myapp", + Type: "package", + FilePath: "/app/myapp/__init__.py", + Line: null, + IsNamespacePackage: false, + ParentPackage: null, + Imports: ImmutableArray.Create("requests", "json"))), + Entrypoints: ImmutableArray.Create( + new PythonObservationEntrypoint( + Path: "/app/myapp/__main__.py", + Type: "PackageMain", + Handler: null, + RequiredPackages: ImmutableArray.Empty, + InvocationContext: "Module")), + DependencyEdges: ImmutableArray.Create( + new PythonObservationDependencyEdge( + FromPackage: "myapp", + ToPackage: "requests", + VersionConstraint: ">=2.28.0", + Extra: null, + IsOptional: false)), + ImportEdges: ImmutableArray.Empty, + NativeExtensions: ImmutableArray.Empty, + Frameworks: ImmutableArray.Empty, + Warnings: ImmutableArray.Empty, + Environment: new PythonObservationEnvironment( + PythonVersion: "3.11.4", + SitePackagesPaths: ImmutableArray.Create("/app/.venv/lib/python3.11/site-packages"), + VersionSources: ImmutableArray.Empty, + RequirementsFiles: ImmutableArray.Create("/app/requirements.txt"), + PyprojectFiles: ImmutableArray.Empty, + VirtualenvPath: "/app/.venv", + CondaPrefix: null, + IsContainer: true), + Capabilities: new PythonObservationCapabilitySummary( + UsesProcessExecution: false, + UsesNetworkAccess: true, + UsesFileSystem: false, + UsesCodeExecution: false, + UsesDeserialization: false, + UsesNativeCode: false, + UsesAsyncAwait: true, + UsesMultiprocessing: false, + DetectedFrameworks: ImmutableArray.Create("FastAPI"), + SecuritySensitiveCapabilities: ImmutableArray.Empty)); + + var json = PythonObservationSerializer.Serialize(document); + var parsed = JsonDocument.Parse(json); + + // Verify structure + var root = parsed.RootElement; + Assert.Equal("python-aoc-v1", root.GetProperty("schema").GetString()); + Assert.Equal(1, root.GetProperty("packages").GetArrayLength()); + Assert.Equal("requests", root.GetProperty("packages")[0].GetProperty("name").GetString()); + Assert.True(root.GetProperty("capabilities").GetProperty("usesNetworkAccess").GetBoolean()); + Assert.Equal("3.11.4", root.GetProperty("environment").GetProperty("pythonVersion").GetString()); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/RuntimeEvidence/PythonPathHasherTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/RuntimeEvidence/PythonPathHasherTests.cs new file mode 100644 index 000000000..70c3ea272 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/RuntimeEvidence/PythonPathHasherTests.cs @@ -0,0 +1,125 @@ +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.RuntimeEvidence; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Tests.RuntimeEvidence; + +/// +/// Tests for PythonPathHasher path scrubbing and hashing functionality. +/// +public sealed class PythonPathHasherTests +{ + [Theory] + [InlineData("/home/user/project/main.py", "[HOME]/project/main.py")] + [InlineData("/Users/developer/code/app.py", "[HOME]/code/app.py")] + [InlineData("C:\\Users\\admin\\Documents\\script.py", "[HOME]/Documents/script.py")] + [InlineData("/root/.local/lib/python3.11/site-packages/flask/__init__.py", "[ROOT]/.local/lib/python3.11/site-packages/flask/__init__.py")] + [InlineData("/tmp/abc123/temp.py", "[TEMP]/temp.py")] + public void ScrubPath_ReplacesSensitiveComponents(string input, string expected) + { + var result = PythonPathHasher.ScrubPath(input); + Assert.Equal(expected, result); + } + + [Fact] + public void ScrubPath_NullInput_ReturnsEmpty() + { + var result = PythonPathHasher.ScrubPath(null); + Assert.Equal(string.Empty, result); + } + + [Fact] + public void ScrubPath_EmptyInput_ReturnsEmpty() + { + var result = PythonPathHasher.ScrubPath(string.Empty); + Assert.Equal(string.Empty, result); + } + + [Fact] + public void ScrubPath_NormalizesPathSeparators() + { + var result = PythonPathHasher.ScrubPath("C:\\Users\\test\\project\\main.py"); + Assert.Contains("/", result); + Assert.DoesNotContain("\\", result); + } + + [Fact] + public void HashPath_ReturnsDeterministicHash() + { + var path = "/usr/lib/python3.11/site-packages/flask/__init__.py"; + var hash1 = PythonPathHasher.HashPath(path); + var hash2 = PythonPathHasher.HashPath(path); + + Assert.Equal(hash1, hash2); + Assert.Equal(64, hash1.Length); // SHA-256 produces 64 hex chars + } + + [Fact] + public void HashPath_NullInput_ReturnsEmpty() + { + var result = PythonPathHasher.HashPath(null); + Assert.Equal(string.Empty, result); + } + + [Fact] + public void HashPath_DifferentPaths_ProduceDifferentHashes() + { + var hash1 = PythonPathHasher.HashPath("/path/to/module1.py"); + var hash2 = PythonPathHasher.HashPath("/path/to/module2.py"); + + Assert.NotEqual(hash1, hash2); + } + + [Fact] + public void HashPath_CaseInsensitiveNormalization() + { + // Windows paths with different case should hash the same + var hash1 = PythonPathHasher.HashPath("/Path/To/Module.py"); + var hash2 = PythonPathHasher.HashPath("/path/to/module.py"); + + Assert.Equal(hash1, hash2); + } + + [Fact] + public void ScrubAndHash_ReturnsBothValues() + { + var path = "/home/user/project/main.py"; + var (scrubbed, hash) = PythonPathHasher.ScrubAndHash(path); + + Assert.Equal("[HOME]/project/main.py", scrubbed); + Assert.NotEmpty(hash); + Assert.Equal(64, hash.Length); + } + + [Theory] + [InlineData("/usr/lib/python3.11/site-packages/flask/__init__.py", "flask")] + [InlineData("/usr/lib/python3.11/site-packages/requests/api.py", "requests.api")] + [InlineData("/usr/lib/python3.11/site-packages/numpy/core/__init__.py", "numpy.core")] + [InlineData("/usr/lib/python3.11/dist-packages/django/views.py", "django.views")] + public void ExtractModuleName_ExtractsFromSitePackages(string path, string expected) + { + var result = PythonPathHasher.ExtractModuleName(path); + Assert.Equal(expected, result); + } + + [Theory] + [InlineData("/usr/lib/python3.11/site-packages/numpy/core/multiarray.cpython-311-x86_64-linux-gnu.so", "numpy.core.multiarray")] + [InlineData("/usr/lib/python3.11/site-packages/_ssl.cpython-311-x86_64-linux-gnu.so", "_ssl")] + public void ExtractModuleName_HandlesNativeExtensions(string path, string expected) + { + var result = PythonPathHasher.ExtractModuleName(path); + Assert.Equal(expected, result); + } + + [Fact] + public void ExtractModuleName_NullInput_ReturnsNull() + { + var result = PythonPathHasher.ExtractModuleName(null); + Assert.Null(result); + } + + [Fact] + public void ExtractModuleName_FallbackToFilename() + { + var result = PythonPathHasher.ExtractModuleName("/some/other/path/mymodule.py"); + Assert.Equal("mymodule", result); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/RuntimeEvidence/PythonRuntimeEvidenceCollectorTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/RuntimeEvidence/PythonRuntimeEvidenceCollectorTests.cs new file mode 100644 index 000000000..50e6ba968 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/RuntimeEvidence/PythonRuntimeEvidenceCollectorTests.cs @@ -0,0 +1,240 @@ +using StellaOps.Scanner.Analyzers.Lang.Python.Internal.RuntimeEvidence; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Tests.RuntimeEvidence; + +/// +/// Tests for PythonRuntimeEvidenceCollector. +/// +public sealed class PythonRuntimeEvidenceCollectorTests +{ + [Fact] + public void ParseLine_InterpreterStart_CapturesVersion() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var json = """{"type": "interpreter_start", "python_version": "3.11.5", "platform": "linux", "pid": 12345}"""; + collector.ParseLine(json); + + var evidence = collector.Build(); + + Assert.True(evidence.HasEvidence); + Assert.Equal("3.11.5", evidence.RuntimePythonVersion); + Assert.Equal("linux", evidence.RuntimePlatform); + } + + [Fact] + public void ParseLine_ModuleImport_TracksModule() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var json = """{"type": "module_import", "module": "flask", "path": "/usr/lib/python3.11/site-packages/flask/__init__.py", "pid": 12345}"""; + collector.ParseLine(json); + + var evidence = collector.Build(); + + Assert.Contains("flask", evidence.LoadedModules); + Assert.Contains("flask", evidence.LoadedPackages); + Assert.Equal(1, evidence.LoadedModulesCount); + } + + [Fact] + public void ParseLine_NestedModule_ExtractsTopLevelPackage() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var json = """{"type": "module_import", "module": "flask.views", "path": "/usr/lib/python3.11/site-packages/flask/views.py", "pid": 12345}"""; + collector.ParseLine(json); + + var evidence = collector.Build(); + + Assert.Contains("flask.views", evidence.LoadedModules); + Assert.Contains("flask", evidence.LoadedPackages); + } + + [Fact] + public void ParseLine_NativeLoad_TracksCapability() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var json = """{"type": "native_load", "module": "numpy.core.multiarray", "path": "/usr/lib/python3.11/site-packages/numpy/core/multiarray.cpython-311-x86_64-linux-gnu.so", "pid": 12345}"""; + collector.ParseLine(json); + + var evidence = collector.Build(); + + Assert.Contains("numpy.core.multiarray", evidence.LoadedModules); + Assert.Contains("native_code", evidence.RuntimeCapabilities); + } + + [Fact] + public void ParseLine_DynamicImport_TracksCapability() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var json = """{"type": "dynamic_import", "module": "plugin_module", "pid": 12345}"""; + collector.ParseLine(json); + + var evidence = collector.Build(); + + Assert.Contains("plugin_module", evidence.LoadedModules); + Assert.Contains("dynamic_import", evidence.RuntimeCapabilities); + } + + [Fact] + public void ParseLine_ProcessSpawn_TracksCapability() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var json = """{"type": "process_spawn", "spawn_type": "subprocess", "pid": 12345}"""; + collector.ParseLine(json); + + var evidence = collector.Build(); + + Assert.Contains("process_spawn", evidence.RuntimeCapabilities); + } + + [Fact] + public void ParseLine_MultiprocessingSpawn_TracksMultiprocessingCapability() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var json = """{"type": "process_spawn", "spawn_type": "multiprocessing", "pid": 12345}"""; + collector.ParseLine(json); + + var evidence = collector.Build(); + + Assert.Contains("process_spawn", evidence.RuntimeCapabilities); + Assert.Contains("multiprocessing", evidence.RuntimeCapabilities); + } + + [Fact] + public void ParseLine_ModuleError_CapturesError() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var json = """{"type": "module_error", "module": "missing_module", "error": "ModuleNotFoundError: No module named 'missing_module'", "pid": 12345}"""; + collector.ParseLine(json); + + var evidence = collector.Build(); + + Assert.NotEmpty(evidence.Errors); + Assert.Contains(evidence.Errors, e => e.Message.Contains("missing_module")); + } + + [Fact] + public void ParseLine_PathModification_AddsHash() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var json = """{"type": "path_modification", "path": "/usr/lib/python3.11/site-packages", "action": "append", "pid": 12345}"""; + collector.ParseLine(json); + + var evidence = collector.Build(); + + Assert.NotEmpty(evidence.PathHashes); + } + + [Fact] + public void ParseOutput_MultipleLines_ParsesAll() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var ndjson = """ + {"type": "interpreter_start", "python_version": "3.11.5", "platform": "linux", "pid": 12345} + {"type": "module_import", "module": "os", "path": "/usr/lib/python3.11/os.py", "pid": 12345} + {"type": "module_import", "module": "sys", "path": null, "pid": 12345} + {"type": "module_import", "module": "json", "path": "/usr/lib/python3.11/json/__init__.py", "pid": 12345} + """; + + collector.ParseOutput(ndjson); + + var evidence = collector.Build(); + + Assert.True(evidence.HasEvidence); + Assert.Equal("3.11.5", evidence.RuntimePythonVersion); + Assert.Contains("os", evidence.LoadedModules); + Assert.Contains("sys", evidence.LoadedModules); + Assert.Contains("json", evidence.LoadedModules); + Assert.Equal(3, evidence.LoadedModulesCount); + } + + [Fact] + public void ParseLine_MalformedJson_Ignored() + { + var collector = new PythonRuntimeEvidenceCollector(); + + collector.ParseLine("not valid json"); + collector.ParseLine("{incomplete"); + + var evidence = collector.Build(); + + Assert.False(evidence.HasEvidence); + } + + [Fact] + public void ParseLine_EmptyLine_Ignored() + { + var collector = new PythonRuntimeEvidenceCollector(); + + collector.ParseLine(string.Empty); + collector.ParseLine(" "); + collector.ParseLine(null!); + + var evidence = collector.Build(); + + Assert.False(evidence.HasEvidence); + } + + [Fact] + public void Build_NoEvents_ReturnsEmptyEvidence() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var evidence = collector.Build(); + + Assert.False(evidence.HasEvidence); + Assert.Null(evidence.RuntimePythonVersion); + Assert.Null(evidence.RuntimePlatform); + Assert.Equal(0, evidence.LoadedModulesCount); + Assert.Empty(evidence.LoadedModules); + Assert.Empty(evidence.LoadedPackages); + } + + [Fact] + public void Build_ModulesAreSorted() + { + var collector = new PythonRuntimeEvidenceCollector(); + + collector.ParseLine("""{"type": "module_import", "module": "zebra", "pid": 1}"""); + collector.ParseLine("""{"type": "module_import", "module": "alpha", "pid": 1}"""); + collector.ParseLine("""{"type": "module_import", "module": "middle", "pid": 1}"""); + + var evidence = collector.Build(); + + Assert.Equal(["alpha", "middle", "zebra"], evidence.LoadedModules); + } + + [Fact] + public void Events_ReturnsAllCapturedEvents() + { + var collector = new PythonRuntimeEvidenceCollector(); + + collector.ParseLine("""{"type": "interpreter_start", "python_version": "3.11.5", "pid": 1}"""); + collector.ParseLine("""{"type": "module_import", "module": "os", "pid": 1}"""); + + Assert.Equal(2, collector.Events.Count); + Assert.Equal(PythonRuntimeEventKind.InterpreterStart, collector.Events[0].Kind); + Assert.Equal(PythonRuntimeEventKind.ModuleImport, collector.Events[1].Kind); + } + + [Fact] + public void ParseLine_ScrubbsPathsInEvents() + { + var collector = new PythonRuntimeEvidenceCollector(); + + var json = """{"type": "module_import", "module": "mymodule", "path": "/home/user/project/mymodule.py", "pid": 12345}"""; + collector.ParseLine(json); + + var moduleEvent = collector.Events[0]; + Assert.Equal("[HOME]/project/mymodule.py", moduleEvent.ModulePath); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/Fixtures/opt/homebrew/Cellar/jq/1.7/INSTALL_RECEIPT.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/Fixtures/opt/homebrew/Cellar/jq/1.7/INSTALL_RECEIPT.json new file mode 100644 index 000000000..55fd0e547 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/Fixtures/opt/homebrew/Cellar/jq/1.7/INSTALL_RECEIPT.json @@ -0,0 +1,22 @@ +{ + "name": "jq", + "versions": { + "stable": "1.7" + }, + "revision": 0, + "tap": "homebrew/core", + "poured_from_bottle": true, + "time": 1700000000, + "installed_as_dependency": false, + "installed_on_request": true, + "runtime_dependencies": [], + "build_dependencies": [], + "source": { + "url": "https://github.com/jqlang/jq/releases/download/jq-1.7/jq-1.7.tar.gz", + "checksum": "sha256:jq17hash" + }, + "desc": "Lightweight and flexible command-line JSON processor", + "homepage": "https://jqlang.github.io/jq/", + "license": "MIT", + "arch": "arm64" +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/Fixtures/usr/local/Cellar/openssl@3/3.1.0/INSTALL_RECEIPT.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/Fixtures/usr/local/Cellar/openssl@3/3.1.0/INSTALL_RECEIPT.json new file mode 100644 index 000000000..10be3f089 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/Fixtures/usr/local/Cellar/openssl@3/3.1.0/INSTALL_RECEIPT.json @@ -0,0 +1,27 @@ +{ + "name": "openssl@3", + "versions": { + "stable": "3.1.0" + }, + "revision": 0, + "tap": "homebrew/core", + "poured_from_bottle": true, + "time": 1699000000, + "installed_as_dependency": false, + "installed_on_request": true, + "runtime_dependencies": [ + { + "full_name": "ca-certificates", + "version": "2023-01-10" + } + ], + "build_dependencies": [], + "source": { + "url": "https://www.openssl.org/source/openssl-3.1.0.tar.gz", + "checksum": "sha256:aafde89dd0e91c3d0e87c4b4e3f4d4c9f8f5a6e2b3d4c5a6f7e8d9c0a1b2c3d4e5" + }, + "desc": "Cryptography and SSL/TLS Toolkit", + "homepage": "https://openssl.org/", + "license": "Apache-2.0", + "arch": "x86_64" +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/Fixtures/usr/local/Cellar/wget/1.21.4/INSTALL_RECEIPT.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/Fixtures/usr/local/Cellar/wget/1.21.4/INSTALL_RECEIPT.json new file mode 100644 index 000000000..af103008c --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/Fixtures/usr/local/Cellar/wget/1.21.4/INSTALL_RECEIPT.json @@ -0,0 +1,31 @@ +{ + "name": "wget", + "versions": { + "stable": "1.21.4" + }, + "revision": 1, + "tap": "homebrew/core", + "poured_from_bottle": true, + "time": 1698500000, + "installed_as_dependency": true, + "installed_on_request": false, + "runtime_dependencies": [ + { + "full_name": "openssl@3", + "version": "3.1.0" + }, + { + "full_name": "gettext", + "version": "0.21.1" + } + ], + "build_dependencies": [], + "source": { + "url": "https://ftp.gnu.org/gnu/wget/wget-1.21.4.tar.gz", + "checksum": "sha256:abc123def456" + }, + "desc": "Internet file retriever", + "homepage": "https://www.gnu.org/software/wget/", + "license": "GPL-3.0-or-later", + "arch": "x86_64" +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/HomebrewPackageAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/HomebrewPackageAnalyzerTests.cs new file mode 100644 index 000000000..9be949cf1 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/HomebrewPackageAnalyzerTests.cs @@ -0,0 +1,222 @@ +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Homebrew; +using Xunit; + +namespace StellaOps.Scanner.Analyzers.OS.Homebrew.Tests; + +public sealed class HomebrewPackageAnalyzerTests +{ + private static readonly string FixturesRoot = Path.Combine( + AppContext.BaseDirectory, + "Fixtures"); + + private readonly HomebrewPackageAnalyzer _analyzer; + private readonly ILogger _logger; + + public HomebrewPackageAnalyzerTests() + { + _logger = NullLoggerFactory.Instance.CreateLogger(); + _analyzer = new HomebrewPackageAnalyzer((ILogger)_logger); + } + + private OSPackageAnalyzerContext CreateContext(string rootPath) + { + return new OSPackageAnalyzerContext( + rootPath, + workspacePath: null, + TimeProvider.System, + _logger); + } + + [Fact] + public void AnalyzerId_ReturnsHomebrew() + { + Assert.Equal("homebrew", _analyzer.AnalyzerId); + } + + [Fact] + public async Task AnalyzeAsync_WithValidCellar_ReturnsPackages() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.NotNull(result); + Assert.Equal("homebrew", result.AnalyzerId); + Assert.True(result.Packages.Count > 0, "Expected at least one package"); + } + + [Fact] + public async Task AnalyzeAsync_FindsIntelCellarPackages() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var openssl = result.Packages.FirstOrDefault(p => p.Name == "openssl@3"); + Assert.NotNull(openssl); + Assert.Equal("3.1.0", openssl.Version); + Assert.Equal("x86_64", openssl.Architecture); + Assert.Contains("pkg:brew/homebrew%2Fcore/openssl%403@3.1.0", openssl.PackageUrl); + } + + [Fact] + public async Task AnalyzeAsync_FindsAppleSiliconCellarPackages() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var jq = result.Packages.FirstOrDefault(p => p.Name == "jq"); + Assert.NotNull(jq); + Assert.Equal("1.7", jq.Version); + Assert.Equal("arm64", jq.Architecture); + } + + [Fact] + public async Task AnalyzeAsync_PackageWithRevision_IncludesRevisionInPurl() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var wget = result.Packages.FirstOrDefault(p => p.Name == "wget"); + Assert.NotNull(wget); + Assert.Contains("?revision=1", wget.PackageUrl); + Assert.Equal("1", wget.Release); + } + + [Fact] + public async Task AnalyzeAsync_ExtractsDependencies() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var wget = result.Packages.FirstOrDefault(p => p.Name == "wget"); + Assert.NotNull(wget); + Assert.Contains("openssl@3", wget.Depends); + Assert.Contains("gettext", wget.Depends); + } + + [Fact] + public async Task AnalyzeAsync_ExtractsVendorMetadata() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var openssl = result.Packages.FirstOrDefault(p => p.Name == "openssl@3"); + Assert.NotNull(openssl); + Assert.Equal("homebrew/core", openssl.VendorMetadata["brew:tap"]); + Assert.Equal("true", openssl.VendorMetadata["brew:poured_from_bottle"]); + Assert.Equal("Cryptography and SSL/TLS Toolkit", openssl.VendorMetadata["description"]); + Assert.Equal("https://openssl.org/", openssl.VendorMetadata["homepage"]); + } + + [Fact] + public async Task AnalyzeAsync_SetsEvidenceSourceToHomebrewCellar() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + foreach (var package in result.Packages) + { + Assert.Equal(PackageEvidenceSource.HomebrewCellar, package.EvidenceSource); + } + } + + [Fact] + public async Task AnalyzeAsync_DiscoversBinFiles() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var wget = result.Packages.FirstOrDefault(p => p.Name == "wget"); + Assert.NotNull(wget); + Assert.Contains(wget.Files, f => f.Path.Contains("wget")); + } + + [Fact] + public async Task AnalyzeAsync_ResultsAreDeterministicallySorted() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result1 = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + var result2 = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.Equal(result1.Packages.Count, result2.Packages.Count); + for (int i = 0; i < result1.Packages.Count; i++) + { + Assert.Equal(result1.Packages[i].PackageUrl, result2.Packages[i].PackageUrl); + } + } + + [Fact] + public async Task AnalyzeAsync_NoCellar_ReturnsEmptyPackages() + { + // Arrange - use temp directory without Cellar structure + var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); + Directory.CreateDirectory(tempPath); + + try + { + var context = CreateContext(tempPath); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.Empty(result.Packages); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task AnalyzeAsync_PopulatesTelemetry() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.NotNull(result.Telemetry); + Assert.True(result.Telemetry.PackageCount > 0); + Assert.True(result.Telemetry.Duration > TimeSpan.Zero); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/HomebrewReceiptParserTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/HomebrewReceiptParserTests.cs new file mode 100644 index 000000000..fe8f94341 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/HomebrewReceiptParserTests.cs @@ -0,0 +1,269 @@ +using System.Text; +using StellaOps.Scanner.Analyzers.OS.Homebrew; +using Xunit; + +namespace StellaOps.Scanner.Analyzers.OS.Homebrew.Tests; + +public sealed class HomebrewReceiptParserTests +{ + private readonly HomebrewReceiptParser _parser = new(); + + [Fact] + public void Parse_ValidReceipt_ReturnsExpectedValues() + { + // Arrange + var json = """ + { + "name": "openssl@3", + "versions": { "stable": "3.1.0" }, + "revision": 0, + "tap": "homebrew/core", + "poured_from_bottle": true, + "time": 1699000000, + "installed_as_dependency": false, + "installed_on_request": true, + "runtime_dependencies": [{ "full_name": "ca-certificates", "version": "2023-01-10" }], + "desc": "Cryptography and SSL/TLS Toolkit", + "homepage": "https://openssl.org/", + "license": "Apache-2.0", + "arch": "x86_64" + } + """; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var receipt = _parser.Parse(stream); + + // Assert + Assert.NotNull(receipt); + Assert.Equal("openssl@3", receipt.Name); + Assert.Equal("3.1.0", receipt.Version); + Assert.Equal(0, receipt.Revision); + Assert.Equal("homebrew/core", receipt.Tap); + Assert.True(receipt.PouredFromBottle); + Assert.False(receipt.InstalledAsDependency); + Assert.True(receipt.InstalledOnRequest); + Assert.Single(receipt.RuntimeDependencies); + Assert.Equal("ca-certificates", receipt.RuntimeDependencies[0]); + Assert.Equal("Cryptography and SSL/TLS Toolkit", receipt.Description); + Assert.Equal("https://openssl.org/", receipt.Homepage); + Assert.Equal("Apache-2.0", receipt.License); + Assert.Equal("x86_64", receipt.Architecture); + } + + [Fact] + public void Parse_WithRevision_ReturnsCorrectRevision() + { + // Arrange + var json = """ + { + "name": "wget", + "versions": { "stable": "1.21.4" }, + "revision": 1, + "tap": "homebrew/core", + "poured_from_bottle": true, + "arch": "x86_64" + } + """; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var receipt = _parser.Parse(stream); + + // Assert + Assert.NotNull(receipt); + Assert.Equal("wget", receipt.Name); + Assert.Equal("1.21.4", receipt.Version); + Assert.Equal(1, receipt.Revision); + } + + [Fact] + public void Parse_AppleSilicon_ReturnsArm64Architecture() + { + // Arrange + var json = """ + { + "name": "jq", + "versions": { "stable": "1.7" }, + "revision": 0, + "tap": "homebrew/core", + "arch": "arm64" + } + """; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var receipt = _parser.Parse(stream); + + // Assert + Assert.NotNull(receipt); + Assert.Equal("arm64", receipt.Architecture); + } + + [Fact] + public void Parse_WithSourceInfo_ExtractsSourceUrlAndChecksum() + { + // Arrange + var json = """ + { + "name": "test", + "versions": { "stable": "1.0.0" }, + "tap": "homebrew/core", + "source": { + "url": "https://example.com/test-1.0.0.tar.gz", + "checksum": "sha256:abcdef123456" + } + } + """; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var receipt = _parser.Parse(stream); + + // Assert + Assert.NotNull(receipt); + Assert.Equal("https://example.com/test-1.0.0.tar.gz", receipt.SourceUrl); + Assert.Equal("sha256:abcdef123456", receipt.SourceChecksum); + } + + [Fact] + public void Parse_MultipleDependencies_SortsAlphabetically() + { + // Arrange + var json = """ + { + "name": "test", + "versions": { "stable": "1.0.0" }, + "tap": "homebrew/core", + "runtime_dependencies": [ + { "full_name": "zlib" }, + { "full_name": "openssl" }, + { "full_name": "libpng" } + ] + } + """; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var receipt = _parser.Parse(stream); + + // Assert + Assert.NotNull(receipt); + Assert.Equal(3, receipt.RuntimeDependencies.Count); + Assert.Equal("libpng", receipt.RuntimeDependencies[0]); + Assert.Equal("openssl", receipt.RuntimeDependencies[1]); + Assert.Equal("zlib", receipt.RuntimeDependencies[2]); + } + + [Fact] + public void Parse_InvalidJson_ReturnsNull() + { + // Arrange + var invalidJson = "{ invalid json }"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(invalidJson)); + + // Act + var receipt = _parser.Parse(stream); + + // Assert + Assert.Null(receipt); + } + + [Fact] + public void Parse_EmptyJson_ReturnsNull() + { + // Arrange + var emptyJson = "{}"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(emptyJson)); + + // Act + var receipt = _parser.Parse(stream); + + // Assert + Assert.Null(receipt); + } + + [Fact] + public void Parse_MissingName_ReturnsNull() + { + // Arrange + var json = """ + { + "versions": { "stable": "1.0.0" }, + "tap": "homebrew/core" + } + """; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var receipt = _parser.Parse(stream); + + // Assert + Assert.Null(receipt); + } + + [Fact] + public void Parse_TappedFrom_UsesTappedFromOverTap() + { + // Arrange + var json = """ + { + "name": "test", + "versions": { "stable": "1.0.0" }, + "tap": "homebrew/core", + "tapped_from": "custom/tap" + } + """; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var receipt = _parser.Parse(stream); + + // Assert + Assert.NotNull(receipt); + Assert.Equal("custom/tap", receipt.Tap); + } + + [Fact] + public void Parse_FallbackVersion_UsesVersionFieldWhenVersionsStableMissing() + { + // Arrange - older receipt format uses version field directly + var json = """ + { + "name": "test", + "version": "2.0.0", + "tap": "homebrew/core" + } + """; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var receipt = _parser.Parse(stream); + + // Assert + Assert.NotNull(receipt); + Assert.Equal("2.0.0", receipt.Version); + } + + [Fact] + public void Parse_NormalizesArchitecture_AArch64ToArm64() + { + // Arrange + var json = """ + { + "name": "test", + "versions": { "stable": "1.0.0" }, + "tap": "homebrew/core", + "arch": "aarch64" + } + """; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var receipt = _parser.Parse(stream); + + // Assert + Assert.NotNull(receipt); + Assert.Equal("arm64", receipt.Architecture); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests.csproj new file mode 100644 index 000000000..5c395ec4a --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests/StellaOps.Scanner.Analyzers.OS.Homebrew.Tests.csproj @@ -0,0 +1,27 @@ + + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + + + + + + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/EntitlementsParserTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/EntitlementsParserTests.cs new file mode 100644 index 000000000..1491653ec --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/EntitlementsParserTests.cs @@ -0,0 +1,132 @@ +using StellaOps.Scanner.Analyzers.OS.MacOsBundle; +using Xunit; + +namespace StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests; + +public sealed class EntitlementsParserTests +{ + private static readonly string FixturesRoot = Path.Combine( + AppContext.BaseDirectory, + "Fixtures"); + + private readonly EntitlementsParser _parser = new(); + + [Fact] + public void Parse_ValidEntitlements_ReturnsEntitlements() + { + // Arrange + var entPath = Path.Combine(FixturesRoot, "Applications", "SandboxedApp.app", "Contents", "_CodeSignature", "test.xcent"); + + // Act + var result = _parser.Parse(entPath); + + // Assert + Assert.NotNull(result); + Assert.True(result.IsSandboxed); + } + + [Fact] + public void Parse_DetectsHighRiskEntitlements() + { + // Arrange + var entPath = Path.Combine(FixturesRoot, "Applications", "SandboxedApp.app", "Contents", "_CodeSignature", "test.xcent"); + + // Act + var result = _parser.Parse(entPath); + + // Assert + Assert.NotEmpty(result.HighRiskEntitlements); + Assert.Contains("com.apple.security.device.camera", result.HighRiskEntitlements); + Assert.Contains("com.apple.security.device.microphone", result.HighRiskEntitlements); + } + + [Fact] + public void Parse_CategorizeEntitlements() + { + // Arrange + var entPath = Path.Combine(FixturesRoot, "Applications", "SandboxedApp.app", "Contents", "_CodeSignature", "test.xcent"); + + // Act + var result = _parser.Parse(entPath); + + // Assert + Assert.Contains("network", result.Categories); + Assert.Contains("camera", result.Categories); + Assert.Contains("microphone", result.Categories); + Assert.Contains("filesystem", result.Categories); + Assert.Contains("sandbox", result.Categories); + } + + [Fact] + public void Parse_NonExistentFile_ReturnsEmpty() + { + // Arrange + var entPath = Path.Combine(FixturesRoot, "nonexistent.xcent"); + + // Act + var result = _parser.Parse(entPath); + + // Assert + Assert.Same(BundleEntitlements.Empty, result); + } + + [Fact] + public void FindEntitlementsFile_FindsXcentFile() + { + // Arrange + var bundlePath = Path.Combine(FixturesRoot, "Applications", "SandboxedApp.app"); + + // Act + var result = _parser.FindEntitlementsFile(bundlePath); + + // Assert + Assert.NotNull(result); + Assert.EndsWith(".xcent", result); + } + + [Fact] + public void FindEntitlementsFile_NoBundlePath_ReturnsNull() + { + // Act + var result = _parser.FindEntitlementsFile(""); + + // Assert + Assert.Null(result); + } + + [Fact] + public void FindEntitlementsFile_NoEntitlements_ReturnsNull() + { + // Arrange - bundle without entitlements + var bundlePath = Path.Combine(FixturesRoot, "Applications", "TestApp.app"); + + // Act + var result = _parser.FindEntitlementsFile(bundlePath); + + // Assert + Assert.Null(result); + } + + [Fact] + public void HasEntitlement_ReturnsTrueForExistingEntitlement() + { + // Arrange + var entPath = Path.Combine(FixturesRoot, "Applications", "SandboxedApp.app", "Contents", "_CodeSignature", "test.xcent"); + var result = _parser.Parse(entPath); + + // Act & Assert + Assert.True(result.HasEntitlement("com.apple.security.app-sandbox")); + Assert.True(result.HasEntitlement("com.apple.security.device.camera")); + } + + [Fact] + public void HasEntitlement_ReturnsFalseForMissingEntitlement() + { + // Arrange + var entPath = Path.Combine(FixturesRoot, "Applications", "SandboxedApp.app", "Contents", "_CodeSignature", "test.xcent"); + var result = _parser.Parse(entPath); + + // Act & Assert + Assert.False(result.HasEntitlement("com.apple.security.nonexistent")); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/Info.plist b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/Info.plist new file mode 100644 index 000000000..4db4b18d5 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/Info.plist @@ -0,0 +1,24 @@ + + + + + CFBundleIdentifier + com.stellaops.sandboxed + CFBundleName + SandboxedApp + CFBundleVersion + 100 + CFBundleShortVersionString + 2.0.0 + LSMinimumSystemVersion + 13.0 + CFBundleExecutable + SandboxedApp + CFBundlePackageType + APPL + CFBundleSupportedPlatforms + + MacOSX + + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/MacOS/SandboxedApp b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/MacOS/SandboxedApp new file mode 100644 index 000000000..d561652f3 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/MacOS/SandboxedApp @@ -0,0 +1,3 @@ +#!/bin/bash +# Placeholder executable +echo "SandboxedApp" diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/_CodeSignature/CodeResources b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/_CodeSignature/CodeResources new file mode 100644 index 000000000..0e435918e --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/_CodeSignature/CodeResources @@ -0,0 +1,16 @@ + + + + + files + + Contents/Info.plist + aGFzaA== + + rules + + ^.* + + + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/_CodeSignature/test.xcent b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/_CodeSignature/test.xcent new file mode 100644 index 000000000..69b09fcf2 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/SandboxedApp.app/Contents/_CodeSignature/test.xcent @@ -0,0 +1,16 @@ + + + + + com.apple.security.app-sandbox + + com.apple.security.network.client + + com.apple.security.device.camera + + com.apple.security.device.microphone + + com.apple.security.files.user-selected.read-write + + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/TestApp.app/Contents/Info.plist b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/TestApp.app/Contents/Info.plist new file mode 100644 index 000000000..77f99e92c --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/TestApp.app/Contents/Info.plist @@ -0,0 +1,26 @@ + + + + + CFBundleIdentifier + com.stellaops.testapp + CFBundleName + TestApp + CFBundleDisplayName + Test Application + CFBundleVersion + 123 + CFBundleShortVersionString + 1.2.3 + LSMinimumSystemVersion + 12.0 + CFBundleExecutable + TestApp + CFBundlePackageType + APPL + CFBundleSupportedPlatforms + + MacOSX + + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/TestApp.app/Contents/MacOS/TestApp b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/TestApp.app/Contents/MacOS/TestApp new file mode 100644 index 000000000..0d19a61c4 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/TestApp.app/Contents/MacOS/TestApp @@ -0,0 +1,3 @@ +#!/bin/bash +# Placeholder executable +echo "TestApp" diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/TestApp.app/Contents/_CodeSignature/CodeResources b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/TestApp.app/Contents/_CodeSignature/CodeResources new file mode 100644 index 000000000..0e435918e --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/Fixtures/Applications/TestApp.app/Contents/_CodeSignature/CodeResources @@ -0,0 +1,16 @@ + + + + + files + + Contents/Info.plist + aGFzaA== + + rules + + ^.* + + + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/InfoPlistParserTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/InfoPlistParserTests.cs new file mode 100644 index 000000000..fa315e3aa --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/InfoPlistParserTests.cs @@ -0,0 +1,115 @@ +using StellaOps.Scanner.Analyzers.OS.MacOsBundle; +using Xunit; + +namespace StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests; + +public sealed class InfoPlistParserTests +{ + private static readonly string FixturesRoot = Path.Combine( + AppContext.BaseDirectory, + "Fixtures"); + + private readonly InfoPlistParser _parser = new(); + + [Fact] + public void Parse_ValidInfoPlist_ReturnsBundleInfo() + { + // Arrange + var plistPath = Path.Combine(FixturesRoot, "Applications", "TestApp.app", "Contents", "Info.plist"); + + // Act + var result = _parser.Parse(plistPath); + + // Assert + Assert.NotNull(result); + Assert.Equal("com.stellaops.testapp", result.BundleIdentifier); + Assert.Equal("TestApp", result.BundleName); + Assert.Equal("Test Application", result.BundleDisplayName); + Assert.Equal("123", result.Version); + Assert.Equal("1.2.3", result.ShortVersion); + } + + [Fact] + public void Parse_ExtractsMinimumSystemVersion() + { + // Arrange + var plistPath = Path.Combine(FixturesRoot, "Applications", "TestApp.app", "Contents", "Info.plist"); + + // Act + var result = _parser.Parse(plistPath); + + // Assert + Assert.NotNull(result); + Assert.Equal("12.0", result.MinimumSystemVersion); + } + + [Fact] + public void Parse_ExtractsExecutable() + { + // Arrange + var plistPath = Path.Combine(FixturesRoot, "Applications", "TestApp.app", "Contents", "Info.plist"); + + // Act + var result = _parser.Parse(plistPath); + + // Assert + Assert.NotNull(result); + Assert.Equal("TestApp", result.Executable); + } + + [Fact] + public void Parse_ExtractsSupportedPlatforms() + { + // Arrange + var plistPath = Path.Combine(FixturesRoot, "Applications", "TestApp.app", "Contents", "Info.plist"); + + // Act + var result = _parser.Parse(plistPath); + + // Assert + Assert.NotNull(result); + Assert.Single(result.SupportedPlatforms); + Assert.Contains("MacOSX", result.SupportedPlatforms); + } + + [Fact] + public void Parse_NonExistentFile_ReturnsNull() + { + // Arrange + var plistPath = Path.Combine(FixturesRoot, "nonexistent.plist"); + + // Act + var result = _parser.Parse(plistPath); + + // Assert + Assert.Null(result); + } + + [Fact] + public void Parse_MissingBundleIdentifier_ReturnsNull() + { + // Arrange - Create a temp file without CFBundleIdentifier + var tempPath = Path.Combine(Path.GetTempPath(), $"{Guid.NewGuid()}.plist"); + File.WriteAllText(tempPath, @" + + + + CFBundleName + TestApp + +"); + + try + { + // Act + var result = _parser.Parse(tempPath); + + // Assert + Assert.Null(result); + } + finally + { + File.Delete(tempPath); + } + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/MacOsBundleAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/MacOsBundleAnalyzerTests.cs new file mode 100644 index 000000000..72e27e68e --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/MacOsBundleAnalyzerTests.cs @@ -0,0 +1,322 @@ +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Scanner.Analyzers.OS.MacOsBundle; +using Xunit; + +namespace StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests; + +public sealed class MacOsBundleAnalyzerTests +{ + private static readonly string FixturesRoot = Path.Combine( + AppContext.BaseDirectory, + "Fixtures"); + + private readonly MacOsBundleAnalyzer _analyzer; + private readonly ILogger _logger; + + public MacOsBundleAnalyzerTests() + { + _logger = NullLoggerFactory.Instance.CreateLogger(); + _analyzer = new MacOsBundleAnalyzer((ILogger)_logger); + } + + private OSPackageAnalyzerContext CreateContext(string rootPath) + { + return new OSPackageAnalyzerContext( + rootPath, + workspacePath: null, + TimeProvider.System, + _logger); + } + + [Fact] + public void AnalyzerId_ReturnsMacosBundleIdentifier() + { + Assert.Equal("macos-bundle", _analyzer.AnalyzerId); + } + + [Fact] + public async Task AnalyzeAsync_WithValidBundles_ReturnsPackages() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.NotNull(result); + Assert.Equal("macos-bundle", result.AnalyzerId); + Assert.True(result.Packages.Count > 0, "Expected at least one bundle"); + } + + [Fact] + public async Task AnalyzeAsync_FindsTestApp() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var testApp = result.Packages.FirstOrDefault(p => + p.VendorMetadata.TryGetValue("macos:bundle_id", out var id) && + id == "com.stellaops.testapp"); + Assert.NotNull(testApp); + Assert.Equal("1.2.3", testApp.Version); + Assert.Equal("Test Application", testApp.Name); + } + + [Fact] + public async Task AnalyzeAsync_ExtractsVersionCorrectly() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var testApp = result.Packages.FirstOrDefault(p => + p.VendorMetadata.TryGetValue("macos:bundle_id", out var id) && + id == "com.stellaops.testapp"); + Assert.NotNull(testApp); + // ShortVersion takes precedence + Assert.Equal("1.2.3", testApp.Version); + // Build number goes to release + Assert.Equal("123", testApp.Release); + } + + [Fact] + public async Task AnalyzeAsync_BuildsCorrectPurl() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var testApp = result.Packages.FirstOrDefault(p => + p.VendorMetadata.TryGetValue("macos:bundle_id", out var id) && + id == "com.stellaops.testapp"); + Assert.NotNull(testApp); + Assert.Contains("pkg:generic/macos-app/com.stellaops.testapp@1.2.3", testApp.PackageUrl); + } + + [Fact] + public async Task AnalyzeAsync_ExtractsVendorFromBundleId() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var testApp = result.Packages.FirstOrDefault(p => + p.VendorMetadata.TryGetValue("macos:bundle_id", out var id) && + id == "com.stellaops.testapp"); + Assert.NotNull(testApp); + Assert.Equal("stellaops", testApp.SourcePackage); + } + + [Fact] + public async Task AnalyzeAsync_SetsEvidenceSourceToMacOsBundle() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + foreach (var package in result.Packages) + { + Assert.Equal(PackageEvidenceSource.MacOsBundle, package.EvidenceSource); + } + } + + [Fact] + public async Task AnalyzeAsync_ExtractsVendorMetadata() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var testApp = result.Packages.FirstOrDefault(p => + p.VendorMetadata.TryGetValue("macos:bundle_id", out var id) && + id == "com.stellaops.testapp"); + Assert.NotNull(testApp); + Assert.Equal("com.stellaops.testapp", testApp.VendorMetadata["macos:bundle_id"]); + Assert.Equal("APPL", testApp.VendorMetadata["macos:bundle_type"]); + Assert.Equal("12.0", testApp.VendorMetadata["macos:min_os_version"]); + Assert.Equal("TestApp", testApp.VendorMetadata["macos:executable"]); + Assert.Equal("MacOSX", testApp.VendorMetadata["macos:platforms"]); + } + + [Fact] + public async Task AnalyzeAsync_IncludesCodeResourcesHash() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var testApp = result.Packages.FirstOrDefault(p => + p.VendorMetadata.TryGetValue("macos:bundle_id", out var id) && + id == "com.stellaops.testapp"); + Assert.NotNull(testApp); + Assert.True(testApp.VendorMetadata.ContainsKey("macos:code_resources_hash")); + var hash = testApp.VendorMetadata["macos:code_resources_hash"]; + Assert.StartsWith("sha256:", hash); + } + + [Fact] + public async Task AnalyzeAsync_DetectsSandboxedApp() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var sandboxedApp = result.Packages.FirstOrDefault(p => + p.VendorMetadata.TryGetValue("macos:bundle_id", out var id) && + id == "com.stellaops.sandboxed"); + Assert.NotNull(sandboxedApp); + Assert.Equal("true", sandboxedApp.VendorMetadata["macos:sandboxed"]); + } + + [Fact] + public async Task AnalyzeAsync_DetectsHighRiskEntitlements() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var sandboxedApp = result.Packages.FirstOrDefault(p => + p.VendorMetadata.TryGetValue("macos:bundle_id", out var id) && + id == "com.stellaops.sandboxed"); + Assert.NotNull(sandboxedApp); + Assert.True(sandboxedApp.VendorMetadata.ContainsKey("macos:high_risk_entitlements")); + var highRisk = sandboxedApp.VendorMetadata["macos:high_risk_entitlements"]; + // Full entitlement keys are stored + Assert.Contains("com.apple.security.device.camera", highRisk); + Assert.Contains("com.apple.security.device.microphone", highRisk); + } + + [Fact] + public async Task AnalyzeAsync_DetectsCapabilityCategories() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var sandboxedApp = result.Packages.FirstOrDefault(p => + p.VendorMetadata.TryGetValue("macos:bundle_id", out var id) && + id == "com.stellaops.sandboxed"); + Assert.NotNull(sandboxedApp); + Assert.True(sandboxedApp.VendorMetadata.ContainsKey("macos:capability_categories")); + var categories = sandboxedApp.VendorMetadata["macos:capability_categories"]; + Assert.Contains("network", categories); + Assert.Contains("camera", categories); + Assert.Contains("microphone", categories); + Assert.Contains("filesystem", categories); + Assert.Contains("sandbox", categories); + } + + [Fact] + public async Task AnalyzeAsync_IncludesFileEvidence() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var testApp = result.Packages.FirstOrDefault(p => + p.VendorMetadata.TryGetValue("macos:bundle_id", out var id) && + id == "com.stellaops.testapp"); + Assert.NotNull(testApp); + Assert.True(testApp.Files.Count > 0); + + var executable = testApp.Files.FirstOrDefault(f => f.Path.Contains("MacOS/TestApp")); + Assert.NotNull(executable); + Assert.False(executable.IsConfigFile); + + var infoPlist = testApp.Files.FirstOrDefault(f => f.Path.Contains("Info.plist")); + Assert.NotNull(infoPlist); + Assert.True(infoPlist.IsConfigFile); + } + + [Fact] + public async Task AnalyzeAsync_ResultsAreDeterministicallySorted() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result1 = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + var result2 = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.Equal(result1.Packages.Count, result2.Packages.Count); + for (int i = 0; i < result1.Packages.Count; i++) + { + Assert.Equal(result1.Packages[i].PackageUrl, result2.Packages[i].PackageUrl); + } + } + + [Fact] + public async Task AnalyzeAsync_NoApplicationsDirectory_ReturnsEmptyPackages() + { + // Arrange - use temp directory without Applications + var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); + Directory.CreateDirectory(tempPath); + + try + { + var context = CreateContext(tempPath); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.Empty(result.Packages); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task AnalyzeAsync_PopulatesTelemetry() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.NotNull(result.Telemetry); + Assert.True(result.Telemetry.PackageCount > 0); + Assert.True(result.Telemetry.Duration > TimeSpan.Zero); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests.csproj new file mode 100644 index 000000000..71705194f --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests/StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests.csproj @@ -0,0 +1,27 @@ + + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + + + + + + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/Fixtures/var/db/receipts/com.apple.pkg.Safari.plist b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/Fixtures/var/db/receipts/com.apple.pkg.Safari.plist new file mode 100644 index 000000000..062004d3d --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/Fixtures/var/db/receipts/com.apple.pkg.Safari.plist @@ -0,0 +1,18 @@ + + + + + PackageIdentifier + com.apple.pkg.Safari + PackageVersion + 17.1 + InstallDate + 2024-01-15T12:00:00Z + InstallPrefixPath + / + VolumePath + / + InstallProcessName + installer + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/Fixtures/var/db/receipts/com.example.app.plist b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/Fixtures/var/db/receipts/com.example.app.plist new file mode 100644 index 000000000..a67754294 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/Fixtures/var/db/receipts/com.example.app.plist @@ -0,0 +1,14 @@ + + + + + PackageIdentifier + com.example.app + PackageVersion + 2.5.0 + VolumePath + / + InstallProcessName + installer + + diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/PkgutilPackageAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/PkgutilPackageAnalyzerTests.cs new file mode 100644 index 000000000..00e50620c --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/PkgutilPackageAnalyzerTests.cs @@ -0,0 +1,171 @@ +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Pkgutil; +using Xunit; + +namespace StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests; + +public sealed class PkgutilPackageAnalyzerTests +{ + private static readonly string FixturesRoot = Path.Combine( + AppContext.BaseDirectory, + "Fixtures"); + + private readonly PkgutilPackageAnalyzer _analyzer; + private readonly ILogger _logger; + + public PkgutilPackageAnalyzerTests() + { + _logger = NullLoggerFactory.Instance.CreateLogger(); + _analyzer = new PkgutilPackageAnalyzer((ILogger)_logger); + } + + private OSPackageAnalyzerContext CreateContext(string rootPath) + { + return new OSPackageAnalyzerContext( + rootPath, + workspacePath: null, + TimeProvider.System, + _logger); + } + + [Fact] + public void AnalyzerId_ReturnsPkgutil() + { + Assert.Equal("pkgutil", _analyzer.AnalyzerId); + } + + [Fact] + public async Task AnalyzeAsync_WithValidReceipts_ReturnsPackages() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.NotNull(result); + Assert.Equal("pkgutil", result.AnalyzerId); + Assert.True(result.Packages.Count > 0, "Expected at least one package"); + } + + [Fact] + public async Task AnalyzeAsync_FindsSafariPackage() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var safari = result.Packages.FirstOrDefault(p => p.Name == "Safari"); + Assert.NotNull(safari); + Assert.Equal("17.1", safari.Version); + Assert.Contains("pkg:generic/apple/com.apple.pkg.Safari@17.1", safari.PackageUrl); + } + + [Fact] + public async Task AnalyzeAsync_ExtractsVendorFromIdentifier() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var safari = result.Packages.FirstOrDefault(p => p.Name == "Safari"); + Assert.NotNull(safari); + Assert.Equal("apple", safari.SourcePackage); + } + + [Fact] + public async Task AnalyzeAsync_SetsEvidenceSourceToPkgutilReceipt() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + foreach (var package in result.Packages) + { + Assert.Equal(PackageEvidenceSource.PkgutilReceipt, package.EvidenceSource); + } + } + + [Fact] + public async Task AnalyzeAsync_ExtractsVendorMetadata() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + var safari = result.Packages.FirstOrDefault(p => p.Name == "Safari"); + Assert.NotNull(safari); + Assert.Equal("com.apple.pkg.Safari", safari.VendorMetadata["pkgutil:identifier"]); + Assert.Equal("/", safari.VendorMetadata["pkgutil:volume"]); + } + + [Fact] + public async Task AnalyzeAsync_ResultsAreDeterministicallySorted() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result1 = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + var result2 = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.Equal(result1.Packages.Count, result2.Packages.Count); + for (int i = 0; i < result1.Packages.Count; i++) + { + Assert.Equal(result1.Packages[i].PackageUrl, result2.Packages[i].PackageUrl); + } + } + + [Fact] + public async Task AnalyzeAsync_NoReceiptsDirectory_ReturnsEmptyPackages() + { + // Arrange - use temp directory without receipts + var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); + Directory.CreateDirectory(tempPath); + + try + { + var context = CreateContext(tempPath); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.Empty(result.Packages); + } + finally + { + Directory.Delete(tempPath, recursive: true); + } + } + + [Fact] + public async Task AnalyzeAsync_PopulatesTelemetry() + { + // Arrange + var context = CreateContext(FixturesRoot); + + // Act + var result = await _analyzer.AnalyzeAsync(context, CancellationToken.None); + + // Assert + Assert.NotNull(result.Telemetry); + Assert.True(result.Telemetry.PackageCount > 0); + Assert.True(result.Telemetry.Duration > TimeSpan.Zero); + } +} diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests.csproj new file mode 100644 index 000000000..d3ea876f7 --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests/StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests.csproj @@ -0,0 +1,27 @@ + + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + + + + + + + diff --git a/src/StellaOps.sln b/src/StellaOps.sln index a6dd1aac9..a9295b9e6 100644 --- a/src/StellaOps.sln +++ b/src/StellaOps.sln @@ -445,6 +445,42 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AirGap.Time", "Ai EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AirGap.Importer", "AirGap\StellaOps.AirGap.Importer\StellaOps.AirGap.Importer.csproj", "{D3829E4D-6538-4533-A0E0-3418042D7BFE}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Scanner", "Scanner", "{A31A0899-6847-809B-913C-AB80CDCEC5C5}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries", "{1285E3E4-21C1-72C0-6EB2-84C0D86F9543}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Homebrew", "Scanner\__Libraries\StellaOps.Scanner.Analyzers.OS.Homebrew\StellaOps.Scanner.Analyzers.OS.Homebrew.csproj", "{AFEEC916-DE26-43A0-960A-30D18FC19901}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS", "Scanner\__Libraries\StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj", "{D005396A-993D-42C1-AD6B-52EACD77F2E2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Core", "Scanner\__Libraries\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj", "{FAF62DAD-7D52-4D94-AA76-25C3EA731D27}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{C7166020-40C2-4D4E-8694-15C34B1ACAD1}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{11389097-C534-4895-BC58-E38384B8388C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.Plugin.Pkcs11Gost", "__Libraries\StellaOps.Cryptography.Plugin.Pkcs11Gost\StellaOps.Cryptography.Plugin.Pkcs11Gost.csproj", "{14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.DependencyInjection", "__Libraries\StellaOps.Cryptography.DependencyInjection\StellaOps.Cryptography.DependencyInjection.csproj", "{FE7796BB-A761-452E-A346-4177AC0BD14C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.Plugin.OpenSslGost", "__Libraries\StellaOps.Cryptography.Plugin.OpenSslGost\StellaOps.Cryptography.Plugin.OpenSslGost.csproj", "{47C9F4AF-C404-4263-B5F1-A743F1AA13E7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Security", "__Libraries\StellaOps.Auth.Security\StellaOps.Auth.Security.csproj", "{32F3816D-277D-43C7-9F81-7426F9899B92}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Replay.Core", "__Libraries\StellaOps.Replay.Core\StellaOps.Replay.Core.csproj", "{B85B9955-163B-42C5-B9AF-1ED0577EA536}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{9E86431F-0E96-A7CC-FC1F-8519FE022244}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Homebrew.Tests", "Scanner\__Tests\StellaOps.Scanner.Analyzers.OS.Homebrew.Tests\StellaOps.Scanner.Analyzers.OS.Homebrew.Tests.csproj", "{728953F9-0A9E-4719-BD68-8C0635868772}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Pkgutil", "Scanner\__Libraries\StellaOps.Scanner.Analyzers.OS.Pkgutil\StellaOps.Scanner.Analyzers.OS.Pkgutil.csproj", "{D644C88C-14FE-45A3-91C1-D694BA6B8900}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests", "Scanner\__Tests\StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests\StellaOps.Scanner.Analyzers.OS.Pkgutil.Tests.csproj", "{94CF61AB-B671-4334-959F-6CF19B1A13C9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.MacOsBundle", "Scanner\__Libraries\StellaOps.Scanner.Analyzers.OS.MacOsBundle\StellaOps.Scanner.Analyzers.OS.MacOsBundle.csproj", "{ECF79786-C3DC-423F-8ECC-82C528C56FAE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests", "Scanner\__Tests\StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests\StellaOps.Scanner.Analyzers.OS.MacOsBundle.Tests.csproj", "{169D73D6-1630-4346-A80F-656399337D16}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -2915,6 +2951,186 @@ Global {D3829E4D-6538-4533-A0E0-3418042D7BFE}.Release|x64.Build.0 = Release|Any CPU {D3829E4D-6538-4533-A0E0-3418042D7BFE}.Release|x86.ActiveCfg = Release|Any CPU {D3829E4D-6538-4533-A0E0-3418042D7BFE}.Release|x86.Build.0 = Release|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Debug|Any CPU.Build.0 = Debug|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Debug|x64.ActiveCfg = Debug|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Debug|x64.Build.0 = Debug|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Debug|x86.ActiveCfg = Debug|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Debug|x86.Build.0 = Debug|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Release|Any CPU.ActiveCfg = Release|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Release|Any CPU.Build.0 = Release|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Release|x64.ActiveCfg = Release|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Release|x64.Build.0 = Release|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Release|x86.ActiveCfg = Release|Any CPU + {AFEEC916-DE26-43A0-960A-30D18FC19901}.Release|x86.Build.0 = Release|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Debug|x64.ActiveCfg = Debug|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Debug|x64.Build.0 = Debug|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Debug|x86.ActiveCfg = Debug|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Debug|x86.Build.0 = Debug|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Release|Any CPU.Build.0 = Release|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Release|x64.ActiveCfg = Release|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Release|x64.Build.0 = Release|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Release|x86.ActiveCfg = Release|Any CPU + {D005396A-993D-42C1-AD6B-52EACD77F2E2}.Release|x86.Build.0 = Release|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Debug|x64.ActiveCfg = Debug|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Debug|x64.Build.0 = Debug|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Debug|x86.ActiveCfg = Debug|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Debug|x86.Build.0 = Debug|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Release|Any CPU.Build.0 = Release|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Release|x64.ActiveCfg = Release|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Release|x64.Build.0 = Release|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Release|x86.ActiveCfg = Release|Any CPU + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27}.Release|x86.Build.0 = Release|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Debug|x64.ActiveCfg = Debug|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Debug|x64.Build.0 = Debug|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Debug|x86.ActiveCfg = Debug|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Debug|x86.Build.0 = Debug|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Release|Any CPU.Build.0 = Release|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Release|x64.ActiveCfg = Release|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Release|x64.Build.0 = Release|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Release|x86.ActiveCfg = Release|Any CPU + {C7166020-40C2-4D4E-8694-15C34B1ACAD1}.Release|x86.Build.0 = Release|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Debug|x64.ActiveCfg = Debug|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Debug|x64.Build.0 = Debug|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Debug|x86.ActiveCfg = Debug|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Debug|x86.Build.0 = Debug|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Release|Any CPU.Build.0 = Release|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Release|x64.ActiveCfg = Release|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Release|x64.Build.0 = Release|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Release|x86.ActiveCfg = Release|Any CPU + {11389097-C534-4895-BC58-E38384B8388C}.Release|x86.Build.0 = Release|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Debug|x64.ActiveCfg = Debug|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Debug|x64.Build.0 = Debug|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Debug|x86.ActiveCfg = Debug|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Debug|x86.Build.0 = Debug|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Release|Any CPU.Build.0 = Release|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Release|x64.ActiveCfg = Release|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Release|x64.Build.0 = Release|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Release|x86.ActiveCfg = Release|Any CPU + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7}.Release|x86.Build.0 = Release|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Debug|x64.ActiveCfg = Debug|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Debug|x64.Build.0 = Debug|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Debug|x86.ActiveCfg = Debug|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Debug|x86.Build.0 = Debug|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Release|Any CPU.Build.0 = Release|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Release|x64.ActiveCfg = Release|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Release|x64.Build.0 = Release|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Release|x86.ActiveCfg = Release|Any CPU + {FE7796BB-A761-452E-A346-4177AC0BD14C}.Release|x86.Build.0 = Release|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Debug|x64.ActiveCfg = Debug|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Debug|x64.Build.0 = Debug|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Debug|x86.ActiveCfg = Debug|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Debug|x86.Build.0 = Debug|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Release|Any CPU.Build.0 = Release|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Release|x64.ActiveCfg = Release|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Release|x64.Build.0 = Release|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Release|x86.ActiveCfg = Release|Any CPU + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7}.Release|x86.Build.0 = Release|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Debug|Any CPU.Build.0 = Debug|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Debug|x64.ActiveCfg = Debug|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Debug|x64.Build.0 = Debug|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Debug|x86.ActiveCfg = Debug|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Debug|x86.Build.0 = Debug|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Release|Any CPU.ActiveCfg = Release|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Release|Any CPU.Build.0 = Release|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Release|x64.ActiveCfg = Release|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Release|x64.Build.0 = Release|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Release|x86.ActiveCfg = Release|Any CPU + {32F3816D-277D-43C7-9F81-7426F9899B92}.Release|x86.Build.0 = Release|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Debug|x64.ActiveCfg = Debug|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Debug|x64.Build.0 = Debug|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Debug|x86.ActiveCfg = Debug|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Debug|x86.Build.0 = Debug|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Release|Any CPU.Build.0 = Release|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Release|x64.ActiveCfg = Release|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Release|x64.Build.0 = Release|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Release|x86.ActiveCfg = Release|Any CPU + {B85B9955-163B-42C5-B9AF-1ED0577EA536}.Release|x86.Build.0 = Release|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Debug|Any CPU.Build.0 = Debug|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Debug|x64.ActiveCfg = Debug|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Debug|x64.Build.0 = Debug|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Debug|x86.ActiveCfg = Debug|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Debug|x86.Build.0 = Debug|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Release|Any CPU.ActiveCfg = Release|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Release|Any CPU.Build.0 = Release|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Release|x64.ActiveCfg = Release|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Release|x64.Build.0 = Release|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Release|x86.ActiveCfg = Release|Any CPU + {728953F9-0A9E-4719-BD68-8C0635868772}.Release|x86.Build.0 = Release|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Debug|x64.ActiveCfg = Debug|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Debug|x64.Build.0 = Debug|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Debug|x86.ActiveCfg = Debug|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Debug|x86.Build.0 = Debug|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Release|Any CPU.Build.0 = Release|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Release|x64.ActiveCfg = Release|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Release|x64.Build.0 = Release|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Release|x86.ActiveCfg = Release|Any CPU + {D644C88C-14FE-45A3-91C1-D694BA6B8900}.Release|x86.Build.0 = Release|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Debug|x64.ActiveCfg = Debug|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Debug|x64.Build.0 = Debug|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Debug|x86.ActiveCfg = Debug|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Debug|x86.Build.0 = Debug|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Release|Any CPU.Build.0 = Release|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Release|x64.ActiveCfg = Release|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Release|x64.Build.0 = Release|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Release|x86.ActiveCfg = Release|Any CPU + {94CF61AB-B671-4334-959F-6CF19B1A13C9}.Release|x86.Build.0 = Release|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Debug|x64.ActiveCfg = Debug|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Debug|x64.Build.0 = Debug|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Debug|x86.ActiveCfg = Debug|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Debug|x86.Build.0 = Debug|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Release|Any CPU.Build.0 = Release|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Release|x64.ActiveCfg = Release|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Release|x64.Build.0 = Release|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Release|x86.ActiveCfg = Release|Any CPU + {ECF79786-C3DC-423F-8ECC-82C528C56FAE}.Release|x86.Build.0 = Release|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Debug|Any CPU.Build.0 = Debug|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Debug|x64.ActiveCfg = Debug|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Debug|x64.Build.0 = Debug|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Debug|x86.ActiveCfg = Debug|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Debug|x86.Build.0 = Debug|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Release|Any CPU.ActiveCfg = Release|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Release|Any CPU.Build.0 = Release|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Release|x64.ActiveCfg = Release|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Release|x64.Build.0 = Release|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Release|x86.ActiveCfg = Release|Any CPU + {169D73D6-1630-4346-A80F-656399337D16}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -3052,5 +3268,22 @@ Global {FB2C1275-6C67-403C-8F21-B07A48C74FE4} = {41F15E67-7190-CF23-3BC4-77E87134CADD} {0B4DD2CC-19C8-4FE0-A2DE-076A5FF1B704} = {704A59BF-CC38-09FA-CE4F-73B27EC8F04F} {D3829E4D-6538-4533-A0E0-3418042D7BFE} = {704A59BF-CC38-09FA-CE4F-73B27EC8F04F} + {1285E3E4-21C1-72C0-6EB2-84C0D86F9543} = {A31A0899-6847-809B-913C-AB80CDCEC5C5} + {AFEEC916-DE26-43A0-960A-30D18FC19901} = {1285E3E4-21C1-72C0-6EB2-84C0D86F9543} + {D005396A-993D-42C1-AD6B-52EACD77F2E2} = {1285E3E4-21C1-72C0-6EB2-84C0D86F9543} + {FAF62DAD-7D52-4D94-AA76-25C3EA731D27} = {1285E3E4-21C1-72C0-6EB2-84C0D86F9543} + {C7166020-40C2-4D4E-8694-15C34B1ACAD1} = {D09AE309-2C35-6780-54D1-97CCC67DFFDE} + {11389097-C534-4895-BC58-E38384B8388C} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {14DC8B32-1024-4F7B-BA8A-DA8E66A95DA7} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {FE7796BB-A761-452E-A346-4177AC0BD14C} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {47C9F4AF-C404-4263-B5F1-A743F1AA13E7} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {32F3816D-277D-43C7-9F81-7426F9899B92} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {B85B9955-163B-42C5-B9AF-1ED0577EA536} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {9E86431F-0E96-A7CC-FC1F-8519FE022244} = {A31A0899-6847-809B-913C-AB80CDCEC5C5} + {728953F9-0A9E-4719-BD68-8C0635868772} = {9E86431F-0E96-A7CC-FC1F-8519FE022244} + {D644C88C-14FE-45A3-91C1-D694BA6B8900} = {1285E3E4-21C1-72C0-6EB2-84C0D86F9543} + {94CF61AB-B671-4334-959F-6CF19B1A13C9} = {9E86431F-0E96-A7CC-FC1F-8519FE022244} + {ECF79786-C3DC-423F-8ECC-82C528C56FAE} = {1285E3E4-21C1-72C0-6EB2-84C0D86F9543} + {169D73D6-1630-4346-A80F-656399337D16} = {9E86431F-0E96-A7CC-FC1F-8519FE022244} EndGlobalSection EndGlobal