up
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
This commit is contained in:
@@ -7,12 +7,13 @@ networks:
|
||||
stellaops:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
mongo-data:
|
||||
minio-data:
|
||||
rustfs-data:
|
||||
concelier-jobs:
|
||||
nats-data:
|
||||
volumes:
|
||||
mongo-data:
|
||||
minio-data:
|
||||
rustfs-data:
|
||||
concelier-jobs:
|
||||
nats-data:
|
||||
scanner-surface-cache:
|
||||
|
||||
services:
|
||||
mongo:
|
||||
@@ -28,8 +29,8 @@ services:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
minio:
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
minio:
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
command: ["server", "/data", "--console-address", ":9001"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
@@ -41,22 +42,22 @@ services:
|
||||
- "${MINIO_CONSOLE_PORT:-29001}:9001"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
rustfs:
|
||||
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
RUSTFS__LOG__LEVEL: info
|
||||
RUSTFS__STORAGE__PATH: /data
|
||||
volumes:
|
||||
- rustfs-data:/data
|
||||
ports:
|
||||
- "${RUSTFS_HTTP_PORT:-8080}:8080"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
labels: *release-labels
|
||||
|
||||
rustfs:
|
||||
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
RUSTFS__LOG__LEVEL: info
|
||||
RUSTFS__STORAGE__PATH: /data
|
||||
volumes:
|
||||
- rustfs-data:/data
|
||||
ports:
|
||||
- "${RUSTFS_HTTP_PORT:-8080}:8080"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
nats:
|
||||
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||
@@ -107,43 +108,43 @@ services:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:1ff0a3124d66d3a2702d8e421df40fbd98cc75cb605d95510598ebbae1433c50
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
issuer-directory:
|
||||
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- authority
|
||||
environment:
|
||||
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
||||
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}"
|
||||
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
||||
volumes:
|
||||
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
||||
ports:
|
||||
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:29e2e1a0972707e092cbd3d370701341f9fec2aa9316fb5d8100480f2a1c76b5
|
||||
restart: unless-stopped
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:1ff0a3124d66d3a2702d8e421df40fbd98cc75cb605d95510598ebbae1433c50
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
issuer-directory:
|
||||
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- authority
|
||||
environment:
|
||||
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
||||
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}"
|
||||
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
||||
volumes:
|
||||
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
||||
ports:
|
||||
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:29e2e1a0972707e092cbd3d370701341f9fec2aa9316fb5d8100480f2a1c76b5
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- minio
|
||||
@@ -163,69 +164,95 @@ services:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
scanner-web:
|
||||
scanner-web:
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:3df8ca21878126758203c1a0444e39fd97f77ddacf04a69685cda9f1e5e94718
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- concelier
|
||||
- rustfs
|
||||
- nats
|
||||
environment:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
||||
SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}"
|
||||
SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-redis}"
|
||||
SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}"
|
||||
SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}"
|
||||
SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}"
|
||||
SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}"
|
||||
depends_on:
|
||||
- concelier
|
||||
- rustfs
|
||||
- nats
|
||||
environment:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
||||
SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}"
|
||||
SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-redis}"
|
||||
SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}"
|
||||
SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}"
|
||||
SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}"
|
||||
SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}"
|
||||
# Surface.Env configuration (see docs/modules/scanner/design/surface-env.md)
|
||||
SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}"
|
||||
SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}"
|
||||
SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}"
|
||||
SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}"
|
||||
SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}"
|
||||
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
||||
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
||||
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
||||
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
||||
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
||||
volumes:
|
||||
- scanner-surface-cache:/var/lib/stellaops/surface
|
||||
ports:
|
||||
- "${SCANNER_WEB_PORT:-8444}:8444"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
scanner-worker:
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:eea5d6cfe7835950c5ec7a735a651f2f0d727d3e470cf9027a4a402ea89c4fb5
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- scanner-web
|
||||
- rustfs
|
||||
- nats
|
||||
environment:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
scheduler-worker:
|
||||
image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- nats
|
||||
- scanner-web
|
||||
command:
|
||||
- "dotnet"
|
||||
- "StellaOps.Scheduler.Worker.Host.dll"
|
||||
environment:
|
||||
SCHEDULER__QUEUE__KIND: "${SCHEDULER_QUEUE_KIND:-Nats}"
|
||||
SCHEDULER__QUEUE__NATS__URL: "${SCHEDULER_QUEUE_NATS_URL:-nats://nats:4222}"
|
||||
SCHEDULER__STORAGE__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCHEDULER__STORAGE__DATABASE: "${SCHEDULER_STORAGE_DATABASE:-stellaops_scheduler}"
|
||||
SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
scanner-worker:
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:eea5d6cfe7835950c5ec7a735a651f2f0d727d3e470cf9027a4a402ea89c4fb5
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- scanner-web
|
||||
- rustfs
|
||||
- nats
|
||||
environment:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
||||
# Surface.Env configuration (see docs/modules/scanner/design/surface-env.md)
|
||||
SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}"
|
||||
SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}"
|
||||
SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}"
|
||||
SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}"
|
||||
SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}"
|
||||
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
||||
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
||||
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
||||
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
||||
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
||||
volumes:
|
||||
- scanner-surface-cache:/var/lib/stellaops/surface
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
scheduler-worker:
|
||||
image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- nats
|
||||
- scanner-web
|
||||
command:
|
||||
- "dotnet"
|
||||
- "StellaOps.Scheduler.Worker.Host.dll"
|
||||
environment:
|
||||
SCHEDULER__QUEUE__KIND: "${SCHEDULER_QUEUE_KIND:-Nats}"
|
||||
SCHEDULER__QUEUE__NATS__URL: "${SCHEDULER_QUEUE_NATS_URL:-nats://nats:4222}"
|
||||
SCHEDULER__STORAGE__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCHEDULER__STORAGE__DATABASE: "${SCHEDULER_STORAGE_DATABASE:-stellaops_scheduler}"
|
||||
SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
notify-web:
|
||||
image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2}
|
||||
@@ -235,70 +262,70 @@ services:
|
||||
- authority
|
||||
environment:
|
||||
DOTNET_ENVIRONMENT: Production
|
||||
volumes:
|
||||
- ../../etc/notify.airgap.yaml:/app/etc/notify.yaml:ro
|
||||
volumes:
|
||||
- ../../etc/notify.airgap.yaml:/app/etc/notify.yaml:ro
|
||||
ports:
|
||||
- "${NOTIFY_WEB_PORT:-9446}:8446"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
excititor:
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:65c0ee13f773efe920d7181512349a09d363ab3f3e177d276136bd2742325a68
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- concelier
|
||||
environment:
|
||||
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
advisory-ai-web:
|
||||
image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.09.2-airgap
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- scanner-web
|
||||
environment:
|
||||
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
||||
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
||||
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
||||
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
||||
ports:
|
||||
- "${ADVISORY_AI_WEB_PORT:-8448}:8448"
|
||||
volumes:
|
||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
advisory-ai-worker:
|
||||
image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.09.2-airgap
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- advisory-ai-web
|
||||
environment:
|
||||
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
||||
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
||||
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
||||
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
||||
volumes:
|
||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
excititor:
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:65c0ee13f773efe920d7181512349a09d363ab3f3e177d276136bd2742325a68
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- concelier
|
||||
environment:
|
||||
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
advisory-ai-web:
|
||||
image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.09.2-airgap
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- scanner-web
|
||||
environment:
|
||||
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
||||
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
||||
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
||||
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
||||
ports:
|
||||
- "${ADVISORY_AI_WEB_PORT:-8448}:8448"
|
||||
volumes:
|
||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
advisory-ai-worker:
|
||||
image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.09.2-airgap
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- advisory-ai-web
|
||||
environment:
|
||||
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
||||
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
||||
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
||||
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
||||
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
||||
volumes:
|
||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
web-ui:
|
||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:bee9668011ff414572131dc777faab4da24473fe12c230893f161cabee092a1d
|
||||
restart: unless-stopped
|
||||
|
||||
@@ -1,13 +1,57 @@
|
||||
global:
|
||||
release:
|
||||
version: ""
|
||||
channel: ""
|
||||
manifestSha256: ""
|
||||
profile: ""
|
||||
image:
|
||||
pullPolicy: IfNotPresent
|
||||
labels: {}
|
||||
|
||||
global:
|
||||
release:
|
||||
version: ""
|
||||
channel: ""
|
||||
manifestSha256: ""
|
||||
profile: ""
|
||||
image:
|
||||
pullPolicy: IfNotPresent
|
||||
labels: {}
|
||||
|
||||
# Surface.Env configuration for Scanner/Zastava components
|
||||
# See docs/modules/scanner/design/surface-env.md for details
|
||||
surface:
|
||||
# Surface.FS storage configuration
|
||||
fs:
|
||||
# Base URI for Surface.FS / RustFS / S3-compatible store (required)
|
||||
endpoint: ""
|
||||
# Bucket/container for manifests and artefacts
|
||||
bucket: "surface-cache"
|
||||
# Optional region for S3-compatible stores (AWS/GCS)
|
||||
region: ""
|
||||
# Local cache configuration
|
||||
cache:
|
||||
# Local directory for warm caches
|
||||
root: "/var/lib/stellaops/surface"
|
||||
# Soft limit for on-disk cache usage in MB (64-262144)
|
||||
quotaMb: 4096
|
||||
# Enable manifest prefetch threads
|
||||
prefetchEnabled: false
|
||||
# Tenant configuration
|
||||
tenant: "default"
|
||||
# Comma-separated feature switches
|
||||
features: ""
|
||||
# TLS configuration for client authentication
|
||||
tls:
|
||||
# Path to PEM/PKCS#12 certificate file
|
||||
certPath: ""
|
||||
# Optional private key path when cert/key stored separately
|
||||
keyPath: ""
|
||||
# Secret name containing TLS cert/key
|
||||
secretName: ""
|
||||
# Secrets provider configuration
|
||||
secrets:
|
||||
# Provider ID: kubernetes, file, inline
|
||||
provider: "kubernetes"
|
||||
# Kubernetes namespace for secrets provider
|
||||
namespace: ""
|
||||
# Path or base for file provider
|
||||
root: ""
|
||||
# Optional fallback provider ID
|
||||
fallbackProvider: ""
|
||||
# Allow inline secrets (disable in production)
|
||||
allowInline: false
|
||||
|
||||
telemetry:
|
||||
collector:
|
||||
enabled: false
|
||||
@@ -35,6 +79,25 @@ telemetry:
|
||||
resources: {}
|
||||
|
||||
configMaps:
|
||||
# Surface.Env environment variables for Scanner/Zastava components
|
||||
surface-env:
|
||||
data:
|
||||
SCANNER_SURFACE_FS_ENDPOINT: "{{ .Values.surface.fs.endpoint }}"
|
||||
SCANNER_SURFACE_FS_BUCKET: "{{ .Values.surface.fs.bucket }}"
|
||||
SCANNER_SURFACE_FS_REGION: "{{ .Values.surface.fs.region }}"
|
||||
SCANNER_SURFACE_CACHE_ROOT: "{{ .Values.surface.cache.root }}"
|
||||
SCANNER_SURFACE_CACHE_QUOTA_MB: "{{ .Values.surface.cache.quotaMb }}"
|
||||
SCANNER_SURFACE_PREFETCH_ENABLED: "{{ .Values.surface.cache.prefetchEnabled }}"
|
||||
SCANNER_SURFACE_TENANT: "{{ .Values.surface.tenant }}"
|
||||
SCANNER_SURFACE_FEATURES: "{{ .Values.surface.features }}"
|
||||
SCANNER_SURFACE_TLS_CERT_PATH: "{{ .Values.surface.tls.certPath }}"
|
||||
SCANNER_SURFACE_TLS_KEY_PATH: "{{ .Values.surface.tls.keyPath }}"
|
||||
SCANNER_SURFACE_SECRETS_PROVIDER: "{{ .Values.surface.secrets.provider }}"
|
||||
SCANNER_SURFACE_SECRETS_NAMESPACE: "{{ .Values.surface.secrets.namespace }}"
|
||||
SCANNER_SURFACE_SECRETS_ROOT: "{{ .Values.surface.secrets.root }}"
|
||||
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER: "{{ .Values.surface.secrets.fallbackProvider }}"
|
||||
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "{{ .Values.surface.secrets.allowInline }}"
|
||||
|
||||
issuer-directory-config:
|
||||
data:
|
||||
issuer-directory.yaml: |
|
||||
|
||||
@@ -41,6 +41,48 @@ completely isolated network:
|
||||
|
||||
The PHP analyzer parses `composer.lock` for Composer dependencies and supports optional runtime evidence via the `stella-trace.php` shim; set `STELLA_PHP_OPCACHE=1` to enable opcache statistics collection.
|
||||
|
||||
**Python analyzer features:**
|
||||
- **Wheel/sdist/editable** parsing with dependency edges from `METADATA`, `PKG-INFO`, `requirements.txt`, and `pyproject.toml`
|
||||
- **Virtual environment** support for virtualenv, venv, and conda prefix layouts
|
||||
- **PEP 420 namespace packages** with proper `importlib` resolution semantics across `sys.path`
|
||||
- **Python version detection** via `pyproject.toml`, `runtime.txt`, Dockerfile `FROM python:*`, `.python-version`
|
||||
- **Native extension detection** for `.so`, `.pyd`, CFFI modules, ctypes loaders, and embedded WASM
|
||||
- **Framework/config heuristics** for Django, Flask, FastAPI, Celery, AWS Lambda, Gunicorn, Click/Typer CLIs
|
||||
- **AOC-compliant observations**: entrypoints (module `__main__`, console_scripts, zipapp), components (modules/packages/native), edges (import, namespace, dynamic-hint, native-extension) with resolver traces
|
||||
- **Optional runtime evidence** via import hook; the bundled `stellaops_trace.py` module captures module load events with SHA-256 path hashing for secure evidence correlation
|
||||
- **CLI inspection**: run `stella python inspect --root /path/to/app` to analyze a Python workspace locally
|
||||
|
||||
**Surface.Env configuration:** Scanner Worker and WebService components use the Surface.Env library for configuration discovery. In air-gapped deployments, configure the following environment variables (see `docs/modules/scanner/design/surface-env.md` for details):
|
||||
|
||||
| Variable | Description | Air-gap Default |
|
||||
|----------|-------------|-----------------|
|
||||
| `SCANNER_SURFACE_FS_ENDPOINT` | Base URI for Surface.FS / RustFS storage | `http://rustfs:8080` |
|
||||
| `SCANNER_SURFACE_FS_BUCKET` | Bucket for manifests/artefacts | `surface-cache` |
|
||||
| `SCANNER_SURFACE_CACHE_ROOT` | Local cache directory | `/var/lib/stellaops/surface` |
|
||||
| `SCANNER_SURFACE_CACHE_QUOTA_MB` | Cache quota in MB (64-262144) | `4096` |
|
||||
| `SCANNER_SURFACE_PREFETCH_ENABLED` | Enable manifest prefetch | `false` |
|
||||
| `SCANNER_SURFACE_TENANT` | Tenant namespace | `default` |
|
||||
| `SCANNER_SURFACE_SECRETS_PROVIDER` | Secrets provider (`file`, `kubernetes`) | `file` |
|
||||
| `SCANNER_SURFACE_SECRETS_ROOT` | Root path for file provider | `/etc/stellaops/secrets` |
|
||||
| `SCANNER_SURFACE_SECRETS_ALLOW_INLINE` | Allow inline secrets | `false` |
|
||||
|
||||
For Helm deployments, configure via `values.yaml`:
|
||||
```yaml
|
||||
surface:
|
||||
fs:
|
||||
endpoint: "http://rustfs:8080"
|
||||
bucket: "surface-cache"
|
||||
cache:
|
||||
root: "/var/lib/stellaops/surface"
|
||||
quotaMb: 4096
|
||||
tenant: "default"
|
||||
secrets:
|
||||
provider: "file"
|
||||
root: "/etc/stellaops/secrets"
|
||||
```
|
||||
|
||||
For Docker Compose, these variables are pre-configured in `docker-compose.airgap.yaml` with sensible defaults.
|
||||
|
||||
**Advisory AI volume primer:** ship a tarball containing empty `queue/`, `plans/`, and `outputs/` directories plus their ownership metadata. During import, extract it onto the RWX volume used by `advisory-ai-web` and `advisory-ai-worker` so pods start with the expected directory tree even on air-gapped nodes.
|
||||
|
||||
*Scanner core:* C# 12 on **.NET {{ dotnet }}**.
|
||||
|
||||
@@ -34,8 +34,8 @@
|
||||
| 8 | CONCELIER-LNM-21-004 | DONE (2025-11-27) | Completed: AOC write guards + tests + docs | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Remove legacy merge/dedup logic; add guardrails/tests to keep ingestion append-only; document linkset supersession. |
|
||||
| 9 | CONCELIER-LNM-21-005 | DONE (2025-11-27) | Completed: Event contract + publisher interfaces + tests + docs | Concelier Core Guild · Platform Events Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit `advisory.linkset.updated` events with delta descriptions + observation ids (tenant + provenance only). |
|
||||
| 10 | CONCELIER-LNM-21-101-DEV | DONE (2025-11-27) | Completed: Sharding + TTL migration + event collection | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Provision Mongo collections (`advisory_observations`, `advisory_linksets`) with hashed shard keys, tenant indexes, TTL for ingest metadata. |
|
||||
| 11 | CONCELIER-LNM-21-102-DEV | TODO | Unblocked by 21-101-DEV completion; CI runner available for migrations. | Concelier Storage Guild · DevOps Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Backfill legacy merged advisories; seed tombstones; provide rollback tooling for Offline Kit. |
|
||||
| 12 | CONCELIER-LNM-21-103-DEV | BLOCKED (awaits 21-102-DEV) | Requires 21-102-DEV completion; CI runner available for object-store bootstrap tests. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Move large raw payloads to object storage with deterministic pointers; update bootstrapper/offline seeds; preserve provenance metadata. |
|
||||
| 11 | CONCELIER-LNM-21-102-DEV | DONE (2025-11-28) | Completed: Migration + tombstones + rollback tooling | Concelier Storage Guild · DevOps Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Backfill legacy merged advisories; seed tombstones; provide rollback tooling for Offline Kit. |
|
||||
| 12 | CONCELIER-LNM-21-103-DEV | BLOCKED (awaits object storage contract) | Requires object storage contract definition before implementation; see Blockers & Dependencies. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Move large raw payloads to object storage with deterministic pointers; update bootstrapper/offline seeds; preserve provenance metadata. |
|
||||
| 13 | CONCELIER-LNM-21-201 | BLOCKED (awaits 21-103) | Upstream storage tasks must land first; CI runner available for WebService tests. | Concelier WebService Guild · BE-Base Platform Guild (`src/Concelier/StellaOps.Concelier.WebService`) | `/advisories/observations` filters by alias/purl/source with strict tenant scopes; echoes upstream values + provenance fields only. |
|
||||
| 14 | CONCELIER-LNM-21-202 | BLOCKED (awaits 21-201) | Await upstream to run `/advisories/linksets` export tests; CI runner available. | Concelier WebService Guild (`src/Concelier/StellaOps.Concelier.WebService`) | `/advisories/linksets`/`export`/`evidence` endpoints surface correlation + conflict payloads and `ERR_AGG_*` mapping; no synthesis/merge. |
|
||||
| 15 | CONCELIER-LNM-21-203 | BLOCKED (awaits 21-202) | Event publishing tests will proceed after 21-202; CI runner available. | Concelier WebService Guild · Platform Events Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Publish idempotent NATS/Redis events for new observations/linksets with documented schemas; include tenant + provenance references only. |
|
||||
@@ -46,6 +46,8 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-11-28 | CONCELIER-LNM-21-103-DEV BLOCKED: Object storage contract for raw payloads not yet defined. Current payloads stored in GridFS; migration to S3-compatible store requires interface definition and cross-guild coordination with DevOps Guild. Marked task blocked and documented in Decisions & Risks. | Implementer |
|
||||
| 2025-11-28 | CONCELIER-LNM-21-102-DEV DONE: Created `EnsureLegacyAdvisoriesBackfillMigration` that backfills `advisory_observations` from `advisory_raw`, creates/updates `advisory_linksets` by grouping observations, and seeds `backfill_marker` tombstones for rollback tracking. Added rollback script at `ops/devops/scripts/rollback-lnm-backfill.js` for Offline Kit. Updated MIGRATIONS.md with migration entry and operator runbook. Build passed. | Implementer |
|
||||
| 2025-11-27 | CONCELIER-LNM-21-101-DEV DONE: Created `EnsureLinkNotMergeShardingAndTtlMigration` adding hashed shard key indexes on `tenantId` for horizontal scaling, optional TTL indexes for `ObservationRetention`/`LinksetRetention`/`EventRetention` options, and `advisory_linkset_events` collection for linkset event outbox. Updated `MongoStorageOptions` with retention properties. Registered both `EnsureLinkNotMergeCollectionsMigration` and new sharding/TTL migration in DI. | Implementer |
|
||||
| 2025-11-27 | CONCELIER-LNM-21-005 DONE: Implemented `advisory.linkset.updated@1` event infrastructure (`AdvisoryLinksetUpdatedEvent`, `IAdvisoryLinksetEventPublisher`, `IAdvisoryLinksetEventOutbox`, `AdvisoryLinksetEventPublisherOptions`). Added 9 unit tests covering delta computation, conflict summaries, and provenance mapping. Documented event contract at `docs/modules/concelier/events/advisory.linkset.updated@1.md`. | Implementer |
|
||||
| 2025-11-27 | CONCELIER-LNM-21-004 DONE: Implemented AOC write guard infrastructure (`IAdvisoryObservationWriteGuard`, `AdvisoryObservationWriteGuard`, `AppendOnlyViolationException`). Added 13 unit tests covering Proceed/SkipIdentical/RejectMutation dispositions. Documented AOC and linkset supersession model in `docs/modules/concelier/link-not-merge-schema.md`. Legacy merge logic already deprecated with `[Obsolete]` and gated by `NoMergeEnabled` flag (defaults true). | Implementer |
|
||||
@@ -115,6 +117,7 @@
|
||||
- CONCELIER-GRAPH-28-102 implemented: contract lives at `docs/modules/concelier/api/evidence-batch.md`; integration test covers empty-match path. Ensure consumers align on tenant header + limits before rollout.
|
||||
- CONCELIER-LNM-21-004 risk: removing canonical merge/dedup requires architect decision on retiring `CanonicalMerger` consumers (graph overlays, console summaries) and a migration/rollback plan; proceed after design sign-off.
|
||||
- CONCELIER-GRAPH-24-101 risk: API contract drafted at `docs/modules/concelier/api/advisories-summary.md`; implementation pending WebService wiring and consumer alignment.
|
||||
- CONCELIER-LNM-21-103-DEV blocked: Object storage contract for raw payloads not yet defined. Requires cross-guild coordination between Storage Guild and DevOps Guild. Current payloads stored in GridFS; migration to S3-compatible store (MinIO) requires interface definition, migration strategy, bootstrapper updates, and offline seed support.
|
||||
|
||||
## Next Checkpoints
|
||||
- Next LNM schema review: align with CARTO-GRAPH/LNM owners (date TBD); unblock tasks 1–2 and 5–15.
|
||||
|
||||
@@ -44,7 +44,8 @@
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-11-25 | Runner disk is full (“No space left on device”); orchestrator WebService tests cannot be re-run. Free bin/obj/TestResults and `ops/devops/artifacts/ci-110` before continuing ORCH-32/33/34. | Concelier Core |
|
||||
| 2025-11-28 | Disk space issue resolved (56GB available). Fixed `InitializeMongoAsync` to skip in testing mode. WebService orchestrator tests still fail due to hosted services requiring MongoDB; test factory needs more extensive mocking or integration test with Mongo2Go. ORCH tasks remain BLOCKED pending test infrastructure fix. | Implementer |
|
||||
| 2025-11-25 | Runner disk is full ("No space left on device"); orchestrator WebService tests cannot be re-run. Free bin/obj/TestResults and `ops/devops/artifacts/ci-110` before continuing ORCH-32/33/34. | Concelier Core |
|
||||
| 2025-11-25 | Storage.Mongo job-store slice executed locally: `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/StellaOps.Concelier.Storage.Mongo.Tests.csproj -c Debug --no-restore --no-build --filter FullyQualifiedName~MongoJobStore` (3/3 pass). TRX: `ops/devops/artifacts/ci-110/20251125T034529Z/trx/concelier-storage-jobstore.trx`. Broader suite still pending CI. | Concelier Core |
|
||||
| 2025-11-25 | WebService orchestrator filter run (`dotnet test ...WebService.Tests.csproj --filter FullyQualifiedName~Orchestrator`) produced no matching tests; TRX recorded at `ops/devops/artifacts/ci-110/20251125T040900Z/trx/concelier-web-orch.trx`. Need to add orchestrator WebService tests before closing ORCH-32/33/34. | Concelier Core |
|
||||
| 2025-11-25 | Attempted to add WebService orchestrator tests with Mongo bypass; repo disk is full (`No space left on device`), preventing further builds/tests. Cleanup of bin/obj/TestResults and ops/devops artifacts required before rerunning orchestrator test slice. | Concelier Core |
|
||||
|
||||
@@ -26,22 +26,29 @@
|
||||
| P3 | PREP-CONCELIER-VULN-29-001 | DONE (2025-11-19) | Bridge contract published at `docs/modules/concelier/bridges/vuln-29-001.md`; sample fixture location noted. | Concelier WebService Guild · Vuln Explorer Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Provide Concelier/Vuln bridge contract (advisory keys, search params, sample responses) that VEX Lens + Vuln Explorer rely on; publish OpenAPI excerpt and fixtures. |
|
||||
| 0 | POLICY-AUTH-SIGNALS-LIB-115 | DONE (2025-11-19) | Package `StellaOps.Policy.AuthSignals` 0.1.0-alpha published to `local-nugets/`; schema/fixtures at `docs/policy/*`. | Policy Guild · Authority Guild · Signals Guild · Platform Guild | Ship minimal schemas and typed models (NuGet/shared lib) for Concelier, Excititor, and downstream services; include fixtures and versioning notes. |
|
||||
| 1 | CONCELIER-POLICY-20-002 | DONE (2025-11-20) | Vendor alias + SemVer range normalization landed; tests green. | Concelier Core Guild · Policy Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Expand linkset builders with vendor equivalence, NEVRA/PURL normalization, version-range parsing so policy joins are accurate without prioritizing sources. |
|
||||
| 2 | CONCELIER-POLICY-20-003 | BLOCKED | Upstream POLICY-20-001 outputs missing; 20-002 complete. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Advisory selection cursors + change-stream checkpoints for deterministic policy deltas; include offline migration scripts. |
|
||||
| 3 | CONCELIER-POLICY-23-001 | BLOCKED | Depends on 20-003 (blocked). | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Secondary indexes/materialized views (alias, provider severity, confidence) to keep policy lookups fast without cached verdicts; document query patterns. |
|
||||
| 4 | CONCELIER-POLICY-23-002 | BLOCKED | Depends on 23-001 (blocked). | Concelier Core Guild · Platform Events Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Ensure `advisory.linkset.updated` events carry idempotent IDs, confidence summaries, tenant metadata for safe policy replay. |
|
||||
| 5 | CONCELIER-RISK-66-001 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 and POLICY chain. | Concelier Core Guild · Risk Engine Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Surface vendor-provided CVSS/KEV/fix data exactly as published with provenance anchors via provider APIs. |
|
||||
| 6 | CONCELIER-RISK-66-002 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 and 66-001. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit structured fix-availability metadata per observation/linkset (release version, advisory link, evidence timestamp) without guessing exploitability. |
|
||||
| 7 | CONCELIER-RISK-67-001 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 and 66-001. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Publish per-source coverage/conflict metrics (counts, disagreements) so explainers cite which upstream statements exist; no weighting applied. |
|
||||
| 8 | CONCELIER-RISK-68-001 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 and POLICY-RISK-68-001. | Concelier Core Guild · Policy Studio Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Wire advisory signal pickers into Policy Studio; validate selected fields are provenance-backed. |
|
||||
| 9 | CONCELIER-RISK-69-001 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 and 66-002. | Concelier Core Guild · Notifications Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit notifications on upstream advisory field changes (e.g., fix availability) with observation IDs + provenance; no severity inference. |
|
||||
| 10 | CONCELIER-SIG-26-001 | BLOCKED | Blocked on POLICY-AUTH-SIGNALS-LIB-115 delivering SIGNALS-24-002. | Concelier Core Guild · Signals Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Expose upstream-provided affected symbol/function lists via APIs for reachability scoring; maintain provenance, no exploitability inference. |
|
||||
| 2 | CONCELIER-POLICY-20-003 | DONE (2025-11-28) | Implemented `PolicyDeltaCheckpoint` model, `IPolicyDeltaCheckpointStore` interface, MongoDB store + migration `20251128_policy_delta_checkpoints`. | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Advisory selection cursors + change-stream checkpoints for deterministic policy deltas; include offline migration scripts. |
|
||||
| 3 | CONCELIER-POLICY-23-001 | DONE (2025-11-28) | Implemented migration `20251128_policy_lookup_indexes` with alias multikey, confidence, and severity indexes. Query patterns documented in migration XML docs. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Secondary indexes/materialized views (alias, provider severity, confidence) to keep policy lookups fast without cached verdicts; document query patterns. |
|
||||
| 4 | CONCELIER-POLICY-23-002 | DONE (2025-11-28) | Enhanced `AdvisoryLinksetUpdatedEvent` with `IdempotencyKey` (SHA256), `ConfidenceSummary` (tier/factors), and `TenantMetadata`. | Concelier Core Guild · Platform Events Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Ensure `advisory.linkset.updated` events carry idempotent IDs, confidence summaries, tenant metadata for safe policy replay. |
|
||||
| 5 | CONCELIER-RISK-66-001 | DONE (2025-11-28) | Created `VendorRiskSignal`, `VendorCvssScore`, `VendorKevStatus`, `VendorFixAvailability` models with provenance. Extractor parses OSV/NVD formats. | Concelier Core Guild · Risk Engine Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Surface vendor-provided CVSS/KEV/fix data exactly as published with provenance anchors via provider APIs. |
|
||||
| 6 | CONCELIER-RISK-66-002 | TODO | Upstream 66-001 DONE. Ready to emit fix-availability metadata. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit structured fix-availability metadata per observation/linkset (release version, advisory link, evidence timestamp) without guessing exploitability. |
|
||||
| 7 | CONCELIER-RISK-67-001 | TODO | Upstream 66-001 DONE. Ready to publish coverage/conflict metrics. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Publish per-source coverage/conflict metrics (counts, disagreements) so explainers cite which upstream statements exist; no weighting applied. |
|
||||
| 8 | CONCELIER-RISK-68-001 | BLOCKED | Blocked on POLICY-RISK-68-001. | Concelier Core Guild · Policy Studio Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Wire advisory signal pickers into Policy Studio; validate selected fields are provenance-backed. |
|
||||
| 9 | CONCELIER-RISK-69-001 | BLOCKED | Blocked on 66-002. | Concelier Core Guild · Notifications Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Emit notifications on upstream advisory field changes (e.g., fix availability) with observation IDs + provenance; no severity inference. |
|
||||
| 10 | CONCELIER-SIG-26-001 | BLOCKED | Blocked on SIGNALS-24-002. | Concelier Core Guild · Signals Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Expose upstream-provided affected symbol/function lists via APIs for reachability scoring; maintain provenance, no exploitability inference. |
|
||||
| 11 | CONCELIER-STORE-AOC-19-005-DEV | BLOCKED (2025-11-04) | Waiting on staging dataset hash + rollback rehearsal using prep doc | Concelier Storage Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo`) | Execute raw-linkset backfill/rollback plan so Mongo reflects Link-Not-Merge data; rehearse rollback (dev/staging). |
|
||||
| 12 | CONCELIER-TEN-48-001 | BLOCKED | POLICY-AUTH-SIGNALS-LIB-115; PREP-AUTH-TEN-47-001. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Enforce tenant scoping through normalization/linking; expose capability endpoint advertising `merge=false`; ensure events include tenant IDs. |
|
||||
| 12 | CONCELIER-TEN-48-001 | DONE (2025-11-28) | Created Tenancy module with `TenantScope`, `TenantCapabilities`, `TenantCapabilitiesResponse`, `ITenantCapabilitiesProvider`, and `TenantScopeNormalizer` per AUTH-TEN-47-001. | Concelier Core Guild (`src/Concelier/__Libraries/StellaOps.Concelier.Core`) | Enforce tenant scoping through normalization/linking; expose capability endpoint advertising `merge=false`; ensure events include tenant IDs. |
|
||||
| 13 | CONCELIER-VEXLENS-30-001 | BLOCKED | PREP-CONCELIER-VULN-29-001; VEXLENS-30-005 | Concelier WebService Guild · VEX Lens Guild (`src/Concelier/StellaOps.Concelier.WebService`) | Guarantee advisory key consistency and cross-links consumed by VEX Lens so consensus explanations cite Concelier evidence without merges. |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-11-28 | Completed CONCELIER-TEN-48-001: created Tenancy module with `TenantScope`, `TenantCapabilities`, `TenantCapabilitiesResponse`, `ITenantCapabilitiesProvider`, `LinkNotMergeTenantCapabilitiesProvider`, and `TenantScopeNormalizer`. Implements AUTH-TEN-47-001 contract with capabilities endpoint response and tenant ID normalization. Build green. | Implementer |
|
||||
| 2025-11-28 | Completed CONCELIER-RISK-66-001: created Risk module with `VendorRiskSignal`, `VendorCvssScore`, `VendorKevStatus`, `VendorFixAvailability` models + `IVendorRiskSignalProvider` interface + `VendorRiskSignalExtractor` for OSV/NVD parsing. All with provenance anchors. Build green. Tasks 6 and 7 now TODO. | Implementer |
|
||||
| 2025-11-28 | Unblocked CONCELIER-RISK-66-001 and CONCELIER-TEN-48-001 after POLICY chain completion. Tasks 5 and 12 moved to TODO. | Implementer |
|
||||
| 2025-11-28 | Completed CONCELIER-POLICY-23-002: enhanced `AdvisoryLinksetUpdatedEvent` with `IdempotencyKey` (SHA256 of linkset identity + content), `ConfidenceSummary` (tier classification: high/medium/low/very-low/unknown + contributing factors), and `TenantMetadata` (URN + namespace extraction). Build green. POLICY chain (20-002/003, 23-001/002) now complete. | Implementer |
|
||||
| 2025-11-28 | Completed CONCELIER-POLICY-23-001: implemented migration `20251128_policy_lookup_indexes` with indexes for alias lookups (multikey on `linkset.aliases`), confidence filtering (`confidence`, `tenantId+confidence`), severity queries (`normalized.severities.system/score`), and pagination (`tenantId+createdAt`). Query patterns documented in XML docs. Build green; MIGRATIONS.md updated. Task 4 (23-002) now TODO. | Implementer |
|
||||
| 2025-11-28 | Completed CONCELIER-POLICY-20-003: implemented `PolicyDeltaCheckpoint` model + `IPolicyDeltaCheckpointStore` interface in Core/Linksets; MongoDB document, store, and migration (`20251128_policy_delta_checkpoints`) in Storage.Mongo. Indexes on tenantId, consumerId, compound, and updatedAt. Build green; MIGRATIONS.md updated. Task 3 (23-001) now TODO. | Implementer |
|
||||
| 2025-11-28 | Unblocked CONCELIER-POLICY-20-003 after verifying POLICY-20-001 DONE in Sprint 0114. Task moved to TODO; ready for implementation. | Implementer |
|
||||
| 2025-11-25 | Synced status with tasks-all: RISK-66/68/69, SIG-26-001, TEN-48-001, VEXLENS-30-001 remain BLOCKED despite signals library shipping; blockers are POLICY-20-001 outputs, AUTH-TEN-47-001, SIGNALS-24-002, VEXLENS-30-005. | Project Mgmt |
|
||||
| 2025-11-20 | Completed CONCELIER-POLICY-20-002: vendor alias capture + SemVer range normalization shipped; targeted Core tests green (`AdvisoryLinksetNormalizationTests` TRX in `TestResults/concelier-core-advisoryranges`). | Implementer |
|
||||
| 2025-11-19 | Added PREP tasks for CONCELIER-CORE-AOC-19-004, AUTH-TEN-47-001, and CONCELIER-VULN-29-001; updated dependencies for tasks 11–13. | Project Mgmt |
|
||||
|
||||
@@ -27,15 +27,17 @@
|
||||
| 6 | POLICY-ENGINE-20-006 | BLOCKED (2025-11-27) | Depends on 20-005. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||
| 7 | POLICY-ENGINE-20-007 | BLOCKED (2025-11-27) | Depends on 20-006. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||
| 8 | POLICY-ENGINE-20-008 | BLOCKED (2025-11-27) | Depends on 20-007. | Policy · QA Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||
| 9 | POLICY-ENGINE-20-009 | BLOCKED (2025-11-27) | Depends on 20-008. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||
| 10 | POLICY-ENGINE-27-001 | BLOCKED (2025-11-27) | Depends on 20-009. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||
| 11 | POLICY-ENGINE-27-002 | BLOCKED (2025-11-27) | Depends on 27-001. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||
| 9 | POLICY-ENGINE-20-009 | DONE (2025-11-28) | MongoDB schemas/indexes for policies, policy_runs, effective_finding_* with migrations and tenant enforcement. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||
| 10 | POLICY-ENGINE-27-001 | DONE (2025-11-28) | Extended compile outputs with symbol table, rule index, documentation, coverage metadata, and deterministic hashes. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||
| 11 | POLICY-ENGINE-27-002 | DONE (2025-11-28) | Enhanced simulate endpoints with rule firing counts, heatmap aggregates, sampled explain traces with deterministic ordering, and delta summaries. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||
| 12 | POLICY-ENGINE-29-001 | BLOCKED (2025-11-27) | Depends on 27-004. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||
| 13 | POLICY-ENGINE-29-002 | DONE (2025-11-23) | Contract published at `docs/modules/policy/contracts/29-002-streaming-simulation.md`. | Policy · Findings Ledger Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-11-28 | Completed POLICY-ENGINE-27-002: Enhanced simulation analytics with SimulationAnalytics models (RuleFiringCounts, SimulationHeatmap, SampledExplainTraces, SimulationDeltaSummary) and SimulationAnalyticsService. Integrated into RiskSimulationResult. 15 new unit tests. | Policy Guild |
|
||||
| 2025-11-28 | Completed POLICY-ENGINE-20-009: MongoDB schemas/indexes with migration infrastructure (PolicyEngineMongoContext, migrations, TenantFilterBuilder). Completed POLICY-ENGINE-27-001: Extended compile outputs with PolicyCompileMetadata (symbol table, rule index, documentation, coverage metadata, deterministic hashes) via PolicyMetadataExtractor. 16 new unit tests. | Policy Guild |
|
||||
| 2025-11-27 | Marked POLICY-CONSOLE-23-002 and POLICY-ENGINE-20-003..29-001 BLOCKED due to unmet upstream contracts (POLICY-CONSOLE-23-001, deterministic evaluator 20-002 chain). | Policy Guild |
|
||||
| 2025-11-23 | Published POLICY-ENGINE-29-002 streaming simulation contract (`docs/modules/policy/contracts/29-002-streaming-simulation.md`); marked task 13 DONE. | Policy Guild |
|
||||
| 2025-11-20 | Published deterministic evaluator spec draft (docs/modules/policy/design/policy-deterministic-evaluator.md); moved PREP-POLICY-ENGINE-20-002 to DOING. | Project Mgmt |
|
||||
|
||||
@@ -26,14 +26,15 @@
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
| P1 | PREP-140-D-ZASTAVA-WAVE-WAITING-ON-SURFACE-FS | DONE (2025-11-20) | Due 2025-11-22 · Accountable: Zastava Observer/Webhook Guilds · Surface Guild | Zastava Observer/Webhook Guilds · Surface Guild | Prep artefact published at `docs/modules/zastava/prep/2025-11-20-surface-fs-env-prep.md` (cache drop cadence, env helper ownership, DSSE requirements). |
|
||||
| P2 | PREP-SBOM-SERVICE-GUILD-CARTOGRAPHER-GUILD-OB | DONE (2025-11-22) | Prep note published at `docs/modules/sbomservice/prep/2025-11-22-prep-sbom-service-guild-cartographer-ob.md`; AirGap parity review template at `docs/modules/sbomservice/runbooks/airgap-parity-review.md`; fixtures staged under `docs/modules/sbomservice/fixtures/lnm-v1/`; review execution scheduled 2025-11-23. | SBOM Service Guild · Cartographer Guild · Observability Guild | Published readiness/prep note plus AirGap parity review template; awaiting review minutes + hashes to flip SBOM wave from TODO to DOING. |
|
||||
| 1 | 140.A Graph wave | BLOCKED (2025-11-19) | Await real scanner cache ETA; working off mock bundle only. | Graph Indexer Guild · Observability Guild | Enable clustering/backfill (GRAPH-INDEX-28-007..010) against mock bundle; revalidate once real cache lands. |
|
||||
| 2 | 140.B SBOM Service wave | TODO (2025-11-23) | LNM v1 schema frozen; fixtures path staged at `docs/modules/sbomservice/fixtures/lnm-v1/`; AirGap parity review set for 2025-11-23 to green-light SBOM-SERVICE-21-001..004. | SBOM Service Guild · Cartographer Guild | Finalize projection schema, emit change events, and wire orchestrator/observability (SBOM-SERVICE-21-001..004, SBOM-AIAI-31-001/002). |
|
||||
| 3 | 140.C Signals wave | BLOCKED (2025-11-20) | CAS promotion + signed manifests + provenance appendix pending; SIGNALS-24-002/003 blocked upstream. TRACTORS: see `docs/signals/cas-promotion-24-002.md` and `docs/signals/provenance-24-003.md`. | Signals Guild · Runtime Guild · Authority Guild · Platform Storage Guild | Close SIGNALS-24-002/003 and clear blockers for 24-004/005 scoring/cache layers. |
|
||||
| 4 | 140.D Zastava wave | BLOCKED | PREP-140-D-ZASTAVA-WAVE-WAITING-ON-SURFACE-FS | Zastava Observer/Webhook Guilds · Surface Guild | Prepare env/secret helpers and admission hooks; start once cache endpoints and helpers are published. |
|
||||
| 1 | 140.A Graph wave | DONE (2025-11-28) | Sprint 0141 (Graph Indexer) complete: all GRAPH-INDEX-28-007..010 tasks DONE. | Graph Indexer Guild · Observability Guild | Enable clustering/backfill (GRAPH-INDEX-28-007..010) against mock bundle; revalidate once real cache lands. |
|
||||
| 2 | 140.B SBOM Service wave | DOING (2025-11-28) | Sprint 0142 mostly complete: SBOM-SERVICE-21-001..004, SBOM-AIAI-31-001/002, SBOM-ORCH-32/33/34-001, SBOM-VULN-29-001/002 all DONE. Only SBOM-CONSOLE-23-001/002 remain BLOCKED. | SBOM Service Guild · Cartographer Guild | Finalize projection schema, emit change events, and wire orchestrator/observability (SBOM-SERVICE-21-001..004, SBOM-AIAI-31-001/002). |
|
||||
| 3 | 140.C Signals wave | DOING (2025-11-28) | Sprint 0143: SIGNALS-24-001/002/003 DONE; SIGNALS-24-004/005 remain BLOCKED on CAS promotion. | Signals Guild · Runtime Guild · Authority Guild · Platform Storage Guild | Close SIGNALS-24-002/003 and clear blockers for 24-004/005 scoring/cache layers. |
|
||||
| 4 | 140.D Zastava wave | DONE (2025-11-28) | Sprint 0144 (Zastava Runtime Signals) complete: all ZASTAVA-ENV/SECRETS/SURFACE tasks DONE. | Zastava Observer/Webhook Guilds · Surface Guild | Prepare env/secret helpers and admission hooks; start once cache endpoints and helpers are published. |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-11-28 | Synced wave status with downstream sprints: 140.A Graph (DONE per Sprint 0141); 140.B SBOM (DOING, mostly complete per Sprint 0142); 140.C Signals (DOING, 3/5 done per Sprint 0143); 140.D Zastava (DONE per Sprint 0144). Updated Delivery Tracker and unblocked Sprint 0150 dependencies. | Implementer |
|
||||
| 2025-11-20 | Completed PREP-140-D-ZASTAVA-WAVE-WAITING-ON-SURFACE-FS: published cache/env helper prep at `docs/modules/zastava/prep/2025-11-20-surface-fs-env-prep.md`; status set to DONE. | Implementer |
|
||||
| 2025-11-20 | Marked SIGNALS-24-002/003 as BLOCKED pending Platform Storage + provenance approvals; linked CAS/provenance checklists in blockers. | Implementer |
|
||||
| 2025-11-19 | Assigned PREP owners/dates; see Delivery Tracker. | Planning |
|
||||
@@ -93,17 +94,17 @@ This file now only tracks the runtime & signals status snapshot. Active backlog
|
||||
|
||||
| Wave | Guild owners | Shared prerequisites | Status | Notes |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| 140.A Graph | Graph Indexer Guild · Observability Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner (phase I tracked under `docs/implplan/SPRINT_130_scanner_surface.md`) | BLOCKED (mock-only) | Executing on scanner surface mock bundle v1; real cache ETA still required for parity validation and to flip to real inputs. |
|
||||
| 140.B SbomService | SBOM Service Guild · Cartographer Guild · Observability Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | PREP-SBOM-SERVICE-GUILD-CARTOGRAPHER-GUILD-OB | Prep note published 2025-11-22 at `docs/modules/sbomservice/prep/2025-11-22-prep-sbom-service-guild-cartographer-ob.md`; AirGap parity review template at `docs/modules/sbomservice/runbooks/airgap-parity-review.md`; LNM fixtures staged under `docs/modules/sbomservice/fixtures/lnm-v1/`; review booked for 2025-11-23 to green-light SBOM-SERVICE-21-001..004. |
|
||||
| 140.C Signals | Signals Guild · Authority Guild (for scopes) · Runtime Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | BLOCKED (red) | CAS checklist + provenance appendix overdue; callgraph retrieval live but artifacts not trusted until CAS/signing lands. |
|
||||
| 140.D Zastava | Zastava Observer/Webhook Guilds · Security Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | PREP-SBOM-SERVICE-GUILD-CARTOGRAPHER-GUILD-OB | Surface.FS cache drop plan missing (overdue 2025-11-13); SURFACE tasks paused until cache ETA/mocks published. |
|
||||
| 140.A Graph | Graph Indexer Guild · Observability Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner (phase I tracked under `docs/implplan/SPRINT_130_scanner_surface.md`) | DONE (2025-11-28) | Sprint 0141 complete: GRAPH-INDEX-28-007..010 all DONE. |
|
||||
| 140.B SbomService | SBOM Service Guild · Cartographer Guild · Observability Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | DOING (2025-11-28) | Sprint 0142 mostly complete: SBOM-SERVICE-21-001..004, SBOM-AIAI-31-001/002, SBOM-ORCH-32/33/34-001, SBOM-VULN-29-001/002 DONE. SBOM-CONSOLE-23-001/002 remain BLOCKED. |
|
||||
| 140.C Signals | Signals Guild · Authority Guild (for scopes) · Runtime Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | DOING (2025-11-28) | Sprint 0143: SIGNALS-24-001/002/003 DONE; SIGNALS-24-004/005 remain BLOCKED on CAS promotion. |
|
||||
| 140.D Zastava | Zastava Observer/Webhook Guilds · Security Guild | Sprint 120.A – AirGap; Sprint 130.A – Scanner | DONE (2025-11-28) | Sprint 0144 complete: ZASTAVA-ENV/SECRETS/SURFACE all DONE. |
|
||||
|
||||
# Status snapshot (2025-11-18)
|
||||
# Status snapshot (2025-11-28)
|
||||
|
||||
- **140.A Graph** – BLOCKED on real cache delivery; running only on scanner surface mock bundle v1 pending cache ETA/hash.
|
||||
- **140.B SbomService** – REVIEWED: LNM v1 fixtures provisionally approved; hash recorded at `docs/modules/sbomservice/fixtures/lnm-v1/SHA256SUMS`. Minutes: `docs/modules/sbomservice/reviews/2025-11-23-airgap-parity.md`. SBOM-SERVICE-21-001 is DOING; 21-002..004 next in sequence.
|
||||
- **140.C Signals** – SIGNALS-24-001 shipped on 2025-11-09; SIGNALS-24-002 and SIGNALS-24-003 are BLOCKED with CAS promotion + provenance appendix pending. Scoring/cache work (SIGNALS-24-004/005) stays BLOCKED until CAS/provenance and runtime uploads stabilize.
|
||||
- **140.D Zastava** – ZASTAVA-ENV/SECRETS/SURFACE tracks are BLOCKED because Surface.FS cache outputs from Scanner are still unavailable; guilds continue prepping Surface.Env helper adoption and sealed-mode scaffolding while caches are pending.
|
||||
- **140.A Graph** – DONE. Sprint 0141 complete: GRAPH-INDEX-28-007..010 all shipped.
|
||||
- **140.B SbomService** – DOING. Sprint 0142 mostly complete: SBOM-SERVICE-21-001..004, SBOM-AIAI-31-001/002, SBOM-ORCH-32/33/34-001, SBOM-VULN-29-001/002 all DONE. Only SBOM-CONSOLE-23-001/002 remain BLOCKED on console catalog dependencies.
|
||||
- **140.C Signals** – DOING. Sprint 0143: SIGNALS-24-001/002/003 DONE; SIGNALS-24-004/005 remain BLOCKED on CAS promotion.
|
||||
- **140.D Zastava** – DONE. Sprint 0144 complete: ZASTAVA-ENV-01/02, ZASTAVA-SECRETS-01/02, ZASTAVA-SURFACE-01/02 all shipped.
|
||||
|
||||
## Wave task tracker (refreshed 2025-11-18)
|
||||
|
||||
|
||||
@@ -21,27 +21,42 @@
|
||||
## Delivery Tracker
|
||||
| # | Task ID | Status | Key dependency / next step | Owners | Task Definition |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
| 1 | 150.A-Orchestrator | TODO | Wait for Sprint 0120.A/0130.A/0140.A readiness; verify Scanner surface artifacts | Orchestrator Service Guild · AirGap Policy/Controller Guilds · Observability Guild | Kick off orchestration scheduling/telemetry baseline for automation epic. |
|
||||
| 1 | 150.A-Orchestrator | TODO | 0140.A (Graph) ✅ DONE, 0140.D (Zastava) ✅ DONE. Remaining blockers: 0120.A AirGap staleness + 0130.A Scanner surface | Orchestrator Service Guild · AirGap Policy/Controller Guilds · Observability Guild | Kick off orchestration scheduling/telemetry baseline for automation epic. |
|
||||
| 2 | 150.B-PacksRegistry | TODO | 150.A must reach DOING; confirm tenancy scaffolding from Orchestrator | Packs Registry Guild · Exporter Guild · Security Guild | Packs registry automation stream staged; start after Orchestrator scaffolding. |
|
||||
| 3 | 150.C-Scheduler | TODO | Hold until 0140.A Graph overlays and 0130.A Scanner surface green | Scheduler WebService/Worker Guilds · Findings Ledger Guild · Observability Guild | Scheduler impact index improvements gated on Graph overlays. |
|
||||
| 3 | 150.C-Scheduler | TODO | 0140.A Graph ✅ DONE. Remaining blocker: 0130.A Scanner surface | Scheduler WebService/Worker Guilds · Findings Ledger Guild · Observability Guild | Scheduler impact index improvements gated on Graph overlays. |
|
||||
| 4 | 150.D-TaskRunner | TODO | Requires Orchestrator/Scheduler telemetry baselines (150.A/150.C) | Task Runner Guild · AirGap Guilds · Evidence Locker Guild | Execution engine upgrades and evidence integration to start post-baselines. |
|
||||
|
||||
## Wave Coordination Snapshot
|
||||
| Wave | Guild owners | Shared prerequisites | Status | Notes |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| 150.A Orchestrator | Orchestrator Service Guild · AirGap Policy/Controller Guilds · Observability Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Pending confirmation that Scanner surface artifacts are ready; keep job telemetry work prepped for fast start. |
|
||||
| 150.B PacksRegistry | Packs Registry Guild · Exporter Guild · Security Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Blocked on Orchestrator tenancy scaffolding; specs are ready once 150.A flips to DOING. |
|
||||
| 150.C Scheduler | Scheduler WebService/Worker Guilds · Findings Ledger Guild · Observability Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Impact index improvements need Graph overlays; hold until 0140.A status improves. |
|
||||
| 150.A Orchestrator | Orchestrator Service Guild · AirGap Policy/Controller Guilds · Observability Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Graph (0140.A) and Zastava (0140.D) now DONE. AirGap staleness (0120.A 56-002/57/58) and Scanner surface (0130.A) remain blockers. Approaching readiness. |
|
||||
| 150.B PacksRegistry | Packs Registry Guild · Exporter Guild · Security Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Blocked on Orchestrator tenancy scaffolding; specs ready once 150.A flips to DOING. |
|
||||
| 150.C Scheduler | Scheduler WebService/Worker Guilds · Findings Ledger Guild · Observability Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Graph overlays (0140.A) now DONE. Scheduler impact index work can proceed once Scanner surface (0130.A) clears. |
|
||||
| 150.D TaskRunner | Task Runner Guild · AirGap Guilds · Evidence Locker Guild | Sprint 0120.A – AirGap; Sprint 0130.A – Scanner; Sprint 0140.A – Graph | TODO | Execution engine upgrades staged; start once Orchestrator/Scheduler telemetry baselines exist. |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-11-28 | Synced with downstream sprints: Sprint 0141 (Graph) DONE, Sprint 0142 (SBOM) mostly DONE, Sprint 0143 (Signals) 3/5 DONE, Sprint 0144 (Zastava) DONE. Updated Sprint 0140 tracker and revised 150.* upstream dependency status. 150.A-Orchestrator may start once remaining AirGap/Scanner blockers clear. | Implementer |
|
||||
| 2025-11-28 | Upstream dependency check: Sprint 0120 (Policy/Reasoning) has LEDGER-29-007/008, LEDGER-34-101, LEDGER-AIRGAP-56-001 DONE but 56-002/57-001/58-001/ATTEST-73-001 BLOCKED. Sprint 0140 (Runtime/Signals) has all waves BLOCKED except SBOM (TODO). No Sprint 0130.A file found. All 150.* tasks remain TODO pending upstream readiness. | Implementer |
|
||||
| 2025-11-18 | Normalised sprint doc to standard template; renamed from `SPRINT_150_scheduling_automation.md`. | Planning |
|
||||
|
||||
## Upstream Dependency Status (as of 2025-11-28)
|
||||
| Upstream Sprint | Key Deliverable | Status | Impact on 150.* |
|
||||
| --- | --- | --- | --- |
|
||||
| Sprint 0120.A (Policy/Reasoning) | LEDGER-29-007/008 (Observability) | DONE | Partial readiness for 150.A |
|
||||
| Sprint 0120.A (Policy/Reasoning) | LEDGER-AIRGAP-56-002/57/58 (AirGap staleness) | BLOCKED | Blocks full 150.A readiness |
|
||||
| Sprint 0130.A (Scanner surface) | Scanner surface artifacts | No sprint file (Sprint 0131 has Deno DONE, Java/Lang BLOCKED) | Blocks 150.A, 150.C verification |
|
||||
| Sprint 0140.A (Graph overlays) | 140.A Graph wave | **DONE** (Sprint 0141 complete) | Unblocks 150.C Scheduler graph deps |
|
||||
| Sprint 0140.A (Graph overlays) | 140.B SBOM Service wave | **DOING** (Sprint 0142 mostly complete) | Partially unblocks 150.A/150.C |
|
||||
| Sprint 0140.A (Graph overlays) | 140.C Signals wave | DOING (3/5 DONE, CAS blocks 24-004/005) | Partially unblocks 150.A telemetry |
|
||||
| Sprint 0140.A (Graph overlays) | 140.D Zastava wave | **DONE** (Sprint 0144 complete) | Unblocks 150.A surface deps |
|
||||
|
||||
## Decisions & Risks
|
||||
- All waves remain gated on upstream AirGap/Scanner/Graph readiness; no new intra-decade dependencies introduced.
|
||||
- **Progress (2025-11-28):** Graph (0140.A) and Zastava (0140.D) waves now DONE; SBOM Service (0140.B) and Signals (0140.C) waves DOING. Main remaining blockers are 0120.A AirGap staleness tasks and 0130.A Scanner surface artifacts.
|
||||
- 150.A Orchestrator and 150.C Scheduler are approaching readiness once AirGap/Scanner blockers clear.
|
||||
- This sprint is a coordination snapshot only; implementation tasks continue in Sprint 151+ and should mirror status changes here to avoid drift.
|
||||
- Sprint 0130.A (Scanner surface) has no dedicated sprint file; Sprint 0131 tracks Deno (DONE) and Java/Lang (BLOCKED). Coordinate with Scanner Guild to finalize.
|
||||
|
||||
## Next Checkpoints
|
||||
- None scheduled; add next scheduling/automation sync once upstream readiness dates are confirmed.
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
| 3 | WEB-AIAI-31-003 | BLOCKED (2025-11-22) | Blocked by WEB-AIAI-31-002; telemetry targets depend on routing/batching contract. | BE-Base Platform Guild; Observability Guild | Telemetry + audit for advisory AI, guardrail block visibility. |
|
||||
| 4 | WEB-AOC-19-002 | TODO | Depends on WEB-AOC-19-001; align DSSE/CMS helper APIs. | BE-Base Platform Guild | Ship `ProvenanceBuilder`, checksum utilities, signature verification helper with tests. |
|
||||
| 5 | WEB-AOC-19-003 | TODO | Depends on WEB-AOC-19-002; confirm Roslyn analyzer rules. | QA Guild; BE-Base Platform Guild | Analyzer to prevent forbidden key writes; shared guard-validation fixtures. |
|
||||
| 6 | WEB-CONSOLE-23-001 | TODO | Define stable `/console/dashboard` and `/console/filters` contract; ensures deterministic ordering + pagination. | BE-Base Platform Guild; Product Analytics Guild | Tenant-scoped aggregates for findings, VEX overrides, advisory deltas, run health, policy change log. |
|
||||
| 6 | WEB-CONSOLE-23-001 | DONE (2025-11-28) | `/console/dashboard` and `/console/filters` endpoints implemented with tenant-scoped aggregates. | BE-Base Platform Guild; Product Analytics Guild | Tenant-scoped aggregates for findings, VEX overrides, advisory deltas, run health, policy change log. |
|
||||
| 7 | CONSOLE-VULN-29-001 | BLOCKED (2025-11-19) | Blocked on WEB-CONSOLE-23-001 contract and Concelier graph schema freeze. | Console Guild; BE-Base Platform Guild | `/console/vuln/*` workspace endpoints with filters/reachability badges and DTOs once schemas stabilize. |
|
||||
| 8 | CONSOLE-VEX-30-001 | BLOCKED (2025-11-19) | Blocked on WEB-CONSOLE-23-001 and Excititor console contract (SSE payload validation). | Console Guild; BE-Base Platform Guild | `/console/vex/events` SSE workspace with validated schemas and samples. |
|
||||
| 9 | WEB-CONSOLE-23-002 | TODO | Depends on WEB-CONSOLE-23-001; design heartbeat/backoff + auth scopes. | BE-Base Platform Guild; Scheduler Guild | `/console/status` polling and `/console/runs/{id}/stream` SSE/WebSocket proxy with queue lag metrics. |
|
||||
@@ -79,3 +79,4 @@
|
||||
| 2025-11-22 | Synced `docs/implplan/tasks-all.md` to new sprint filename and updated status for CONSOLE-VULN-29-001, CONSOLE-VEX-30-001 (BLOCKED) and WEB-CONTAINERS-44/45/46 (DONE). | Planning |
|
||||
| 2025-11-22 | Added completion dates in `tasks-all` for WEB-CONTAINERS-44/45/46 and aligned BLOCKED dates for VULN-29-001/VEX-30-001. | Planning |
|
||||
| 2025-11-22 | Harmonized all `CONTAINERS-44/45/46` rows in `tasks-all` to DONE with dates to match sprint status. | Planning |
|
||||
| 2025-11-28 | Completed WEB-CONSOLE-23-001: Implemented `/console/dashboard` and `/console/filters` endpoints in Authority module. Dashboard returns tenant-scoped aggregates (findings summary, VEX overrides, advisory deltas, run health, policy change log) with 30-day trend data. Filters endpoint returns deterministic filter categories with counts and cache-validation hash. Added 8 unit tests for dashboard/filters endpoints. Implementation in `src/Authority/StellaOps.Authority/StellaOps.Authority/Console/`. | Policy Guild |
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Sprint 124 - Policy & Reasoning
|
||||
|
||||
_Last updated: November 8, 2025. Implementation order is DOING → TODO → BLOCKED._
|
||||
_Last updated: November 28, 2025. Implementation order is DOING → TODO → BLOCKED._
|
||||
|
||||
Focus areas below were split out of the previous combined sprint; execute sections in order unless noted.
|
||||
|
||||
@@ -18,8 +18,8 @@ Focus: Policy & Reasoning focus on Policy (phase II).
|
||||
| 5 | POLICY-ENGINE-20-005 | DONE (2025-11-27) | Determinism guard implemented in `src/Policy/StellaOps.Policy.Engine/DeterminismGuard/` with static analyzer (`ProhibitedPatternAnalyzer`), runtime sandbox (`DeterminismGuardService`, `EvaluationScope`), and guarded evaluator integration (`GuardedPolicyEvaluator`) | Policy Guild, Security Engineering / src/Policy/StellaOps.Policy.Engine |
|
||||
| 6 | POLICY-ENGINE-20-006 | DONE (2025-11-27) | Incremental orchestrator implemented in `src/Policy/StellaOps.Policy.Engine/IncrementalOrchestrator/` with `PolicyChangeEvent` models (advisory/VEX/SBOM change types), `IncrementalPolicyOrchestrator` (batching, deduplication, retry logic), and `IncrementalOrchestratorBackgroundService` (continuous processing, metrics) | Policy Guild, Scheduler Worker Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 7 | POLICY-ENGINE-20-007 | DONE (2025-11-27) | Structured traces implemented in `src/Policy/StellaOps.Policy.Engine/Telemetry/` with `RuleHitTrace.cs` (trace models, statistics), `RuleHitTraceCollector.cs` (sampling controls, exporters), and `ExplainTraceExport.cs` (JSON/NDJSON/Text/Markdown export formats) | Policy Guild, Observability Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 8 | POLICY-ENGINE-20-008 | TODO | Add unit/property/golden/perf suites covering policy compilation, evaluation correctness, determinism, and SLA targets (Deps: POLICY-ENGINE-20-007) | Policy Guild, QA Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 9 | POLICY-ENGINE-20-009 | TODO | Define Mongo schemas/indexes for `policies`, `policy_runs`, and `effective_finding_*`; implement migrations and tenant enforcement (Deps: POLICY-ENGINE-20-008) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 8 | POLICY-ENGINE-20-008 | DONE (2025-11-28) | Unit test suites added in `src/Policy/__Tests/StellaOps.Policy.Engine.Tests/` for DeterminismGuard, SelectionJoin, IncrementalOrchestrator, Materialization, and Telemetry components (99 tests passing) | Policy Guild, QA Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 9 | POLICY-ENGINE-20-009 | DONE (2025-11-28) | MongoDB schemas implemented in `src/Policy/StellaOps.Policy.Engine/Storage/Mongo/` with document classes (`PolicyDocuments.cs`, `PolicyRunDocument.cs`, `EffectiveFindingDocument.cs`, `PolicyAuditDocument.cs`), options (`PolicyEngineMongoOptions.cs`), context (`PolicyEngineMongoContext.cs`), migrations (`EnsurePolicyCollectionsMigration.cs`, `EnsurePolicyIndexesMigration.cs`, `EffectiveFindingCollectionInitializer.cs`), migration runner, and tenant enforcement (`TenantFilterBuilder.cs`) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 10 | POLICY-ENGINE-27-001 | TODO | Extend compile outputs to include rule coverage metadata, symbol table, inline documentation, and rule index for editor autocomplete; persist deterministic hashes (Deps: POLICY-ENGINE-20-009) | Policy Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 11 | POLICY-ENGINE-27-002 | TODO | Enhance simulate endpoints to emit rule firing counts, heatmap aggregates, sampled explain traces with deterministic ordering, and delta summaries for quick/batch sims (Deps: POLICY-ENGINE-27-001) | Policy Guild, Observability Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 12 | POLICY-ENGINE-29-001 | TODO | Implement batch evaluation endpoint (`POST /policy/eval/batch`) returning determinations + rationale chain for sets of `(artifact,purl,version,advisory)` tuples; support pagination and cost budgets (Deps: POLICY-ENGINE-27-004) | Policy Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
@@ -29,6 +29,9 @@ Focus: Policy & Reasoning focus on Policy (phase II).
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-11-28 | POLICY-ENGINE-20-009: Completed MongoDB storage layer - document schemas for policies, policy_revisions, policy_bundles, policy_runs, effective_finding_*, effective_finding_history_*, and policy_audit collections. Created `PolicyEngineMongoOptions.cs` (connection/collection configuration with TTL settings), `PolicyEngineMongoContext.cs` (database access with read/write concerns), migration infrastructure (`IPolicyEngineMongoMigration`, `PolicyEngineMigrationRunner`, `PolicyEngineMongoInitializer`), `EnsurePolicyCollectionsMigration.cs` (creates base collections), `EnsurePolicyIndexesMigration.cs` (indexes for policies, revisions, bundles, runs, audit), `EffectiveFindingCollectionInitializer.cs` (dynamic per-policy collection creation with indexes), `TenantFilterBuilder.cs` (tenant enforcement utilities), and `ServiceCollectionExtensions.cs` (DI registration). Status → DONE. | Implementer |
|
||||
| 2025-11-28 | POLICY-ENGINE-20-008: Completed unit test suites - `DeterminismGuardTests.cs` (static analyzer, runtime sandbox, guarded evaluator), `SelectionJoinTests.cs` (PURL equivalence, tuple resolution, VEX overlay), `IncrementalOrchestratorTests.cs` (event processing, deduplication, priority batching), `MaterializationTests.cs` (deterministic IDs, content hashing), `TelemetryTests.cs` (trace factory, statistics, sampling). 99 tests passing. Status → DONE. | Implementer |
|
||||
| 2025-11-27 | POLICY-ENGINE-20-007: Completed structured traces - `RuleHitTrace.cs` (trace models, factory, statistics aggregation), `RuleHitTraceCollector.cs` (sampling controls with VEX/severity-aware rates, incident mode, exporters), `ExplainTraceExport.cs` (JSON/NDJSON/Text/Markdown formats, builder pattern). Status → DONE. | Implementer |
|
||||
| 2025-11-27 | POLICY-ENGINE-20-006: Completed incremental orchestrator - `PolicyChangeEvent.cs` (change event models with factory for advisory/VEX/SBOM changes, deterministic content hashing, batching), `IncrementalPolicyOrchestrator.cs` (event processing with idempotency, retry logic, priority-based batching), `IncrementalOrchestratorBackgroundService.cs` (continuous processing with metrics). Status → DONE. | Implementer |
|
||||
| 2025-11-27 | POLICY-ENGINE-20-005: Completed determinism guard - `DeterminismViolation.cs` (violation models/options), `ProhibitedPatternAnalyzer.cs` (static analysis with regex patterns for DateTime.Now, Random, Guid.NewGuid, HttpClient, File.Read, etc.), `DeterminismGuardService.cs` (runtime sandbox with EvaluationScope, DeterministicTimeProvider), `GuardedPolicyEvaluator.cs` (integration layer). Status → DONE. | Implementer |
|
||||
| 2025-11-27 | POLICY-ENGINE-20-004: Completed materialization writer - `EffectiveFindingModels.cs` (document schema), `EffectiveFindingWriter.cs` (upsert + append-only history). Tenant-scoped collections, trace references, content hash deduplication. Status → DONE. | Implementer |
|
||||
|
||||
@@ -12,16 +12,16 @@ Focus: Policy & Reasoning focus on Policy (phase IV).
|
||||
| --- | --- | --- | --- | --- |
|
||||
| 1 | POLICY-ENGINE-40-003 | DONE | Provide API/SDK utilities for consumers (Web Scanner, Graph Explorer) to request policy decisions with source evidence summaries (top severity sources, conflict counts) (Deps: POLICY-ENGINE-40-002) | Policy Guild, Web Scanner Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 2 | POLICY-ENGINE-50-001 | DONE | Implement SPL compiler: validate YAML, canonicalize, produce signed bundle, store artifact in object storage, write `policy_revisions` with AOC metadata (Deps: POLICY-ENGINE-40-003) | Policy Guild, Platform Security / src/Policy/StellaOps.Policy.Engine |
|
||||
| 3 | POLICY-ENGINE-50-002 | TODO | Build runtime evaluator executing compiled plans over advisory/vex linksets + SBOM asset metadata with deterministic caching (Redis) and fallback path (Deps: POLICY-ENGINE-50-001) | Policy Guild, Runtime Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 4 | POLICY-ENGINE-50-003 | TODO | Implement evaluation/compilation metrics, tracing, and structured logs (`policy_eval_seconds`, `policy_compiles_total`, explanation sampling) (Deps: POLICY-ENGINE-50-002) | Policy Guild, Observability Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 5 | POLICY-ENGINE-50-004 | TODO | Build event pipeline: subscribe to linkset/SBOM updates, schedule re-eval jobs, emit `policy.effective.updated` events with diff metadata (Deps: POLICY-ENGINE-50-003) | Policy Guild, Platform Events Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 6 | POLICY-ENGINE-50-005 | TODO | Design and implement `policy_packs`, `policy_revisions`, `policy_runs`, `policy_artifacts` collections with indexes, TTL, and tenant scoping (Deps: POLICY-ENGINE-50-004) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 7 | POLICY-ENGINE-50-006 | TODO | Implement explainer persistence + retrieval APIs linking decisions to explanation tree and AOC chain (Deps: POLICY-ENGINE-50-005) | Policy Guild, QA Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 8 | POLICY-ENGINE-50-007 | TODO | Provide evaluation worker host/DI wiring and job orchestration hooks for batch re-evaluations after policy activation (Deps: POLICY-ENGINE-50-006) | Policy Guild, Scheduler Worker Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 9 | POLICY-ENGINE-60-001 | TODO | Maintain Redis effective decision maps per asset/snapshot for Graph overlays; implement versioning and eviction strategy (Deps: POLICY-ENGINE-50-007) | Policy Guild, SBOM Service Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 10 | POLICY-ENGINE-60-002 | TODO | Expose simulation bridge for Graph What-if APIs, supporting hypothetical SBOM diffs and draft policies without persisting results (Deps: POLICY-ENGINE-60-001) | Policy Guild, BE-Base Platform Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 11 | POLICY-ENGINE-70-002 | TODO | Design and create Mongo collections (`exceptions`, `exception_reviews`, `exception_bindings`) with indexes and migrations; expose repository APIs (Deps: POLICY-ENGINE-60-002) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 12 | POLICY-ENGINE-70-003 | TODO | Build Redis exception decision cache (`exceptions_effective_map`) with warm/invalidation logic reacting to `exception.*` events (Deps: POLICY-ENGINE-70-002) | Policy Guild, Runtime Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 3 | POLICY-ENGINE-50-002 | DONE | Build runtime evaluator executing compiled plans over advisory/vex linksets + SBOM asset metadata with deterministic caching (Redis) and fallback path (Deps: POLICY-ENGINE-50-001) | Policy Guild, Runtime Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 4 | POLICY-ENGINE-50-003 | DONE | Implement evaluation/compilation metrics, tracing, and structured logs (`policy_eval_seconds`, `policy_compiles_total`, explanation sampling) (Deps: POLICY-ENGINE-50-002) | Policy Guild, Observability Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 5 | POLICY-ENGINE-50-004 | DONE | Build event pipeline: subscribe to linkset/SBOM updates, schedule re-eval jobs, emit `policy.effective.updated` events with diff metadata (Deps: POLICY-ENGINE-50-003) | Policy Guild, Platform Events Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 6 | POLICY-ENGINE-50-005 | DONE | Design and implement `policy_packs`, `policy_revisions`, `policy_runs`, `policy_artifacts` collections with indexes, TTL, and tenant scoping (Deps: POLICY-ENGINE-50-004) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 7 | POLICY-ENGINE-50-006 | DONE | Implement explainer persistence + retrieval APIs linking decisions to explanation tree and AOC chain (Deps: POLICY-ENGINE-50-005) | Policy Guild, QA Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 8 | POLICY-ENGINE-50-007 | DONE | Provide evaluation worker host/DI wiring and job orchestration hooks for batch re-evaluations after policy activation (Deps: POLICY-ENGINE-50-006) | Policy Guild, Scheduler Worker Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 9 | POLICY-ENGINE-60-001 | DONE | Maintain Redis effective decision maps per asset/snapshot for Graph overlays; implement versioning and eviction strategy (Deps: POLICY-ENGINE-50-007) | Policy Guild, SBOM Service Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 10 | POLICY-ENGINE-60-002 | DONE | Expose simulation bridge for Graph What-if APIs, supporting hypothetical SBOM diffs and draft policies without persisting results (Deps: POLICY-ENGINE-60-001) | Policy Guild, BE-Base Platform Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 11 | POLICY-ENGINE-70-002 | DONE | Design and create Mongo collections (`exceptions`, `exception_reviews`, `exception_bindings`) with indexes and migrations; expose repository APIs (Deps: POLICY-ENGINE-60-002) | Policy Guild, Storage Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 12 | POLICY-ENGINE-70-003 | DONE | Build Redis exception decision cache (`exceptions_effective_map`) with warm/invalidation logic reacting to `exception.*` events (Deps: POLICY-ENGINE-70-002) | Policy Guild, Runtime Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 13 | POLICY-ENGINE-70-004 | TODO | Extend metrics/tracing/logging for exception application (latency, counts, expiring events) and include AOC references in logs (Deps: POLICY-ENGINE-70-003) | Policy Guild, Observability Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 14 | POLICY-ENGINE-70-005 | TODO | Provide APIs/workers hook for exception activation/expiry (auto start/end) and event emission (`exception.activated/expired`) (Deps: POLICY-ENGINE-70-004) | Policy Guild, Scheduler Worker Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
| 15 | POLICY-ENGINE-80-001 | TODO | Integrate reachability/exploitability inputs into evaluation pipeline (state/score/confidence) with caching and explain support (Deps: POLICY-ENGINE-70-005) | Policy Guild, Signals Guild / src/Policy/StellaOps.Policy.Engine |
|
||||
@@ -48,3 +48,13 @@ Focus: Policy & Reasoning focus on Policy (phase IV).
|
||||
| 2025-11-27 | Started POLICY-ENGINE-40-003; implemented PolicyDecisionService, PolicyDecisionEndpoint, PolicyDecisionModels, tests. Blocked by pre-existing build issues in Telemetry.Core and RiskProfile projects. | Implementer |
|
||||
| 2025-11-27 | Fixed pre-existing build issues (TelemetryContext API mismatch, JsonSchema.Net v5 API changes, OpenTelemetry Meter API changes, test project missing packages/namespaces). All 9 PolicyDecisionServiceTests pass. POLICY-ENGINE-40-003 marked DONE. | Implementer |
|
||||
| 2025-11-27 | Implemented POLICY-ENGINE-50-001: Extended SPL compiler with AOC metadata support. Added PolicyAocMetadata, PolicyProvenance, PolicyAttestationRef models. Updated PolicyBundleService to capture compilation metadata, source/artifact digests, complexity metrics, provenance info. Added 4 new tests (all pass). POLICY-ENGINE-50-001 marked DONE. | Implementer |
|
||||
| 2025-11-28 | Implemented POLICY-ENGINE-50-002: Built runtime evaluator with deterministic caching. Created `IPolicyEvaluationCache` interface, `InMemoryPolicyEvaluationCache` implementation with TTL/eviction, `PolicyRuntimeEvaluationService` with batch evaluation support, cache key generation using SHA256 digests (policy, subject, context). Extended `PolicyBundleRecord` to store compiled `PolicyIrDocument`. Added 8 tests (all pass). POLICY-ENGINE-50-002 marked DONE. | Implementer |
|
||||
| 2025-11-28 | Implemented POLICY-ENGINE-50-003: Integrated telemetry into PolicyCompilationService and PolicyRuntimeEvaluationService. Added OpenTelemetry Activity tracing for `policy.compile`, `policy.evaluate`, and `policy.evaluate_batch` operations. Integrated existing metrics (RecordCompilation, RecordEvaluation, RecordEvaluationLatency, RecordRuleFired, RecordError, RecordEvaluationFailure). Added structured logging with context (duration, rule counts, complexity, cache hits). All 23 core tests pass. POLICY-ENGINE-50-003 marked DONE. | Implementer |
|
||||
| 2025-11-28 | Implemented POLICY-ENGINE-50-004: Built event pipeline for linkset/SBOM updates. Created `PolicyEffectiveEventModels.cs` with event types (`policy.effective.updated`, `policy.effective.added`, `policy.effective.removed`, `policy.effective.batch_completed`), `PolicyDecisionDiff` for diff metadata. Created `PolicyEventProcessor.cs` with `IPolicyEffectiveEventPublisher`, `IReEvaluationJobScheduler` interfaces. Processor handles PolicyChangeEvents, schedules re-evaluation jobs, and emits effective events with diffs. Added 3 new telemetry counters. Build succeeds. POLICY-ENGINE-50-004 marked DONE. | Implementer |
|
||||
| 2025-11-28 | Implemented POLICY-ENGINE-50-005: MongoDB collections with tenant scoping and indexes. Pre-existing infrastructure includes: `PolicyDocument`, `PolicyRevisionDocument`, `PolicyBundleDocument`, `PolicyRunDocument` classes in Documents folder; `EnsurePolicyIndexesMigration` with TTL indexes for policy_runs collection; `PolicyEngineMongoOptions` for configuration. Created `MongoPolicyPackRepository.cs` implementing `IPolicyPackRepository` with tenant-scoped CRUD operations for policy packs, revisions, bundles; approval workflow; activation tracking. Fixed pre-existing bug in `PolicyMetadataExtractor.cs` (string comparisons for enum operators). All 11 core tests pass. POLICY-ENGINE-50-005 marked DONE. | Implementer |
|
||||
| 2025-11-28 | Implemented POLICY-ENGINE-50-006: Explainer persistence and retrieval APIs with AOC chain linkage. Created `PolicyExplainDocument.cs` with MongoDB documents for explain traces including `ExplainInputContextDocument`, `ExplainRuleStepDocument`, `ExplainVexEvidenceDocument`, `ExplainStatisticsDocument`, `ExplainAocChainDocument`. Created `PolicyExplainerService.cs` with `IExplainTraceRepository` interface, `StoredExplainTrace`/`ExplainAocChain` records, `ExplainQueryOptions` for filtering/pagination, `AocChainValidationResult` for verifying attestation chain integrity. Service links explain traces to policy bundle AOC metadata (compilation ID, source/artifact digests, attestation references). Added `policy_explain_traces_stored_total` telemetry counter. Added `PolicyExplainsCollection` and `ExplainTraceRetention` to options. Added indexes for `policy_explains` collection (tenant_runId, tenant_policy_evaluatedAt_desc, tenant_subjectHash, aocChain_compilationId, expiresAt_ttl). All 11 core tests pass. POLICY-ENGINE-50-006 marked DONE. | Implementer |
|
||||
| 2025-11-28 | Implemented POLICY-ENGINE-50-007: Evaluation worker host and DI wiring for job orchestration. Created `PolicyEvaluationWorkerService.cs` integrating with existing `PolicyEventProcessor.DequeueJob()` for job scheduling, with `EvaluationJobResult` record for tracking job outcomes. Created `PolicyEvaluationWorkerHost.cs` as BackgroundService with configurable concurrency from `PolicyEngineWorkerOptions`. Created `PolicyEngineServiceCollectionExtensions.cs` with `AddPolicyEngineCore()`, `AddPolicyEngineEventPipeline()`, `AddPolicyEngineWorker()`, `AddPolicyEngineExplainer()`, and combined `AddPolicyEngine()` extension methods. Worker integrates with existing `IPolicyEffectiveEventPublisher` and `IReEvaluationJobScheduler` interfaces. Added `ScheduleActivationReEvalAsync()` hook for triggering re-evaluations after policy activation. All 182 tests pass. POLICY-ENGINE-50-007 marked DONE. | Implementer |
|
||||
| 2025-11-28 | Implemented POLICY-ENGINE-60-001: Redis effective decision maps for Graph overlays. Added StackExchange.Redis package. Created `EffectiveDecisionMap/EffectiveDecisionModels.cs` with `EffectiveDecisionEntry`, `EffectiveDecisionQueryResult`, `EffectiveDecisionSummary`, `EffectiveDecisionFilter` records for storing/querying policy decisions per asset/snapshot. Created `EffectiveDecisionMap/IEffectiveDecisionMap.cs` interface with Set/Get/Query/Invalidate operations plus versioning (`GetVersionAsync`, `IncrementVersionAsync`). Created `EffectiveDecisionMap/RedisEffectiveDecisionMap.cs` with TTL-based eviction using Redis key structure `stellaops:edm:{tenant}:{snapshot}:e:{asset}` for entries, `:idx` sorted sets for indexing, `:v` for version counters. Added `EffectiveDecisionMapOptions` to `PolicyEngineOptions`. Added `policy_effective_decision_map_operations_total` telemetry counter. Added `AddEffectiveDecisionMap()` and `AddPolicyEngineRedis()` DI extensions. All 182 tests pass. POLICY-ENGINE-60-001 marked DONE. | Implementer |
|
||||
| 2025-11-28 | Implemented POLICY-ENGINE-60-002: What-If simulation bridge for Graph APIs. Created `WhatIfSimulation/WhatIfSimulationModels.cs` with comprehensive request/response models (`WhatIfSimulationRequest`, `WhatIfSimulationResponse`, `WhatIfDraftPolicy`, `WhatIfSbomDiff`, `WhatIfDecisionChange`, `WhatIfDecision`, `WhatIfExplanation`, `WhatIfSummary`, `WhatIfImpact`, `WhatIfPolicyRef`). Created `WhatIfSimulation/WhatIfSimulationService.cs` supporting: hypothetical SBOM diffs (add/remove/upgrade/downgrade operations), draft policy comparison, baseline decision lookup from effective decision map, simulated decision computation considering VEX status and reachability, change detection and diff computation, impact assessment with risk delta recommendations. Service integrates with `IEffectiveDecisionMap` for baseline lookups, `IPolicyPackRepository` for policy retrieval, `PolicyCompilationService` for potential on-the-fly compilation. Added `AddWhatIfSimulation()` DI extension. Telemetry via existing `RecordSimulation()` counter. All 181 core tests pass. POLICY-ENGINE-60-002 marked DONE. | Implementer |
|
||||
| 2025-11-28 | Implemented POLICY-ENGINE-70-002: MongoDB collections for policy exceptions with indexes and repository APIs. Created `Storage/Mongo/Documents/PolicyExceptionDocuments.cs` with `PolicyExceptionDocument` (exceptions with scope, risk assessment, compensating controls, workflow states), `ExceptionScopeDocument` (advisory/CVE/PURL/asset targeting), `ExceptionRiskAssessmentDocument` (risk levels, justification), `ExceptionReviewDocument` (multi-reviewer approval workflow), `ReviewDecisionDocument` (individual decisions with conditions), `ExceptionBindingDocument` (asset-specific bindings with time ranges). Created `Storage/Mongo/Repositories/IExceptionRepository.cs` interface with CRUD operations for exceptions, reviews, and bindings; query options for filtering/pagination; methods for finding applicable exceptions, pending activations, expiring exceptions. Created `Storage/Mongo/Repositories/MongoExceptionRepository.cs` MongoDB implementation with tenant scoping. Added collection names to `PolicyEngineMongoOptions` (exceptions, exception_reviews, exception_bindings). Created `Storage/Mongo/Migrations/EnsureExceptionIndexesMigration.cs` with comprehensive indexes: tenant+status, tenant+type+status, tenant+created, tenant+tags, scope.advisoryIds, scope.assetIds, scope.cveIds, expiry tracking, reviewer queues, binding lookups. Added `policy_exception_operations_total` telemetry counter with `RecordExceptionOperation()` method. Registered migration and repository in `ServiceCollectionExtensions`. All 196 core tests pass. POLICY-ENGINE-70-002 marked DONE. | Implementer |
|
||||
| 2025-11-28 | Implemented POLICY-ENGINE-70-003: Redis exception decision cache with warm/invalidation logic. Created `ExceptionCache/ExceptionCacheModels.cs` with `ExceptionCacheEntry` (cached exception for fast lookup with priority, decision override, expiry), `ExceptionCacheQueryResult` (query results with cache metadata), `ExceptionCacheSummary` (tenant summary with counts by type/decision), `ExceptionCacheOptions` (TTL, auto-warm, max entries), `ExceptionCacheStats` (hit/miss counts, memory usage). Created `ExceptionCache/IExceptionEffectiveCache.cs` interface with `GetForAssetAsync`, `GetBatchAsync`, `SetAsync`, `SetBatchAsync`, `InvalidateExceptionAsync`, `InvalidateAssetAsync`, `InvalidateTenantAsync`, `WarmAsync`, `HandleExceptionEventAsync` for event-driven invalidation; `ExceptionEvent` record for exception lifecycle events (activated, expired, revoked, updated, created, deleted). Created `ExceptionCache/RedisExceptionEffectiveCache.cs` Redis implementation with key structure: `stellaops:exc:{tenant}:a:{asset}:{advisory}` for asset entries, `stellaops:exc:{tenant}:idx:e:{exceptionId}` for exception-to-asset index, `stellaops:exc:{tenant}:v` for version counter. Warm logic loads from `IExceptionRepository` for active/pending exceptions. Invalidation reacts to exception events. Added `ExceptionCacheOptions` to `PolicyEngineOptions`. Added `policy_exception_cache_operations_total` telemetry counter with `RecordExceptionCacheOperation()` method. Added `AddExceptionEffectiveCache()` DI extension. All 197 core tests pass. POLICY-ENGINE-70-003 marked DONE. | Implementer |
|
||||
|
||||
@@ -17,9 +17,9 @@ Dependency: Sprint 133 - 4. Scanner.IV — Scanner & Surface focus on Scanner (p
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-003` | DONE | Static import graph builder using AST and bytecode fallback. Support `import`, `from ... import`, relative imports, `importlib.import_module`, `__import__` with literal args, `pkgutil.extend_path`. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-002 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-004` | DONE | Python resolver engine (importlib semantics) handling namespace packages (PEP 420), package discovery order, `.pth` files, `sys.path` composition, zipimport, and site-packages precedence across virtualenv/container roots. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-003 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-005` | DONE | Packaging adapters: pip editable (`.egg-link`), Poetry/Flit layout, Conda prefix, `.dist-info/RECORD` cross-check, container layer overlays. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-004 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-006` | TODO | Detect native extensions (`*.so`, `*.pyd`), CFFI modules, ctypes loaders, embedded WASM, and runtime capability signals (subprocess, multiprocessing, ctypes, eval). | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-005 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-007` | TODO | Framework/config heuristics: Django, Flask, FastAPI, Celery, AWS Lambda handlers, Gunicorn, Click/Typer CLIs, logging configs, pyproject optional dependencies. Tagged as hints only. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-006 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-008` | TODO | Produce AOC-compliant observations: entrypoints, components (modules/packages/native), edges (import, namespace, dynamic-hint, native-extension) with reason codes/confidence and resolver traces. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-007 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-009` | TODO | Fixture suite + perf benchmarks covering virtualenv, namespace packages, zipapp, editable installs, containers, lambda handler. | Python Analyzer Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-008 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-010` | TODO | Optional runtime evidence: import hook capturing module load events with path scrubbing, optional bytecode instrumentation for `importlib` hooks, multiprocessing tracer. | Python Analyzer Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-009 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-011` | TODO | Package analyzer plug-in, add CLI commands (`stella python inspect`), refresh Offline Kit documentation. | Python Analyzer Guild, DevOps Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-010 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-006` | DONE | Detect native extensions (`*.so`, `*.pyd`), CFFI modules, ctypes loaders, embedded WASM, and runtime capability signals (subprocess, multiprocessing, ctypes, eval). | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-005 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-007` | DONE | Framework/config heuristics: Django, Flask, FastAPI, Celery, AWS Lambda handlers, Gunicorn, Click/Typer CLIs, logging configs, pyproject optional dependencies. Tagged as hints only. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-006 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-008` | DONE | Produce AOC-compliant observations: entrypoints, components (modules/packages/native), edges (import, namespace, dynamic-hint, native-extension) with reason codes/confidence and resolver traces. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-007 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-009` | DONE | Fixture suite + perf benchmarks covering virtualenv, namespace packages, zipapp, editable installs, containers, lambda handler. | Python Analyzer Guild, QA Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-008 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-010` | DONE | Optional runtime evidence: import hook capturing module load events with path scrubbing, optional bytecode instrumentation for `importlib` hooks, multiprocessing tracer. | Python Analyzer Guild, Signals Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-009 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-011` | DONE | Package analyzer plug-in, add CLI commands (`stella python inspect`), refresh Offline Kit documentation. | Python Analyzer Guild, DevOps Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-010 |
|
||||
|
||||
@@ -7,7 +7,7 @@ Dependency: Sprint 134 - 5. Scanner.V — Scanner & Surface focus on Scanner (ph
|
||||
|
||||
| Task ID | State | Summary | Owner / Source | Depends On |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-012` | TODO | Container/zipapp adapter enhancements: parse OCI layers for Python runtime, detect `PYTHONPATH`/`PYTHONHOME` env, record warnings for sitecustomize/startup hooks. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-011 |
|
||||
| `SCANNER-ANALYZERS-PYTHON-23-012` | DONE | Container/zipapp adapter enhancements: parse OCI layers for Python runtime, detect `PYTHONPATH`/`PYTHONHOME` env, record warnings for sitecustomize/startup hooks. | Python Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python) | SCANNER-ANALYZERS-PYTHON-23-011 |
|
||||
| `SCANNER-ANALYZERS-RUBY-28-001` | DONE | Build input normalizer & VFS for Ruby projects: merge source trees, Gemfile/Gemfile.lock, vendor/bundle, .gem archives, `.bundle/config`, Rack configs, containers. Detect framework/job fingerprints deterministically. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | — |
|
||||
| `SCANNER-ANALYZERS-RUBY-28-002` | DONE | Gem & Bundler analyzer: parse Gemfile/Gemfile.lock, vendor specs, .gem archives, produce package nodes (PURLs), dependency edges, bin scripts, Bundler group metadata. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | SCANNER-ANALYZERS-RUBY-28-001 |
|
||||
| `SCANNER-ANALYZERS-RUBY-28-003` | DONE | Require/autoload graph builder: resolve static/dynamic require, require_relative, load; infer Zeitwerk autoload paths and Rack boot chain. | Ruby Analyzer Guild (src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby) | SCANNER-ANALYZERS-RUBY-28-002 |
|
||||
|
||||
@@ -17,7 +17,7 @@ Dependency: Sprint 135 - 6. Scanner.VI — Scanner & Surface focus on Scanner (p
|
||||
| `SURFACE-ENV-02` | DONE (2025-11-18) | Strongly-typed env accessors implemented; validation covers required endpoint, bounds, TLS cert path; regression tests passing. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env) | SURFACE-ENV-01 |
|
||||
| `SURFACE-ENV-03` | DONE (2025-11-27) | Adopt the env helper across Scanner Worker/WebService/BuildX plug-ins. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env) | SURFACE-ENV-02 |
|
||||
| `SURFACE-ENV-04` | DONE (2025-11-27) | Wire env helper into Zastava Observer/Webhook containers. | Zastava Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env) | SURFACE-ENV-02 |
|
||||
| `SURFACE-ENV-05` | TODO | Update Helm/Compose/offline kit templates with new env knobs and documentation. | Ops Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env) | SURFACE-ENV-03, SURFACE-ENV-04 |
|
||||
| `SURFACE-ENV-05` | DONE | Update Helm/Compose/offline kit templates with new env knobs and documentation. | Ops Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Env) | SURFACE-ENV-03, SURFACE-ENV-04 |
|
||||
| `SCANNER-EVENTS-16-301` | BLOCKED (2025-10-26) | Emit orchestrator-compatible envelopes (`scanner.event.*`) and update integration tests to verify Notifier ingestion (no Redis queue coupling). | Scanner WebService Guild (src/Scanner/StellaOps.Scanner.WebService) | — |
|
||||
| `SCANNER-GRAPH-21-001` | DONE (2025-11-27) | Provide webhook/REST endpoint for Cartographer to request policy overlays and runtime evidence for graph nodes, ensuring determinism and tenant scoping. | Scanner WebService Guild, Cartographer Guild (src/Scanner/StellaOps.Scanner.WebService) | — |
|
||||
| `SCANNER-LNM-21-001` | BLOCKED (2025-11-27) | Update `/reports` and `/policy/runtime` payloads to consume advisory/vex linksets, exposing source severity arrays and conflict summaries alongside effective verdicts. Blocked: requires Concelier HTTP client integration or shared library; no existing Concelier dependency in Scanner WebService. | Scanner WebService Guild, Policy Guild (src/Scanner/StellaOps.Scanner.WebService) | — |
|
||||
@@ -29,8 +29,8 @@ Dependency: Sprint 135 - 6. Scanner.VI — Scanner & Surface focus on Scanner (p
|
||||
| `SURFACE-SECRETS-04` | DONE (2025-11-27) | Integrate Surface.Secrets into Scanner Worker/WebService/BuildX for registry + CAS creds. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets) | SURFACE-SECRETS-02 |
|
||||
| `SURFACE-SECRETS-05` | DONE (2025-11-27) | Invoke Surface.Secrets from Zastava Observer/Webhook for CAS & attestation secrets. | Zastava Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets) | SURFACE-SECRETS-02 |
|
||||
| `SURFACE-SECRETS-06` | BLOCKED (2025-11-27) | Update deployment manifests/offline kit bundles to provision secret references instead of raw values. Requires Ops Guild input on Helm/Compose patterns for Surface.Secrets provider configuration. | Ops Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Secrets) | SURFACE-SECRETS-03 |
|
||||
| `SCANNER-ENG-0020` | TODO | Implement Homebrew collector & fragment mapper per `design/macos-analyzer.md` §3.1. | Scanner Guild (docs/modules/scanner) | — |
|
||||
| `SCANNER-ENG-0021` | TODO | Implement pkgutil receipt collector per `design/macos-analyzer.md` §3.2. | Scanner Guild (docs/modules/scanner) | — |
|
||||
| `SCANNER-ENG-0020` | DONE (2025-11-28) | Implement Homebrew collector & fragment mapper per `design/macos-analyzer.md` §3.1. | Scanner Guild (docs/modules/scanner) | — |
|
||||
| `SCANNER-ENG-0021` | DONE (2025-11-28) | Implement pkgutil receipt collector per `design/macos-analyzer.md` §3.2. | Scanner Guild (docs/modules/scanner) | — |
|
||||
| `SCANNER-ENG-0022` | TODO | Implement macOS bundle inspector & capability overlays per `design/macos-analyzer.md` §3.3. | Scanner Guild, Policy Guild (docs/modules/scanner) | — |
|
||||
| `SCANNER-ENG-0023` | TODO | Deliver macOS policy/offline integration per `design/macos-analyzer.md` §5–6. | Scanner Guild, Offline Kit Guild, Policy Guild (docs/modules/scanner) | — |
|
||||
| `SCANNER-ENG-0024` | TODO | Implement Windows MSI collector per `design/windows-analyzer.md` §3.1. | Scanner Guild (docs/modules/scanner) | — |
|
||||
@@ -42,7 +42,7 @@ Dependency: Sprint 135 - 6. Scanner.VI — Scanner & Surface focus on Scanner (p
|
||||
| `SURFACE-FS-03` | DONE (2025-11-27) | Integrate Surface.FS writer into Scanner Worker analyzer pipeline to persist layer + entry-trace fragments. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SURFACE-FS-02 |
|
||||
| `SURFACE-FS-04` | DONE (2025-11-27) | Integrate Surface.FS reader into Zastava Observer runtime drift loop. | Zastava Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SURFACE-FS-02 |
|
||||
| `SURFACE-FS-05` | DONE (2025-11-27) | Expose Surface.FS pointers via Scanner WebService reports and coordinate rescan planning with Scheduler. | Scanner Guild, Scheduler Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SURFACE-FS-03 |
|
||||
| `SURFACE-FS-06` | TODO | Update scanner-engine guide and offline kit docs with Surface.FS workflow. | Docs Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SURFACE-FS-02..05 |
|
||||
| `SURFACE-FS-06` | DONE (2025-11-28) | Update scanner-engine guide and offline kit docs with Surface.FS workflow. | Docs Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SURFACE-FS-02..05 |
|
||||
| `SCANNER-SURFACE-04` | TODO | DSSE-sign every `layer.fragments` payload, emit `_composition.json`, and persist DSSE envelopes so offline kits can replay deterministically (see `docs/modules/scanner/deterministic-sbom-compose.md` §2.1). | Scanner Worker Guild (src/Scanner/StellaOps.Scanner.Worker) | SCANNER-SURFACE-01, SURFACE-FS-03 |
|
||||
| `SURFACE-FS-07` | TODO | Extend Surface.FS manifest schema with `composition.recipe`, fragment attestation metadata, and verification helpers per deterministic SBOM spec. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.FS) | SCANNER-SURFACE-04 |
|
||||
| `SCANNER-EMIT-15-001` | TODO | Enforce canonical JSON (`stella.contentHash`, Merkle root metadata, zero timestamps) for fragments and composed CycloneDX inventory/usage BOMs. Documented in `docs/modules/scanner/deterministic-sbom-compose.md` §2.2. | Scanner Emit Guild (src/Scanner/__Libraries/StellaOps.Scanner.Emit) | SCANNER-SURFACE-04 |
|
||||
@@ -51,7 +51,7 @@ Dependency: Sprint 135 - 6. Scanner.VI — Scanner & Surface focus on Scanner (p
|
||||
| `SURFACE-VAL-02` | DONE (2025-11-23) | Validation library now enforces secrets schema, fallback/provider checks, and inline/file guardrails; tests added. | Scanner Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation) | SURFACE-VAL-01, SURFACE-ENV-02, SURFACE-FS-02 |
|
||||
| `SURFACE-VAL-03` | DONE (2025-11-23) | Validation runner wired into Worker/WebService startup and pre-analyzer paths (OS, language, EntryTrace). | Scanner Guild, Analyzer Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation) | SURFACE-VAL-02 |
|
||||
| `SURFACE-VAL-04` | DONE (2025-11-27) | Expose validation helpers to Zastava and other runtime consumers for preflight checks. | Scanner Guild, Zastava Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation) | SURFACE-VAL-02 |
|
||||
| `SURFACE-VAL-05` | TODO | Document validation extensibility, registration, and customization in scanner-engine guides. | Docs Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation) | SURFACE-VAL-02 |
|
||||
| `SURFACE-VAL-05` | DONE | Document validation extensibility, registration, and customization in scanner-engine guides. | Docs Guild (src/Scanner/__Libraries/StellaOps.Scanner.Surface.Validation) | SURFACE-VAL-02 |
|
||||
|
||||
## Execution Log
|
||||
| Date (UTC) | Update | Owner |
|
||||
@@ -86,3 +86,7 @@ Dependency: Sprint 135 - 6. Scanner.VI — Scanner & Surface focus on Scanner (p
|
||||
| 2025-11-18 | Attempted `dotnet test` for Worker Surface manifest configurator; restore failed fetching StackExchange.Redis from nuget.org (network timeout); tests still pending CI. | Implementer |
|
||||
| 2025-11-18 | SCANNER-ENV-03 started: BuildX plugin now loads Surface.Env defaults (SCANNER/SURFACE prefixes) for cache root/bucket/tenant when args/env missing; tests not yet added. | Implementer |
|
||||
| 2025-11-19 | Marked SCANNER-ENV-03, SURFACE-SECRETS-01/02, and SURFACE-VAL-01 BLOCKED pending Security/Surface schema approvals and published env/secrets artifacts; move back to TODO once upstream contracts land. | Implementer |
|
||||
| 2025-11-28 | Created `docs/modules/scanner/guides/surface-validation-extensibility.md` covering custom validators, reporters, configuration, and testing; SURFACE-VAL-05 DONE. | Implementer |
|
||||
| 2025-11-28 | Created `docs/modules/scanner/guides/surface-fs-workflow.md` with end-to-end workflow including artefact generation, storage layout, consumption, and offline kit handling; SURFACE-FS-06 DONE. | Implementer |
|
||||
| 2025-11-28 | Created `StellaOps.Scanner.Analyzers.OS.Homebrew` library with `HomebrewReceiptParser` (INSTALL_RECEIPT.json parsing), `HomebrewPackageAnalyzer` (Cellar discovery for Intel/Apple Silicon), and `HomebrewAnalyzerPlugin`; added `BuildHomebrew` PURL builder, `HomebrewCellar` evidence source; 23 tests passing. SCANNER-ENG-0020 DONE. | Implementer |
|
||||
| 2025-11-28 | Created `StellaOps.Scanner.Analyzers.OS.Pkgutil` library with `PkgutilReceiptParser` (plist parsing), `BomParser` (BOM file enumeration), `PkgutilPackageAnalyzer` (receipt discovery from /var/db/receipts), and `PkgutilAnalyzerPlugin`; added `BuildPkgutil` PURL builder, `PkgutilReceipt` evidence source; 9 tests passing. SCANNER-ENG-0021 DONE. | Implementer |
|
||||
|
||||
@@ -7,15 +7,15 @@ Depends on: Sprint 180.A - Cli.IV
|
||||
Summary: Experience & SDKs focus on Cli (phase V).
|
||||
Task ID | State | Task description | Owners (Source)
|
||||
--- | --- | --- | ---
|
||||
CLI-TEN-47-001 | DOING | Implement `stella login`, `whoami`, `tenants list`, persistent profiles, secure token storage, and `--tenant` override with validation. Partial: `auth login`, `auth whoami` already exist; `tenants list` implemented. Remaining: persistent profiles, secure token storage enhancements, `--tenant` override validation. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-TEN-49-001 | TODO | Add service account token minting, delegation (`stella token delegate`), impersonation banner, and audit-friendly logging. Dependencies: CLI-TEN-47-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VEX-30-001 | TODO | Implement `stella vex consensus list` with filters, paging, policy selection, `--json/--csv`. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VEX-30-002 | TODO | Implement `stella vex consensus show` displaying quorum, evidence, rationale, signature status. Dependencies: CLI-VEX-30-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VEX-30-003 | TODO | Implement `stella vex simulate` for trust/threshold overrides with JSON diff output. Dependencies: CLI-VEX-30-002. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VEX-30-004 | TODO | Implement `stella vex export` for consensus NDJSON bundles with signature verification helper. Dependencies: CLI-VEX-30-003. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-001 | TODO | Implement `stella vuln list` with grouping, paging, filters, `--json/--csv`, and policy selection. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-002 | TODO | Implement `stella vuln show` displaying evidence, policy rationale, paths, ledger summary; support `--json` for automation. Dependencies: CLI-VULN-29-001. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-003 | TODO | Add workflow commands (`assign`, `comment`, `accept-risk`, `verify-fix`, `target-fix`, `reopen`) with filter selection (`--filter`) and idempotent retries. Dependencies: CLI-VULN-29-002. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-004 | TODO | Implement `stella vuln simulate` producing delta summaries and optional Markdown report for CI. Dependencies: CLI-VULN-29-003. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-005 | TODO | Add `stella vuln export` and `stella vuln bundle verify` commands to trigger/download evidence bundles and verify signatures. Dependencies: CLI-VULN-29-004. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-006 | TODO | Update CLI docs/examples for Vulnerability Explorer with compliance checklist and CI snippets. Dependencies: CLI-VULN-29-005. | DevEx/CLI Guild, Docs Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-TEN-47-001 | DONE | Implement `stella login`, `whoami`, `tenants list`, persistent profiles, secure token storage, and `--tenant` override with validation. Completed: `auth login`/`auth whoami` existed; `tenants list`/`use`/`current`/`clear` commands added; TenantProfileStore for persistent profiles at ~/.stellaops/profile.json; global `--tenant` option with profile fallback; tenant validation against Authority when available. Token storage uses existing file cache at ~/.stellaops/tokens/. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-TEN-49-001 | DONE | Add service account token minting, delegation (`stella token delegate`), impersonation banner, and audit-friendly logging. Completed: `auth token mint` and `auth token delegate` commands; TokenMint/DelegateRequest/Response models; AuthorityConsoleClient extended with MintTokenAsync, DelegateTokenAsync, IntrospectTokenAsync; CheckAndDisplayImpersonationBannerAsync helper for audit-aware impersonation notices. Note: Authority service endpoints (POST /console/token/mint, /delegate, /introspect) need backend implementation. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VEX-30-001 | DONE | Implement `stella vex consensus list` with filters, paging, policy selection, `--json/--csv`. Completed: VexModels.cs with request/response records; IBackendOperationsClient.ListVexConsensusAsync; BackendOperationsClient implementation calling GET /api/vex/consensus; BuildVexCommand in CommandFactory.cs with `vex consensus list` subcommand; HandleVexConsensusListAsync handler with table/JSON/CSV output, tenant resolution via TenantProfileStore, pagination support. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VEX-30-002 | DONE | Implement `stella vex consensus show` displaying quorum, evidence, rationale, signature status. Dependencies: CLI-VEX-30-001. Completed: VexConsensusDetailResponse with quorum/rationale/signature/evidence models; IBackendOperationsClient.GetVexConsensusAsync; BackendOperationsClient implementation calling GET /api/vex/consensus/{vulnId}/{productKey}; `vex consensus show` subcommand in CommandFactory.cs; HandleVexConsensusShowAsync handler with rich Spectre.Console formatted output including panels and tables for all sections. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VEX-30-003 | DONE | Implement `stella vex simulate` for trust/threshold overrides with JSON diff output. Dependencies: CLI-VEX-30-002. Completed: VexSimulationRequest/Response models with TrustOverrides, ThresholdOverride, QuorumOverride, ExcludeProviders; SimulateVexConsensusAsync interface and implementation calling POST /api/vex/consensus/simulate; `vex simulate` command with --trust provider=weight, --threshold, --quorum, --exclude, --include-only, --changed-only options; HandleVexSimulateAsync handler with before/after diff table and summary panel. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VEX-30-004 | DONE | Implement `stella vex export` for consensus NDJSON bundles with signature verification helper. Dependencies: CLI-VEX-30-003. Completed: VexExportRequest/Response models with format, signed, filter options; VexExportVerifyRequest/Result for local verification; IBackendOperationsClient.ExportVexConsensusAsync (POST /api/vex/consensus/export) and DownloadVexExportAsync (GET /api/vex/consensus/export/{exportId}); `vex export` command with --vuln-id, --product-key, --purl, --status, --output, --unsigned filters; `vex export verify` subcommand with --expected-digest and --public-key for local digest/signature verification; HandleVexExportAsync handler with download and progress display; HandleVexVerifyAsync for offline verification with SHA-256 digest calculation. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-001 | DONE | Implement `stella vuln list` with grouping, paging, filters, `--json/--csv`, and policy selection. Completed: VulnModels.cs with VulnListRequest/Response, VulnItem, VulnSeverityInfo, VulnAffectedPackage, VulnGroupingInfo, VulnGroup and all models for CLI-VULN-29-002 through CLI-VULN-29-005; IBackendOperationsClient extended with ListVulnerabilitiesAsync, GetVulnerabilityAsync, ExecuteVulnWorkflowAsync, SimulateVulnerabilitiesAsync, ExportVulnerabilitiesAsync, DownloadVulnExportAsync; BackendOperationsClient HTTP implementations calling GET/POST /api/vuln/*; `vuln list` command with --vuln-id, --severity, --status, --purl, --cpe, --sbom-id, --policy-id, --policy-version, --group-by, --limit, --offset, --cursor, --tenant, --json, --csv options; HandleVulnListAsync handler with grouped and individual table output, CSV output, color-coded severity/status display. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-002 | DONE | Implement `stella vuln show` displaying evidence, policy rationale, paths, ledger summary; support `--json` for automation. Dependencies: CLI-VULN-29-001. Completed: `vuln show` subcommand with vulnerability-id argument, --tenant, --json, --verbose options; HandleVulnShowAsync handler; RenderVulnDetail helper with Spectre.Console panels and tables for: header (ID, status, severity, VEX, aliases, assignee, dates), description, affected packages table, policy rationale panel with rules, evidence table, dependency paths, workflow ledger history table, references list. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-003 | DONE | Add workflow commands (`assign`, `comment`, `accept-risk`, `verify-fix`, `target-fix`, `reopen`) with filter selection (`--filter`) and idempotent retries. Dependencies: CLI-VULN-29-002. Completed: Six workflow subcommands under `vuln` command: `assign <assignee>`, `comment <text>`, `accept-risk <justification> [--due-date]`, `verify-fix <fix-version>`, `target-fix <version> [--due-date]`, `reopen <comment>`. All commands share common options: --vuln-id (multi-value), --filter-severity, --filter-status, --filter-purl, --filter-sbom for bulk operations; --tenant, --idempotency-key for retries, --json for automation. HandleVulnWorkflowAsync handler builds VulnWorkflowRequest with action-specific fields, calls ExecuteVulnWorkflowAsync (POST /api/vuln/workflow), renders success/error table with affected counts. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-004 | DONE | Implement `stella vuln simulate` producing delta summaries and optional Markdown report for CI. Dependencies: CLI-VULN-29-003. Completed: `vuln simulate` subcommand with --policy-id, --policy-version, --vex-override vulnId=status (multi), --severity-threshold, --sbom-id (multi), --markdown, --changed-only, --output (file), --tenant, --json options; HandleVulnSimulateAsync handler parsing VEX overrides into Dictionary, building VulnSimulationRequest, calling SimulateVulnerabilitiesAsync (POST /api/vuln/simulate); output includes simulation summary panel (total/changed/upgrades/downgrades/nochange), delta table with before/after status and change indicator (UPGRADE/DOWNGRADE), optional Markdown report to file or console for CI integration. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-005 | DONE | Add `stella vuln export` and `stella vuln bundle verify` commands to trigger/download evidence bundles and verify signatures. Dependencies: CLI-VULN-29-004. Completed: `vuln export` command with --vuln-id (multi), --sbom-id (multi), --policy-id, --format (ndjson/json), --include-evidence, --include-ledger, --signed (defaults true), --output (required), --tenant options; HandleVulnExportAsync handler calling ExportVulnerabilitiesAsync (POST /api/vuln/export) and DownloadVulnExportAsync to stream bundle to file; output displays item count, format, signature info, digest; `vuln export verify` subcommand with file argument, --expected-digest, --public-key options; HandleVulnExportVerifyAsync performs SHA-256 digest calculation, optional signature file detection (.sig), renders verification panel with pass/fail status. | DevEx/CLI Guild (src/Cli/StellaOps.Cli)
|
||||
CLI-VULN-29-006 | DONE | Update CLI docs/examples for Vulnerability Explorer with compliance checklist and CI snippets. Dependencies: CLI-VULN-29-005. Completed: Created docs/modules/cli/guides/vuln-explorer-cli.md with comprehensive documentation covering: Prerequisites (scopes, connectivity); vuln list with filters, grouping, pagination, --json/--csv; vuln show with all output sections; Workflow commands (assign, comment, accept-risk, verify-fix, target-fix, reopen) with idempotency support; vuln simulate for policy/VEX delta analysis with CI Markdown output; vuln export and export verify for compliance bundles; Exit codes table; Compliance checklist (inventory, SLA, risk acceptance audit, evidence bundles); CI pipeline snippets for GitHub Actions, GitLab CI, Jenkins; Offline operation guidance. | DevEx/CLI Guild, Docs Guild (src/Cli/StellaOps.Cli)
|
||||
502
docs/modules/cli/guides/vuln-explorer-cli.md
Normal file
502
docs/modules/cli/guides/vuln-explorer-cli.md
Normal file
@@ -0,0 +1,502 @@
|
||||
# CLI Vulnerability Explorer Commands Reference
|
||||
|
||||
> **Audience:** DevEx engineers, security operators, and CI authors managing vulnerabilities through the `stella` CLI.
|
||||
> **Scope:** Command synopsis, options, exit codes, and CI integration patterns for `stella vuln` commands as introduced in Sprint 205.
|
||||
|
||||
The Vulnerability Explorer CLI enables comprehensive vulnerability management including listing, inspection, workflow operations, policy simulation, and export. All commands support multi-tenant environments and integrate with StellaOps Authority for authentication.
|
||||
|
||||
---
|
||||
|
||||
## 1. Prerequisites
|
||||
|
||||
- CLI version: `stella` >= 0.21.0 (Vulnerability Explorer feature gate enabled).
|
||||
- Required scopes (DPoP-bound):
|
||||
- `vuln:view` for listing and viewing vulnerabilities.
|
||||
- `vuln:workflow` for workflow operations (assign, comment, accept-risk, etc.).
|
||||
- `vuln:simulate` for policy simulation.
|
||||
- `vuln:export` for exporting evidence bundles.
|
||||
- `tenant:select` if using tenant switching.
|
||||
- Connectivity: direct access to Backend APIs or configured backend URL.
|
||||
- Environment: set `STELLAOPS_BACKEND_URL`, `STELLA_TENANT`, and authenticate via `stella auth login`.
|
||||
|
||||
---
|
||||
|
||||
## 2. `stella vuln list`
|
||||
|
||||
### 2.1 Synopsis
|
||||
|
||||
```bash
|
||||
stella vuln list \
|
||||
[--vuln-id <id>] \
|
||||
[--severity critical|high|medium|low] \
|
||||
[--status open|triaged|accepted|fixed|risk_accepted] \
|
||||
[--purl <package-url>] \
|
||||
[--cpe <cpe>] \
|
||||
[--sbom-id <sbom-id>] \
|
||||
[--policy-id <policy-id>] \
|
||||
[--policy-version <version>] \
|
||||
[--group-by severity|status|sbom|policy] \
|
||||
[--limit <n>] [--offset <n>] [--cursor <token>] \
|
||||
[--tenant <tenant-id>] \
|
||||
[--json] [--csv] [--verbose]
|
||||
```
|
||||
|
||||
### 2.2 Description
|
||||
|
||||
Lists vulnerabilities matching the specified filters with pagination support. Supports grouped summaries for reporting and machine-readable output for automation.
|
||||
|
||||
### 2.3 Options
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--vuln-id <id>` | Filter by vulnerability ID (e.g., CVE-2024-1234). |
|
||||
| `--severity <level>` | Filter by severity (critical, high, medium, low). |
|
||||
| `--status <status>` | Filter by workflow status. |
|
||||
| `--purl <package-url>` | Filter by Package URL pattern. |
|
||||
| `--cpe <cpe>` | Filter by CPE pattern. |
|
||||
| `--sbom-id <sbom-id>` | Filter by SBOM identifier. |
|
||||
| `--policy-id <policy-id>` | Filter by policy ID. |
|
||||
| `--policy-version <version>` | Filter by policy version. |
|
||||
| `--group-by <field>` | Group results by field (shows summary counts). |
|
||||
| `--limit <n>` | Maximum results to return (default 50). |
|
||||
| `--offset <n>` | Number of results to skip. |
|
||||
| `--cursor <token>` | Pagination cursor from previous response. |
|
||||
| `--tenant <tenant-id>` | Override tenant for multi-tenant deployments. |
|
||||
| `--json` | Output as JSON for automation. |
|
||||
| `--csv` | Output as CSV for spreadsheet import. |
|
||||
| `--verbose` | Enable debug logging. |
|
||||
|
||||
### 2.4 Examples
|
||||
|
||||
List critical vulnerabilities:
|
||||
|
||||
```bash
|
||||
stella vuln list --severity critical
|
||||
```
|
||||
|
||||
Group by status for reporting:
|
||||
|
||||
```bash
|
||||
stella vuln list --group-by status --json > status-summary.json
|
||||
```
|
||||
|
||||
Export CSV for compliance audit:
|
||||
|
||||
```bash
|
||||
stella vuln list --severity critical --severity high --csv > critical-vulns.csv
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. `stella vuln show`
|
||||
|
||||
### 3.1 Synopsis
|
||||
|
||||
```bash
|
||||
stella vuln show <vulnerability-id> \
|
||||
[--tenant <tenant-id>] \
|
||||
[--json] [--verbose]
|
||||
```
|
||||
|
||||
### 3.2 Description
|
||||
|
||||
Displays detailed information about a specific vulnerability including severity, affected packages, policy rationale, evidence, dependency paths, and workflow history.
|
||||
|
||||
### 3.3 Output Sections
|
||||
|
||||
- **Header:** Vulnerability ID, status, severity, VEX status, aliases, assignee, dates.
|
||||
- **Description:** Full vulnerability description.
|
||||
- **Affected Packages:** Table of affected packages with versions and fix status.
|
||||
- **Policy Rationale:** Active policy rules and their evaluation results.
|
||||
- **Evidence:** Timeline of evidence collected.
|
||||
- **Dependency Paths:** Transitive dependency chains leading to vulnerability.
|
||||
- **Workflow History:** Audit ledger of all workflow actions.
|
||||
- **References:** Links to advisories, patches, and documentation.
|
||||
|
||||
### 3.4 Example
|
||||
|
||||
```bash
|
||||
stella vuln show CVE-2024-1234 --json
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 4. Workflow Commands
|
||||
|
||||
All workflow commands support bulk operations via `--vuln-id` (repeatable) or filter options.
|
||||
|
||||
### 4.1 `stella vuln assign`
|
||||
|
||||
Assign vulnerabilities to a team member.
|
||||
|
||||
```bash
|
||||
stella vuln assign <assignee> \
|
||||
[--vuln-id <id>]... \
|
||||
[--filter-severity <level>] \
|
||||
[--filter-status <status>] \
|
||||
[--filter-purl <pattern>] \
|
||||
[--filter-sbom <sbom-id>] \
|
||||
[--tenant <tenant-id>] \
|
||||
[--idempotency-key <key>] \
|
||||
[--json] [--verbose]
|
||||
```
|
||||
|
||||
Example:
|
||||
|
||||
```bash
|
||||
stella vuln assign security-team \
|
||||
--filter-severity critical \
|
||||
--filter-status open
|
||||
```
|
||||
|
||||
### 4.2 `stella vuln comment`
|
||||
|
||||
Add a comment to vulnerabilities.
|
||||
|
||||
```bash
|
||||
stella vuln comment "<text>" \
|
||||
--vuln-id CVE-2024-1234 \
|
||||
[--json]
|
||||
```
|
||||
|
||||
### 4.3 `stella vuln accept-risk`
|
||||
|
||||
Accept risk for vulnerabilities with documented justification.
|
||||
|
||||
```bash
|
||||
stella vuln accept-risk "<justification>" \
|
||||
--vuln-id CVE-2024-1234 \
|
||||
[--due-date 2025-12-31] \
|
||||
[--json]
|
||||
```
|
||||
|
||||
### 4.4 `stella vuln verify-fix`
|
||||
|
||||
Mark vulnerabilities as fixed and verified.
|
||||
|
||||
```bash
|
||||
stella vuln verify-fix <fix-version> \
|
||||
--vuln-id CVE-2024-1234 \
|
||||
[--json]
|
||||
```
|
||||
|
||||
### 4.5 `stella vuln target-fix`
|
||||
|
||||
Set target fix version and due date.
|
||||
|
||||
```bash
|
||||
stella vuln target-fix <version> \
|
||||
--vuln-id CVE-2024-1234 \
|
||||
[--due-date 2025-06-30] \
|
||||
[--json]
|
||||
```
|
||||
|
||||
### 4.6 `stella vuln reopen`
|
||||
|
||||
Reopen previously closed vulnerabilities.
|
||||
|
||||
```bash
|
||||
stella vuln reopen "<reason>" \
|
||||
--vuln-id CVE-2024-1234 \
|
||||
[--json]
|
||||
```
|
||||
|
||||
### 4.7 Idempotency
|
||||
|
||||
All workflow commands support `--idempotency-key` for safe retries in CI pipelines:
|
||||
|
||||
```bash
|
||||
stella vuln assign security-team \
|
||||
--vuln-id CVE-2024-1234 \
|
||||
--idempotency-key "assign-cve-2024-1234-$(date +%Y%m%d)"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. `stella vuln simulate`
|
||||
|
||||
### 5.1 Synopsis
|
||||
|
||||
```bash
|
||||
stella vuln simulate \
|
||||
[--policy-id <id>] \
|
||||
[--policy-version <version>] \
|
||||
[--vex-override <vulnId>=<status>]... \
|
||||
[--severity-threshold <level>] \
|
||||
[--sbom-id <id>]... \
|
||||
[--markdown] \
|
||||
[--changed-only] \
|
||||
[--output <file>] \
|
||||
[--tenant <tenant-id>] \
|
||||
[--json] [--verbose]
|
||||
```
|
||||
|
||||
### 5.2 Description
|
||||
|
||||
Simulates the impact of policy or VEX changes without modifying data. Produces delta summaries showing which vulnerabilities would change status, useful for policy review and CI gates.
|
||||
|
||||
### 5.3 Options
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--policy-id <id>` | Policy ID to simulate. |
|
||||
| `--policy-version <version>` | Policy version to simulate against. |
|
||||
| `--vex-override <vulnId>=<status>` | Override VEX status for simulation (repeatable). |
|
||||
| `--severity-threshold <level>` | Minimum severity to include. |
|
||||
| `--sbom-id <id>` | Limit simulation to specific SBOMs (repeatable). |
|
||||
| `--markdown` | Include Markdown report for CI. |
|
||||
| `--changed-only` | Only show items that would change. |
|
||||
| `--output <file>` | Write Markdown report to file. |
|
||||
| `--json` | Output full simulation results as JSON. |
|
||||
|
||||
### 5.4 Output
|
||||
|
||||
The command displays:
|
||||
- **Summary Panel:** Total evaluated, changed, upgrades, downgrades.
|
||||
- **Delta Table:** Before/after status comparison with UPGRADE/DOWNGRADE indicators.
|
||||
- **Markdown Report:** Optional CI-friendly report.
|
||||
|
||||
### 5.5 CI Integration Example
|
||||
|
||||
```bash
|
||||
# Run simulation and fail if any downgrades
|
||||
stella vuln simulate \
|
||||
--policy-id prod-policy \
|
||||
--changed-only \
|
||||
--markdown \
|
||||
--output simulation-report.md
|
||||
|
||||
# Check exit code
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Simulation found issues - see simulation-report.md"
|
||||
exit 1
|
||||
fi
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 6. `stella vuln export`
|
||||
|
||||
### 6.1 Synopsis
|
||||
|
||||
```bash
|
||||
stella vuln export \
|
||||
--output <file> \
|
||||
[--vuln-id <id>]... \
|
||||
[--sbom-id <id>]... \
|
||||
[--policy-id <id>] \
|
||||
[--format ndjson|json] \
|
||||
[--include-evidence] \
|
||||
[--include-ledger] \
|
||||
[--signed] \
|
||||
[--tenant <tenant-id>] \
|
||||
[--verbose]
|
||||
```
|
||||
|
||||
### 6.2 Description
|
||||
|
||||
Exports vulnerability evidence bundles for compliance documentation, audits, or offline analysis. Bundles can be cryptographically signed for integrity verification.
|
||||
|
||||
### 6.3 Options
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--output <file>` | Output file path (required). |
|
||||
| `--vuln-id <id>` | Vulnerability IDs to include (repeatable). |
|
||||
| `--sbom-id <id>` | SBOM IDs to scope export (repeatable). |
|
||||
| `--policy-id <id>` | Policy ID for filtering. |
|
||||
| `--format <fmt>` | Output format: `ndjson` (default) or `json`. |
|
||||
| `--include-evidence` | Include evidence data (default: true). |
|
||||
| `--include-ledger` | Include workflow ledger (default: true). |
|
||||
| `--signed` | Request signed bundle (default: true). |
|
||||
|
||||
### 6.4 Example
|
||||
|
||||
```bash
|
||||
stella vuln export \
|
||||
--output compliance-bundle.ndjson \
|
||||
--sbom-id prod-app-sbom \
|
||||
--signed
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 7. `stella vuln export verify`
|
||||
|
||||
### 7.1 Synopsis
|
||||
|
||||
```bash
|
||||
stella vuln export verify <file> \
|
||||
[--expected-digest <sha256:hex>] \
|
||||
[--public-key <key-file>] \
|
||||
[--verbose]
|
||||
```
|
||||
|
||||
### 7.2 Description
|
||||
|
||||
Verifies the integrity and optional signature of an exported vulnerability bundle. Use this to validate bundles received from external sources or stored archives.
|
||||
|
||||
### 7.3 Example
|
||||
|
||||
```bash
|
||||
stella vuln export verify compliance-bundle.ndjson \
|
||||
--expected-digest sha256:abc123... \
|
||||
--public-key /path/to/public.pem
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 8. Exit Codes
|
||||
|
||||
| Exit Code | Meaning |
|
||||
|-----------|---------|
|
||||
| `0` | Command completed successfully. |
|
||||
| `1` | General error (see error message). |
|
||||
| `130` | Operation cancelled by user (Ctrl+C). |
|
||||
|
||||
---
|
||||
|
||||
## 9. Compliance Checklist
|
||||
|
||||
Use these commands to demonstrate vulnerability management compliance:
|
||||
|
||||
### 9.1 Vulnerability Inventory
|
||||
|
||||
```bash
|
||||
# Generate complete vulnerability inventory
|
||||
stella vuln list --json > inventory.json
|
||||
|
||||
# Summary by severity
|
||||
stella vuln list --group-by severity --json > severity-summary.json
|
||||
```
|
||||
|
||||
### 9.2 SLA Compliance
|
||||
|
||||
```bash
|
||||
# Find critical vulns older than 30 days
|
||||
stella vuln list \
|
||||
--severity critical \
|
||||
--status open \
|
||||
--json | jq '.items[] | select(.updatedAt < (now - 2592000 | todate))'
|
||||
```
|
||||
|
||||
### 9.3 Risk Acceptance Audit
|
||||
|
||||
```bash
|
||||
# Export all risk-accepted vulnerabilities with justifications
|
||||
stella vuln list --status risk_accepted --json > risk-accepted.json
|
||||
```
|
||||
|
||||
### 9.4 Evidence Bundle for Audit
|
||||
|
||||
```bash
|
||||
# Export signed evidence bundle
|
||||
stella vuln export \
|
||||
--output audit-$(date +%Y%m%d).ndjson \
|
||||
--signed
|
||||
|
||||
# Verify bundle integrity
|
||||
stella vuln export verify audit-$(date +%Y%m%d).ndjson
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 10. CI Pipeline Snippets
|
||||
|
||||
### 10.1 GitHub Actions
|
||||
|
||||
```yaml
|
||||
- name: Check Critical Vulnerabilities
|
||||
run: |
|
||||
count=$(stella vuln list --severity critical --status open --json | jq '.total')
|
||||
if [ "$count" -gt 0 ]; then
|
||||
echo "::error::Found $count critical open vulnerabilities"
|
||||
stella vuln list --severity critical --status open
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Policy Simulation Gate
|
||||
run: |
|
||||
stella vuln simulate \
|
||||
--policy-id ${{ env.POLICY_ID }} \
|
||||
--changed-only \
|
||||
--markdown \
|
||||
--output ${{ github.workspace }}/simulation.md
|
||||
|
||||
cat ${{ github.workspace }}/simulation.md >> $GITHUB_STEP_SUMMARY
|
||||
```
|
||||
|
||||
### 10.2 GitLab CI
|
||||
|
||||
```yaml
|
||||
vuln-check:
|
||||
script:
|
||||
- stella auth login --token $STELLA_TOKEN
|
||||
- |
|
||||
if stella vuln list --severity critical --status open --json | jq -e '.total > 0'; then
|
||||
echo "Critical vulnerabilities found!"
|
||||
stella vuln list --severity critical --status open
|
||||
exit 1
|
||||
fi
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: vuln-status.env
|
||||
```
|
||||
|
||||
### 10.3 Jenkins Pipeline
|
||||
|
||||
```groovy
|
||||
stage('Vulnerability Check') {
|
||||
steps {
|
||||
sh '''
|
||||
stella vuln list \
|
||||
--severity critical \
|
||||
--severity high \
|
||||
--status open \
|
||||
--csv > vulns.csv
|
||||
'''
|
||||
archiveArtifacts artifacts: 'vulns.csv'
|
||||
|
||||
script {
|
||||
def count = sh(
|
||||
script: "stella vuln list --severity critical --status open --json | jq '.total'",
|
||||
returnStdout: true
|
||||
).trim().toInteger()
|
||||
|
||||
if (count > 0) {
|
||||
error("Found ${count} critical vulnerabilities")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 11. Offline Operation
|
||||
|
||||
When operating in air-gapped environments:
|
||||
|
||||
1. Export vulnerability data before going offline:
|
||||
```bash
|
||||
stella vuln export --output vuln-bundle.ndjson --signed
|
||||
```
|
||||
|
||||
2. Transfer bundle to air-gapped system.
|
||||
|
||||
3. Verify bundle integrity:
|
||||
```bash
|
||||
stella vuln export verify vuln-bundle.ndjson \
|
||||
--expected-digest sha256:...
|
||||
```
|
||||
|
||||
For full offline kit support, see the [Offline Kit documentation](../../../24_OFFLINE_KIT.md).
|
||||
|
||||
---
|
||||
|
||||
## 12. Related Documentation
|
||||
|
||||
- [VEX Consensus CLI](./vex-cli.md) - VEX status management
|
||||
- [Policy Simulation](../../policy/guides/simulation.md) - Policy testing
|
||||
- [Authentication Guide](./auth-cli.md) - Token management
|
||||
- [API Reference](../../../09_API_CLI_REFERENCE.md) - Full API documentation
|
||||
414
docs/modules/scanner/guides/surface-fs-workflow.md
Normal file
414
docs/modules/scanner/guides/surface-fs-workflow.md
Normal file
@@ -0,0 +1,414 @@
|
||||
# Surface.FS Workflow Guide
|
||||
|
||||
> **Version:** 1.0 (2025-11-28)
|
||||
>
|
||||
> **Audience:** Scanner Worker/WebService integrators, Zastava operators, Offline Kit builders
|
||||
|
||||
## Overview
|
||||
|
||||
Surface.FS provides a content-addressable storage layer for Scanner-derived artefacts. This guide covers the end-to-end workflow from artefact generation to consumption, including offline bundle handling.
|
||||
|
||||
## Workflow Stages
|
||||
|
||||
```
|
||||
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
|
||||
│ Scanner Worker │───▶│ Surface.FS │───▶│ Consumers │
|
||||
│ - Scan image │ │ - Store manifest│ │ - WebService │
|
||||
│ - Generate │ │ - Store payload │ │ - Zastava │
|
||||
│ artefacts │ │ - Local cache │ │ - CLI │
|
||||
└─────────────────┘ └─────────────────┘ └─────────────────┘
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
|
||||
│ Generate: │ │ Store: │ │ Consume: │
|
||||
│ - Layer frags │ │ - RustFS/S3 │ │ - Report API │
|
||||
│ - EntryTrace │ │ - Local disk │ │ - Drift detect │
|
||||
│ - SBOM frags │ │ - Offline kit │ │ - Rescan plan │
|
||||
└─────────────────┘ └─────────────────┘ └─────────────────┘
|
||||
```
|
||||
|
||||
## Stage 1: Artefact Generation (Scanner Worker)
|
||||
|
||||
### 1.1 Configure Surface.FS
|
||||
|
||||
```csharp
|
||||
// In Scanner Worker startup
|
||||
builder.Services.AddSurfaceFileCache();
|
||||
builder.Services.AddSurfaceManifestStore();
|
||||
```
|
||||
|
||||
Environment variables (see [Surface.Env guide](../design/surface-env.md)):
|
||||
```bash
|
||||
SCANNER_SURFACE_FS_ENDPOINT=http://rustfs:8080
|
||||
SCANNER_SURFACE_FS_BUCKET=surface-cache
|
||||
SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface
|
||||
SCANNER_SURFACE_TENANT=default
|
||||
```
|
||||
|
||||
### 1.2 Generate and Publish Artefacts
|
||||
|
||||
```csharp
|
||||
public async Task<ScanResult> ExecuteScanAsync(ScanJob job, CancellationToken ct)
|
||||
{
|
||||
// 1. Run analyzers to generate artefacts
|
||||
var layerFragments = await AnalyzeLayersAsync(job.Image, ct);
|
||||
var entryTrace = await AnalyzeEntryPointsAsync(job.Image, ct);
|
||||
var sbomFragments = await GenerateSbomAsync(job.Image, ct);
|
||||
|
||||
// 2. Create manifest document
|
||||
var manifest = new SurfaceManifestDocument
|
||||
{
|
||||
Schema = "stellaops.surface.manifest@1",
|
||||
Tenant = _environment.Settings.Tenant,
|
||||
ImageDigest = job.Image.Digest,
|
||||
ScanId = job.Id,
|
||||
GeneratedAt = DateTimeOffset.UtcNow,
|
||||
Source = new SurfaceManifestSource
|
||||
{
|
||||
Component = "scanner.worker",
|
||||
Version = _version,
|
||||
WorkerInstance = Environment.MachineName,
|
||||
Attempt = job.Attempt
|
||||
},
|
||||
Artifacts = new List<SurfaceManifestArtifact>()
|
||||
};
|
||||
|
||||
// 3. Add artefacts to manifest
|
||||
foreach (var fragment in layerFragments)
|
||||
{
|
||||
var payloadUri = await _manifestWriter.StorePayloadAsync(
|
||||
fragment.Content,
|
||||
"layer.fragments",
|
||||
ct);
|
||||
|
||||
manifest.Artifacts.Add(new SurfaceManifestArtifact
|
||||
{
|
||||
Kind = "layer.fragments",
|
||||
Uri = payloadUri,
|
||||
Digest = fragment.Digest,
|
||||
MediaType = "application/vnd.stellaops.layer-fragments+json",
|
||||
Format = "json",
|
||||
SizeBytes = fragment.Content.Length
|
||||
});
|
||||
}
|
||||
|
||||
// 4. Publish manifest
|
||||
var result = await _manifestWriter.PublishAsync(manifest, ct);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Published manifest {Digest} with {Count} artefacts",
|
||||
result.Digest,
|
||||
manifest.Artifacts.Count);
|
||||
|
||||
return new ScanResult
|
||||
{
|
||||
ManifestUri = result.Uri,
|
||||
ManifestDigest = result.Digest
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### 1.3 Cache EntryTrace Results
|
||||
|
||||
```csharp
|
||||
public async Task<EntryTraceGraph?> GetOrComputeEntryTraceAsync(
|
||||
ImageReference image,
|
||||
EntryTraceOptions options,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// Create deterministic cache key
|
||||
var cacheKey = new SurfaceCacheKey(
|
||||
@namespace: "entrytrace.graph",
|
||||
tenant: _environment.Settings.Tenant,
|
||||
digest: ComputeOptionsHash(options, image.Digest));
|
||||
|
||||
// Try cache first
|
||||
var cached = await _cache.TryGetAsync<EntryTraceGraph>(cacheKey, ct);
|
||||
if (cached is not null)
|
||||
{
|
||||
_logger.LogDebug("EntryTrace cache hit for {Key}", cacheKey);
|
||||
return cached;
|
||||
}
|
||||
|
||||
// Compute and cache
|
||||
var graph = await ComputeEntryTraceAsync(image, options, ct);
|
||||
await _cache.SetAsync(cacheKey, graph, ct);
|
||||
|
||||
return graph;
|
||||
}
|
||||
```
|
||||
|
||||
## Stage 2: Storage (Surface.FS)
|
||||
|
||||
### 2.1 Manifest Storage Layout
|
||||
|
||||
```
|
||||
<bucket>/
|
||||
├── manifests/
|
||||
│ └── <tenant>/
|
||||
│ └── <digest[0..1]>/
|
||||
│ └── <digest[2..3]>/
|
||||
│ └── <digest>.json
|
||||
└── payloads/
|
||||
└── <tenant>/
|
||||
└── <kind>/
|
||||
└── sha256/
|
||||
└── <digest[0..1]>/
|
||||
└── <digest[2..3]>/
|
||||
└── <digest>.json.zst
|
||||
```
|
||||
|
||||
### 2.2 Local Cache Layout
|
||||
|
||||
```
|
||||
<cache_root>/
|
||||
├── manifests/ # Manifest JSON files
|
||||
│ └── <tenant>/...
|
||||
├── cache/ # Hot artefacts
|
||||
│ └── <namespace>/
|
||||
│ └── <tenant>/
|
||||
│ └── <digest>
|
||||
└── temp/ # In-progress writes
|
||||
```
|
||||
|
||||
### 2.3 Manifest URI Format
|
||||
|
||||
```
|
||||
cas://<bucket>/<prefix>/<tenant>/<digest[0..1]>/<digest[2..3]>/<digest>.json
|
||||
```
|
||||
|
||||
Example:
|
||||
```
|
||||
cas://surface-cache/manifests/acme/ab/cd/abcdef0123456789...json
|
||||
```
|
||||
|
||||
## Stage 3: Consumption
|
||||
|
||||
### 3.1 WebService API
|
||||
|
||||
```http
|
||||
GET /api/v1/scans/{id}
|
||||
```
|
||||
|
||||
Response includes Surface manifest pointer:
|
||||
```json
|
||||
{
|
||||
"id": "scan-1234",
|
||||
"status": "completed",
|
||||
"surface": {
|
||||
"manifestUri": "cas://surface-cache/manifests/acme/ab/cd/...",
|
||||
"manifestDigest": "sha256:abcdef...",
|
||||
"artifacts": [
|
||||
{
|
||||
"kind": "layer.fragments",
|
||||
"uri": "cas://surface-cache/payloads/acme/layer.fragments/...",
|
||||
"digest": "sha256:123456...",
|
||||
"mediaType": "application/vnd.stellaops.layer-fragments+json"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3.2 Zastava Drift Detection
|
||||
|
||||
```csharp
|
||||
public async Task<DriftResult> DetectDriftAsync(
|
||||
string imageDigest,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// 1. Fetch baseline manifest
|
||||
var manifestUri = await _surfacePointerService.GetManifestUriAsync(imageDigest, ct);
|
||||
var manifest = await _manifestReader.TryGetByUriAsync(manifestUri, ct);
|
||||
|
||||
if (manifest is null)
|
||||
{
|
||||
return DriftResult.NoBaseline();
|
||||
}
|
||||
|
||||
// 2. Get EntryTrace artefact
|
||||
var entryTraceArtifact = manifest.Artifacts
|
||||
.FirstOrDefault(a => a.Kind == "entrytrace.graph");
|
||||
|
||||
if (entryTraceArtifact is null)
|
||||
{
|
||||
return DriftResult.NoEntryTrace();
|
||||
}
|
||||
|
||||
// 3. Compare with runtime
|
||||
var baseline = await _payloadStore.GetAsync<EntryTraceGraph>(
|
||||
entryTraceArtifact.Uri, ct);
|
||||
|
||||
var runtime = await _runtimeCollector.CollectAsync(ct);
|
||||
|
||||
return CompareGraphs(baseline, runtime);
|
||||
}
|
||||
```
|
||||
|
||||
### 3.3 Scheduler Rescan Planning
|
||||
|
||||
```csharp
|
||||
public async Task<RescanPlan> CreateRescanPlanAsync(
|
||||
string imageDigest,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// 1. Read manifest to understand what was scanned
|
||||
var manifest = await _manifestReader.TryGetByDigestAsync(imageDigest, ct);
|
||||
|
||||
if (manifest is null || IsExpired(manifest))
|
||||
{
|
||||
return RescanPlan.FullRescan();
|
||||
}
|
||||
|
||||
// 2. Check for layer changes
|
||||
var layerArtifact = manifest.Artifacts
|
||||
.FirstOrDefault(a => a.Kind == "layer.fragments");
|
||||
|
||||
if (layerArtifact is not null)
|
||||
{
|
||||
var layers = await _payloadStore.GetAsync<LayerFragments>(
|
||||
layerArtifact.Uri, ct);
|
||||
|
||||
var changedLayers = await DetectChangedLayersAsync(layers, ct);
|
||||
|
||||
if (changedLayers.Any())
|
||||
{
|
||||
return RescanPlan.IncrementalRescan(changedLayers);
|
||||
}
|
||||
}
|
||||
|
||||
return RescanPlan.NoRescanNeeded();
|
||||
}
|
||||
```
|
||||
|
||||
## Offline Kit Workflow
|
||||
|
||||
### Export (Online Environment)
|
||||
|
||||
```bash
|
||||
# 1. Build offline kit with Surface manifests
|
||||
python ops/offline-kit/build_offline_kit.py \
|
||||
--version 2025.10.0 \
|
||||
--include-surface-manifests \
|
||||
--output-dir out/offline-kit
|
||||
|
||||
# 2. Kit structure includes:
|
||||
# offline/
|
||||
# surface/
|
||||
# manifests/
|
||||
# <tenant>/<digest[0..1]>/<digest[2..3]>/<digest>.json
|
||||
# payloads/
|
||||
# <tenant>/<kind>/sha256/<digest[0..1]>/<digest[2..3]>/<digest>.json.zst
|
||||
# manifest-index.json
|
||||
```
|
||||
|
||||
### Import (Air-Gapped Environment)
|
||||
|
||||
```csharp
|
||||
public async Task ImportOfflineKitAsync(
|
||||
string kitPath,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var surfacePath = Path.Combine(kitPath, "surface");
|
||||
var indexPath = Path.Combine(surfacePath, "manifest-index.json");
|
||||
|
||||
var index = await LoadIndexAsync(indexPath, ct);
|
||||
|
||||
foreach (var entry in index.Manifests)
|
||||
{
|
||||
// 1. Load and verify manifest
|
||||
var manifestPath = Path.Combine(surfacePath, entry.RelativePath);
|
||||
var manifest = await LoadManifestAsync(manifestPath, ct);
|
||||
|
||||
// 2. Verify digest
|
||||
var computedDigest = ComputeDigest(manifest);
|
||||
if (computedDigest != entry.Digest)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Manifest digest mismatch: expected {entry.Digest}, got {computedDigest}");
|
||||
}
|
||||
|
||||
// 3. Import via Surface.FS API
|
||||
await _manifestWriter.PublishAsync(manifest, ct);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Imported manifest {Digest} for image {Image}",
|
||||
entry.Digest,
|
||||
manifest.ImageDigest);
|
||||
}
|
||||
|
||||
// 4. Import payloads
|
||||
foreach (var payload in index.Payloads)
|
||||
{
|
||||
var payloadPath = Path.Combine(surfacePath, payload.RelativePath);
|
||||
await _payloadStore.ImportAsync(payloadPath, payload.Uri, ct);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Offline Operation
|
||||
|
||||
Once imported, Surface.FS consumers operate normally:
|
||||
|
||||
```csharp
|
||||
// Same code works online and offline
|
||||
var manifest = await _manifestReader.TryGetByUriAsync(manifestUri, ct);
|
||||
var payload = await _payloadStore.GetAsync(artifact.Uri, ct);
|
||||
```
|
||||
|
||||
## Configuration Reference
|
||||
|
||||
### SurfaceManifestStoreOptions
|
||||
|
||||
| Option | Default | Description |
|
||||
|--------|---------|-------------|
|
||||
| `Bucket` | `surface-cache` | Object store bucket |
|
||||
| `ManifestPrefix` | `manifests` | Prefix for manifest objects |
|
||||
| `PayloadPrefix` | `payloads` | Prefix for payload objects |
|
||||
| `LocalManifestRoot` | `<cache>/manifests` | Local manifest directory |
|
||||
|
||||
### SurfaceCacheOptions
|
||||
|
||||
| Option | Default | Description |
|
||||
|--------|---------|-------------|
|
||||
| `Root` | `<temp>/stellaops/surface` | Cache root directory |
|
||||
| `QuotaMegabytes` | `4096` | Cache size limit |
|
||||
| `EvictionThreshold` | `0.9` | Trigger eviction at 90% quota |
|
||||
|
||||
## Metrics
|
||||
|
||||
| Metric | Labels | Description |
|
||||
|--------|--------|-------------|
|
||||
| `surface_manifest_published_total` | `tenant`, `kind` | Manifests published |
|
||||
| `surface_manifest_cache_hit_total` | `namespace`, `tenant` | Cache hits |
|
||||
| `surface_manifest_publish_duration_ms` | `tenant` | Publish latency |
|
||||
| `surface_payload_persisted_total` | `kind` | Payloads stored |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Manifest Not Found
|
||||
|
||||
1. Check tenant matches between writer and reader
|
||||
2. Verify Surface.FS endpoint is reachable
|
||||
3. Check bucket permissions
|
||||
4. Review `surface_manifest_published_total` metric
|
||||
|
||||
### Cache Miss Despite Expected Hit
|
||||
|
||||
1. Verify cache key components match (namespace, tenant, digest)
|
||||
2. Check cache quota - eviction may have occurred
|
||||
3. Review `surface_manifest_cache_hit_total` metric
|
||||
|
||||
### Offline Import Failures
|
||||
|
||||
1. Verify manifest digest matches index
|
||||
2. Check file permissions on import path
|
||||
3. Ensure Surface.FS endpoint is writable
|
||||
4. Review import logs for specific errors
|
||||
|
||||
## References
|
||||
|
||||
- [Surface.FS Design](../design/surface-fs.md)
|
||||
- [Surface.Env Design](../design/surface-env.md)
|
||||
- [Surface.Validation Guide](./surface-validation-extensibility.md)
|
||||
- [Offline Kit Documentation](../../../../24_OFFLINE_KIT.md)
|
||||
455
docs/modules/scanner/guides/surface-validation-extensibility.md
Normal file
455
docs/modules/scanner/guides/surface-validation-extensibility.md
Normal file
@@ -0,0 +1,455 @@
|
||||
# Surface.Validation Extensibility Guide
|
||||
|
||||
> **Version:** 1.0 (2025-11-28)
|
||||
>
|
||||
> **Audience:** Scanner Worker/WebService integrators, custom analyzer developers, Zastava contributors
|
||||
|
||||
## Overview
|
||||
|
||||
Surface.Validation provides a pluggable validator framework for ensuring configuration and data preconditions before performing scanner work. This guide covers how to extend the validation system with custom validators, customize reporting, and integrate validation into your components.
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Basic Registration
|
||||
|
||||
```csharp
|
||||
// In Program.cs or your DI configuration
|
||||
builder.Services.AddSurfaceValidation();
|
||||
```
|
||||
|
||||
This registers the default validators:
|
||||
- `SurfaceEndpointValidator` - Validates Surface.FS endpoint and bucket
|
||||
- `SurfaceCacheValidator` - Validates cache directory writability and quota
|
||||
- `SurfaceSecretsValidator` - Validates secrets provider configuration
|
||||
|
||||
### Adding Custom Validators
|
||||
|
||||
```csharp
|
||||
builder.Services.AddSurfaceValidation(builder =>
|
||||
{
|
||||
builder.AddValidator<MyCustomValidator>();
|
||||
builder.AddValidator<AnotherValidator>();
|
||||
});
|
||||
```
|
||||
|
||||
## Writing Custom Validators
|
||||
|
||||
### Validator Interface
|
||||
|
||||
Implement `ISurfaceValidator` to create a custom validator:
|
||||
|
||||
```csharp
|
||||
public interface ISurfaceValidator
|
||||
{
|
||||
ValueTask<SurfaceValidationResult> ValidateAsync(
|
||||
SurfaceValidationContext context,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
```
|
||||
|
||||
### Example: Registry Credentials Validator
|
||||
|
||||
```csharp
|
||||
public sealed class RegistryCredentialsValidator : ISurfaceValidator
|
||||
{
|
||||
private readonly IHttpClientFactory _httpClientFactory;
|
||||
|
||||
public RegistryCredentialsValidator(IHttpClientFactory httpClientFactory)
|
||||
{
|
||||
_httpClientFactory = httpClientFactory;
|
||||
}
|
||||
|
||||
public async ValueTask<SurfaceValidationResult> ValidateAsync(
|
||||
SurfaceValidationContext context,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
var issues = new List<SurfaceValidationIssue>();
|
||||
|
||||
// Access secrets configuration from context
|
||||
var secrets = context.Environment.Secrets;
|
||||
if (secrets.Provider == "file" && string.IsNullOrEmpty(secrets.Root))
|
||||
{
|
||||
issues.Add(SurfaceValidationIssue.Error(
|
||||
"REGISTRY_SECRETS_ROOT_MISSING",
|
||||
"Registry secrets root path is not configured.",
|
||||
"Set SCANNER_SURFACE_SECRETS_ROOT to the secrets directory."));
|
||||
}
|
||||
|
||||
// Access custom properties passed during validation
|
||||
if (context.Properties.TryGetValue("registryEndpoint", out var endpoint))
|
||||
{
|
||||
var reachable = await CheckEndpointAsync(endpoint?.ToString(), cancellationToken);
|
||||
if (!reachable)
|
||||
{
|
||||
issues.Add(SurfaceValidationIssue.Warning(
|
||||
"REGISTRY_ENDPOINT_UNREACHABLE",
|
||||
$"Registry endpoint {endpoint} is not reachable.",
|
||||
"Verify network connectivity to the container registry."));
|
||||
}
|
||||
}
|
||||
|
||||
return issues.Count == 0
|
||||
? SurfaceValidationResult.Success()
|
||||
: SurfaceValidationResult.FromIssues(issues);
|
||||
}
|
||||
|
||||
private async Task<bool> CheckEndpointAsync(string? endpoint, CancellationToken ct)
|
||||
{
|
||||
if (string.IsNullOrEmpty(endpoint)) return true;
|
||||
|
||||
try
|
||||
{
|
||||
var client = _httpClientFactory.CreateClient();
|
||||
client.Timeout = TimeSpan.FromMilliseconds(500); // Keep validations fast
|
||||
var response = await client.GetAsync(endpoint, ct);
|
||||
return response.IsSuccessStatusCode;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Best Practices for Validators
|
||||
|
||||
1. **Keep validations fast** - Target < 500ms per validator to avoid blocking startup
|
||||
2. **Use appropriate severity levels**:
|
||||
- `Error` - Fatal misconfiguration that prevents operation
|
||||
- `Warning` - Suboptimal configuration that may cause issues
|
||||
- `Info` - Informational notices
|
||||
3. **Provide actionable hints** - Include remediation steps in the hint parameter
|
||||
4. **Access services via context** - Use `context.Services.GetService<T>()` for DI
|
||||
5. **Check cancellation tokens** - Honor cancellation for async operations
|
||||
|
||||
## Validation Context
|
||||
|
||||
### Creating Context with Properties
|
||||
|
||||
```csharp
|
||||
var context = SurfaceValidationContext.Create(
|
||||
serviceProvider,
|
||||
componentName: "Scanner.Worker",
|
||||
environment: surfaceEnvironment,
|
||||
properties: new Dictionary<string, object?>
|
||||
{
|
||||
["jobId"] = currentJob.Id,
|
||||
["imageDigest"] = image.Digest,
|
||||
["configPath"] = "/etc/scanner/config.yaml"
|
||||
});
|
||||
```
|
||||
|
||||
### Accessing Context in Validators
|
||||
|
||||
```csharp
|
||||
public ValueTask<SurfaceValidationResult> ValidateAsync(
|
||||
SurfaceValidationContext context,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Access environment settings
|
||||
var endpoint = context.Environment.SurfaceFsEndpoint;
|
||||
var bucket = context.Environment.SurfaceFsBucket;
|
||||
var tenant = context.Environment.Tenant;
|
||||
|
||||
// Access custom properties
|
||||
if (context.Properties.TryGetValue("imageDigest", out var digest))
|
||||
{
|
||||
// Validate specific to this image
|
||||
}
|
||||
|
||||
// Access DI services
|
||||
var logger = context.Services.GetService<ILogger<MyValidator>>();
|
||||
}
|
||||
```
|
||||
|
||||
## Running Validators
|
||||
|
||||
### Using the Validator Runner
|
||||
|
||||
```csharp
|
||||
public class MyService
|
||||
{
|
||||
private readonly ISurfaceValidatorRunner _runner;
|
||||
private readonly ISurfaceEnvironment _environment;
|
||||
|
||||
public MyService(ISurfaceValidatorRunner runner, ISurfaceEnvironment environment)
|
||||
{
|
||||
_runner = runner;
|
||||
_environment = environment;
|
||||
}
|
||||
|
||||
public async Task ExecuteAsync(CancellationToken ct)
|
||||
{
|
||||
var context = SurfaceValidationContext.Create(
|
||||
_serviceProvider,
|
||||
"MyService",
|
||||
_environment.Settings);
|
||||
|
||||
// Option 1: Get results and handle manually
|
||||
var result = await _runner.RunAllAsync(context, ct);
|
||||
if (!result.IsSuccess)
|
||||
{
|
||||
foreach (var issue in result.Issues.Where(i => i.Severity == SurfaceValidationSeverity.Error))
|
||||
{
|
||||
_logger.LogError("Validation failed: {Code} - {Message}", issue.Code, issue.Message);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Option 2: Throw on failure (respects options)
|
||||
await _runner.EnsureAsync(context, ct);
|
||||
|
||||
// Continue with work...
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Custom Reporting
|
||||
|
||||
### Implementing a Reporter
|
||||
|
||||
```csharp
|
||||
public sealed class MetricsSurfaceValidationReporter : ISurfaceValidationReporter
|
||||
{
|
||||
private readonly IMetricsFactory _metrics;
|
||||
|
||||
public MetricsSurfaceValidationReporter(IMetricsFactory metrics)
|
||||
{
|
||||
_metrics = metrics;
|
||||
}
|
||||
|
||||
public void Report(SurfaceValidationContext context, SurfaceValidationResult result)
|
||||
{
|
||||
var counter = _metrics.CreateCounter<long>("surface_validation_issues_total");
|
||||
|
||||
foreach (var issue in result.Issues)
|
||||
{
|
||||
counter.Add(1, new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("code", issue.Code),
|
||||
new("severity", issue.Severity.ToString().ToLowerInvariant()),
|
||||
new("component", context.ComponentName)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Registering Custom Reporters
|
||||
|
||||
```csharp
|
||||
// Replace default reporter
|
||||
builder.Services.AddSingleton<ISurfaceValidationReporter, MetricsSurfaceValidationReporter>();
|
||||
|
||||
// Or add alongside default (using composite pattern)
|
||||
builder.Services.Decorate<ISurfaceValidationReporter>((inner, sp) =>
|
||||
new CompositeSurfaceValidationReporter(
|
||||
inner,
|
||||
sp.GetRequiredService<MetricsSurfaceValidationReporter>()));
|
||||
```
|
||||
|
||||
## Configuration Options
|
||||
|
||||
### SurfaceValidationOptions
|
||||
|
||||
| Option | Default | Description |
|
||||
|--------|---------|-------------|
|
||||
| `ThrowOnFailure` | `true` | Whether `EnsureAsync()` throws on validation failure |
|
||||
| `ContinueOnError` | `false` | Whether to continue running validators after first error |
|
||||
|
||||
Configure via `IConfiguration`:
|
||||
|
||||
```json
|
||||
{
|
||||
"Surface": {
|
||||
"Validation": {
|
||||
"ThrowOnFailure": true,
|
||||
"ContinueOnError": false
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Or programmatically:
|
||||
|
||||
```csharp
|
||||
builder.Services.Configure<SurfaceValidationOptions>(options =>
|
||||
{
|
||||
options.ThrowOnFailure = true;
|
||||
options.ContinueOnError = true; // Useful for diagnostics
|
||||
});
|
||||
```
|
||||
|
||||
## Issue Codes
|
||||
|
||||
### Standard Codes
|
||||
|
||||
| Code | Severity | Validator |
|
||||
|------|----------|-----------|
|
||||
| `SURFACE_ENV_MISSING_ENDPOINT` | Error | SurfaceEndpointValidator |
|
||||
| `SURFACE_FS_BUCKET_MISSING` | Error | SurfaceEndpointValidator |
|
||||
| `SURFACE_ENV_CACHE_DIR_UNWRITABLE` | Error | SurfaceCacheValidator |
|
||||
| `SURFACE_ENV_CACHE_QUOTA_INVALID` | Error | SurfaceCacheValidator |
|
||||
| `SURFACE_SECRET_PROVIDER_UNKNOWN` | Error | SurfaceSecretsValidator |
|
||||
| `SURFACE_SECRET_CONFIGURATION_MISSING` | Error | SurfaceSecretsValidator |
|
||||
| `SURFACE_ENV_TENANT_MISSING` | Error | SurfaceSecretsValidator |
|
||||
|
||||
### Custom Issue Codes
|
||||
|
||||
Follow the naming convention: `<SUBSYSTEM>_<COMPONENT>_<ISSUE>`
|
||||
|
||||
```csharp
|
||||
public static class MyValidationCodes
|
||||
{
|
||||
public const string RegistrySecretsRootMissing = "REGISTRY_SECRETS_ROOT_MISSING";
|
||||
public const string RegistryEndpointUnreachable = "REGISTRY_ENDPOINT_UNREACHABLE";
|
||||
public const string CacheWarmupFailed = "CACHE_WARMUP_FAILED";
|
||||
}
|
||||
```
|
||||
|
||||
## Integration Examples
|
||||
|
||||
### Scanner Worker Startup
|
||||
|
||||
```csharp
|
||||
// In hosted service
|
||||
public async Task StartAsync(CancellationToken ct)
|
||||
{
|
||||
var context = SurfaceValidationContext.Create(
|
||||
_services,
|
||||
"Scanner.Worker",
|
||||
_surfaceEnv.Settings);
|
||||
|
||||
try
|
||||
{
|
||||
await _validatorRunner.EnsureAsync(context, ct);
|
||||
_logger.LogInformation("Surface validation passed");
|
||||
}
|
||||
catch (SurfaceValidationException ex)
|
||||
{
|
||||
_logger.LogCritical(ex, "Surface validation failed; worker cannot start");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Per-Scan Validation
|
||||
|
||||
```csharp
|
||||
public async Task<ScanResult> ScanImageAsync(ImageReference image, CancellationToken ct)
|
||||
{
|
||||
var context = SurfaceValidationContext.Create(
|
||||
_services,
|
||||
"Scanner.Analyzer",
|
||||
_surfaceEnv.Settings,
|
||||
new Dictionary<string, object?>
|
||||
{
|
||||
["imageDigest"] = image.Digest,
|
||||
["imageReference"] = image.Reference
|
||||
});
|
||||
|
||||
var result = await _validatorRunner.RunAllAsync(context, ct);
|
||||
|
||||
if (result.HasErrors)
|
||||
{
|
||||
return ScanResult.Failed(result.Issues.Select(i => i.Message));
|
||||
}
|
||||
|
||||
// Proceed with scan...
|
||||
}
|
||||
```
|
||||
|
||||
### Zastava Webhook Readiness
|
||||
|
||||
```csharp
|
||||
app.MapGet("/readyz", async (ISurfaceValidatorRunner runner, ISurfaceEnvironment env) =>
|
||||
{
|
||||
var context = SurfaceValidationContext.Create(
|
||||
app.Services,
|
||||
"Zastava.Webhook",
|
||||
env.Settings);
|
||||
|
||||
var result = await runner.RunAllAsync(context);
|
||||
|
||||
if (!result.IsSuccess)
|
||||
{
|
||||
return Results.Json(new
|
||||
{
|
||||
status = "unhealthy",
|
||||
issues = result.Issues.Select(i => new { i.Code, i.Message, i.Hint })
|
||||
}, statusCode: 503);
|
||||
}
|
||||
|
||||
return Results.Ok(new { status = "healthy" });
|
||||
});
|
||||
```
|
||||
|
||||
## Testing Validators
|
||||
|
||||
### Unit Testing
|
||||
|
||||
```csharp
|
||||
[Fact]
|
||||
public async Task Validator_MissingEndpoint_ReturnsError()
|
||||
{
|
||||
// Arrange
|
||||
var settings = new SurfaceEnvironmentSettings(
|
||||
SurfaceFsEndpoint: new Uri("https://surface.invalid"),
|
||||
SurfaceFsBucket: "",
|
||||
// ... other settings
|
||||
);
|
||||
|
||||
var context = SurfaceValidationContext.Create(
|
||||
new ServiceCollection().BuildServiceProvider(),
|
||||
"Test",
|
||||
settings);
|
||||
|
||||
var validator = new SurfaceEndpointValidator();
|
||||
|
||||
// Act
|
||||
var result = await validator.ValidateAsync(context);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsSuccess);
|
||||
Assert.Contains(result.Issues, i => i.Code == SurfaceValidationIssueCodes.SurfaceEndpointMissing);
|
||||
}
|
||||
```
|
||||
|
||||
### Integration Testing
|
||||
|
||||
```csharp
|
||||
[Fact]
|
||||
public async Task ValidationRunner_AllValidatorsExecute()
|
||||
{
|
||||
// Arrange
|
||||
var services = new ServiceCollection();
|
||||
services.AddSurfaceValidation(builder =>
|
||||
{
|
||||
builder.AddValidator<TestValidator1>();
|
||||
builder.AddValidator<TestValidator2>();
|
||||
});
|
||||
|
||||
var provider = services.BuildServiceProvider();
|
||||
var runner = provider.GetRequiredService<ISurfaceValidatorRunner>();
|
||||
|
||||
var context = SurfaceValidationContext.Create(
|
||||
provider,
|
||||
"IntegrationTest",
|
||||
CreateValidSettings());
|
||||
|
||||
// Act
|
||||
var result = await runner.RunAllAsync(context);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsSuccess);
|
||||
}
|
||||
```
|
||||
|
||||
## References
|
||||
|
||||
- [Surface.Validation Design](../design/surface-validation.md)
|
||||
- [Surface.Env Design](../design/surface-env.md)
|
||||
- [Surface.Secrets Schema](../design/surface-secrets-schema.md)
|
||||
100
ops/devops/scripts/rollback-lnm-backfill.js
Normal file
100
ops/devops/scripts/rollback-lnm-backfill.js
Normal file
@@ -0,0 +1,100 @@
|
||||
/**
|
||||
* Rollback script for LNM-21-102-DEV legacy advisory backfill migration.
|
||||
* Removes backfilled observations and linksets by querying the backfill_marker field,
|
||||
* then clears the tombstone markers from advisory_raw.
|
||||
*
|
||||
* Usage:
|
||||
* mongo concelier ops/devops/scripts/rollback-lnm-backfill.js
|
||||
*
|
||||
* Environment variables:
|
||||
* DRY_RUN - if set to "1", only reports what would be deleted without making changes.
|
||||
* BATCH_SIZE - optional batch size for deletions (default 500).
|
||||
*
|
||||
* After running this script, delete the migration record:
|
||||
* db.schema_migrations.deleteOne({ _id: "20251127_lnm_legacy_backfill" })
|
||||
*
|
||||
* Then restart the Concelier service.
|
||||
*/
|
||||
(function () {
|
||||
var BACKFILL_MARKER = "lnm_21_102_dev";
|
||||
|
||||
function toInt(value, fallback) {
|
||||
var parsed = parseInt(value, 10);
|
||||
return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback;
|
||||
}
|
||||
|
||||
function toBool(value) {
|
||||
return value === "1" || value === "true" || value === true;
|
||||
}
|
||||
|
||||
var dryRun = typeof DRY_RUN !== "undefined" ? toBool(DRY_RUN) : false;
|
||||
var batchSize = typeof BATCH_SIZE !== "undefined" ? toInt(BATCH_SIZE, 500) : 500;
|
||||
var database = db.getName ? db.getSiblingDB(db.getName()) : db;
|
||||
if (!database) {
|
||||
throw new Error("Unable to resolve database handle");
|
||||
}
|
||||
|
||||
print("");
|
||||
print("== LNM-21-102-DEV Backfill Rollback ==");
|
||||
print("Database : " + database.getName());
|
||||
print("Dry Run : " + dryRun);
|
||||
print("Batch Size: " + batchSize);
|
||||
print("");
|
||||
|
||||
// Step 1: Count and delete backfilled observations
|
||||
var observationsCollection = database.getCollection("advisory_observations");
|
||||
var observationsFilter = { backfill_marker: BACKFILL_MARKER };
|
||||
var observationsCount = observationsCollection.countDocuments(observationsFilter);
|
||||
|
||||
print("Found " + observationsCount + " backfilled observations to remove.");
|
||||
|
||||
if (!dryRun && observationsCount > 0) {
|
||||
var obsResult = observationsCollection.deleteMany(observationsFilter);
|
||||
print("Deleted " + obsResult.deletedCount + " observations.");
|
||||
}
|
||||
|
||||
// Step 2: Count and delete backfilled linksets
|
||||
var linksetsCollection = database.getCollection("advisory_linksets");
|
||||
var linksetsFilter = { backfill_marker: BACKFILL_MARKER };
|
||||
var linksetsCount = linksetsCollection.countDocuments(linksetsFilter);
|
||||
|
||||
print("Found " + linksetsCount + " backfilled linksets to remove.");
|
||||
|
||||
if (!dryRun && linksetsCount > 0) {
|
||||
var linkResult = linksetsCollection.deleteMany(linksetsFilter);
|
||||
print("Deleted " + linkResult.deletedCount + " linksets.");
|
||||
}
|
||||
|
||||
// Step 3: Clear tombstone markers from advisory_raw
|
||||
var rawCollection = database.getCollection("advisory_raw");
|
||||
var rawFilter = { backfill_marker: BACKFILL_MARKER };
|
||||
var rawCount = rawCollection.countDocuments(rawFilter);
|
||||
|
||||
print("Found " + rawCount + " advisory_raw documents with tombstone markers to clear.");
|
||||
|
||||
if (!dryRun && rawCount > 0) {
|
||||
var rawResult = rawCollection.updateMany(rawFilter, { $unset: { backfill_marker: "" } });
|
||||
print("Cleared tombstone markers from " + rawResult.modifiedCount + " advisory_raw documents.");
|
||||
}
|
||||
|
||||
// Step 4: Summary
|
||||
print("");
|
||||
print("== Rollback Summary ==");
|
||||
if (dryRun) {
|
||||
print("DRY RUN - No changes were made.");
|
||||
print("Would delete " + observationsCount + " observations.");
|
||||
print("Would delete " + linksetsCount + " linksets.");
|
||||
print("Would clear " + rawCount + " tombstone markers.");
|
||||
} else {
|
||||
print("Observations deleted: " + observationsCount);
|
||||
print("Linksets deleted : " + linksetsCount);
|
||||
print("Tombstones cleared : " + rawCount);
|
||||
}
|
||||
|
||||
print("");
|
||||
print("Next steps:");
|
||||
print("1. Delete the migration record:");
|
||||
print(' db.schema_migrations.deleteOne({ _id: "20251127_lnm_legacy_backfill" })');
|
||||
print("2. Restart the Concelier service.");
|
||||
print("");
|
||||
})();
|
||||
@@ -1,12 +1,12 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Net;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Security.Claims;
|
||||
using System.Text.Encodings.Web;
|
||||
using System.Text.Json;
|
||||
using System.Linq;
|
||||
using System.Net.Http.Json;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using System.Collections.Generic;
|
||||
using System.Net;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Security.Claims;
|
||||
using System.Text.Encodings.Web;
|
||||
using System.Text.Json;
|
||||
using System.Linq;
|
||||
using System.Net.Http.Json;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.TestHost;
|
||||
@@ -54,11 +54,11 @@ public sealed class ConsoleEndpointsTests
|
||||
Assert.Equal(1, tenants.GetArrayLength());
|
||||
Assert.Equal("tenant-default", tenants[0].GetProperty("id").GetString());
|
||||
|
||||
var events = sink.Events;
|
||||
var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize");
|
||||
Assert.Equal(AuthEventOutcome.Success, authorizeEvent.Outcome);
|
||||
|
||||
var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.tenants.read");
|
||||
var events = sink.Events;
|
||||
var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize");
|
||||
Assert.Equal(AuthEventOutcome.Success, authorizeEvent.Outcome);
|
||||
|
||||
var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.tenants.read");
|
||||
Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome);
|
||||
Assert.Contains("tenant.resolved", consoleEvent.Properties.Select(property => property.Name));
|
||||
Assert.Equal(2, events.Count);
|
||||
@@ -148,17 +148,17 @@ public sealed class ConsoleEndpointsTests
|
||||
Assert.Equal("tenant-default", json.RootElement.GetProperty("tenant").GetString());
|
||||
|
||||
var events = sink.Events;
|
||||
var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize");
|
||||
var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize");
|
||||
Assert.Equal(AuthEventOutcome.Success, authorizeEvent.Outcome);
|
||||
|
||||
var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.profile.read");
|
||||
var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.profile.read");
|
||||
Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome);
|
||||
Assert.Equal(2, events.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TokenIntrospect_FlagsInactive_WhenExpired()
|
||||
{
|
||||
public async Task TokenIntrospect_FlagsInactive_WhenExpired()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
@@ -186,123 +186,340 @@ public sealed class ConsoleEndpointsTests
|
||||
Assert.Equal("token-abc", json.RootElement.GetProperty("tokenId").GetString());
|
||||
|
||||
var events = sink.Events;
|
||||
var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize");
|
||||
var authorizeEvent = Assert.Single(events, evt => evt.EventType == "authority.resource.authorize");
|
||||
Assert.Equal(AuthEventOutcome.Success, authorizeEvent.Outcome);
|
||||
|
||||
var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.token.introspect");
|
||||
var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.token.introspect");
|
||||
Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome);
|
||||
Assert.Equal(2, events.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VulnerabilityFindings_ReturnsSamplePayload()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/vuln/findings?severity=high");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
var items = json.RootElement.GetProperty("items");
|
||||
Assert.True(items.GetArrayLength() >= 1);
|
||||
Assert.Equal("CVE-2024-12345", items[0].GetProperty("coordinates").GetProperty("advisoryId").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VulnerabilityFindingDetail_ReturnsExpandedDocument()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/vuln/tenant-default:advisory-ai:sha256:5d1a");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
var summary = json.RootElement.GetProperty("summary");
|
||||
Assert.Equal("tenant-default:advisory-ai:sha256:5d1a", summary.GetProperty("findingId").GetString());
|
||||
Assert.Equal("reachable", summary.GetProperty("reachability").GetProperty("status").GetString());
|
||||
var detailReachability = json.RootElement.GetProperty("reachability");
|
||||
Assert.Equal("reachable", detailReachability.GetProperty("status").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VulnerabilityTicket_ReturnsDeterministicPayload()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var payload = new ConsoleVulnerabilityTicketRequest(
|
||||
Selection: new[] { "tenant-default:advisory-ai:sha256:5d1a" },
|
||||
TargetSystem: "servicenow",
|
||||
Metadata: new Dictionary<string, string> { ["assignmentGroup"] = "runtime-security" });
|
||||
|
||||
var response = await client.PostAsJsonAsync("/console/vuln/tickets", payload);
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
Assert.StartsWith("console-ticket::tenant-default::", json.RootElement.GetProperty("ticketId").GetString());
|
||||
Assert.Equal("servicenow", payload.TargetSystem);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VexStatements_ReturnsSampleRows()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.VexRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/vex/statements?advisoryId=CVE-2024-12345");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
var items = json.RootElement.GetProperty("items");
|
||||
Assert.True(items.GetArrayLength() >= 1);
|
||||
Assert.Equal("CVE-2024-12345", items[0].GetProperty("advisoryId").GetString());
|
||||
}
|
||||
Assert.Equal(2, events.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VulnerabilityFindings_ReturnsSamplePayload()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/vuln/findings?severity=high");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
var items = json.RootElement.GetProperty("items");
|
||||
Assert.True(items.GetArrayLength() >= 1);
|
||||
Assert.Equal("CVE-2024-12345", items[0].GetProperty("coordinates").GetProperty("advisoryId").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VulnerabilityFindingDetail_ReturnsExpandedDocument()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/vuln/tenant-default:advisory-ai:sha256:5d1a");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
var summary = json.RootElement.GetProperty("summary");
|
||||
Assert.Equal("tenant-default:advisory-ai:sha256:5d1a", summary.GetProperty("findingId").GetString());
|
||||
Assert.Equal("reachable", summary.GetProperty("reachability").GetProperty("status").GetString());
|
||||
var detailReachability = json.RootElement.GetProperty("reachability");
|
||||
Assert.Equal("reachable", detailReachability.GetProperty("status").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VulnerabilityTicket_ReturnsDeterministicPayload()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.VexRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var payload = new ConsoleVulnerabilityTicketRequest(
|
||||
Selection: new[] { "tenant-default:advisory-ai:sha256:5d1a" },
|
||||
TargetSystem: "servicenow",
|
||||
Metadata: new Dictionary<string, string> { ["assignmentGroup"] = "runtime-security" });
|
||||
|
||||
var response = await client.PostAsJsonAsync("/console/vuln/tickets", payload);
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
Assert.StartsWith("console-ticket::tenant-default::", json.RootElement.GetProperty("ticketId").GetString());
|
||||
Assert.Equal("servicenow", payload.TargetSystem);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VexStatements_ReturnsSampleRows()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.VexRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/vex/statements?advisoryId=CVE-2024-12345");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
var items = json.RootElement.GetProperty("items");
|
||||
Assert.True(items.GetArrayLength() >= 1);
|
||||
Assert.Equal("CVE-2024-12345", items[0].GetProperty("advisoryId").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Dashboard_ReturnsTenantScopedAggregates()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/dashboard");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
Assert.Equal("tenant-default", json.RootElement.GetProperty("tenant").GetString());
|
||||
Assert.True(json.RootElement.TryGetProperty("generatedAt", out _));
|
||||
Assert.True(json.RootElement.TryGetProperty("findings", out var findings));
|
||||
Assert.True(findings.TryGetProperty("totalFindings", out _));
|
||||
Assert.True(json.RootElement.TryGetProperty("vexOverrides", out _));
|
||||
Assert.True(json.RootElement.TryGetProperty("advisoryDeltas", out _));
|
||||
Assert.True(json.RootElement.TryGetProperty("runHealth", out _));
|
||||
Assert.True(json.RootElement.TryGetProperty("policyChanges", out _));
|
||||
|
||||
var events = sink.Events;
|
||||
var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.dashboard");
|
||||
Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Dashboard_ReturnsBadRequest_WhenTenantHeaderMissing()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
|
||||
var response = await client.GetAsync("/console/dashboard");
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Dashboard_ContainsFindingsTrendData()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/dashboard");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
var findings = json.RootElement.GetProperty("findings");
|
||||
var trend = findings.GetProperty("trendLast30Days");
|
||||
Assert.True(trend.GetArrayLength() > 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Filters_ReturnsFilterCategories()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/filters");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
Assert.Equal("tenant-default", json.RootElement.GetProperty("tenant").GetString());
|
||||
Assert.True(json.RootElement.TryGetProperty("generatedAt", out _));
|
||||
Assert.True(json.RootElement.TryGetProperty("filtersHash", out _));
|
||||
var categories = json.RootElement.GetProperty("categories");
|
||||
Assert.True(categories.GetArrayLength() >= 5);
|
||||
|
||||
var events = sink.Events;
|
||||
var consoleEvent = Assert.Single(events, evt => evt.EventType == "authority.console.filters");
|
||||
Assert.Equal(AuthEventOutcome.Success, consoleEvent.Outcome);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Filters_ReturnsExpectedCategoryIds()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/filters");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
var categories = json.RootElement.GetProperty("categories");
|
||||
var categoryIds = categories.EnumerateArray()
|
||||
.Select(c => c.GetProperty("categoryId").GetString())
|
||||
.ToList();
|
||||
|
||||
Assert.Contains("severity", categoryIds);
|
||||
Assert.Contains("policyBadge", categoryIds);
|
||||
Assert.Contains("reachability", categoryIds);
|
||||
Assert.Contains("vexState", categoryIds);
|
||||
Assert.Contains("kev", categoryIds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Filters_FiltersByScopeParameter()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/filters?scope=severity");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
var categories = json.RootElement.GetProperty("categories");
|
||||
Assert.Equal(1, categories.GetArrayLength());
|
||||
Assert.Equal("severity", categories[0].GetProperty("categoryId").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Filters_ReturnsBadRequest_WhenTenantHeaderMissing()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
|
||||
var response = await client.GetAsync("/console/filters");
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Filters_ReturnsHashForCacheValidation()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-08T12:00:00Z"));
|
||||
var sink = new RecordingAuthEventSink();
|
||||
await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty<string>(), Array.Empty<string>()));
|
||||
|
||||
var accessor = app.Services.GetRequiredService<TestPrincipalAccessor>();
|
||||
accessor.Principal = CreatePrincipal(
|
||||
tenant: "tenant-default",
|
||||
scopes: new[] { StellaOpsScopes.UiRead },
|
||||
expiresAt: timeProvider.GetUtcNow().AddMinutes(30));
|
||||
|
||||
var client = app.CreateTestClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme);
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.GetAsync("/console/filters");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
using var json = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
|
||||
var filtersHash = json.RootElement.GetProperty("filtersHash").GetString();
|
||||
Assert.StartsWith("sha256:", filtersHash);
|
||||
}
|
||||
|
||||
private static ClaimsPrincipal CreatePrincipal(
|
||||
string tenant,
|
||||
@@ -371,10 +588,10 @@ public sealed class ConsoleEndpointsTests
|
||||
builder.Services.AddSingleton<TimeProvider>(timeProvider);
|
||||
builder.Services.AddSingleton<IAuthEventSink>(sink);
|
||||
builder.Services.AddSingleton<IAuthorityTenantCatalog>(new FakeTenantCatalog(tenants));
|
||||
builder.Services.AddSingleton<TestPrincipalAccessor>();
|
||||
builder.Services.AddHttpContextAccessor();
|
||||
builder.Services.AddSingleton<StellaOpsBypassEvaluator>();
|
||||
builder.Services.AddSingleton<IConsoleWorkspaceService, ConsoleWorkspaceSampleService>();
|
||||
builder.Services.AddSingleton<TestPrincipalAccessor>();
|
||||
builder.Services.AddHttpContextAccessor();
|
||||
builder.Services.AddSingleton<StellaOpsBypassEvaluator>();
|
||||
builder.Services.AddSingleton<IConsoleWorkspaceService, ConsoleWorkspaceSampleService>();
|
||||
|
||||
var authBuilder = builder.Services.AddAuthentication(options =>
|
||||
{
|
||||
@@ -400,7 +617,7 @@ public sealed class ConsoleEndpointsTests
|
||||
app.UseAuthorization();
|
||||
app.MapConsoleEndpoints();
|
||||
|
||||
await app.StartAsync();
|
||||
await app.StartAsync();
|
||||
return app;
|
||||
}
|
||||
|
||||
@@ -434,11 +651,11 @@ public sealed class ConsoleEndpointsTests
|
||||
|
||||
private sealed class TestAuthenticationHandler : AuthenticationHandler<AuthenticationSchemeOptions>
|
||||
{
|
||||
public TestAuthenticationHandler(
|
||||
IOptionsMonitor<AuthenticationSchemeOptions> options,
|
||||
ILoggerFactory logger,
|
||||
UrlEncoder encoder)
|
||||
: base(options, logger, encoder)
|
||||
public TestAuthenticationHandler(
|
||||
IOptionsMonitor<AuthenticationSchemeOptions> options,
|
||||
ILoggerFactory logger,
|
||||
UrlEncoder encoder)
|
||||
: base(options, logger, encoder)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -468,4 +685,4 @@ internal static class HostTestClientExtensions
|
||||
internal static class TestAuthenticationDefaults
|
||||
{
|
||||
public const string AuthenticationScheme = "AuthorityConsoleTests";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ internal sealed class AuthoritySealedModeEvidenceValidator : IAuthoritySealedMod
|
||||
}
|
||||
|
||||
var cacheKey = $"authority:sealed-mode:{sealedOptions.EvidencePath}";
|
||||
if (memoryCache.TryGetValue(cacheKey, out AuthoritySealedModeValidationResult cached))
|
||||
if (memoryCache.TryGetValue(cacheKey, out AuthoritySealedModeValidationResult? cached) && cached is not null)
|
||||
{
|
||||
return cached;
|
||||
}
|
||||
|
||||
@@ -37,41 +37,54 @@ internal static class ConsoleEndpointExtensions
|
||||
.WithName("ConsoleProfile")
|
||||
.WithSummary("Return the authenticated principal profile metadata.");
|
||||
|
||||
group.MapPost("/token/introspect", IntrospectToken)
|
||||
.RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.UiRead))
|
||||
.WithName("ConsoleTokenIntrospect")
|
||||
.WithSummary("Introspect the current access token and return expiry, scope, and tenant metadata.");
|
||||
|
||||
var vulnGroup = group.MapGroup("/vuln")
|
||||
.RequireAuthorization(policy => policy.RequireStellaOpsScopes(
|
||||
StellaOpsScopes.UiRead,
|
||||
StellaOpsScopes.AdvisoryRead,
|
||||
StellaOpsScopes.VexRead));
|
||||
|
||||
vulnGroup.MapGet("/findings", GetVulnerabilityFindings)
|
||||
.WithName("ConsoleVulnerabilityFindings")
|
||||
.WithSummary("List tenant-scoped vulnerability findings with policy/VEX metadata.");
|
||||
|
||||
vulnGroup.MapGet("/{findingId}", GetVulnerabilityFindingById)
|
||||
.WithName("ConsoleVulnerabilityFindingDetail")
|
||||
.WithSummary("Return the full finding document, including evidence and policy overlays.");
|
||||
|
||||
vulnGroup.MapPost("/tickets", CreateVulnerabilityTicket)
|
||||
.WithName("ConsoleVulnerabilityTickets")
|
||||
.WithSummary("Generate a signed payload payload for external ticketing workflows.");
|
||||
|
||||
var vexGroup = group.MapGroup("/vex")
|
||||
.RequireAuthorization(policy => policy.RequireStellaOpsScopes(
|
||||
StellaOpsScopes.UiRead,
|
||||
StellaOpsScopes.VexRead));
|
||||
|
||||
vexGroup.MapGet("/statements", GetVexStatements)
|
||||
.WithName("ConsoleVexStatements")
|
||||
.WithSummary("List VEX statements impacting the tenant.");
|
||||
|
||||
vexGroup.MapGet("/events", StreamVexEvents)
|
||||
.WithName("ConsoleVexEvents")
|
||||
.WithSummary("Server-sent events feed for live VEX updates (placeholder).");
|
||||
group.MapPost("/token/introspect", IntrospectToken)
|
||||
.RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.UiRead))
|
||||
.WithName("ConsoleTokenIntrospect")
|
||||
.WithSummary("Introspect the current access token and return expiry, scope, and tenant metadata.");
|
||||
|
||||
var vulnGroup = group.MapGroup("/vuln")
|
||||
.RequireAuthorization(policy => policy.RequireStellaOpsScopes(
|
||||
StellaOpsScopes.UiRead,
|
||||
StellaOpsScopes.AdvisoryRead,
|
||||
StellaOpsScopes.VexRead));
|
||||
|
||||
vulnGroup.MapGet("/findings", GetVulnerabilityFindings)
|
||||
.WithName("ConsoleVulnerabilityFindings")
|
||||
.WithSummary("List tenant-scoped vulnerability findings with policy/VEX metadata.");
|
||||
|
||||
vulnGroup.MapGet("/{findingId}", GetVulnerabilityFindingById)
|
||||
.WithName("ConsoleVulnerabilityFindingDetail")
|
||||
.WithSummary("Return the full finding document, including evidence and policy overlays.");
|
||||
|
||||
vulnGroup.MapPost("/tickets", CreateVulnerabilityTicket)
|
||||
.WithName("ConsoleVulnerabilityTickets")
|
||||
.WithSummary("Generate a signed payload payload for external ticketing workflows.");
|
||||
|
||||
var vexGroup = group.MapGroup("/vex")
|
||||
.RequireAuthorization(policy => policy.RequireStellaOpsScopes(
|
||||
StellaOpsScopes.UiRead,
|
||||
StellaOpsScopes.VexRead));
|
||||
|
||||
vexGroup.MapGet("/statements", GetVexStatements)
|
||||
.WithName("ConsoleVexStatements")
|
||||
.WithSummary("List VEX statements impacting the tenant.");
|
||||
|
||||
vexGroup.MapGet("/events", StreamVexEvents)
|
||||
.WithName("ConsoleVexEvents")
|
||||
.WithSummary("Server-sent events feed for live VEX updates (placeholder).");
|
||||
|
||||
// Dashboard and filters endpoints (WEB-CONSOLE-23-001)
|
||||
group.MapGet("/dashboard", GetDashboard)
|
||||
.RequireAuthorization(policy => policy.RequireStellaOpsScopes(
|
||||
StellaOpsScopes.UiRead))
|
||||
.WithName("ConsoleDashboard")
|
||||
.WithSummary("Tenant-scoped aggregates for findings, VEX overrides, advisory deltas, run health, and policy change log.");
|
||||
|
||||
group.MapGet("/filters", GetFilters)
|
||||
.RequireAuthorization(policy => policy.RequireStellaOpsScopes(
|
||||
StellaOpsScopes.UiRead))
|
||||
.WithName("ConsoleFilters")
|
||||
.WithSummary("Available filter categories with options and counts for deterministic console queries.");
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetTenants(
|
||||
@@ -165,11 +178,11 @@ internal static class ConsoleEndpointExtensions
|
||||
return Results.Ok(profile);
|
||||
}
|
||||
|
||||
private static async Task<IResult> IntrospectToken(
|
||||
HttpContext httpContext,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
private static async Task<IResult> IntrospectToken(
|
||||
HttpContext httpContext,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpContext);
|
||||
ArgumentNullException.ThrowIfNull(timeProvider);
|
||||
@@ -183,214 +196,311 @@ internal static class ConsoleEndpointExtensions
|
||||
|
||||
var introspection = BuildTokenIntrospection(principal, timeProvider);
|
||||
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.token.introspect",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(
|
||||
("token.active", introspection.Active ? "true" : "false"),
|
||||
("token.expires_at", FormatInstant(introspection.ExpiresAt)),
|
||||
("tenant.resolved", introspection.Tenant)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(introspection);
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetVulnerabilityFindings(
|
||||
HttpContext httpContext,
|
||||
IConsoleWorkspaceService workspaceService,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpContext);
|
||||
ArgumentNullException.ThrowIfNull(workspaceService);
|
||||
|
||||
var tenant = TenantHeaderFilter.GetTenant(httpContext);
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.findings",
|
||||
AuthEventOutcome.Failure,
|
||||
"tenant_header_missing",
|
||||
BuildProperties(("tenant.header", null)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." });
|
||||
}
|
||||
|
||||
var query = BuildVulnerabilityQuery(httpContext.Request);
|
||||
var response = await workspaceService.SearchFindingsAsync(tenant, query, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.findings",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(("tenant.resolved", tenant), ("pagination.next_token", response.NextPageToken)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(response);
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetVulnerabilityFindingById(
|
||||
HttpContext httpContext,
|
||||
string findingId,
|
||||
IConsoleWorkspaceService workspaceService,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpContext);
|
||||
ArgumentNullException.ThrowIfNull(workspaceService);
|
||||
|
||||
var tenant = TenantHeaderFilter.GetTenant(httpContext);
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.finding",
|
||||
AuthEventOutcome.Failure,
|
||||
"tenant_header_missing",
|
||||
BuildProperties(("tenant.header", null)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." });
|
||||
}
|
||||
|
||||
var detail = await workspaceService.GetFindingAsync(tenant, findingId, cancellationToken).ConfigureAwait(false);
|
||||
if (detail is null)
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.finding",
|
||||
AuthEventOutcome.Failure,
|
||||
"finding_not_found",
|
||||
BuildProperties(("tenant.resolved", tenant), ("finding.id", findingId)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.NotFound(new { error = "finding_not_found", message = $"Finding '{findingId}' not found." });
|
||||
}
|
||||
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.finding",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(("tenant.resolved", tenant), ("finding.id", findingId)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(detail);
|
||||
}
|
||||
|
||||
private static async Task<IResult> CreateVulnerabilityTicket(
|
||||
HttpContext httpContext,
|
||||
ConsoleVulnerabilityTicketRequest request,
|
||||
IConsoleWorkspaceService workspaceService,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpContext);
|
||||
ArgumentNullException.ThrowIfNull(workspaceService);
|
||||
|
||||
if (request is null || request.Selection.Count == 0)
|
||||
{
|
||||
return Results.BadRequest(new { error = "invalid_request", message = "At least one finding must be selected." });
|
||||
}
|
||||
|
||||
var tenant = TenantHeaderFilter.GetTenant(httpContext);
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.ticket",
|
||||
AuthEventOutcome.Failure,
|
||||
"tenant_header_missing",
|
||||
BuildProperties(("tenant.header", null)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." });
|
||||
}
|
||||
|
||||
var ticket = await workspaceService.CreateTicketAsync(tenant, request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.ticket",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(
|
||||
("tenant.resolved", tenant),
|
||||
("ticket.id", ticket.TicketId),
|
||||
("ticket.selection.count", request.Selection.Count.ToString(CultureInfo.InvariantCulture))),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(ticket);
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetVexStatements(
|
||||
HttpContext httpContext,
|
||||
IConsoleWorkspaceService workspaceService,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpContext);
|
||||
ArgumentNullException.ThrowIfNull(workspaceService);
|
||||
|
||||
var tenant = TenantHeaderFilter.GetTenant(httpContext);
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vex.statements",
|
||||
AuthEventOutcome.Failure,
|
||||
"tenant_header_missing",
|
||||
BuildProperties(("tenant.header", null)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." });
|
||||
}
|
||||
|
||||
var query = BuildVexQuery(httpContext.Request);
|
||||
var response = await workspaceService.GetVexStatementsAsync(tenant, query, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vex.statements",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(("tenant.resolved", tenant), ("pagination.next_token", response.NextPageToken)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(response);
|
||||
}
|
||||
|
||||
private static IResult StreamVexEvents() =>
|
||||
Results.StatusCode(StatusCodes.Status501NotImplemented);
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.token.introspect",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(
|
||||
("token.active", introspection.Active ? "true" : "false"),
|
||||
("token.expires_at", FormatInstant(introspection.ExpiresAt)),
|
||||
("tenant.resolved", introspection.Tenant)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(introspection);
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetVulnerabilityFindings(
|
||||
HttpContext httpContext,
|
||||
IConsoleWorkspaceService workspaceService,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpContext);
|
||||
ArgumentNullException.ThrowIfNull(workspaceService);
|
||||
|
||||
var tenant = TenantHeaderFilter.GetTenant(httpContext);
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.findings",
|
||||
AuthEventOutcome.Failure,
|
||||
"tenant_header_missing",
|
||||
BuildProperties(("tenant.header", null)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." });
|
||||
}
|
||||
|
||||
var query = BuildVulnerabilityQuery(httpContext.Request);
|
||||
var response = await workspaceService.SearchFindingsAsync(tenant, query, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.findings",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(("tenant.resolved", tenant), ("pagination.next_token", response.NextPageToken)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(response);
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetVulnerabilityFindingById(
|
||||
HttpContext httpContext,
|
||||
string findingId,
|
||||
IConsoleWorkspaceService workspaceService,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpContext);
|
||||
ArgumentNullException.ThrowIfNull(workspaceService);
|
||||
|
||||
var tenant = TenantHeaderFilter.GetTenant(httpContext);
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.finding",
|
||||
AuthEventOutcome.Failure,
|
||||
"tenant_header_missing",
|
||||
BuildProperties(("tenant.header", null)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." });
|
||||
}
|
||||
|
||||
var detail = await workspaceService.GetFindingAsync(tenant, findingId, cancellationToken).ConfigureAwait(false);
|
||||
if (detail is null)
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.finding",
|
||||
AuthEventOutcome.Failure,
|
||||
"finding_not_found",
|
||||
BuildProperties(("tenant.resolved", tenant), ("finding.id", findingId)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.NotFound(new { error = "finding_not_found", message = $"Finding '{findingId}' not found." });
|
||||
}
|
||||
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.finding",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(("tenant.resolved", tenant), ("finding.id", findingId)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(detail);
|
||||
}
|
||||
|
||||
private static async Task<IResult> CreateVulnerabilityTicket(
|
||||
HttpContext httpContext,
|
||||
ConsoleVulnerabilityTicketRequest request,
|
||||
IConsoleWorkspaceService workspaceService,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpContext);
|
||||
ArgumentNullException.ThrowIfNull(workspaceService);
|
||||
|
||||
if (request is null || request.Selection.Count == 0)
|
||||
{
|
||||
return Results.BadRequest(new { error = "invalid_request", message = "At least one finding must be selected." });
|
||||
}
|
||||
|
||||
var tenant = TenantHeaderFilter.GetTenant(httpContext);
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.ticket",
|
||||
AuthEventOutcome.Failure,
|
||||
"tenant_header_missing",
|
||||
BuildProperties(("tenant.header", null)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." });
|
||||
}
|
||||
|
||||
var ticket = await workspaceService.CreateTicketAsync(tenant, request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vuln.ticket",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(
|
||||
("tenant.resolved", tenant),
|
||||
("ticket.id", ticket.TicketId),
|
||||
("ticket.selection.count", request.Selection.Count.ToString(CultureInfo.InvariantCulture))),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(ticket);
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetVexStatements(
|
||||
HttpContext httpContext,
|
||||
IConsoleWorkspaceService workspaceService,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpContext);
|
||||
ArgumentNullException.ThrowIfNull(workspaceService);
|
||||
|
||||
var tenant = TenantHeaderFilter.GetTenant(httpContext);
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vex.statements",
|
||||
AuthEventOutcome.Failure,
|
||||
"tenant_header_missing",
|
||||
BuildProperties(("tenant.header", null)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." });
|
||||
}
|
||||
|
||||
var query = BuildVexQuery(httpContext.Request);
|
||||
var response = await workspaceService.GetVexStatementsAsync(tenant, query, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.vex.statements",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(("tenant.resolved", tenant), ("pagination.next_token", response.NextPageToken)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(response);
|
||||
}
|
||||
|
||||
private static IResult StreamVexEvents() =>
|
||||
Results.StatusCode(StatusCodes.Status501NotImplemented);
|
||||
|
||||
private static async Task<IResult> GetDashboard(
|
||||
HttpContext httpContext,
|
||||
IConsoleWorkspaceService workspaceService,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpContext);
|
||||
ArgumentNullException.ThrowIfNull(workspaceService);
|
||||
|
||||
var tenant = TenantHeaderFilter.GetTenant(httpContext);
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.dashboard",
|
||||
AuthEventOutcome.Failure,
|
||||
"tenant_header_missing",
|
||||
BuildProperties(("tenant.header", null)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." });
|
||||
}
|
||||
|
||||
var dashboard = await workspaceService.GetDashboardAsync(tenant, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.dashboard",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(
|
||||
("tenant.resolved", tenant),
|
||||
("dashboard.findings_count", dashboard.Findings.TotalFindings.ToString(CultureInfo.InvariantCulture))),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(dashboard);
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetFilters(
|
||||
HttpContext httpContext,
|
||||
IConsoleWorkspaceService workspaceService,
|
||||
TimeProvider timeProvider,
|
||||
IAuthEventSink auditSink,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpContext);
|
||||
ArgumentNullException.ThrowIfNull(workspaceService);
|
||||
|
||||
var tenant = TenantHeaderFilter.GetTenant(httpContext);
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.filters",
|
||||
AuthEventOutcome.Failure,
|
||||
"tenant_header_missing",
|
||||
BuildProperties(("tenant.header", null)),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." });
|
||||
}
|
||||
|
||||
var query = BuildFiltersQuery(httpContext.Request);
|
||||
var filters = await workspaceService.GetFiltersAsync(tenant, query, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await WriteAuditAsync(
|
||||
httpContext,
|
||||
auditSink,
|
||||
timeProvider,
|
||||
"authority.console.filters",
|
||||
AuthEventOutcome.Success,
|
||||
null,
|
||||
BuildProperties(
|
||||
("tenant.resolved", tenant),
|
||||
("filters.hash", filters.FiltersHash),
|
||||
("filters.categories_count", filters.Categories.Count.ToString(CultureInfo.InvariantCulture))),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(filters);
|
||||
}
|
||||
|
||||
private static ConsoleFiltersQuery BuildFiltersQuery(HttpRequest request)
|
||||
{
|
||||
var scope = request.Query.TryGetValue("scope", out var scopeValues) ? scopeValues.FirstOrDefault() : null;
|
||||
var includeEmpty = request.Query.TryGetValue("includeEmpty", out var includeValues) &&
|
||||
bool.TryParse(includeValues.FirstOrDefault(), out var include) && include;
|
||||
|
||||
return new ConsoleFiltersQuery(scope, includeEmpty);
|
||||
}
|
||||
|
||||
private static ConsoleProfileResponse BuildProfile(ClaimsPrincipal principal, TimeProvider timeProvider)
|
||||
{
|
||||
@@ -455,9 +565,9 @@ internal static class ConsoleEndpointExtensions
|
||||
FreshAuth: freshAuth);
|
||||
}
|
||||
|
||||
private static bool DetermineFreshAuth(ClaimsPrincipal principal, DateTimeOffset now)
|
||||
{
|
||||
var flag = principal.FindFirst("stellaops:fresh_auth") ?? principal.FindFirst("fresh_auth");
|
||||
private static bool DetermineFreshAuth(ClaimsPrincipal principal, DateTimeOffset now)
|
||||
{
|
||||
var flag = principal.FindFirst("stellaops:fresh_auth") ?? principal.FindFirst("fresh_auth");
|
||||
if (flag is not null && bool.TryParse(flag.Value, out var freshFlag))
|
||||
{
|
||||
if (freshFlag)
|
||||
@@ -478,67 +588,67 @@ internal static class ConsoleEndpointExtensions
|
||||
return authTime.Value.Add(ttl) > now;
|
||||
}
|
||||
|
||||
const int defaultFreshAuthWindowSeconds = 300;
|
||||
return authTime.Value.AddSeconds(defaultFreshAuthWindowSeconds) > now;
|
||||
}
|
||||
|
||||
private static ConsoleVulnerabilityQuery BuildVulnerabilityQuery(HttpRequest request)
|
||||
{
|
||||
var builder = new ConsoleVulnerabilityQueryBuilder()
|
||||
.SetPageSize(ParseInt(request.Query["pageSize"], 50))
|
||||
.SetPageToken(request.Query.TryGetValue("pageToken", out var tokenValues) ? tokenValues.FirstOrDefault() : null)
|
||||
.AddSeverity(ReadMulti(request, "severity"))
|
||||
.AddPolicyBadges(ReadMulti(request, "policyBadge"))
|
||||
.AddReachability(ReadMulti(request, "reachability"))
|
||||
.AddProducts(ReadMulti(request, "product"))
|
||||
.AddVexStates(ReadMulti(request, "vexState"));
|
||||
|
||||
var search = request.Query.TryGetValue("search", out var searchValues)
|
||||
? searchValues
|
||||
.Where(value => !string.IsNullOrWhiteSpace(value))
|
||||
.SelectMany(value => value!.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
|
||||
: Array.Empty<string>();
|
||||
|
||||
builder.AddSearchTerms(search);
|
||||
return builder.Build();
|
||||
}
|
||||
|
||||
private static ConsoleVexQuery BuildVexQuery(HttpRequest request)
|
||||
{
|
||||
var builder = new ConsoleVexQueryBuilder()
|
||||
.SetPageSize(ParseInt(request.Query["pageSize"], 50))
|
||||
.SetPageToken(request.Query.TryGetValue("pageToken", out var pageValues) ? pageValues.FirstOrDefault() : null)
|
||||
.AddAdvisories(ReadMulti(request, "advisoryId"))
|
||||
.AddTypes(ReadMulti(request, "statementType"))
|
||||
.AddStates(ReadMulti(request, "state"));
|
||||
|
||||
return builder.Build();
|
||||
}
|
||||
|
||||
private static IEnumerable<string> ReadMulti(HttpRequest request, string key)
|
||||
{
|
||||
if (!request.Query.TryGetValue(key, out var values))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
return values
|
||||
.Where(value => !string.IsNullOrWhiteSpace(value))
|
||||
.SelectMany(value => value!.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
|
||||
.Where(value => value.Length > 0);
|
||||
}
|
||||
|
||||
private static int ParseInt(StringValues values, int fallback)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return fallback;
|
||||
}
|
||||
|
||||
return int.TryParse(values[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out var number)
|
||||
? number
|
||||
: fallback;
|
||||
}
|
||||
const int defaultFreshAuthWindowSeconds = 300;
|
||||
return authTime.Value.AddSeconds(defaultFreshAuthWindowSeconds) > now;
|
||||
}
|
||||
|
||||
private static ConsoleVulnerabilityQuery BuildVulnerabilityQuery(HttpRequest request)
|
||||
{
|
||||
var builder = new ConsoleVulnerabilityQueryBuilder()
|
||||
.SetPageSize(ParseInt(request.Query["pageSize"], 50))
|
||||
.SetPageToken(request.Query.TryGetValue("pageToken", out var tokenValues) ? tokenValues.FirstOrDefault() : null)
|
||||
.AddSeverity(ReadMulti(request, "severity"))
|
||||
.AddPolicyBadges(ReadMulti(request, "policyBadge"))
|
||||
.AddReachability(ReadMulti(request, "reachability"))
|
||||
.AddProducts(ReadMulti(request, "product"))
|
||||
.AddVexStates(ReadMulti(request, "vexState"));
|
||||
|
||||
var search = request.Query.TryGetValue("search", out var searchValues)
|
||||
? searchValues
|
||||
.Where(value => !string.IsNullOrWhiteSpace(value))
|
||||
.SelectMany(value => value!.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
|
||||
: Array.Empty<string>();
|
||||
|
||||
builder.AddSearchTerms(search);
|
||||
return builder.Build();
|
||||
}
|
||||
|
||||
private static ConsoleVexQuery BuildVexQuery(HttpRequest request)
|
||||
{
|
||||
var builder = new ConsoleVexQueryBuilder()
|
||||
.SetPageSize(ParseInt(request.Query["pageSize"], 50))
|
||||
.SetPageToken(request.Query.TryGetValue("pageToken", out var pageValues) ? pageValues.FirstOrDefault() : null)
|
||||
.AddAdvisories(ReadMulti(request, "advisoryId"))
|
||||
.AddTypes(ReadMulti(request, "statementType"))
|
||||
.AddStates(ReadMulti(request, "state"));
|
||||
|
||||
return builder.Build();
|
||||
}
|
||||
|
||||
private static IEnumerable<string> ReadMulti(HttpRequest request, string key)
|
||||
{
|
||||
if (!request.Query.TryGetValue(key, out var values))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
return values
|
||||
.Where(value => !string.IsNullOrWhiteSpace(value))
|
||||
.SelectMany(value => value!.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
|
||||
.Where(value => value.Length > 0);
|
||||
}
|
||||
|
||||
private static int ParseInt(StringValues values, int fallback)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return fallback;
|
||||
}
|
||||
|
||||
return int.TryParse(values[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out var number)
|
||||
? number
|
||||
: fallback;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string> ExtractRoles(ClaimsPrincipal principal)
|
||||
{
|
||||
|
||||
@@ -183,6 +183,22 @@ internal interface IConsoleWorkspaceService
|
||||
string tenant,
|
||||
ConsoleVexQuery query,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Get tenant-scoped dashboard aggregates including findings, VEX overrides,
|
||||
/// advisory deltas, run health, and policy change log.
|
||||
/// </summary>
|
||||
Task<ConsoleDashboardResponse> GetDashboardAsync(
|
||||
string tenant,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Get available filter categories with options and counts for console queries.
|
||||
/// </summary>
|
||||
Task<ConsoleFiltersResponse> GetFiltersAsync(
|
||||
string tenant,
|
||||
ConsoleFiltersQuery query,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
internal sealed class ConsoleVulnerabilityQueryBuilder
|
||||
@@ -302,3 +318,167 @@ internal sealed class ConsoleVexQueryBuilder
|
||||
_pageSize,
|
||||
_pageToken);
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Dashboard Models (WEB-CONSOLE-23-001)
|
||||
// ============================================================================
|
||||
|
||||
/// <summary>
|
||||
/// Dashboard response containing tenant-scoped aggregates for findings, VEX overrides,
|
||||
/// advisory deltas, run health, and policy change log.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleDashboardResponse(
|
||||
string Tenant,
|
||||
DateTimeOffset GeneratedAt,
|
||||
ConsoleDashboardFindingsSummary Findings,
|
||||
ConsoleDashboardVexSummary VexOverrides,
|
||||
ConsoleDashboardAdvisorySummary AdvisoryDeltas,
|
||||
ConsoleDashboardRunHealth RunHealth,
|
||||
ConsoleDashboardPolicyChangeLog PolicyChanges);
|
||||
|
||||
/// <summary>
|
||||
/// Aggregated findings summary for dashboard.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleDashboardFindingsSummary(
|
||||
int TotalFindings,
|
||||
int CriticalCount,
|
||||
int HighCount,
|
||||
int MediumCount,
|
||||
int LowCount,
|
||||
int InformationalCount,
|
||||
int NewLastDay,
|
||||
int NewLastWeek,
|
||||
int ResolvedLastWeek,
|
||||
IReadOnlyList<ConsoleDashboardTrendPoint> TrendLast30Days);
|
||||
|
||||
/// <summary>
|
||||
/// A single trend data point.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleDashboardTrendPoint(
|
||||
DateTimeOffset Date,
|
||||
int Open,
|
||||
int Resolved,
|
||||
int New);
|
||||
|
||||
/// <summary>
|
||||
/// VEX overrides summary for dashboard.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleDashboardVexSummary(
|
||||
int TotalStatements,
|
||||
int NotAffectedCount,
|
||||
int FixedCount,
|
||||
int UnderInvestigationCount,
|
||||
int AffectedCount,
|
||||
int AutomatedCount,
|
||||
int ManualCount,
|
||||
DateTimeOffset? LastStatementUpdated);
|
||||
|
||||
/// <summary>
|
||||
/// Advisory delta summary for dashboard.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleDashboardAdvisorySummary(
|
||||
int TotalAdvisories,
|
||||
int NewLastDay,
|
||||
int NewLastWeek,
|
||||
int UpdatedLastWeek,
|
||||
int KevCount,
|
||||
IReadOnlyList<ConsoleDashboardAdvisoryItem> RecentAdvisories);
|
||||
|
||||
/// <summary>
|
||||
/// A recent advisory item for dashboard display.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleDashboardAdvisoryItem(
|
||||
string AdvisoryId,
|
||||
string Severity,
|
||||
string Summary,
|
||||
bool Kev,
|
||||
int AffectedFindings,
|
||||
DateTimeOffset PublishedAt);
|
||||
|
||||
/// <summary>
|
||||
/// Run health summary for dashboard.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleDashboardRunHealth(
|
||||
int TotalRuns,
|
||||
int SuccessfulRuns,
|
||||
int FailedRuns,
|
||||
int RunningRuns,
|
||||
int PendingRuns,
|
||||
double SuccessRatePercent,
|
||||
TimeSpan? AverageRunDuration,
|
||||
DateTimeOffset? LastRunCompletedAt,
|
||||
IReadOnlyList<ConsoleDashboardRecentRun> RecentRuns);
|
||||
|
||||
/// <summary>
|
||||
/// A recent run item for dashboard display.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleDashboardRecentRun(
|
||||
string RunId,
|
||||
string RunType,
|
||||
string Status,
|
||||
DateTimeOffset StartedAt,
|
||||
DateTimeOffset? CompletedAt,
|
||||
TimeSpan? Duration,
|
||||
int FindingsProcessed);
|
||||
|
||||
/// <summary>
|
||||
/// Policy change log summary for dashboard.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleDashboardPolicyChangeLog(
|
||||
int TotalPolicies,
|
||||
int ActivePolicies,
|
||||
int ChangesLastWeek,
|
||||
DateTimeOffset? LastPolicyUpdated,
|
||||
IReadOnlyList<ConsoleDashboardPolicyChange> RecentChanges);
|
||||
|
||||
/// <summary>
|
||||
/// A recent policy change for dashboard display.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleDashboardPolicyChange(
|
||||
string PolicyId,
|
||||
string PolicyName,
|
||||
string ChangeType,
|
||||
string ChangedBy,
|
||||
DateTimeOffset ChangedAt,
|
||||
string? Description);
|
||||
|
||||
// ============================================================================
|
||||
// Filters Models (WEB-CONSOLE-23-001)
|
||||
// ============================================================================
|
||||
|
||||
/// <summary>
|
||||
/// Available filters for console queries with counts and deterministic ordering.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleFiltersResponse(
|
||||
string Tenant,
|
||||
DateTimeOffset GeneratedAt,
|
||||
string FiltersHash,
|
||||
IReadOnlyList<ConsoleFilterCategory> Categories);
|
||||
|
||||
/// <summary>
|
||||
/// A filter category with available options.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleFilterCategory(
|
||||
string CategoryId,
|
||||
string DisplayName,
|
||||
string FilterType,
|
||||
bool MultiSelect,
|
||||
IReadOnlyList<ConsoleFilterOption> Options);
|
||||
|
||||
/// <summary>
|
||||
/// A single filter option with count and metadata.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleFilterOption(
|
||||
string Value,
|
||||
string DisplayName,
|
||||
int Count,
|
||||
bool IsDefault,
|
||||
string? Description,
|
||||
string? IconHint);
|
||||
|
||||
/// <summary>
|
||||
/// Query for filters endpoint.
|
||||
/// </summary>
|
||||
internal sealed record ConsoleFiltersQuery(
|
||||
string? Scope,
|
||||
bool IncludeEmptyCategories);
|
||||
|
||||
@@ -277,6 +277,265 @@ internal sealed class ConsoleWorkspaceSampleService : IConsoleWorkspaceService
|
||||
return Task.FromResult(page);
|
||||
}
|
||||
|
||||
public Task<ConsoleDashboardResponse> GetDashboardAsync(
|
||||
string tenant,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var findings = SampleFindings.Where(detail => IsTenantMatch(tenant, detail.Summary)).ToList();
|
||||
var statements = SampleStatements.Where(s => string.Equals(s.Tenant, tenant, StringComparison.OrdinalIgnoreCase)).ToList();
|
||||
|
||||
// Build findings summary
|
||||
var findingsSummary = new ConsoleDashboardFindingsSummary(
|
||||
TotalFindings: findings.Count,
|
||||
CriticalCount: findings.Count(f => string.Equals(f.Summary.Severity, "critical", StringComparison.OrdinalIgnoreCase)),
|
||||
HighCount: findings.Count(f => string.Equals(f.Summary.Severity, "high", StringComparison.OrdinalIgnoreCase)),
|
||||
MediumCount: findings.Count(f => string.Equals(f.Summary.Severity, "medium", StringComparison.OrdinalIgnoreCase)),
|
||||
LowCount: findings.Count(f => string.Equals(f.Summary.Severity, "low", StringComparison.OrdinalIgnoreCase)),
|
||||
InformationalCount: findings.Count(f => string.Equals(f.Summary.Severity, "info", StringComparison.OrdinalIgnoreCase)),
|
||||
NewLastDay: 1,
|
||||
NewLastWeek: 2,
|
||||
ResolvedLastWeek: 0,
|
||||
TrendLast30Days: GenerateSampleTrend());
|
||||
|
||||
// Build VEX summary
|
||||
var vexSummary = new ConsoleDashboardVexSummary(
|
||||
TotalStatements: statements.Count,
|
||||
NotAffectedCount: statements.Count(s => string.Equals(s.State, "not_affected", StringComparison.OrdinalIgnoreCase)),
|
||||
FixedCount: statements.Count(s => string.Equals(s.State, "fixed", StringComparison.OrdinalIgnoreCase)),
|
||||
UnderInvestigationCount: statements.Count(s => string.Equals(s.State, "under_investigation", StringComparison.OrdinalIgnoreCase)),
|
||||
AffectedCount: statements.Count(s => string.Equals(s.State, "affected", StringComparison.OrdinalIgnoreCase)),
|
||||
AutomatedCount: statements.Count(s => string.Equals(s.Source.Type, "advisory_ai", StringComparison.OrdinalIgnoreCase)),
|
||||
ManualCount: statements.Count(s => !string.Equals(s.Source.Type, "advisory_ai", StringComparison.OrdinalIgnoreCase)),
|
||||
LastStatementUpdated: statements.OrderByDescending(s => s.LastUpdated).FirstOrDefault()?.LastUpdated);
|
||||
|
||||
// Build advisory summary
|
||||
var advisorySummary = new ConsoleDashboardAdvisorySummary(
|
||||
TotalAdvisories: findings.Select(f => f.Summary.Coordinates.AdvisoryId).Distinct().Count(),
|
||||
NewLastDay: 1,
|
||||
NewLastWeek: 2,
|
||||
UpdatedLastWeek: 1,
|
||||
KevCount: findings.Count(f => f.Summary.Kev),
|
||||
RecentAdvisories: findings
|
||||
.Select(f => new ConsoleDashboardAdvisoryItem(
|
||||
AdvisoryId: f.Summary.Coordinates.AdvisoryId,
|
||||
Severity: f.Summary.Severity,
|
||||
Summary: f.Summary.Summary,
|
||||
Kev: f.Summary.Kev,
|
||||
AffectedFindings: 1,
|
||||
PublishedAt: f.Summary.Timestamps.FirstSeen))
|
||||
.DistinctBy(a => a.AdvisoryId)
|
||||
.OrderByDescending(a => a.PublishedAt)
|
||||
.Take(5)
|
||||
.ToImmutableArray());
|
||||
|
||||
// Build run health
|
||||
var runHealth = new ConsoleDashboardRunHealth(
|
||||
TotalRuns: 10,
|
||||
SuccessfulRuns: 8,
|
||||
FailedRuns: 1,
|
||||
RunningRuns: 1,
|
||||
PendingRuns: 0,
|
||||
SuccessRatePercent: 80.0,
|
||||
AverageRunDuration: TimeSpan.FromMinutes(5),
|
||||
LastRunCompletedAt: DateTimeOffset.Parse("2025-11-08T12:00:00Z"),
|
||||
RecentRuns: ImmutableArray.Create(
|
||||
new ConsoleDashboardRecentRun(
|
||||
RunId: "run::2025-11-08::001",
|
||||
RunType: "scan",
|
||||
Status: "completed",
|
||||
StartedAt: DateTimeOffset.Parse("2025-11-08T11:55:00Z"),
|
||||
CompletedAt: DateTimeOffset.Parse("2025-11-08T12:00:00Z"),
|
||||
Duration: TimeSpan.FromMinutes(5),
|
||||
FindingsProcessed: 150),
|
||||
new ConsoleDashboardRecentRun(
|
||||
RunId: "run::2025-11-08::002",
|
||||
RunType: "policy_eval",
|
||||
Status: "running",
|
||||
StartedAt: DateTimeOffset.Parse("2025-11-08T12:05:00Z"),
|
||||
CompletedAt: null,
|
||||
Duration: null,
|
||||
FindingsProcessed: 75)));
|
||||
|
||||
// Build policy change log
|
||||
var policyChangeLog = new ConsoleDashboardPolicyChangeLog(
|
||||
TotalPolicies: 5,
|
||||
ActivePolicies: 4,
|
||||
ChangesLastWeek: 2,
|
||||
LastPolicyUpdated: DateTimeOffset.Parse("2025-11-07T15:30:00Z"),
|
||||
RecentChanges: ImmutableArray.Create(
|
||||
new ConsoleDashboardPolicyChange(
|
||||
PolicyId: "policy://tenant-default/runtime-hardening",
|
||||
PolicyName: "Runtime Hardening",
|
||||
ChangeType: "updated",
|
||||
ChangedBy: "admin@stella-ops.org",
|
||||
ChangedAt: DateTimeOffset.Parse("2025-11-07T15:30:00Z"),
|
||||
Description: "Added KEV check rule"),
|
||||
new ConsoleDashboardPolicyChange(
|
||||
PolicyId: "policy://tenant-default/network-hardening",
|
||||
PolicyName: "Network Hardening",
|
||||
ChangeType: "activated",
|
||||
ChangedBy: "admin@stella-ops.org",
|
||||
ChangedAt: DateTimeOffset.Parse("2025-11-06T10:00:00Z"),
|
||||
Description: null)));
|
||||
|
||||
var dashboard = new ConsoleDashboardResponse(
|
||||
Tenant: tenant,
|
||||
GeneratedAt: DateTimeOffset.UtcNow,
|
||||
Findings: findingsSummary,
|
||||
VexOverrides: vexSummary,
|
||||
AdvisoryDeltas: advisorySummary,
|
||||
RunHealth: runHealth,
|
||||
PolicyChanges: policyChangeLog);
|
||||
|
||||
return Task.FromResult(dashboard);
|
||||
}
|
||||
|
||||
public Task<ConsoleFiltersResponse> GetFiltersAsync(
|
||||
string tenant,
|
||||
ConsoleFiltersQuery query,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var findings = SampleFindings.Where(detail => IsTenantMatch(tenant, detail.Summary)).ToList();
|
||||
|
||||
var categories = new List<ConsoleFilterCategory>
|
||||
{
|
||||
new ConsoleFilterCategory(
|
||||
CategoryId: "severity",
|
||||
DisplayName: "Severity",
|
||||
FilterType: "enum",
|
||||
MultiSelect: true,
|
||||
Options: BuildFilterOptions(findings, f => f.Summary.Severity, new (string, string, string?)[]
|
||||
{
|
||||
("critical", "Critical", "critical_icon"),
|
||||
("high", "High", "high_icon"),
|
||||
("medium", "Medium", "medium_icon"),
|
||||
("low", "Low", "low_icon"),
|
||||
("info", "Informational", "info_icon")
|
||||
}, query.IncludeEmptyCategories)),
|
||||
|
||||
new ConsoleFilterCategory(
|
||||
CategoryId: "policyBadge",
|
||||
DisplayName: "Policy Status",
|
||||
FilterType: "enum",
|
||||
MultiSelect: true,
|
||||
Options: BuildFilterOptions(findings, f => f.Summary.PolicyBadge, new (string, string, string?)[]
|
||||
{
|
||||
("fail", "Fail", "fail_icon"),
|
||||
("warn", "Warning", "warn_icon"),
|
||||
("pass", "Pass", "pass_icon"),
|
||||
("waived", "Waived", "waived_icon")
|
||||
}, query.IncludeEmptyCategories)),
|
||||
|
||||
new ConsoleFilterCategory(
|
||||
CategoryId: "reachability",
|
||||
DisplayName: "Reachability",
|
||||
FilterType: "enum",
|
||||
MultiSelect: true,
|
||||
Options: BuildFilterOptions(findings, f => f.Summary.Reachability?.Status ?? "unknown", new (string, string, string?)[]
|
||||
{
|
||||
("reachable", "Reachable", "reachable_icon"),
|
||||
("unreachable", "Unreachable", "unreachable_icon"),
|
||||
("unknown", "Unknown", "unknown_icon")
|
||||
}, query.IncludeEmptyCategories)),
|
||||
|
||||
new ConsoleFilterCategory(
|
||||
CategoryId: "vexState",
|
||||
DisplayName: "VEX State",
|
||||
FilterType: "enum",
|
||||
MultiSelect: true,
|
||||
Options: BuildFilterOptions(findings, f => f.Summary.Vex?.State ?? "none", new (string, string, string?)[]
|
||||
{
|
||||
("not_affected", "Not Affected", "not_affected_icon"),
|
||||
("fixed", "Fixed", "fixed_icon"),
|
||||
("under_investigation", "Under Investigation", "investigating_icon"),
|
||||
("affected", "Affected", "affected_icon"),
|
||||
("none", "No VEX", null)
|
||||
}, query.IncludeEmptyCategories)),
|
||||
|
||||
new ConsoleFilterCategory(
|
||||
CategoryId: "kev",
|
||||
DisplayName: "Known Exploited",
|
||||
FilterType: "boolean",
|
||||
MultiSelect: false,
|
||||
Options: ImmutableArray.Create(
|
||||
new ConsoleFilterOption("true", "KEV Listed", findings.Count(f => f.Summary.Kev), false, "Known Exploited Vulnerability", "kev_icon"),
|
||||
new ConsoleFilterOption("false", "Not KEV", findings.Count(f => !f.Summary.Kev), true, null, null)))
|
||||
};
|
||||
|
||||
// Filter by scope if specified
|
||||
if (!string.IsNullOrWhiteSpace(query.Scope))
|
||||
{
|
||||
categories = categories
|
||||
.Where(c => string.Equals(c.CategoryId, query.Scope, StringComparison.OrdinalIgnoreCase))
|
||||
.ToList();
|
||||
}
|
||||
|
||||
var filtersHash = ComputeFiltersHash(categories);
|
||||
|
||||
var response = new ConsoleFiltersResponse(
|
||||
Tenant: tenant,
|
||||
GeneratedAt: DateTimeOffset.UtcNow,
|
||||
FiltersHash: filtersHash,
|
||||
Categories: categories.ToImmutableArray());
|
||||
|
||||
return Task.FromResult(response);
|
||||
}
|
||||
|
||||
private static ImmutableArray<ConsoleDashboardTrendPoint> GenerateSampleTrend()
|
||||
{
|
||||
var points = new List<ConsoleDashboardTrendPoint>();
|
||||
var baseDate = DateTimeOffset.Parse("2025-10-09T00:00:00Z");
|
||||
|
||||
for (int i = 0; i < 30; i++)
|
||||
{
|
||||
points.Add(new ConsoleDashboardTrendPoint(
|
||||
Date: baseDate.AddDays(i),
|
||||
Open: 2 + (i % 3),
|
||||
Resolved: i % 5 == 0 ? 1 : 0,
|
||||
New: i % 7 == 0 ? 1 : 0));
|
||||
}
|
||||
|
||||
return points.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static ImmutableArray<ConsoleFilterOption> BuildFilterOptions(
|
||||
List<ConsoleVulnerabilityFindingDetail> findings,
|
||||
Func<ConsoleVulnerabilityFindingDetail, string> selector,
|
||||
(string value, string displayName, string? icon)[] definitions,
|
||||
bool includeEmpty)
|
||||
{
|
||||
var counts = findings
|
||||
.GroupBy(selector, StringComparer.OrdinalIgnoreCase)
|
||||
.ToDictionary(g => g.Key, g => g.Count(), StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
var options = new List<ConsoleFilterOption>();
|
||||
foreach (var (value, displayName, icon) in definitions)
|
||||
{
|
||||
var count = counts.TryGetValue(value, out var c) ? c : 0;
|
||||
if (count > 0 || includeEmpty)
|
||||
{
|
||||
options.Add(new ConsoleFilterOption(
|
||||
Value: value,
|
||||
DisplayName: displayName,
|
||||
Count: count,
|
||||
IsDefault: false,
|
||||
Description: null,
|
||||
IconHint: icon));
|
||||
}
|
||||
}
|
||||
|
||||
return options.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static string ComputeFiltersHash(List<ConsoleFilterCategory> categories)
|
||||
{
|
||||
using var sha256 = SHA256.Create();
|
||||
var joined = string.Join("|", categories.SelectMany(c =>
|
||||
c.Options.Select(o => $"{c.CategoryId}:{o.Value}:{o.Count}")));
|
||||
var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(joined));
|
||||
return $"sha256:{Convert.ToHexString(hash[..8]).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static bool MatchesSeverity(ConsoleVulnerabilityFindingDetail detail, ConsoleVulnerabilityQuery query) =>
|
||||
query.Severity.Count == 0 ||
|
||||
query.Severity.Any(sev => string.Equals(sev, detail.Summary.Severity, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -2,6 +2,7 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
@@ -9,11 +10,12 @@ using StellaOps.Cli.Services.Models;
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
/// <summary>
|
||||
/// HTTP client for Authority console endpoints (CLI-TEN-47-001).
|
||||
/// HTTP client for Authority console endpoints (CLI-TEN-47-001, CLI-TEN-49-001).
|
||||
/// </summary>
|
||||
internal sealed class AuthorityConsoleClient : IAuthorityConsoleClient
|
||||
{
|
||||
private readonly HttpClient _httpClient;
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web);
|
||||
|
||||
public AuthorityConsoleClient(HttpClient httpClient)
|
||||
{
|
||||
@@ -38,4 +40,73 @@ internal sealed class AuthorityConsoleClient : IAuthorityConsoleClient
|
||||
|
||||
return result?.Tenants ?? Array.Empty<TenantInfo>();
|
||||
}
|
||||
|
||||
public async Task<TokenMintResponse> MintTokenAsync(TokenMintRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
using var httpRequest = new HttpRequestMessage(HttpMethod.Post, "console/token/mint")
|
||||
{
|
||||
Content = JsonContent.Create(request, options: JsonOptions)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.Tenant))
|
||||
{
|
||||
httpRequest.Headers.Add("X-StellaOps-Tenant", request.Tenant.Trim().ToLowerInvariant());
|
||||
}
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var result = await response.Content
|
||||
.ReadFromJsonAsync<TokenMintResponse>(JsonOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return result ?? throw new InvalidOperationException("Token mint response was empty.");
|
||||
}
|
||||
|
||||
public async Task<TokenDelegateResponse> DelegateTokenAsync(TokenDelegateRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
using var httpRequest = new HttpRequestMessage(HttpMethod.Post, "console/token/delegate")
|
||||
{
|
||||
Content = JsonContent.Create(request, options: JsonOptions)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.Tenant))
|
||||
{
|
||||
httpRequest.Headers.Add("X-StellaOps-Tenant", request.Tenant.Trim().ToLowerInvariant());
|
||||
}
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var result = await response.Content
|
||||
.ReadFromJsonAsync<TokenDelegateResponse>(JsonOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return result ?? throw new InvalidOperationException("Token delegation response was empty.");
|
||||
}
|
||||
|
||||
public async Task<TokenIntrospectionResponse?> IntrospectTokenAsync(string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
using var httpRequest = new HttpRequestMessage(HttpMethod.Post, "console/token/introspect");
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.Add("X-StellaOps-Tenant", tenant.Trim().ToLowerInvariant());
|
||||
}
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return await response.Content
|
||||
.ReadFromJsonAsync<TokenIntrospectionResponse>(JsonOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,10 +18,10 @@ using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Auth.Client;
|
||||
using StellaOps.Cli.Configuration;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
using StellaOps.Cli.Services.Models.AdvisoryAi;
|
||||
using StellaOps.Cli.Services.Models.Ruby;
|
||||
using StellaOps.Cli.Services.Models.Transport;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
using StellaOps.Cli.Services.Models.AdvisoryAi;
|
||||
using StellaOps.Cli.Services.Models.Ruby;
|
||||
using StellaOps.Cli.Services.Models.Transport;
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
@@ -32,12 +32,12 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
|
||||
private static readonly IReadOnlyDictionary<string, object?> EmptyMetadata =
|
||||
new ReadOnlyDictionary<string, object?>(new Dictionary<string, object?>(0, StringComparer.OrdinalIgnoreCase));
|
||||
|
||||
private const string OperatorReasonParameterName = "operator_reason";
|
||||
private const string OperatorTicketParameterName = "operator_ticket";
|
||||
private const string BackfillReasonParameterName = "backfill_reason";
|
||||
private const string BackfillTicketParameterName = "backfill_ticket";
|
||||
private const string AdvisoryScopesHeader = "X-StellaOps-Scopes";
|
||||
private const string AdvisoryRunScope = "advisory:run";
|
||||
private const string OperatorReasonParameterName = "operator_reason";
|
||||
private const string OperatorTicketParameterName = "operator_ticket";
|
||||
private const string BackfillReasonParameterName = "backfill_reason";
|
||||
private const string BackfillTicketParameterName = "backfill_ticket";
|
||||
private const string AdvisoryScopesHeader = "X-StellaOps-Scopes";
|
||||
private const string AdvisoryRunScope = "advisory:run";
|
||||
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly StellaOpsCliOptions _options;
|
||||
@@ -859,9 +859,9 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
|
||||
return MapPolicyFindingExplain(document);
|
||||
}
|
||||
|
||||
public async Task<EntryTraceResponseModel?> GetEntryTraceAsync(string scanId, CancellationToken cancellationToken)
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
public async Task<EntryTraceResponseModel?> GetEntryTraceAsync(string scanId, CancellationToken cancellationToken)
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(scanId))
|
||||
{
|
||||
@@ -883,174 +883,174 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
|
||||
throw new InvalidOperationException(failure);
|
||||
}
|
||||
|
||||
var result = await response.Content.ReadFromJsonAsync<EntryTraceResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
if (result is null)
|
||||
{
|
||||
throw new InvalidOperationException("EntryTrace response payload was empty.");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public async Task<RubyPackageInventoryModel?> GetRubyPackagesAsync(string scanId, CancellationToken cancellationToken)
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(scanId))
|
||||
{
|
||||
throw new ArgumentException("Scan identifier is required.", nameof(scanId));
|
||||
}
|
||||
|
||||
var encodedScanId = Uri.EscapeDataString(scanId);
|
||||
using var request = CreateRequest(HttpMethod.Get, $"api/scans/{encodedScanId}/ruby-packages");
|
||||
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException(failure);
|
||||
}
|
||||
|
||||
var inventory = await response.Content
|
||||
.ReadFromJsonAsync<RubyPackageInventoryModel>(SerializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (inventory is null)
|
||||
{
|
||||
throw new InvalidOperationException("Ruby package response payload was empty.");
|
||||
}
|
||||
|
||||
var normalizedScanId = string.IsNullOrWhiteSpace(inventory.ScanId) ? scanId : inventory.ScanId;
|
||||
var normalizedDigest = inventory.ImageDigest ?? string.Empty;
|
||||
var packages = inventory.Packages ?? Array.Empty<RubyPackageArtifactModel>();
|
||||
|
||||
return inventory with
|
||||
{
|
||||
ScanId = normalizedScanId,
|
||||
ImageDigest = normalizedDigest,
|
||||
Packages = packages
|
||||
};
|
||||
}
|
||||
|
||||
public async Task<AdvisoryPipelinePlanResponseModel> CreateAdvisoryPipelinePlanAsync(
|
||||
AdvisoryAiTaskType taskType,
|
||||
AdvisoryPipelinePlanRequestModel request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var taskSegment = taskType.ToString().ToLowerInvariant();
|
||||
var relative = $"v1/advisory-ai/pipeline/{taskSegment}";
|
||||
|
||||
var payload = new AdvisoryPipelinePlanRequestModel
|
||||
{
|
||||
TaskType = taskType,
|
||||
AdvisoryKey = string.IsNullOrWhiteSpace(request.AdvisoryKey) ? string.Empty : request.AdvisoryKey.Trim(),
|
||||
ArtifactId = string.IsNullOrWhiteSpace(request.ArtifactId) ? null : request.ArtifactId!.Trim(),
|
||||
ArtifactPurl = string.IsNullOrWhiteSpace(request.ArtifactPurl) ? null : request.ArtifactPurl!.Trim(),
|
||||
PolicyVersion = string.IsNullOrWhiteSpace(request.PolicyVersion) ? null : request.PolicyVersion!.Trim(),
|
||||
Profile = string.IsNullOrWhiteSpace(request.Profile) ? "default" : request.Profile!.Trim(),
|
||||
PreferredSections = request.PreferredSections is null
|
||||
? null
|
||||
: request.PreferredSections
|
||||
.Where(static section => !string.IsNullOrWhiteSpace(section))
|
||||
.Select(static section => section.Trim())
|
||||
.ToArray(),
|
||||
ForceRefresh = request.ForceRefresh
|
||||
};
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Post, relative);
|
||||
ApplyAdvisoryAiEndpoint(httpRequest, taskType);
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
httpRequest.Content = JsonContent.Create(payload, options: SerializerOptions);
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException(failure);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var plan = await response.Content.ReadFromJsonAsync<AdvisoryPipelinePlanResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
if (plan is null)
|
||||
{
|
||||
throw new InvalidOperationException("Advisory AI plan response was empty.");
|
||||
}
|
||||
|
||||
return plan;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
var raw = response.Content is null
|
||||
? string.Empty
|
||||
: await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Failed to parse advisory plan response. {ex.Message}", ex)
|
||||
{
|
||||
Data = { ["payload"] = raw }
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<AdvisoryPipelineOutputModel?> TryGetAdvisoryPipelineOutputAsync(
|
||||
string cacheKey,
|
||||
AdvisoryAiTaskType taskType,
|
||||
string profile,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(cacheKey))
|
||||
{
|
||||
throw new ArgumentException("Cache key is required.", nameof(cacheKey));
|
||||
}
|
||||
|
||||
var encodedKey = Uri.EscapeDataString(cacheKey);
|
||||
var taskSegment = Uri.EscapeDataString(taskType.ToString().ToLowerInvariant());
|
||||
var resolvedProfile = string.IsNullOrWhiteSpace(profile) ? "default" : profile.Trim();
|
||||
var relative = $"v1/advisory-ai/outputs/{encodedKey}?taskType={taskSegment}&profile={Uri.EscapeDataString(resolvedProfile)}";
|
||||
|
||||
using var request = CreateRequest(HttpMethod.Get, relative);
|
||||
ApplyAdvisoryAiEndpoint(request, taskType);
|
||||
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException(failure);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
return await response.Content.ReadFromJsonAsync<AdvisoryPipelineOutputModel>(SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
var raw = response.Content is null
|
||||
? string.Empty
|
||||
: await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Failed to parse advisory output response. {ex.Message}", ex)
|
||||
{
|
||||
Data = { ["payload"] = raw }
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<ExcititorProviderSummary>> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken)
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var result = await response.Content.ReadFromJsonAsync<EntryTraceResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
if (result is null)
|
||||
{
|
||||
throw new InvalidOperationException("EntryTrace response payload was empty.");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public async Task<RubyPackageInventoryModel?> GetRubyPackagesAsync(string scanId, CancellationToken cancellationToken)
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(scanId))
|
||||
{
|
||||
throw new ArgumentException("Scan identifier is required.", nameof(scanId));
|
||||
}
|
||||
|
||||
var encodedScanId = Uri.EscapeDataString(scanId);
|
||||
using var request = CreateRequest(HttpMethod.Get, $"api/scans/{encodedScanId}/ruby-packages");
|
||||
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException(failure);
|
||||
}
|
||||
|
||||
var inventory = await response.Content
|
||||
.ReadFromJsonAsync<RubyPackageInventoryModel>(SerializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (inventory is null)
|
||||
{
|
||||
throw new InvalidOperationException("Ruby package response payload was empty.");
|
||||
}
|
||||
|
||||
var normalizedScanId = string.IsNullOrWhiteSpace(inventory.ScanId) ? scanId : inventory.ScanId;
|
||||
var normalizedDigest = inventory.ImageDigest ?? string.Empty;
|
||||
var packages = inventory.Packages ?? Array.Empty<RubyPackageArtifactModel>();
|
||||
|
||||
return inventory with
|
||||
{
|
||||
ScanId = normalizedScanId,
|
||||
ImageDigest = normalizedDigest,
|
||||
Packages = packages
|
||||
};
|
||||
}
|
||||
|
||||
public async Task<AdvisoryPipelinePlanResponseModel> CreateAdvisoryPipelinePlanAsync(
|
||||
AdvisoryAiTaskType taskType,
|
||||
AdvisoryPipelinePlanRequestModel request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var taskSegment = taskType.ToString().ToLowerInvariant();
|
||||
var relative = $"v1/advisory-ai/pipeline/{taskSegment}";
|
||||
|
||||
var payload = new AdvisoryPipelinePlanRequestModel
|
||||
{
|
||||
TaskType = taskType,
|
||||
AdvisoryKey = string.IsNullOrWhiteSpace(request.AdvisoryKey) ? string.Empty : request.AdvisoryKey.Trim(),
|
||||
ArtifactId = string.IsNullOrWhiteSpace(request.ArtifactId) ? null : request.ArtifactId!.Trim(),
|
||||
ArtifactPurl = string.IsNullOrWhiteSpace(request.ArtifactPurl) ? null : request.ArtifactPurl!.Trim(),
|
||||
PolicyVersion = string.IsNullOrWhiteSpace(request.PolicyVersion) ? null : request.PolicyVersion!.Trim(),
|
||||
Profile = string.IsNullOrWhiteSpace(request.Profile) ? "default" : request.Profile!.Trim(),
|
||||
PreferredSections = request.PreferredSections is null
|
||||
? null
|
||||
: request.PreferredSections
|
||||
.Where(static section => !string.IsNullOrWhiteSpace(section))
|
||||
.Select(static section => section.Trim())
|
||||
.ToArray(),
|
||||
ForceRefresh = request.ForceRefresh
|
||||
};
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Post, relative);
|
||||
ApplyAdvisoryAiEndpoint(httpRequest, taskType);
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
httpRequest.Content = JsonContent.Create(payload, options: SerializerOptions);
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException(failure);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var plan = await response.Content.ReadFromJsonAsync<AdvisoryPipelinePlanResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
if (plan is null)
|
||||
{
|
||||
throw new InvalidOperationException("Advisory AI plan response was empty.");
|
||||
}
|
||||
|
||||
return plan;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
var raw = response.Content is null
|
||||
? string.Empty
|
||||
: await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Failed to parse advisory plan response. {ex.Message}", ex)
|
||||
{
|
||||
Data = { ["payload"] = raw }
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<AdvisoryPipelineOutputModel?> TryGetAdvisoryPipelineOutputAsync(
|
||||
string cacheKey,
|
||||
AdvisoryAiTaskType taskType,
|
||||
string profile,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(cacheKey))
|
||||
{
|
||||
throw new ArgumentException("Cache key is required.", nameof(cacheKey));
|
||||
}
|
||||
|
||||
var encodedKey = Uri.EscapeDataString(cacheKey);
|
||||
var taskSegment = Uri.EscapeDataString(taskType.ToString().ToLowerInvariant());
|
||||
var resolvedProfile = string.IsNullOrWhiteSpace(profile) ? "default" : profile.Trim();
|
||||
var relative = $"v1/advisory-ai/outputs/{encodedKey}?taskType={taskSegment}&profile={Uri.EscapeDataString(resolvedProfile)}";
|
||||
|
||||
using var request = CreateRequest(HttpMethod.Get, relative);
|
||||
ApplyAdvisoryAiEndpoint(request, taskType);
|
||||
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException(failure);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
return await response.Content.ReadFromJsonAsync<AdvisoryPipelineOutputModel>(SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
var raw = response.Content is null
|
||||
? string.Empty
|
||||
: await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Failed to parse advisory output response. {ex.Message}", ex)
|
||||
{
|
||||
Data = { ["payload"] = raw }
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<ExcititorProviderSummary>> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken)
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var query = includeDisabled ? "?includeDisabled=true" : string.Empty;
|
||||
using var request = CreateRequest(HttpMethod.Get, $"excititor/providers{query}");
|
||||
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
@@ -1937,44 +1937,44 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
|
||||
return string.IsNullOrWhiteSpace(value) ? null : value.Trim();
|
||||
}
|
||||
|
||||
private void ApplyAdvisoryAiEndpoint(HttpRequestMessage request, AdvisoryAiTaskType taskType)
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(request));
|
||||
}
|
||||
|
||||
var requestUri = request.RequestUri ?? throw new InvalidOperationException("Request URI was not initialized.");
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(_options.AdvisoryAiUrl) &&
|
||||
Uri.TryCreate(_options.AdvisoryAiUrl, UriKind.Absolute, out var advisoryBase))
|
||||
{
|
||||
if (!requestUri.IsAbsoluteUri)
|
||||
{
|
||||
request.RequestUri = new Uri(advisoryBase, requestUri.ToString());
|
||||
}
|
||||
}
|
||||
else if (!string.IsNullOrWhiteSpace(_options.AdvisoryAiUrl))
|
||||
{
|
||||
throw new InvalidOperationException($"Advisory AI URL '{_options.AdvisoryAiUrl}' is not a valid absolute URI.");
|
||||
}
|
||||
else
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
}
|
||||
|
||||
var taskScope = $"advisory:{taskType.ToString().ToLowerInvariant()}";
|
||||
var combined = $"{AdvisoryRunScope} {taskScope}";
|
||||
|
||||
if (request.Headers.Contains(AdvisoryScopesHeader))
|
||||
{
|
||||
request.Headers.Remove(AdvisoryScopesHeader);
|
||||
}
|
||||
|
||||
request.Headers.TryAddWithoutValidation(AdvisoryScopesHeader, combined);
|
||||
}
|
||||
|
||||
private HttpRequestMessage CreateRequest(HttpMethod method, string relativeUri)
|
||||
private void ApplyAdvisoryAiEndpoint(HttpRequestMessage request, AdvisoryAiTaskType taskType)
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(request));
|
||||
}
|
||||
|
||||
var requestUri = request.RequestUri ?? throw new InvalidOperationException("Request URI was not initialized.");
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(_options.AdvisoryAiUrl) &&
|
||||
Uri.TryCreate(_options.AdvisoryAiUrl, UriKind.Absolute, out var advisoryBase))
|
||||
{
|
||||
if (!requestUri.IsAbsoluteUri)
|
||||
{
|
||||
request.RequestUri = new Uri(advisoryBase, requestUri.ToString());
|
||||
}
|
||||
}
|
||||
else if (!string.IsNullOrWhiteSpace(_options.AdvisoryAiUrl))
|
||||
{
|
||||
throw new InvalidOperationException($"Advisory AI URL '{_options.AdvisoryAiUrl}' is not a valid absolute URI.");
|
||||
}
|
||||
else
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
}
|
||||
|
||||
var taskScope = $"advisory:{taskType.ToString().ToLowerInvariant()}";
|
||||
var combined = $"{AdvisoryRunScope} {taskScope}";
|
||||
|
||||
if (request.Headers.Contains(AdvisoryScopesHeader))
|
||||
{
|
||||
request.Headers.Remove(AdvisoryScopesHeader);
|
||||
}
|
||||
|
||||
request.Headers.TryAddWithoutValidation(AdvisoryScopesHeader, combined);
|
||||
}
|
||||
|
||||
private HttpRequestMessage CreateRequest(HttpMethod method, string relativeUri)
|
||||
{
|
||||
if (!Uri.TryCreate(relativeUri, UriKind.RelativeOrAbsolute, out var requestUri))
|
||||
{
|
||||
@@ -2857,4 +2857,469 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
|
||||
var fallbackSeconds = Math.Min(60, Math.Pow(2, attempt));
|
||||
return TimeSpan.FromSeconds(fallbackSeconds);
|
||||
}
|
||||
|
||||
// CLI-VEX-30-001: VEX consensus list
|
||||
public async Task<VexConsensusListResponse> ListVexConsensusAsync(VexConsensusListRequest request, string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(request));
|
||||
}
|
||||
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var queryParams = new List<string>();
|
||||
if (!string.IsNullOrWhiteSpace(request.VulnerabilityId))
|
||||
queryParams.Add($"vulnerabilityId={Uri.EscapeDataString(request.VulnerabilityId)}");
|
||||
if (!string.IsNullOrWhiteSpace(request.ProductKey))
|
||||
queryParams.Add($"productKey={Uri.EscapeDataString(request.ProductKey)}");
|
||||
if (!string.IsNullOrWhiteSpace(request.Purl))
|
||||
queryParams.Add($"purl={Uri.EscapeDataString(request.Purl)}");
|
||||
if (!string.IsNullOrWhiteSpace(request.Status))
|
||||
queryParams.Add($"status={Uri.EscapeDataString(request.Status)}");
|
||||
if (!string.IsNullOrWhiteSpace(request.PolicyVersion))
|
||||
queryParams.Add($"policyVersion={Uri.EscapeDataString(request.PolicyVersion)}");
|
||||
if (request.Limit.HasValue)
|
||||
queryParams.Add($"limit={request.Limit.Value}");
|
||||
if (request.Offset.HasValue)
|
||||
queryParams.Add($"offset={request.Offset.Value}");
|
||||
|
||||
var queryString = queryParams.Count > 0 ? "?" + string.Join("&", queryParams) : string.Empty;
|
||||
var relative = $"api/vex/consensus{queryString}";
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Get, relative);
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim());
|
||||
}
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"VEX consensus list failed: {message}");
|
||||
}
|
||||
|
||||
VexConsensusListResponse? result;
|
||||
try
|
||||
{
|
||||
result = await response.Content.ReadFromJsonAsync<VexConsensusListResponse>(SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Failed to parse VEX consensus list response: {ex.Message}", ex)
|
||||
{
|
||||
Data = { ["payload"] = raw }
|
||||
};
|
||||
}
|
||||
|
||||
if (result is null)
|
||||
{
|
||||
throw new InvalidOperationException("VEX consensus list response was empty.");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// CLI-VEX-30-002: VEX consensus detail
|
||||
public async Task<VexConsensusDetailResponse?> GetVexConsensusAsync(string vulnerabilityId, string productKey, string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(vulnerabilityId))
|
||||
{
|
||||
throw new ArgumentException("Vulnerability ID must be provided.", nameof(vulnerabilityId));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(productKey))
|
||||
{
|
||||
throw new ArgumentException("Product key must be provided.", nameof(productKey));
|
||||
}
|
||||
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var encodedVulnId = Uri.EscapeDataString(vulnerabilityId.Trim());
|
||||
var encodedProductKey = Uri.EscapeDataString(productKey.Trim());
|
||||
var relative = $"api/vex/consensus/{encodedVulnId}/{encodedProductKey}";
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Get, relative);
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim());
|
||||
}
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"VEX consensus get failed: {message}");
|
||||
}
|
||||
|
||||
VexConsensusDetailResponse? result;
|
||||
try
|
||||
{
|
||||
result = await response.Content.ReadFromJsonAsync<VexConsensusDetailResponse>(SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Failed to parse VEX consensus detail response: {ex.Message}", ex)
|
||||
{
|
||||
Data = { ["payload"] = raw }
|
||||
};
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// CLI-VEX-30-003: VEX simulation
|
||||
public async Task<VexSimulationResponse> SimulateVexConsensusAsync(VexSimulationRequest request, string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(request));
|
||||
}
|
||||
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var relative = "api/vex/consensus/simulate";
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Post, relative);
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim());
|
||||
}
|
||||
|
||||
var jsonContent = JsonSerializer.Serialize(request, SerializerOptions);
|
||||
httpRequest.Content = new StringContent(jsonContent, Encoding.UTF8, "application/json");
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"VEX consensus simulation failed: {message}");
|
||||
}
|
||||
|
||||
VexSimulationResponse? result;
|
||||
try
|
||||
{
|
||||
result = await response.Content.ReadFromJsonAsync<VexSimulationResponse>(SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Failed to parse VEX simulation response: {ex.Message}", ex)
|
||||
{
|
||||
Data = { ["payload"] = raw }
|
||||
};
|
||||
}
|
||||
|
||||
if (result is null)
|
||||
{
|
||||
throw new InvalidOperationException("VEX simulation response was empty.");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// CLI-VEX-30-004: VEX export
|
||||
public async Task<VexExportResponse> ExportVexConsensusAsync(VexExportRequest request, string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(request));
|
||||
}
|
||||
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var relative = "api/vex/consensus/export";
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Post, relative);
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim());
|
||||
}
|
||||
|
||||
var jsonContent = JsonSerializer.Serialize(request, SerializerOptions);
|
||||
httpRequest.Content = new StringContent(jsonContent, Encoding.UTF8, "application/json");
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"VEX consensus export failed: {message}");
|
||||
}
|
||||
|
||||
VexExportResponse? result;
|
||||
try
|
||||
{
|
||||
result = await response.Content.ReadFromJsonAsync<VexExportResponse>(SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Failed to parse VEX export response: {ex.Message}", ex)
|
||||
{
|
||||
Data = { ["payload"] = raw }
|
||||
};
|
||||
}
|
||||
|
||||
if (result is null)
|
||||
{
|
||||
throw new InvalidOperationException("VEX export response was empty.");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public async Task<Stream> DownloadVexExportAsync(string exportId, string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(exportId))
|
||||
{
|
||||
throw new ArgumentException("Export ID must be provided.", nameof(exportId));
|
||||
}
|
||||
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var encodedExportId = Uri.EscapeDataString(exportId.Trim());
|
||||
var relative = $"api/vex/consensus/export/{encodedExportId}/download";
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Get, relative);
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim());
|
||||
}
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var response = await _httpClient.SendAsync(httpRequest, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"VEX export download failed: {message}");
|
||||
}
|
||||
|
||||
return await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// CLI-VULN-29-001: Vulnerability explorer list
|
||||
public async Task<VulnListResponse> ListVulnerabilitiesAsync(VulnListRequest request, string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var queryParams = new List<string>();
|
||||
if (!string.IsNullOrWhiteSpace(request.VulnerabilityId))
|
||||
queryParams.Add($"vulnerabilityId={Uri.EscapeDataString(request.VulnerabilityId)}");
|
||||
if (!string.IsNullOrWhiteSpace(request.Severity))
|
||||
queryParams.Add($"severity={Uri.EscapeDataString(request.Severity)}");
|
||||
if (!string.IsNullOrWhiteSpace(request.Status))
|
||||
queryParams.Add($"status={Uri.EscapeDataString(request.Status)}");
|
||||
if (!string.IsNullOrWhiteSpace(request.Purl))
|
||||
queryParams.Add($"purl={Uri.EscapeDataString(request.Purl)}");
|
||||
if (!string.IsNullOrWhiteSpace(request.Cpe))
|
||||
queryParams.Add($"cpe={Uri.EscapeDataString(request.Cpe)}");
|
||||
if (!string.IsNullOrWhiteSpace(request.SbomId))
|
||||
queryParams.Add($"sbomId={Uri.EscapeDataString(request.SbomId)}");
|
||||
if (!string.IsNullOrWhiteSpace(request.PolicyId))
|
||||
queryParams.Add($"policyId={Uri.EscapeDataString(request.PolicyId)}");
|
||||
if (request.PolicyVersion.HasValue)
|
||||
queryParams.Add($"policyVersion={request.PolicyVersion.Value}");
|
||||
if (!string.IsNullOrWhiteSpace(request.GroupBy))
|
||||
queryParams.Add($"groupBy={Uri.EscapeDataString(request.GroupBy)}");
|
||||
if (request.Limit.HasValue)
|
||||
queryParams.Add($"limit={request.Limit.Value}");
|
||||
if (request.Offset.HasValue)
|
||||
queryParams.Add($"offset={request.Offset.Value}");
|
||||
if (!string.IsNullOrWhiteSpace(request.Cursor))
|
||||
queryParams.Add($"cursor={Uri.EscapeDataString(request.Cursor)}");
|
||||
|
||||
var relative = "api/vuln";
|
||||
if (queryParams.Count > 0)
|
||||
relative += "?" + string.Join("&", queryParams);
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Get, relative);
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim());
|
||||
}
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Failed to list vulnerabilities: {message}");
|
||||
}
|
||||
|
||||
var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
var result = JsonSerializer.Deserialize<VulnListResponse>(json, SerializerOptions);
|
||||
return result ?? new VulnListResponse(Array.Empty<VulnItem>(), 0, 0, 0, false);
|
||||
}
|
||||
|
||||
// CLI-VULN-29-002: Vulnerability detail
|
||||
public async Task<VulnDetailResponse?> GetVulnerabilityAsync(string vulnerabilityId, string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(vulnerabilityId))
|
||||
{
|
||||
throw new ArgumentException("Vulnerability ID must be provided.", nameof(vulnerabilityId));
|
||||
}
|
||||
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var encodedVulnId = Uri.EscapeDataString(vulnerabilityId.Trim());
|
||||
var relative = $"api/vuln/{encodedVulnId}";
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Get, relative);
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim());
|
||||
}
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (response.StatusCode == System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Failed to get vulnerability details: {message}");
|
||||
}
|
||||
|
||||
var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
return JsonSerializer.Deserialize<VulnDetailResponse>(json, SerializerOptions);
|
||||
}
|
||||
|
||||
// CLI-VULN-29-003: Vulnerability workflow operations
|
||||
public async Task<VulnWorkflowResponse> ExecuteVulnWorkflowAsync(VulnWorkflowRequest request, string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var relative = "api/vuln/workflow";
|
||||
var jsonPayload = JsonSerializer.Serialize(request, SerializerOptions);
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Post, relative);
|
||||
httpRequest.Content = new StringContent(jsonPayload, Encoding.UTF8, "application/json");
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim());
|
||||
}
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Workflow operation failed: {message}");
|
||||
}
|
||||
|
||||
var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
var result = JsonSerializer.Deserialize<VulnWorkflowResponse>(json, SerializerOptions);
|
||||
return result ?? new VulnWorkflowResponse(false, request.Action, 0);
|
||||
}
|
||||
|
||||
// CLI-VULN-29-004: Vulnerability simulation
|
||||
public async Task<VulnSimulationResponse> SimulateVulnerabilitiesAsync(VulnSimulationRequest request, string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var relative = "api/vuln/simulate";
|
||||
var jsonPayload = JsonSerializer.Serialize(request, SerializerOptions);
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Post, relative);
|
||||
httpRequest.Content = new StringContent(jsonPayload, Encoding.UTF8, "application/json");
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim());
|
||||
}
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Vulnerability simulation failed: {message}");
|
||||
}
|
||||
|
||||
var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
var result = JsonSerializer.Deserialize<VulnSimulationResponse>(json, SerializerOptions);
|
||||
return result ?? new VulnSimulationResponse(Array.Empty<VulnSimulationDelta>(), new VulnSimulationSummary(0, 0, 0, 0, 0));
|
||||
}
|
||||
|
||||
// CLI-VULN-29-005: Vulnerability export
|
||||
public async Task<VulnExportResponse> ExportVulnerabilitiesAsync(VulnExportRequest request, string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var relative = "api/vuln/export";
|
||||
var jsonPayload = JsonSerializer.Serialize(request, SerializerOptions);
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Post, relative);
|
||||
httpRequest.Content = new StringContent(jsonPayload, Encoding.UTF8, "application/json");
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim());
|
||||
}
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Vulnerability export failed: {message}");
|
||||
}
|
||||
|
||||
var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
var result = JsonSerializer.Deserialize<VulnExportResponse>(json, SerializerOptions);
|
||||
return result ?? throw new InvalidOperationException("Failed to parse export response.");
|
||||
}
|
||||
|
||||
public async Task<Stream> DownloadVulnExportAsync(string exportId, string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(exportId))
|
||||
{
|
||||
throw new ArgumentException("Export ID must be provided.", nameof(exportId));
|
||||
}
|
||||
|
||||
EnsureBackendConfigured();
|
||||
|
||||
var encodedExportId = Uri.EscapeDataString(exportId.Trim());
|
||||
var relative = $"api/vuln/export/{encodedExportId}/download";
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Get, relative);
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", tenant.Trim());
|
||||
}
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var response = await _httpClient.SendAsync(httpRequest, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new InvalidOperationException($"Vulnerability export download failed: {message}");
|
||||
}
|
||||
|
||||
return await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ using StellaOps.Cli.Services.Models;
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Client for Authority console endpoints (CLI-TEN-47-001).
|
||||
/// Client for Authority console endpoints (CLI-TEN-47-001, CLI-TEN-49-001).
|
||||
/// </summary>
|
||||
internal interface IAuthorityConsoleClient
|
||||
{
|
||||
@@ -14,4 +14,19 @@ internal interface IAuthorityConsoleClient
|
||||
/// Lists available tenants for the authenticated principal.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<TenantInfo>> ListTenantsAsync(string tenant, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Mints a service account token (CLI-TEN-49-001).
|
||||
/// </summary>
|
||||
Task<TokenMintResponse> MintTokenAsync(TokenMintRequest request, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Delegates a token to another principal (CLI-TEN-49-001).
|
||||
/// </summary>
|
||||
Task<TokenDelegateResponse> DelegateTokenAsync(TokenDelegateRequest request, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Introspects the current token for impersonation/delegation info (CLI-TEN-49-001).
|
||||
/// </summary>
|
||||
Task<TokenIntrospectionResponse?> IntrospectTokenAsync(string? tenant, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
@@ -6,11 +6,11 @@ using StellaOps.Cli.Configuration;
|
||||
using StellaOps.Cli.Services.Models;
|
||||
using StellaOps.Cli.Services.Models.AdvisoryAi;
|
||||
using StellaOps.Cli.Services.Models.Ruby;
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
internal interface IBackendOperationsClient
|
||||
{
|
||||
|
||||
namespace StellaOps.Cli.Services;
|
||||
|
||||
internal interface IBackendOperationsClient
|
||||
{
|
||||
Task<ScannerArtifactResult> DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken);
|
||||
|
||||
Task UploadScanResultsAsync(string filePath, CancellationToken cancellationToken);
|
||||
@@ -54,4 +54,33 @@ internal interface IBackendOperationsClient
|
||||
Task<AdvisoryPipelinePlanResponseModel> CreateAdvisoryPipelinePlanAsync(AdvisoryAiTaskType taskType, AdvisoryPipelinePlanRequestModel request, CancellationToken cancellationToken);
|
||||
|
||||
Task<AdvisoryPipelineOutputModel?> TryGetAdvisoryPipelineOutputAsync(string cacheKey, AdvisoryAiTaskType taskType, string profile, CancellationToken cancellationToken);
|
||||
|
||||
// CLI-VEX-30-001: VEX consensus operations
|
||||
Task<VexConsensusListResponse> ListVexConsensusAsync(VexConsensusListRequest request, string? tenant, CancellationToken cancellationToken);
|
||||
|
||||
// CLI-VEX-30-002: VEX consensus detail
|
||||
Task<VexConsensusDetailResponse?> GetVexConsensusAsync(string vulnerabilityId, string productKey, string? tenant, CancellationToken cancellationToken);
|
||||
|
||||
// CLI-VEX-30-003: VEX simulation
|
||||
Task<VexSimulationResponse> SimulateVexConsensusAsync(VexSimulationRequest request, string? tenant, CancellationToken cancellationToken);
|
||||
|
||||
// CLI-VEX-30-004: VEX export
|
||||
Task<VexExportResponse> ExportVexConsensusAsync(VexExportRequest request, string? tenant, CancellationToken cancellationToken);
|
||||
Task<Stream> DownloadVexExportAsync(string exportId, string? tenant, CancellationToken cancellationToken);
|
||||
|
||||
// CLI-VULN-29-001: Vulnerability explorer list
|
||||
Task<VulnListResponse> ListVulnerabilitiesAsync(VulnListRequest request, string? tenant, CancellationToken cancellationToken);
|
||||
|
||||
// CLI-VULN-29-002: Vulnerability detail
|
||||
Task<VulnDetailResponse?> GetVulnerabilityAsync(string vulnerabilityId, string? tenant, CancellationToken cancellationToken);
|
||||
|
||||
// CLI-VULN-29-003: Vulnerability workflow operations
|
||||
Task<VulnWorkflowResponse> ExecuteVulnWorkflowAsync(VulnWorkflowRequest request, string? tenant, CancellationToken cancellationToken);
|
||||
|
||||
// CLI-VULN-29-004: Vulnerability simulation
|
||||
Task<VulnSimulationResponse> SimulateVulnerabilitiesAsync(VulnSimulationRequest request, string? tenant, CancellationToken cancellationToken);
|
||||
|
||||
// CLI-VULN-29-005: Vulnerability export
|
||||
Task<VulnExportResponse> ExportVulnerabilitiesAsync(VulnExportRequest request, string? tenant, CancellationToken cancellationToken);
|
||||
Task<Stream> DownloadVulnExportAsync(string exportId, string? tenant, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
@@ -35,3 +35,60 @@ internal sealed record TenantProfile
|
||||
[JsonPropertyName("lastUpdated")]
|
||||
public DateTimeOffset? LastUpdated { get; init; }
|
||||
}
|
||||
|
||||
// CLI-TEN-49-001: Token minting and delegation models
|
||||
|
||||
/// <summary>
|
||||
/// Request to mint a service account token.
|
||||
/// </summary>
|
||||
internal sealed record TokenMintRequest(
|
||||
[property: JsonPropertyName("serviceAccountId")] string ServiceAccountId,
|
||||
[property: JsonPropertyName("scopes")] IReadOnlyList<string> Scopes,
|
||||
[property: JsonPropertyName("expiresInSeconds")] int? ExpiresInSeconds = null,
|
||||
[property: JsonPropertyName("tenant")] string? Tenant = null,
|
||||
[property: JsonPropertyName("reason")] string? Reason = null);
|
||||
|
||||
/// <summary>
|
||||
/// Response from token minting.
|
||||
/// </summary>
|
||||
internal sealed record TokenMintResponse(
|
||||
[property: JsonPropertyName("accessToken")] string AccessToken,
|
||||
[property: JsonPropertyName("tokenType")] string TokenType,
|
||||
[property: JsonPropertyName("expiresAt")] DateTimeOffset ExpiresAt,
|
||||
[property: JsonPropertyName("scopes")] IReadOnlyList<string> Scopes,
|
||||
[property: JsonPropertyName("tokenId")] string? TokenId = null);
|
||||
|
||||
/// <summary>
|
||||
/// Request to delegate a token to another principal.
|
||||
/// </summary>
|
||||
internal sealed record TokenDelegateRequest(
|
||||
[property: JsonPropertyName("delegateTo")] string DelegateTo,
|
||||
[property: JsonPropertyName("scopes")] IReadOnlyList<string> Scopes,
|
||||
[property: JsonPropertyName("expiresInSeconds")] int? ExpiresInSeconds = null,
|
||||
[property: JsonPropertyName("tenant")] string? Tenant = null,
|
||||
[property: JsonPropertyName("reason")] string? Reason = null);
|
||||
|
||||
/// <summary>
|
||||
/// Response from token delegation.
|
||||
/// </summary>
|
||||
internal sealed record TokenDelegateResponse(
|
||||
[property: JsonPropertyName("accessToken")] string AccessToken,
|
||||
[property: JsonPropertyName("tokenType")] string TokenType,
|
||||
[property: JsonPropertyName("expiresAt")] DateTimeOffset ExpiresAt,
|
||||
[property: JsonPropertyName("delegationId")] string DelegationId,
|
||||
[property: JsonPropertyName("originalSubject")] string OriginalSubject,
|
||||
[property: JsonPropertyName("delegatedSubject")] string DelegatedSubject,
|
||||
[property: JsonPropertyName("scopes")] IReadOnlyList<string> Scopes);
|
||||
|
||||
/// <summary>
|
||||
/// Token introspection response for impersonation banner.
|
||||
/// </summary>
|
||||
internal sealed record TokenIntrospectionResponse(
|
||||
[property: JsonPropertyName("active")] bool Active,
|
||||
[property: JsonPropertyName("sub")] string? Subject = null,
|
||||
[property: JsonPropertyName("clientId")] string? ClientId = null,
|
||||
[property: JsonPropertyName("scope")] string? Scope = null,
|
||||
[property: JsonPropertyName("exp")] long? ExpiresAt = null,
|
||||
[property: JsonPropertyName("iat")] long? IssuedAt = null,
|
||||
[property: JsonPropertyName("delegatedBy")] string? DelegatedBy = null,
|
||||
[property: JsonPropertyName("delegationReason")] string? DelegationReason = null);
|
||||
|
||||
258
src/Cli/StellaOps.Cli/Services/Models/VexModels.cs
Normal file
258
src/Cli/StellaOps.Cli/Services/Models/VexModels.cs
Normal file
@@ -0,0 +1,258 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Cli.Services.Models;
|
||||
|
||||
// CLI-VEX-30-001: VEX consensus models for CLI
|
||||
|
||||
/// <summary>
|
||||
/// VEX consensus list request parameters.
|
||||
/// </summary>
|
||||
internal sealed record VexConsensusListRequest(
|
||||
[property: JsonPropertyName("vulnerabilityId")] string? VulnerabilityId = null,
|
||||
[property: JsonPropertyName("productKey")] string? ProductKey = null,
|
||||
[property: JsonPropertyName("purl")] string? Purl = null,
|
||||
[property: JsonPropertyName("status")] string? Status = null,
|
||||
[property: JsonPropertyName("policyVersion")] string? PolicyVersion = null,
|
||||
[property: JsonPropertyName("limit")] int? Limit = null,
|
||||
[property: JsonPropertyName("offset")] int? Offset = null);
|
||||
|
||||
/// <summary>
|
||||
/// Paginated VEX consensus list response.
|
||||
/// </summary>
|
||||
internal sealed record VexConsensusListResponse(
|
||||
[property: JsonPropertyName("items")] IReadOnlyList<VexConsensusItem> Items,
|
||||
[property: JsonPropertyName("total")] int Total,
|
||||
[property: JsonPropertyName("limit")] int Limit,
|
||||
[property: JsonPropertyName("offset")] int Offset,
|
||||
[property: JsonPropertyName("hasMore")] bool HasMore);
|
||||
|
||||
/// <summary>
|
||||
/// VEX consensus item from the API.
|
||||
/// </summary>
|
||||
internal sealed record VexConsensusItem(
|
||||
[property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId,
|
||||
[property: JsonPropertyName("product")] VexProductInfo Product,
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("calculatedAt")] DateTimeOffset CalculatedAt,
|
||||
[property: JsonPropertyName("sources")] IReadOnlyList<VexConsensusSourceInfo> Sources,
|
||||
[property: JsonPropertyName("conflicts")] IReadOnlyList<VexConsensusConflictInfo>? Conflicts = null,
|
||||
[property: JsonPropertyName("policyVersion")] string? PolicyVersion = null,
|
||||
[property: JsonPropertyName("policyDigest")] string? PolicyDigest = null,
|
||||
[property: JsonPropertyName("summary")] string? Summary = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX product information.
|
||||
/// </summary>
|
||||
internal sealed record VexProductInfo(
|
||||
[property: JsonPropertyName("key")] string Key,
|
||||
[property: JsonPropertyName("name")] string? Name = null,
|
||||
[property: JsonPropertyName("version")] string? Version = null,
|
||||
[property: JsonPropertyName("purl")] string? Purl = null,
|
||||
[property: JsonPropertyName("cpe")] string? Cpe = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX consensus source (accepted claim).
|
||||
/// </summary>
|
||||
internal sealed record VexConsensusSourceInfo(
|
||||
[property: JsonPropertyName("providerId")] string ProviderId,
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("documentDigest")] string? DocumentDigest = null,
|
||||
[property: JsonPropertyName("weight")] double Weight = 1.0,
|
||||
[property: JsonPropertyName("justification")] string? Justification = null,
|
||||
[property: JsonPropertyName("detail")] string? Detail = null,
|
||||
[property: JsonPropertyName("confidence")] VexConfidenceInfo? Confidence = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX consensus conflict (rejected claim).
|
||||
/// </summary>
|
||||
internal sealed record VexConsensusConflictInfo(
|
||||
[property: JsonPropertyName("providerId")] string ProviderId,
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("documentDigest")] string? DocumentDigest = null,
|
||||
[property: JsonPropertyName("justification")] string? Justification = null,
|
||||
[property: JsonPropertyName("detail")] string? Detail = null,
|
||||
[property: JsonPropertyName("reason")] string? Reason = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX confidence information.
|
||||
/// </summary>
|
||||
internal sealed record VexConfidenceInfo(
|
||||
[property: JsonPropertyName("level")] string? Level = null,
|
||||
[property: JsonPropertyName("score")] double? Score = null,
|
||||
[property: JsonPropertyName("method")] string? Method = null);
|
||||
|
||||
// CLI-VEX-30-002: VEX consensus detail models
|
||||
|
||||
/// <summary>
|
||||
/// Detailed VEX consensus response including quorum, evidence, rationale, and signature status.
|
||||
/// </summary>
|
||||
internal sealed record VexConsensusDetailResponse(
|
||||
[property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId,
|
||||
[property: JsonPropertyName("product")] VexProductInfo Product,
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("calculatedAt")] DateTimeOffset CalculatedAt,
|
||||
[property: JsonPropertyName("sources")] IReadOnlyList<VexConsensusSourceInfo> Sources,
|
||||
[property: JsonPropertyName("conflicts")] IReadOnlyList<VexConsensusConflictInfo>? Conflicts = null,
|
||||
[property: JsonPropertyName("policyVersion")] string? PolicyVersion = null,
|
||||
[property: JsonPropertyName("policyDigest")] string? PolicyDigest = null,
|
||||
[property: JsonPropertyName("summary")] string? Summary = null,
|
||||
[property: JsonPropertyName("quorum")] VexQuorumInfo? Quorum = null,
|
||||
[property: JsonPropertyName("rationale")] VexRationaleInfo? Rationale = null,
|
||||
[property: JsonPropertyName("signature")] VexSignatureInfo? Signature = null,
|
||||
[property: JsonPropertyName("evidence")] IReadOnlyList<VexEvidenceInfo>? Evidence = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX quorum information showing how consensus was reached.
|
||||
/// </summary>
|
||||
internal sealed record VexQuorumInfo(
|
||||
[property: JsonPropertyName("required")] int Required,
|
||||
[property: JsonPropertyName("achieved")] int Achieved,
|
||||
[property: JsonPropertyName("threshold")] double Threshold,
|
||||
[property: JsonPropertyName("totalWeight")] double TotalWeight,
|
||||
[property: JsonPropertyName("weightAchieved")] double WeightAchieved,
|
||||
[property: JsonPropertyName("participatingProviders")] IReadOnlyList<string>? ParticipatingProviders = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX rationale explaining the consensus decision.
|
||||
/// </summary>
|
||||
internal sealed record VexRationaleInfo(
|
||||
[property: JsonPropertyName("text")] string? Text = null,
|
||||
[property: JsonPropertyName("justifications")] IReadOnlyList<string>? Justifications = null,
|
||||
[property: JsonPropertyName("policyRules")] IReadOnlyList<string>? PolicyRules = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX signature status information.
|
||||
/// </summary>
|
||||
internal sealed record VexSignatureInfo(
|
||||
[property: JsonPropertyName("signed")] bool Signed,
|
||||
[property: JsonPropertyName("algorithm")] string? Algorithm = null,
|
||||
[property: JsonPropertyName("keyId")] string? KeyId = null,
|
||||
[property: JsonPropertyName("signedAt")] DateTimeOffset? SignedAt = null,
|
||||
[property: JsonPropertyName("verificationStatus")] string? VerificationStatus = null,
|
||||
[property: JsonPropertyName("certificateChain")] IReadOnlyList<string>? CertificateChain = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX evidence supporting the consensus decision.
|
||||
/// </summary>
|
||||
internal sealed record VexEvidenceInfo(
|
||||
[property: JsonPropertyName("type")] string Type,
|
||||
[property: JsonPropertyName("providerId")] string ProviderId,
|
||||
[property: JsonPropertyName("documentId")] string? DocumentId = null,
|
||||
[property: JsonPropertyName("documentDigest")] string? DocumentDigest = null,
|
||||
[property: JsonPropertyName("timestamp")] DateTimeOffset? Timestamp = null,
|
||||
[property: JsonPropertyName("content")] string? Content = null);
|
||||
|
||||
// CLI-VEX-30-003: VEX simulation models
|
||||
|
||||
/// <summary>
|
||||
/// VEX simulation request with trust/threshold overrides.
|
||||
/// </summary>
|
||||
internal sealed record VexSimulationRequest(
|
||||
[property: JsonPropertyName("vulnerabilityId")] string? VulnerabilityId = null,
|
||||
[property: JsonPropertyName("productKey")] string? ProductKey = null,
|
||||
[property: JsonPropertyName("purl")] string? Purl = null,
|
||||
[property: JsonPropertyName("trustOverrides")] IReadOnlyDictionary<string, double>? TrustOverrides = null,
|
||||
[property: JsonPropertyName("thresholdOverride")] double? ThresholdOverride = null,
|
||||
[property: JsonPropertyName("quorumOverride")] int? QuorumOverride = null,
|
||||
[property: JsonPropertyName("excludeProviders")] IReadOnlyList<string>? ExcludeProviders = null,
|
||||
[property: JsonPropertyName("includeOnly")] IReadOnlyList<string>? IncludeOnly = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX simulation response showing before/after comparison.
|
||||
/// </summary>
|
||||
internal sealed record VexSimulationResponse(
|
||||
[property: JsonPropertyName("items")] IReadOnlyList<VexSimulationResultItem> Items,
|
||||
[property: JsonPropertyName("parameters")] VexSimulationParameters Parameters,
|
||||
[property: JsonPropertyName("summary")] VexSimulationSummary Summary);
|
||||
|
||||
/// <summary>
|
||||
/// Individual VEX simulation result showing the delta.
|
||||
/// </summary>
|
||||
internal sealed record VexSimulationResultItem(
|
||||
[property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId,
|
||||
[property: JsonPropertyName("product")] VexProductInfo Product,
|
||||
[property: JsonPropertyName("before")] VexSimulationState Before,
|
||||
[property: JsonPropertyName("after")] VexSimulationState After,
|
||||
[property: JsonPropertyName("changed")] bool Changed,
|
||||
[property: JsonPropertyName("changeType")] string? ChangeType = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX state for simulation comparison.
|
||||
/// </summary>
|
||||
internal sealed record VexSimulationState(
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("quorumAchieved")] int QuorumAchieved,
|
||||
[property: JsonPropertyName("weightAchieved")] double WeightAchieved,
|
||||
[property: JsonPropertyName("sources")] IReadOnlyList<string>? Sources = null);
|
||||
|
||||
/// <summary>
|
||||
/// Parameters used in the simulation.
|
||||
/// </summary>
|
||||
internal sealed record VexSimulationParameters(
|
||||
[property: JsonPropertyName("threshold")] double Threshold,
|
||||
[property: JsonPropertyName("quorum")] int Quorum,
|
||||
[property: JsonPropertyName("trustWeights")] IReadOnlyDictionary<string, double>? TrustWeights = null,
|
||||
[property: JsonPropertyName("excludedProviders")] IReadOnlyList<string>? ExcludedProviders = null);
|
||||
|
||||
/// <summary>
|
||||
/// Summary of simulation results.
|
||||
/// </summary>
|
||||
internal sealed record VexSimulationSummary(
|
||||
[property: JsonPropertyName("totalEvaluated")] int TotalEvaluated,
|
||||
[property: JsonPropertyName("totalChanged")] int TotalChanged,
|
||||
[property: JsonPropertyName("statusUpgrades")] int StatusUpgrades,
|
||||
[property: JsonPropertyName("statusDowngrades")] int StatusDowngrades,
|
||||
[property: JsonPropertyName("noChange")] int NoChange);
|
||||
|
||||
// CLI-VEX-30-004: VEX export models
|
||||
|
||||
/// <summary>
|
||||
/// VEX export request parameters.
|
||||
/// </summary>
|
||||
internal sealed record VexExportRequest(
|
||||
[property: JsonPropertyName("vulnerabilityIds")] IReadOnlyList<string>? VulnerabilityIds = null,
|
||||
[property: JsonPropertyName("productKeys")] IReadOnlyList<string>? ProductKeys = null,
|
||||
[property: JsonPropertyName("purls")] IReadOnlyList<string>? Purls = null,
|
||||
[property: JsonPropertyName("statuses")] IReadOnlyList<string>? Statuses = null,
|
||||
[property: JsonPropertyName("policyVersion")] string? PolicyVersion = null,
|
||||
[property: JsonPropertyName("signed")] bool Signed = true,
|
||||
[property: JsonPropertyName("format")] string Format = "ndjson");
|
||||
|
||||
/// <summary>
|
||||
/// VEX export response with download information.
|
||||
/// </summary>
|
||||
internal sealed record VexExportResponse(
|
||||
[property: JsonPropertyName("exportId")] string ExportId,
|
||||
[property: JsonPropertyName("downloadUrl")] string? DownloadUrl = null,
|
||||
[property: JsonPropertyName("format")] string Format = "ndjson",
|
||||
[property: JsonPropertyName("itemCount")] int ItemCount = 0,
|
||||
[property: JsonPropertyName("signed")] bool Signed = false,
|
||||
[property: JsonPropertyName("signatureAlgorithm")] string? SignatureAlgorithm = null,
|
||||
[property: JsonPropertyName("signatureKeyId")] string? SignatureKeyId = null,
|
||||
[property: JsonPropertyName("digest")] string? Digest = null,
|
||||
[property: JsonPropertyName("digestAlgorithm")] string? DigestAlgorithm = null,
|
||||
[property: JsonPropertyName("expiresAt")] DateTimeOffset? ExpiresAt = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX export signature verification request.
|
||||
/// </summary>
|
||||
internal sealed record VexExportVerifyRequest(
|
||||
[property: JsonPropertyName("filePath")] string FilePath,
|
||||
[property: JsonPropertyName("signaturePath")] string? SignaturePath = null,
|
||||
[property: JsonPropertyName("expectedDigest")] string? ExpectedDigest = null,
|
||||
[property: JsonPropertyName("publicKeyPath")] string? PublicKeyPath = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX export signature verification result.
|
||||
/// </summary>
|
||||
internal sealed record VexExportVerifyResult(
|
||||
[property: JsonPropertyName("valid")] bool Valid,
|
||||
[property: JsonPropertyName("signatureStatus")] string SignatureStatus,
|
||||
[property: JsonPropertyName("digestMatch")] bool? DigestMatch = null,
|
||||
[property: JsonPropertyName("actualDigest")] string? ActualDigest = null,
|
||||
[property: JsonPropertyName("expectedDigest")] string? ExpectedDigest = null,
|
||||
[property: JsonPropertyName("keyId")] string? KeyId = null,
|
||||
[property: JsonPropertyName("signedAt")] DateTimeOffset? SignedAt = null,
|
||||
[property: JsonPropertyName("errors")] IReadOnlyList<string>? Errors = null);
|
||||
291
src/Cli/StellaOps.Cli/Services/Models/VulnModels.cs
Normal file
291
src/Cli/StellaOps.Cli/Services/Models/VulnModels.cs
Normal file
@@ -0,0 +1,291 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Cli.Services.Models;
|
||||
|
||||
// CLI-VULN-29-001: Vulnerability Explorer models for CLI
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability list request parameters.
|
||||
/// </summary>
|
||||
internal sealed record VulnListRequest(
|
||||
[property: JsonPropertyName("vulnerabilityId")] string? VulnerabilityId = null,
|
||||
[property: JsonPropertyName("severity")] string? Severity = null,
|
||||
[property: JsonPropertyName("status")] string? Status = null,
|
||||
[property: JsonPropertyName("purl")] string? Purl = null,
|
||||
[property: JsonPropertyName("cpe")] string? Cpe = null,
|
||||
[property: JsonPropertyName("sbomId")] string? SbomId = null,
|
||||
[property: JsonPropertyName("policyId")] string? PolicyId = null,
|
||||
[property: JsonPropertyName("policyVersion")] int? PolicyVersion = null,
|
||||
[property: JsonPropertyName("groupBy")] string? GroupBy = null,
|
||||
[property: JsonPropertyName("limit")] int? Limit = null,
|
||||
[property: JsonPropertyName("offset")] int? Offset = null,
|
||||
[property: JsonPropertyName("cursor")] string? Cursor = null);
|
||||
|
||||
/// <summary>
|
||||
/// Paginated vulnerability list response.
|
||||
/// </summary>
|
||||
internal sealed record VulnListResponse(
|
||||
[property: JsonPropertyName("items")] IReadOnlyList<VulnItem> Items,
|
||||
[property: JsonPropertyName("total")] int Total,
|
||||
[property: JsonPropertyName("limit")] int Limit,
|
||||
[property: JsonPropertyName("offset")] int Offset,
|
||||
[property: JsonPropertyName("hasMore")] bool HasMore,
|
||||
[property: JsonPropertyName("nextCursor")] string? NextCursor = null,
|
||||
[property: JsonPropertyName("grouping")] VulnGroupingInfo? Grouping = null);
|
||||
|
||||
/// <summary>
|
||||
/// Individual vulnerability item from the explorer.
|
||||
/// </summary>
|
||||
internal sealed record VulnItem(
|
||||
[property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId,
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("severity")] VulnSeverityInfo Severity,
|
||||
[property: JsonPropertyName("affectedPackages")] IReadOnlyList<VulnAffectedPackage> AffectedPackages,
|
||||
[property: JsonPropertyName("vexStatus")] string? VexStatus = null,
|
||||
[property: JsonPropertyName("policyFindingId")] string? PolicyFindingId = null,
|
||||
[property: JsonPropertyName("aliases")] IReadOnlyList<string>? Aliases = null,
|
||||
[property: JsonPropertyName("summary")] string? Summary = null,
|
||||
[property: JsonPropertyName("publishedAt")] DateTimeOffset? PublishedAt = null,
|
||||
[property: JsonPropertyName("updatedAt")] DateTimeOffset? UpdatedAt = null,
|
||||
[property: JsonPropertyName("assignee")] string? Assignee = null,
|
||||
[property: JsonPropertyName("dueDate")] DateTimeOffset? DueDate = null,
|
||||
[property: JsonPropertyName("tags")] IReadOnlyList<string>? Tags = null);
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability severity information.
|
||||
/// </summary>
|
||||
internal sealed record VulnSeverityInfo(
|
||||
[property: JsonPropertyName("level")] string Level,
|
||||
[property: JsonPropertyName("score")] double? Score = null,
|
||||
[property: JsonPropertyName("vector")] string? Vector = null,
|
||||
[property: JsonPropertyName("source")] string? Source = null);
|
||||
|
||||
/// <summary>
|
||||
/// Affected package information.
|
||||
/// </summary>
|
||||
internal sealed record VulnAffectedPackage(
|
||||
[property: JsonPropertyName("purl")] string? Purl = null,
|
||||
[property: JsonPropertyName("cpe")] string? Cpe = null,
|
||||
[property: JsonPropertyName("name")] string? Name = null,
|
||||
[property: JsonPropertyName("version")] string? Version = null,
|
||||
[property: JsonPropertyName("fixedIn")] string? FixedIn = null,
|
||||
[property: JsonPropertyName("sbomId")] string? SbomId = null,
|
||||
[property: JsonPropertyName("pathCount")] int? PathCount = null);
|
||||
|
||||
/// <summary>
|
||||
/// Grouping information for aggregated results.
|
||||
/// </summary>
|
||||
internal sealed record VulnGroupingInfo(
|
||||
[property: JsonPropertyName("field")] string Field,
|
||||
[property: JsonPropertyName("groups")] IReadOnlyList<VulnGroup> Groups);
|
||||
|
||||
/// <summary>
|
||||
/// A group in aggregated results.
|
||||
/// </summary>
|
||||
internal sealed record VulnGroup(
|
||||
[property: JsonPropertyName("key")] string Key,
|
||||
[property: JsonPropertyName("count")] int Count,
|
||||
[property: JsonPropertyName("criticalCount")] int? CriticalCount = null,
|
||||
[property: JsonPropertyName("highCount")] int? HighCount = null,
|
||||
[property: JsonPropertyName("mediumCount")] int? MediumCount = null,
|
||||
[property: JsonPropertyName("lowCount")] int? LowCount = null);
|
||||
|
||||
// CLI-VULN-29-002: Vulnerability detail models
|
||||
|
||||
/// <summary>
|
||||
/// Detailed vulnerability response including evidence, rationale, paths, and ledger.
|
||||
/// </summary>
|
||||
internal sealed record VulnDetailResponse(
|
||||
[property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId,
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("severity")] VulnSeverityInfo Severity,
|
||||
[property: JsonPropertyName("affectedPackages")] IReadOnlyList<VulnAffectedPackage> AffectedPackages,
|
||||
[property: JsonPropertyName("vexStatus")] string? VexStatus = null,
|
||||
[property: JsonPropertyName("policyFindingId")] string? PolicyFindingId = null,
|
||||
[property: JsonPropertyName("aliases")] IReadOnlyList<string>? Aliases = null,
|
||||
[property: JsonPropertyName("summary")] string? Summary = null,
|
||||
[property: JsonPropertyName("description")] string? Description = null,
|
||||
[property: JsonPropertyName("publishedAt")] DateTimeOffset? PublishedAt = null,
|
||||
[property: JsonPropertyName("updatedAt")] DateTimeOffset? UpdatedAt = null,
|
||||
[property: JsonPropertyName("assignee")] string? Assignee = null,
|
||||
[property: JsonPropertyName("dueDate")] DateTimeOffset? DueDate = null,
|
||||
[property: JsonPropertyName("tags")] IReadOnlyList<string>? Tags = null,
|
||||
[property: JsonPropertyName("evidence")] IReadOnlyList<VulnEvidenceInfo>? Evidence = null,
|
||||
[property: JsonPropertyName("policyRationale")] VulnPolicyRationale? PolicyRationale = null,
|
||||
[property: JsonPropertyName("dependencyPaths")] IReadOnlyList<VulnDependencyPath>? DependencyPaths = null,
|
||||
[property: JsonPropertyName("ledger")] IReadOnlyList<VulnLedgerEntry>? Ledger = null,
|
||||
[property: JsonPropertyName("references")] IReadOnlyList<VulnReference>? References = null);
|
||||
|
||||
/// <summary>
|
||||
/// Evidence supporting the vulnerability assessment.
|
||||
/// </summary>
|
||||
internal sealed record VulnEvidenceInfo(
|
||||
[property: JsonPropertyName("type")] string Type,
|
||||
[property: JsonPropertyName("source")] string Source,
|
||||
[property: JsonPropertyName("documentId")] string? DocumentId = null,
|
||||
[property: JsonPropertyName("documentDigest")] string? DocumentDigest = null,
|
||||
[property: JsonPropertyName("timestamp")] DateTimeOffset? Timestamp = null,
|
||||
[property: JsonPropertyName("content")] string? Content = null);
|
||||
|
||||
/// <summary>
|
||||
/// Policy rationale explaining the status decision.
|
||||
/// </summary>
|
||||
internal sealed record VulnPolicyRationale(
|
||||
[property: JsonPropertyName("policyId")] string PolicyId,
|
||||
[property: JsonPropertyName("policyVersion")] int PolicyVersion,
|
||||
[property: JsonPropertyName("rules")] IReadOnlyList<VulnPolicyRuleResult>? Rules = null,
|
||||
[property: JsonPropertyName("summary")] string? Summary = null);
|
||||
|
||||
/// <summary>
|
||||
/// Result of a policy rule evaluation.
|
||||
/// </summary>
|
||||
internal sealed record VulnPolicyRuleResult(
|
||||
[property: JsonPropertyName("rule")] string Rule,
|
||||
[property: JsonPropertyName("result")] string Result,
|
||||
[property: JsonPropertyName("weight")] double? Weight = null,
|
||||
[property: JsonPropertyName("reason")] string? Reason = null);
|
||||
|
||||
/// <summary>
|
||||
/// Dependency path showing how the vulnerable package is included.
|
||||
/// </summary>
|
||||
internal sealed record VulnDependencyPath(
|
||||
[property: JsonPropertyName("path")] IReadOnlyList<string> Path,
|
||||
[property: JsonPropertyName("sbomId")] string? SbomId = null,
|
||||
[property: JsonPropertyName("depth")] int? Depth = null);
|
||||
|
||||
/// <summary>
|
||||
/// Ledger entry tracking vulnerability workflow history.
|
||||
/// </summary>
|
||||
internal sealed record VulnLedgerEntry(
|
||||
[property: JsonPropertyName("timestamp")] DateTimeOffset Timestamp,
|
||||
[property: JsonPropertyName("action")] string Action,
|
||||
[property: JsonPropertyName("actor")] string? Actor = null,
|
||||
[property: JsonPropertyName("fromStatus")] string? FromStatus = null,
|
||||
[property: JsonPropertyName("toStatus")] string? ToStatus = null,
|
||||
[property: JsonPropertyName("comment")] string? Comment = null,
|
||||
[property: JsonPropertyName("metadata")] IReadOnlyDictionary<string, string>? Metadata = null);
|
||||
|
||||
/// <summary>
|
||||
/// Reference link for the vulnerability.
|
||||
/// </summary>
|
||||
internal sealed record VulnReference(
|
||||
[property: JsonPropertyName("type")] string Type,
|
||||
[property: JsonPropertyName("url")] string Url,
|
||||
[property: JsonPropertyName("title")] string? Title = null);
|
||||
|
||||
// CLI-VULN-29-003: Vulnerability workflow models
|
||||
|
||||
/// <summary>
|
||||
/// Workflow action request for vulnerability operations.
|
||||
/// </summary>
|
||||
internal sealed record VulnWorkflowRequest(
|
||||
[property: JsonPropertyName("action")] string Action,
|
||||
[property: JsonPropertyName("vulnerabilityIds")] IReadOnlyList<string>? VulnerabilityIds = null,
|
||||
[property: JsonPropertyName("filter")] VulnFilterSpec? Filter = null,
|
||||
[property: JsonPropertyName("assignee")] string? Assignee = null,
|
||||
[property: JsonPropertyName("comment")] string? Comment = null,
|
||||
[property: JsonPropertyName("dueDate")] DateTimeOffset? DueDate = null,
|
||||
[property: JsonPropertyName("justification")] string? Justification = null,
|
||||
[property: JsonPropertyName("fixVersion")] string? FixVersion = null,
|
||||
[property: JsonPropertyName("idempotencyKey")] string? IdempotencyKey = null);
|
||||
|
||||
/// <summary>
|
||||
/// Filter specification for bulk workflow operations.
|
||||
/// </summary>
|
||||
internal sealed record VulnFilterSpec(
|
||||
[property: JsonPropertyName("severity")] string? Severity = null,
|
||||
[property: JsonPropertyName("status")] string? Status = null,
|
||||
[property: JsonPropertyName("purl")] string? Purl = null,
|
||||
[property: JsonPropertyName("sbomId")] string? SbomId = null,
|
||||
[property: JsonPropertyName("policyId")] string? PolicyId = null);
|
||||
|
||||
/// <summary>
|
||||
/// Workflow action response with affected items.
|
||||
/// </summary>
|
||||
internal sealed record VulnWorkflowResponse(
|
||||
[property: JsonPropertyName("success")] bool Success,
|
||||
[property: JsonPropertyName("action")] string Action,
|
||||
[property: JsonPropertyName("affectedCount")] int AffectedCount,
|
||||
[property: JsonPropertyName("affectedIds")] IReadOnlyList<string>? AffectedIds = null,
|
||||
[property: JsonPropertyName("errors")] IReadOnlyList<VulnWorkflowError>? Errors = null,
|
||||
[property: JsonPropertyName("idempotencyKey")] string? IdempotencyKey = null);
|
||||
|
||||
/// <summary>
|
||||
/// Error detail for workflow operations.
|
||||
/// </summary>
|
||||
internal sealed record VulnWorkflowError(
|
||||
[property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId,
|
||||
[property: JsonPropertyName("code")] string Code,
|
||||
[property: JsonPropertyName("message")] string Message);
|
||||
|
||||
// CLI-VULN-29-004: Vulnerability simulation models
|
||||
|
||||
/// <summary>
|
||||
/// Simulation request for policy/VEX changes.
|
||||
/// </summary>
|
||||
internal sealed record VulnSimulationRequest(
|
||||
[property: JsonPropertyName("policyId")] string? PolicyId = null,
|
||||
[property: JsonPropertyName("policyVersion")] int? PolicyVersion = null,
|
||||
[property: JsonPropertyName("vexOverrides")] IReadOnlyDictionary<string, string>? VexOverrides = null,
|
||||
[property: JsonPropertyName("severityThreshold")] string? SeverityThreshold = null,
|
||||
[property: JsonPropertyName("sbomIds")] IReadOnlyList<string>? SbomIds = null,
|
||||
[property: JsonPropertyName("outputMarkdown")] bool OutputMarkdown = false);
|
||||
|
||||
/// <summary>
|
||||
/// Simulation response showing deltas.
|
||||
/// </summary>
|
||||
internal sealed record VulnSimulationResponse(
|
||||
[property: JsonPropertyName("items")] IReadOnlyList<VulnSimulationDelta> Items,
|
||||
[property: JsonPropertyName("summary")] VulnSimulationSummary Summary,
|
||||
[property: JsonPropertyName("markdownReport")] string? MarkdownReport = null);
|
||||
|
||||
/// <summary>
|
||||
/// Individual delta in simulation results.
|
||||
/// </summary>
|
||||
internal sealed record VulnSimulationDelta(
|
||||
[property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId,
|
||||
[property: JsonPropertyName("beforeStatus")] string BeforeStatus,
|
||||
[property: JsonPropertyName("afterStatus")] string AfterStatus,
|
||||
[property: JsonPropertyName("changed")] bool Changed,
|
||||
[property: JsonPropertyName("changeReason")] string? ChangeReason = null);
|
||||
|
||||
/// <summary>
|
||||
/// Summary of simulation results.
|
||||
/// </summary>
|
||||
internal sealed record VulnSimulationSummary(
|
||||
[property: JsonPropertyName("totalEvaluated")] int TotalEvaluated,
|
||||
[property: JsonPropertyName("totalChanged")] int TotalChanged,
|
||||
[property: JsonPropertyName("statusUpgrades")] int StatusUpgrades,
|
||||
[property: JsonPropertyName("statusDowngrades")] int StatusDowngrades,
|
||||
[property: JsonPropertyName("noChange")] int NoChange);
|
||||
|
||||
// CLI-VULN-29-005: Vulnerability export models
|
||||
|
||||
/// <summary>
|
||||
/// Export request for vulnerability evidence bundles.
|
||||
/// </summary>
|
||||
internal sealed record VulnExportRequest(
|
||||
[property: JsonPropertyName("vulnerabilityIds")] IReadOnlyList<string>? VulnerabilityIds = null,
|
||||
[property: JsonPropertyName("sbomIds")] IReadOnlyList<string>? SbomIds = null,
|
||||
[property: JsonPropertyName("policyId")] string? PolicyId = null,
|
||||
[property: JsonPropertyName("format")] string Format = "ndjson",
|
||||
[property: JsonPropertyName("includeEvidence")] bool IncludeEvidence = true,
|
||||
[property: JsonPropertyName("includeLedger")] bool IncludeLedger = true,
|
||||
[property: JsonPropertyName("signed")] bool Signed = true);
|
||||
|
||||
/// <summary>
|
||||
/// Export response with download information.
|
||||
/// </summary>
|
||||
internal sealed record VulnExportResponse(
|
||||
[property: JsonPropertyName("exportId")] string ExportId,
|
||||
[property: JsonPropertyName("downloadUrl")] string? DownloadUrl = null,
|
||||
[property: JsonPropertyName("format")] string Format = "ndjson",
|
||||
[property: JsonPropertyName("itemCount")] int ItemCount = 0,
|
||||
[property: JsonPropertyName("signed")] bool Signed = false,
|
||||
[property: JsonPropertyName("signatureAlgorithm")] string? SignatureAlgorithm = null,
|
||||
[property: JsonPropertyName("signatureKeyId")] string? SignatureKeyId = null,
|
||||
[property: JsonPropertyName("digest")] string? Digest = null,
|
||||
[property: JsonPropertyName("digestAlgorithm")] string? DigestAlgorithm = null,
|
||||
[property: JsonPropertyName("expiresAt")] DateTimeOffset? ExpiresAt = null);
|
||||
@@ -27,6 +27,7 @@ internal static class CliMetrics
|
||||
private static readonly Counter<long> RubyInspectCounter = Meter.CreateCounter<long>("stellaops.cli.ruby.inspect.count");
|
||||
private static readonly Counter<long> RubyResolveCounter = Meter.CreateCounter<long>("stellaops.cli.ruby.resolve.count");
|
||||
private static readonly Counter<long> PhpInspectCounter = Meter.CreateCounter<long>("stellaops.cli.php.inspect.count");
|
||||
private static readonly Counter<long> PythonInspectCounter = Meter.CreateCounter<long>("stellaops.cli.python.inspect.count");
|
||||
private static readonly Histogram<double> CommandDurationHistogram = Meter.CreateHistogram<double>("stellaops.cli.command.duration.ms");
|
||||
|
||||
public static void RecordScannerDownload(string channel, bool fromCache)
|
||||
@@ -150,6 +151,12 @@ internal static class CliMetrics
|
||||
new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)
|
||||
});
|
||||
|
||||
public static void RecordPythonInspect(string outcome)
|
||||
=> PythonInspectCounter.Add(1, new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)
|
||||
});
|
||||
|
||||
public static IDisposable MeasureCommandDuration(string command)
|
||||
{
|
||||
var start = DateTime.UtcNow;
|
||||
|
||||
@@ -3284,6 +3284,20 @@ private readonly record struct LinksetObservationSummary(
|
||||
|
||||
static async Task InitializeMongoAsync(WebApplication app)
|
||||
{
|
||||
// Skip Mongo initialization in testing/bypass mode.
|
||||
var isTesting = string.Equals(
|
||||
Environment.GetEnvironmentVariable("DOTNET_ENVIRONMENT"),
|
||||
"Testing",
|
||||
StringComparison.OrdinalIgnoreCase);
|
||||
var bypass = string.Equals(
|
||||
Environment.GetEnvironmentVariable("CONCELIER_BYPASS_MONGO"),
|
||||
"1",
|
||||
StringComparison.OrdinalIgnoreCase);
|
||||
if (isTesting || bypass)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
await using var scope = app.Services.CreateAsyncScope();
|
||||
var bootstrapper = scope.ServiceProvider.GetRequiredService<MongoBootstrapper>();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("MongoBootstrapper");
|
||||
|
||||
@@ -10,16 +10,20 @@ namespace StellaOps.Concelier.Core.Linksets;
|
||||
/// <summary>
|
||||
/// Contract-matching payload for <c>advisory.linkset.updated@1</c> events.
|
||||
/// Per LNM-21-005, emits delta descriptions + observation ids (tenant + provenance only).
|
||||
/// Enhanced per CONCELIER-POLICY-23-002 with idempotent IDs, confidence summaries, and tenant metadata.
|
||||
/// </summary>
|
||||
public sealed record AdvisoryLinksetUpdatedEvent(
|
||||
Guid EventId,
|
||||
string IdempotencyKey,
|
||||
string TenantId,
|
||||
AdvisoryLinksetTenantMetadata TenantMetadata,
|
||||
string LinksetId,
|
||||
string AdvisoryId,
|
||||
string Source,
|
||||
ImmutableArray<string> ObservationIds,
|
||||
AdvisoryLinksetDelta Delta,
|
||||
double? Confidence,
|
||||
AdvisoryLinksetConfidenceSummary ConfidenceSummary,
|
||||
ImmutableArray<AdvisoryLinksetConflictSummary> Conflicts,
|
||||
AdvisoryLinksetProvenanceSummary Provenance,
|
||||
DateTimeOffset CreatedAt,
|
||||
@@ -43,16 +47,22 @@ public sealed record AdvisoryLinksetUpdatedEvent(
|
||||
var delta = ComputeDelta(linkset, previousLinkset);
|
||||
var conflicts = BuildConflictSummaries(linkset.Conflicts);
|
||||
var provenance = BuildProvenance(linkset.Provenance);
|
||||
var tenantMetadata = BuildTenantMetadata(linkset.TenantId, tenantUrn);
|
||||
var confidenceSummary = BuildConfidenceSummary(linkset.Confidence, conflicts.Length);
|
||||
var idempotencyKey = ComputeIdempotencyKey(linksetId, linkset, delta);
|
||||
|
||||
return new AdvisoryLinksetUpdatedEvent(
|
||||
EventId: Guid.NewGuid(),
|
||||
IdempotencyKey: idempotencyKey,
|
||||
TenantId: tenantUrn,
|
||||
TenantMetadata: tenantMetadata,
|
||||
LinksetId: linksetId,
|
||||
AdvisoryId: linkset.AdvisoryId,
|
||||
Source: linkset.Source,
|
||||
ObservationIds: linkset.ObservationIds,
|
||||
Delta: delta,
|
||||
Confidence: linkset.Confidence,
|
||||
ConfidenceSummary: confidenceSummary,
|
||||
Conflicts: conflicts,
|
||||
Provenance: provenance,
|
||||
CreatedAt: linkset.CreatedAt,
|
||||
@@ -61,6 +71,139 @@ public sealed record AdvisoryLinksetUpdatedEvent(
|
||||
TraceId: traceId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes a deterministic idempotency key for safe replay.
|
||||
/// The key is derived from linkset identity + content hash so replaying the same change yields the same key.
|
||||
/// </summary>
|
||||
private static string ComputeIdempotencyKey(string linksetId, AdvisoryLinkset linkset, AdvisoryLinksetDelta delta)
|
||||
{
|
||||
var sb = new StringBuilder(256);
|
||||
sb.Append(linksetId);
|
||||
sb.Append('|');
|
||||
sb.Append(linkset.TenantId);
|
||||
sb.Append('|');
|
||||
sb.Append(linkset.AdvisoryId);
|
||||
sb.Append('|');
|
||||
sb.Append(linkset.Source);
|
||||
sb.Append('|');
|
||||
sb.Append(linkset.CreatedAt.ToUniversalTime().Ticks);
|
||||
sb.Append('|');
|
||||
sb.Append(delta.Type);
|
||||
sb.Append('|');
|
||||
|
||||
// Include observation IDs in sorted order for determinism
|
||||
foreach (var obsId in linkset.ObservationIds.OrderBy(id => id, StringComparer.Ordinal))
|
||||
{
|
||||
sb.Append(obsId);
|
||||
sb.Append(',');
|
||||
}
|
||||
|
||||
// Include provenance hash if available
|
||||
if (linkset.Provenance?.PolicyHash is not null)
|
||||
{
|
||||
sb.Append('|');
|
||||
sb.Append(linkset.Provenance.PolicyHash);
|
||||
}
|
||||
|
||||
var input = Encoding.UTF8.GetBytes(sb.ToString());
|
||||
var hash = SHA256.HashData(input);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds tenant metadata for policy consumers.
|
||||
/// </summary>
|
||||
private static AdvisoryLinksetTenantMetadata BuildTenantMetadata(string tenantId, string tenantUrn)
|
||||
{
|
||||
// Extract tenant identifier from URN if present
|
||||
var rawId = tenantUrn.StartsWith("urn:tenant:", StringComparison.Ordinal)
|
||||
? tenantUrn["urn:tenant:".Length..]
|
||||
: tenantId;
|
||||
|
||||
return new AdvisoryLinksetTenantMetadata(
|
||||
TenantUrn: tenantUrn,
|
||||
TenantId: rawId,
|
||||
Namespace: ExtractNamespace(rawId));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts namespace prefix from tenant ID (e.g., "org:acme" → "org").
|
||||
/// </summary>
|
||||
private static string? ExtractNamespace(string tenantId)
|
||||
{
|
||||
var colonIndex = tenantId.IndexOf(':');
|
||||
return colonIndex > 0 ? tenantId[..colonIndex] : null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds confidence summary with tier classification and contributing factors.
|
||||
/// </summary>
|
||||
private static AdvisoryLinksetConfidenceSummary BuildConfidenceSummary(double? confidence, int conflictCount)
|
||||
{
|
||||
var tier = ClassifyConfidenceTier(confidence);
|
||||
var factors = BuildConfidenceFactors(confidence, conflictCount);
|
||||
|
||||
return new AdvisoryLinksetConfidenceSummary(
|
||||
Value: confidence,
|
||||
Tier: tier,
|
||||
ConflictCount: conflictCount,
|
||||
Factors: factors);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Classifies confidence into tiers for policy rules.
|
||||
/// </summary>
|
||||
private static string ClassifyConfidenceTier(double? confidence) => confidence switch
|
||||
{
|
||||
null => "unknown",
|
||||
>= 0.9 => "high",
|
||||
>= 0.7 => "medium",
|
||||
>= 0.5 => "low",
|
||||
_ => "very-low"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Builds human-readable factors contributing to confidence score.
|
||||
/// </summary>
|
||||
private static ImmutableArray<string> BuildConfidenceFactors(double? confidence, int conflictCount)
|
||||
{
|
||||
var factors = ImmutableArray.CreateBuilder<string>();
|
||||
|
||||
if (confidence is null)
|
||||
{
|
||||
factors.Add("no-confidence-data");
|
||||
return factors.ToImmutable();
|
||||
}
|
||||
|
||||
if (confidence >= 0.9)
|
||||
{
|
||||
factors.Add("strong-alias-correlation");
|
||||
}
|
||||
else if (confidence >= 0.7)
|
||||
{
|
||||
factors.Add("moderate-alias-correlation");
|
||||
}
|
||||
else if (confidence >= 0.5)
|
||||
{
|
||||
factors.Add("weak-alias-correlation");
|
||||
}
|
||||
else
|
||||
{
|
||||
factors.Add("minimal-correlation");
|
||||
}
|
||||
|
||||
if (conflictCount > 0)
|
||||
{
|
||||
factors.Add($"has-{conflictCount}-conflict{(conflictCount > 1 ? "s" : "")}");
|
||||
}
|
||||
else
|
||||
{
|
||||
factors.Add("no-conflicts");
|
||||
}
|
||||
|
||||
return factors.ToImmutable();
|
||||
}
|
||||
|
||||
private static AdvisoryLinksetDelta ComputeDelta(AdvisoryLinkset current, AdvisoryLinkset? previous)
|
||||
{
|
||||
if (previous is null)
|
||||
@@ -166,3 +309,26 @@ public sealed record AdvisoryLinksetProvenanceSummary(
|
||||
ImmutableArray<string> ObservationHashes,
|
||||
string? ToolVersion,
|
||||
string? PolicyHash);
|
||||
|
||||
/// <summary>
|
||||
/// Tenant metadata for policy replay and multi-tenant filtering.
|
||||
/// Per CONCELIER-POLICY-23-002.
|
||||
/// </summary>
|
||||
public sealed record AdvisoryLinksetTenantMetadata(
|
||||
string TenantUrn,
|
||||
string TenantId,
|
||||
string? Namespace);
|
||||
|
||||
/// <summary>
|
||||
/// Confidence summary with tier classification for policy rules.
|
||||
/// Per CONCELIER-POLICY-23-002.
|
||||
/// </summary>
|
||||
/// <param name="Value">Raw confidence score (0.0 - 1.0).</param>
|
||||
/// <param name="Tier">Confidence tier: high (≥0.9), medium (≥0.7), low (≥0.5), very-low (<0.5), unknown (null).</param>
|
||||
/// <param name="ConflictCount">Number of conflicts detected in the linkset.</param>
|
||||
/// <param name="Factors">Human-readable factors contributing to confidence score.</param>
|
||||
public sealed record AdvisoryLinksetConfidenceSummary(
|
||||
double? Value,
|
||||
string Tier,
|
||||
int ConflictCount,
|
||||
ImmutableArray<string> Factors);
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
/// <summary>
|
||||
/// Stores and retrieves policy delta checkpoints for deterministic replay.
|
||||
/// Consumers use checkpoints to track their position in the linkset stream.
|
||||
/// </summary>
|
||||
public interface IPolicyDeltaCheckpointStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets a checkpoint by consumer and tenant, creating one if it does not exist.
|
||||
/// </summary>
|
||||
Task<PolicyDeltaCheckpoint> GetOrCreateAsync(
|
||||
string tenantId,
|
||||
string consumerId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a checkpoint by its unique ID.
|
||||
/// </summary>
|
||||
Task<PolicyDeltaCheckpoint?> GetAsync(
|
||||
string checkpointId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Updates a checkpoint after processing a batch of linksets.
|
||||
/// </summary>
|
||||
Task<PolicyDeltaCheckpoint> UpdateAsync(
|
||||
PolicyDeltaCheckpoint checkpoint,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Lists all checkpoints for a given tenant.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<PolicyDeltaCheckpoint>> ListByTenantAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a checkpoint (for cleanup or reset scenarios).
|
||||
/// </summary>
|
||||
Task<bool> DeleteAsync(
|
||||
string checkpointId,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a checkpoint for tracking policy delta consumption.
|
||||
/// Enables deterministic replay by persisting the last processed position.
|
||||
/// </summary>
|
||||
public sealed record PolicyDeltaCheckpoint(
|
||||
/// <summary>Unique identifier for this checkpoint (typically consumerId + tenant).</summary>
|
||||
string CheckpointId,
|
||||
|
||||
/// <summary>Tenant scope for this checkpoint.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Consumer identifier (e.g., "policy-engine", "vuln-explorer").</summary>
|
||||
string ConsumerId,
|
||||
|
||||
/// <summary>Last processed linkset CreatedAt timestamp for cursor-based pagination.</summary>
|
||||
DateTimeOffset? LastCreatedAt,
|
||||
|
||||
/// <summary>Last processed advisory ID (tie-breaker when CreatedAt matches).</summary>
|
||||
string? LastAdvisoryId,
|
||||
|
||||
/// <summary>MongoDB change-stream resume token for real-time delta subscriptions.</summary>
|
||||
string? ResumeToken,
|
||||
|
||||
/// <summary>Sequence number for ordering events within the same timestamp.</summary>
|
||||
long SequenceNumber,
|
||||
|
||||
/// <summary>When this checkpoint was last updated.</summary>
|
||||
DateTimeOffset UpdatedAt,
|
||||
|
||||
/// <summary>Count of linksets processed since checkpoint creation.</summary>
|
||||
long ProcessedCount,
|
||||
|
||||
/// <summary>Hash of the last processed batch for integrity verification.</summary>
|
||||
string? LastBatchHash)
|
||||
{
|
||||
public static PolicyDeltaCheckpoint CreateNew(string tenantId, string consumerId, DateTimeOffset now) =>
|
||||
new(
|
||||
CheckpointId: $"{consumerId}:{tenantId}",
|
||||
TenantId: tenantId,
|
||||
ConsumerId: consumerId,
|
||||
LastCreatedAt: null,
|
||||
LastAdvisoryId: null,
|
||||
ResumeToken: null,
|
||||
SequenceNumber: 0,
|
||||
UpdatedAt: now,
|
||||
ProcessedCount: 0,
|
||||
LastBatchHash: null);
|
||||
|
||||
/// <summary>
|
||||
/// Creates an <see cref="AdvisoryLinksetCursor"/> from this checkpoint for pagination.
|
||||
/// Returns null if no position has been recorded yet.
|
||||
/// </summary>
|
||||
public AdvisoryLinksetCursor? ToCursor() =>
|
||||
LastCreatedAt.HasValue && !string.IsNullOrEmpty(LastAdvisoryId)
|
||||
? new AdvisoryLinksetCursor(LastCreatedAt.Value, LastAdvisoryId)
|
||||
: null;
|
||||
|
||||
/// <summary>
|
||||
/// Advances the checkpoint to a new position after processing a batch.
|
||||
/// </summary>
|
||||
public PolicyDeltaCheckpoint Advance(
|
||||
DateTimeOffset lastCreatedAt,
|
||||
string lastAdvisoryId,
|
||||
long batchCount,
|
||||
string? batchHash,
|
||||
DateTimeOffset now) =>
|
||||
this with
|
||||
{
|
||||
LastCreatedAt = lastCreatedAt,
|
||||
LastAdvisoryId = lastAdvisoryId,
|
||||
SequenceNumber = SequenceNumber + batchCount,
|
||||
UpdatedAt = now,
|
||||
ProcessedCount = ProcessedCount + batchCount,
|
||||
LastBatchHash = batchHash
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Updates the resume token for change-stream subscriptions.
|
||||
/// </summary>
|
||||
public PolicyDeltaCheckpoint WithResumeToken(string resumeToken, DateTimeOffset now) =>
|
||||
this with { ResumeToken = resumeToken, UpdatedAt = now };
|
||||
}
|
||||
@@ -0,0 +1,111 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Risk;
|
||||
|
||||
/// <summary>
|
||||
/// Provider interface for extracting vendor risk signals from observations.
|
||||
/// Per CONCELIER-RISK-66-001, surfaces fact-only CVSS/KEV/fix data with provenance.
|
||||
/// </summary>
|
||||
public interface IVendorRiskSignalProvider
|
||||
{
|
||||
/// <summary>
|
||||
/// Extracts risk signals from a specific observation.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="observationId">Observation identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Risk signal with CVSS, KEV, and fix data.</returns>
|
||||
Task<VendorRiskSignal?> GetByObservationAsync(
|
||||
string tenantId,
|
||||
string observationId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Extracts risk signals from all observations for an advisory.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="advisoryId">Advisory identifier (e.g., CVE-2024-1234).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Collection of risk signals from all vendor observations.</returns>
|
||||
Task<IReadOnlyList<VendorRiskSignal>> GetByAdvisoryAsync(
|
||||
string tenantId,
|
||||
string advisoryId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Extracts aggregated risk signals for a linkset.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="linksetId">Linkset identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Collection of risk signals from linked observations.</returns>
|
||||
Task<IReadOnlyList<VendorRiskSignal>> GetByLinksetAsync(
|
||||
string tenantId,
|
||||
string linksetId,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Aggregated risk signal view combining multiple vendor observations.
|
||||
/// </summary>
|
||||
public sealed record AggregatedRiskView(
|
||||
string TenantId,
|
||||
string AdvisoryId,
|
||||
IReadOnlyList<VendorRiskSignal> VendorSignals)
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets all unique CVSS scores across vendors with their provenance.
|
||||
/// </summary>
|
||||
public IReadOnlyList<VendorCvssScore> AllCvssScores =>
|
||||
VendorSignals
|
||||
.SelectMany(s => s.CvssScores)
|
||||
.OrderByDescending(c => c.Score)
|
||||
.ToList();
|
||||
|
||||
/// <summary>
|
||||
/// Gets the highest CVSS score from any vendor.
|
||||
/// </summary>
|
||||
public VendorCvssScore? HighestCvssScore =>
|
||||
AllCvssScores.FirstOrDefault();
|
||||
|
||||
/// <summary>
|
||||
/// Indicates if any vendor reports KEV status.
|
||||
/// </summary>
|
||||
public bool IsKnownExploited =>
|
||||
VendorSignals.Any(s => s.IsKnownExploited);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all KEV status entries from vendors.
|
||||
/// </summary>
|
||||
public IReadOnlyList<VendorKevStatus> KevStatuses =>
|
||||
VendorSignals
|
||||
.Where(s => s.KevStatus is not null)
|
||||
.Select(s => s.KevStatus!)
|
||||
.ToList();
|
||||
|
||||
/// <summary>
|
||||
/// Indicates if any vendor reports a fix available.
|
||||
/// </summary>
|
||||
public bool HasFixAvailable =>
|
||||
VendorSignals.Any(s => s.HasFixAvailable);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all fix availability entries from vendors.
|
||||
/// </summary>
|
||||
public IReadOnlyList<VendorFixAvailability> AllFixAvailability =>
|
||||
VendorSignals
|
||||
.SelectMany(s => s.FixAvailability)
|
||||
.ToList();
|
||||
|
||||
/// <summary>
|
||||
/// Gets vendors that provided risk data.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> ContributingVendors =>
|
||||
VendorSignals
|
||||
.Select(s => s.Provenance.Vendor)
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.OrderBy(v => v, StringComparer.OrdinalIgnoreCase)
|
||||
.ToList();
|
||||
}
|
||||
@@ -0,0 +1,169 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Risk;
|
||||
|
||||
/// <summary>
|
||||
/// Vendor-provided risk signal for an advisory observation.
|
||||
/// Per CONCELIER-RISK-66-001, surfaces CVSS/KEV/fix data exactly as published with provenance anchors.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// This model is fact-only: no inference, weighting, or prioritization.
|
||||
/// All data traces back to a specific vendor observation with provenance.
|
||||
/// </remarks>
|
||||
public sealed record VendorRiskSignal(
|
||||
string TenantId,
|
||||
string AdvisoryId,
|
||||
string ObservationId,
|
||||
VendorRiskProvenance Provenance,
|
||||
ImmutableArray<VendorCvssScore> CvssScores,
|
||||
VendorKevStatus? KevStatus,
|
||||
ImmutableArray<VendorFixAvailability> FixAvailability,
|
||||
DateTimeOffset ExtractedAt)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a risk signal with no data (for observations without risk metadata).
|
||||
/// </summary>
|
||||
public static VendorRiskSignal Empty(
|
||||
string tenantId,
|
||||
string advisoryId,
|
||||
string observationId,
|
||||
VendorRiskProvenance provenance,
|
||||
DateTimeOffset extractedAt)
|
||||
{
|
||||
return new VendorRiskSignal(
|
||||
TenantId: tenantId,
|
||||
AdvisoryId: advisoryId,
|
||||
ObservationId: observationId,
|
||||
Provenance: provenance,
|
||||
CvssScores: ImmutableArray<VendorCvssScore>.Empty,
|
||||
KevStatus: null,
|
||||
FixAvailability: ImmutableArray<VendorFixAvailability>.Empty,
|
||||
ExtractedAt: extractedAt);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the highest severity CVSS score if any.
|
||||
/// </summary>
|
||||
public VendorCvssScore? HighestCvssScore => CvssScores.IsDefaultOrEmpty
|
||||
? null
|
||||
: CvssScores.MaxBy(s => s.Score);
|
||||
|
||||
/// <summary>
|
||||
/// Indicates if any fix is available from any vendor.
|
||||
/// </summary>
|
||||
public bool HasFixAvailable => !FixAvailability.IsDefaultOrEmpty &&
|
||||
FixAvailability.Any(f => f.Status == FixStatus.Available);
|
||||
|
||||
/// <summary>
|
||||
/// Indicates if this advisory is in the KEV list.
|
||||
/// </summary>
|
||||
public bool IsKnownExploited => KevStatus?.InKev == true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provenance anchor for vendor risk data.
|
||||
/// </summary>
|
||||
public sealed record VendorRiskProvenance(
|
||||
string Vendor,
|
||||
string Source,
|
||||
string ObservationHash,
|
||||
DateTimeOffset FetchedAt,
|
||||
string? IngestJobId,
|
||||
string? UpstreamId);
|
||||
|
||||
/// <summary>
|
||||
/// Vendor-provided CVSS score with version information.
|
||||
/// </summary>
|
||||
public sealed record VendorCvssScore(
|
||||
string System,
|
||||
double Score,
|
||||
string? Vector,
|
||||
string? Severity,
|
||||
VendorRiskProvenance Provenance)
|
||||
{
|
||||
/// <summary>
|
||||
/// Normalizes the system name to a standard format.
|
||||
/// </summary>
|
||||
public string NormalizedSystem => System?.ToLowerInvariant() switch
|
||||
{
|
||||
"cvss_v2" or "cvssv2" or "cvss2" => "cvss_v2",
|
||||
"cvss_v30" or "cvssv30" or "cvss30" or "cvss_v3" or "cvssv3" or "cvss3" => "cvss_v30",
|
||||
"cvss_v31" or "cvssv31" or "cvss31" => "cvss_v31",
|
||||
"cvss_v40" or "cvssv40" or "cvss40" or "cvss_v4" or "cvssv4" or "cvss4" => "cvss_v40",
|
||||
var s => s ?? "unknown"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Derives severity tier from score (if not provided by vendor).
|
||||
/// </summary>
|
||||
public string EffectiveSeverity => Severity ?? DeriveFromScore(Score, NormalizedSystem);
|
||||
|
||||
private static string DeriveFromScore(double score, string system)
|
||||
{
|
||||
// CVSS v2 uses different thresholds
|
||||
if (system == "cvss_v2")
|
||||
{
|
||||
return score switch
|
||||
{
|
||||
>= 7.0 => "high",
|
||||
>= 4.0 => "medium",
|
||||
_ => "low"
|
||||
};
|
||||
}
|
||||
|
||||
// CVSS v3.x and v4.x thresholds
|
||||
return score switch
|
||||
{
|
||||
>= 9.0 => "critical",
|
||||
>= 7.0 => "high",
|
||||
>= 4.0 => "medium",
|
||||
>= 0.1 => "low",
|
||||
_ => "none"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// KEV (Known Exploited Vulnerabilities) status from vendor data.
|
||||
/// </summary>
|
||||
public sealed record VendorKevStatus(
|
||||
bool InKev,
|
||||
DateTimeOffset? DateAdded,
|
||||
DateTimeOffset? DueDate,
|
||||
string? KnownRansomwareCampaignUse,
|
||||
string? Notes,
|
||||
VendorRiskProvenance Provenance);
|
||||
|
||||
/// <summary>
|
||||
/// Fix availability information from vendor.
|
||||
/// </summary>
|
||||
public sealed record VendorFixAvailability(
|
||||
FixStatus Status,
|
||||
string? FixedVersion,
|
||||
string? AdvisoryUrl,
|
||||
DateTimeOffset? FixReleasedAt,
|
||||
string? Package,
|
||||
string? Ecosystem,
|
||||
VendorRiskProvenance Provenance);
|
||||
|
||||
/// <summary>
|
||||
/// Fix availability status.
|
||||
/// </summary>
|
||||
public enum FixStatus
|
||||
{
|
||||
/// <summary>Fix status unknown.</summary>
|
||||
Unknown,
|
||||
|
||||
/// <summary>Fix is available.</summary>
|
||||
Available,
|
||||
|
||||
/// <summary>No fix available yet.</summary>
|
||||
NotAvailable,
|
||||
|
||||
/// <summary>Will not be fixed (end of life, etc.).</summary>
|
||||
WillNotFix,
|
||||
|
||||
/// <summary>Fix is in progress.</summary>
|
||||
InProgress
|
||||
}
|
||||
@@ -0,0 +1,263 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Risk;
|
||||
|
||||
/// <summary>
|
||||
/// Extracts vendor risk signals from observation data.
|
||||
/// Per CONCELIER-RISK-66-001, extracts fact-only CVSS/KEV/fix data with provenance.
|
||||
/// </summary>
|
||||
public static class VendorRiskSignalExtractor
|
||||
{
|
||||
/// <summary>
|
||||
/// Extracts a vendor risk signal from observation data.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="advisoryId">Advisory identifier.</param>
|
||||
/// <param name="observationId">Observation identifier.</param>
|
||||
/// <param name="vendor">Vendor name.</param>
|
||||
/// <param name="source">Source identifier.</param>
|
||||
/// <param name="observationHash">Content hash for provenance.</param>
|
||||
/// <param name="fetchedAt">When the data was fetched.</param>
|
||||
/// <param name="ingestJobId">Optional ingest job ID.</param>
|
||||
/// <param name="upstreamId">Optional upstream ID.</param>
|
||||
/// <param name="severities">Severity data from observation.</param>
|
||||
/// <param name="rawContent">Raw JSON content for KEV/fix extraction.</param>
|
||||
/// <param name="now">Current timestamp.</param>
|
||||
/// <returns>Extracted vendor risk signal.</returns>
|
||||
public static VendorRiskSignal Extract(
|
||||
string tenantId,
|
||||
string advisoryId,
|
||||
string observationId,
|
||||
string vendor,
|
||||
string source,
|
||||
string observationHash,
|
||||
DateTimeOffset fetchedAt,
|
||||
string? ingestJobId,
|
||||
string? upstreamId,
|
||||
IReadOnlyList<SeverityInput>? severities,
|
||||
JsonElement? rawContent,
|
||||
DateTimeOffset now)
|
||||
{
|
||||
var provenance = new VendorRiskProvenance(
|
||||
Vendor: vendor,
|
||||
Source: source,
|
||||
ObservationHash: observationHash,
|
||||
FetchedAt: fetchedAt,
|
||||
IngestJobId: ingestJobId,
|
||||
UpstreamId: upstreamId);
|
||||
|
||||
var cvssScores = ExtractCvssScores(severities, provenance);
|
||||
var kevStatus = ExtractKevStatus(rawContent, provenance);
|
||||
var fixAvailability = ExtractFixAvailability(rawContent, provenance);
|
||||
|
||||
return new VendorRiskSignal(
|
||||
TenantId: tenantId,
|
||||
AdvisoryId: advisoryId,
|
||||
ObservationId: observationId,
|
||||
Provenance: provenance,
|
||||
CvssScores: cvssScores,
|
||||
KevStatus: kevStatus,
|
||||
FixAvailability: fixAvailability,
|
||||
ExtractedAt: now);
|
||||
}
|
||||
|
||||
private static ImmutableArray<VendorCvssScore> ExtractCvssScores(
|
||||
IReadOnlyList<SeverityInput>? severities,
|
||||
VendorRiskProvenance provenance)
|
||||
{
|
||||
if (severities is null || severities.Count == 0)
|
||||
{
|
||||
return ImmutableArray<VendorCvssScore>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableArray.CreateBuilder<VendorCvssScore>(severities.Count);
|
||||
|
||||
foreach (var severity in severities)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(severity.System))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.Add(new VendorCvssScore(
|
||||
System: severity.System,
|
||||
Score: severity.Score,
|
||||
Vector: severity.Vector,
|
||||
Severity: severity.Severity,
|
||||
Provenance: provenance));
|
||||
}
|
||||
|
||||
return builder.ToImmutable();
|
||||
}
|
||||
|
||||
private static VendorKevStatus? ExtractKevStatus(
|
||||
JsonElement? rawContent,
|
||||
VendorRiskProvenance provenance)
|
||||
{
|
||||
if (rawContent is null || rawContent.Value.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var content = rawContent.Value;
|
||||
|
||||
// Try common KEV data locations in raw content
|
||||
// NVD format: cisa_exploit_add, cisa_required_action, cisa_vulnerability_name
|
||||
if (TryGetProperty(content, "cisa_exploit_add", out var cisaAdd) ||
|
||||
TryGetProperty(content, "database_specific", out var dbSpecific) && TryGetProperty(dbSpecific, "cisa", out cisaAdd))
|
||||
{
|
||||
return new VendorKevStatus(
|
||||
InKev: true,
|
||||
DateAdded: TryParseDate(cisaAdd),
|
||||
DueDate: TryGetDateProperty(content, "cisa_action_due"),
|
||||
KnownRansomwareCampaignUse: TryGetStringProperty(content, "cisa_ransomware"),
|
||||
Notes: TryGetStringProperty(content, "cisa_vulnerability_name"),
|
||||
Provenance: provenance);
|
||||
}
|
||||
|
||||
// OSV/GitHub format: database_specific.kev
|
||||
if (TryGetProperty(content, "database_specific", out var osv) &&
|
||||
TryGetProperty(osv, "kev", out var kev))
|
||||
{
|
||||
var inKev = kev.ValueKind == JsonValueKind.True ||
|
||||
(kev.ValueKind == JsonValueKind.Object && TryGetProperty(kev, "in_kev", out var inKevProp) && inKevProp.ValueKind == JsonValueKind.True);
|
||||
|
||||
if (inKev)
|
||||
{
|
||||
return new VendorKevStatus(
|
||||
InKev: true,
|
||||
DateAdded: kev.ValueKind == JsonValueKind.Object ? TryGetDateProperty(kev, "date_added") : null,
|
||||
DueDate: kev.ValueKind == JsonValueKind.Object ? TryGetDateProperty(kev, "due_date") : null,
|
||||
KnownRansomwareCampaignUse: kev.ValueKind == JsonValueKind.Object ? TryGetStringProperty(kev, "ransomware") : null,
|
||||
Notes: null,
|
||||
Provenance: provenance);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static ImmutableArray<VendorFixAvailability> ExtractFixAvailability(
|
||||
JsonElement? rawContent,
|
||||
VendorRiskProvenance provenance)
|
||||
{
|
||||
if (rawContent is null || rawContent.Value.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return ImmutableArray<VendorFixAvailability>.Empty;
|
||||
}
|
||||
|
||||
var content = rawContent.Value;
|
||||
var builder = ImmutableArray.CreateBuilder<VendorFixAvailability>();
|
||||
|
||||
// OSV format: affected[].ranges[].events[{fixed: "version"}]
|
||||
if (TryGetProperty(content, "affected", out var affected) && affected.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var aff in affected.EnumerateArray())
|
||||
{
|
||||
var package = TryGetStringProperty(aff, "package", "name") ?? TryGetStringProperty(aff, "purl");
|
||||
var ecosystem = TryGetStringProperty(aff, "package", "ecosystem");
|
||||
|
||||
if (TryGetProperty(aff, "ranges", out var ranges) && ranges.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var range in ranges.EnumerateArray())
|
||||
{
|
||||
if (TryGetProperty(range, "events", out var events) && events.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var evt in events.EnumerateArray())
|
||||
{
|
||||
if (TryGetProperty(evt, "fixed", out var fixedVersion))
|
||||
{
|
||||
builder.Add(new VendorFixAvailability(
|
||||
Status: FixStatus.Available,
|
||||
FixedVersion: fixedVersion.GetString(),
|
||||
AdvisoryUrl: null,
|
||||
FixReleasedAt: null,
|
||||
Package: package,
|
||||
Ecosystem: ecosystem,
|
||||
Provenance: provenance));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also check versions[] for fixed versions
|
||||
if (TryGetProperty(aff, "versions", out var versions) && versions.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
// Fixed versions may be indicated by absence from versions array
|
||||
// This is less reliable, so we only use it if no range data exists
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NVD format: configurations with fix status
|
||||
if (TryGetProperty(content, "configurations", out var configs) && configs.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
// NVD configurations don't directly indicate fixes, but CPE matches can imply them
|
||||
// This would require more complex parsing - defer to vendor-specific connectors
|
||||
}
|
||||
|
||||
return builder.ToImmutable();
|
||||
}
|
||||
|
||||
private static bool TryGetProperty(JsonElement element, string propertyName, out JsonElement value)
|
||||
{
|
||||
value = default;
|
||||
if (element.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return element.TryGetProperty(propertyName, out value);
|
||||
}
|
||||
|
||||
private static string? TryGetStringProperty(JsonElement element, params string[] path)
|
||||
{
|
||||
var current = element;
|
||||
foreach (var segment in path)
|
||||
{
|
||||
if (!TryGetProperty(current, segment, out current))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return current.ValueKind == JsonValueKind.String ? current.GetString() : null;
|
||||
}
|
||||
|
||||
private static DateTimeOffset? TryGetDateProperty(JsonElement element, string propertyName)
|
||||
{
|
||||
if (!TryGetProperty(element, propertyName, out var value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return TryParseDate(value);
|
||||
}
|
||||
|
||||
private static DateTimeOffset? TryParseDate(JsonElement element)
|
||||
{
|
||||
if (element.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
var str = element.GetString();
|
||||
if (DateTimeOffset.TryParse(str, out var date))
|
||||
{
|
||||
return date;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Input for severity extraction from observation data.
|
||||
/// </summary>
|
||||
public sealed record SeverityInput(
|
||||
string System,
|
||||
double Score,
|
||||
string? Vector,
|
||||
string? Severity);
|
||||
@@ -0,0 +1,109 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Tenancy;
|
||||
|
||||
/// <summary>
|
||||
/// Response model for /capabilities/tenant endpoint.
|
||||
/// Per AUTH-TEN-47-001 and CONCELIER-TEN-48-001: echoes tenantId, scopes, and mergeAllowed=false when LNM is enabled.
|
||||
/// </summary>
|
||||
public sealed record TenantCapabilitiesResponse(
|
||||
string TenantId,
|
||||
string TenantUrn,
|
||||
ImmutableArray<string> Scopes,
|
||||
bool MergeAllowed,
|
||||
bool OfflineAllowed,
|
||||
TenantCapabilitiesMode Mode,
|
||||
DateTimeOffset GeneratedAt)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a Link-Not-Merge capabilities response.
|
||||
/// </summary>
|
||||
public static TenantCapabilitiesResponse ForLinkNotMerge(
|
||||
TenantScope scope,
|
||||
DateTimeOffset now)
|
||||
{
|
||||
return new TenantCapabilitiesResponse(
|
||||
TenantId: scope.TenantId,
|
||||
TenantUrn: scope.TenantUrn,
|
||||
Scopes: scope.Scopes,
|
||||
MergeAllowed: false, // Always false in LNM mode
|
||||
OfflineAllowed: scope.Capabilities.OfflineAllowed,
|
||||
Mode: TenantCapabilitiesMode.LinkNotMerge,
|
||||
GeneratedAt: now);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Operating mode for tenant capabilities.
|
||||
/// </summary>
|
||||
public enum TenantCapabilitiesMode
|
||||
{
|
||||
/// <summary>Link-Not-Merge mode - no advisory merging.</summary>
|
||||
LinkNotMerge,
|
||||
|
||||
/// <summary>Legacy merge mode (deprecated).</summary>
|
||||
LegacyMerge
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for tenant capabilities provider.
|
||||
/// </summary>
|
||||
public interface ITenantCapabilitiesProvider
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the current capabilities for the tenant scope.
|
||||
/// </summary>
|
||||
TenantCapabilitiesResponse GetCapabilities(TenantScope scope);
|
||||
|
||||
/// <summary>
|
||||
/// Validates that the tenant scope is allowed to perform the requested operation.
|
||||
/// </summary>
|
||||
/// <param name="scope">Tenant scope to validate.</param>
|
||||
/// <param name="requiredScopes">Required scopes for the operation.</param>
|
||||
/// <exception cref="TenantScopeException">Thrown if validation fails.</exception>
|
||||
void ValidateScope(TenantScope scope, params string[] requiredScopes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of tenant capabilities provider for Link-Not-Merge mode.
|
||||
/// </summary>
|
||||
public sealed class LinkNotMergeTenantCapabilitiesProvider : ITenantCapabilitiesProvider
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public LinkNotMergeTenantCapabilitiesProvider(TimeProvider timeProvider)
|
||||
{
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
}
|
||||
|
||||
public TenantCapabilitiesResponse GetCapabilities(TenantScope scope)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(scope);
|
||||
scope.Validate();
|
||||
|
||||
// In Link-Not-Merge mode, merge is never allowed
|
||||
// This enforces the contract even if the token claims mergeAllowed=true
|
||||
return TenantCapabilitiesResponse.ForLinkNotMerge(scope, _timeProvider.GetUtcNow());
|
||||
}
|
||||
|
||||
public void ValidateScope(TenantScope scope, params string[] requiredScopes)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(scope);
|
||||
scope.Validate();
|
||||
|
||||
if (requiredScopes.Length == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var hasRequired = requiredScopes.Any(required =>
|
||||
scope.Scopes.Any(s => s.Equals(required, StringComparison.OrdinalIgnoreCase)));
|
||||
|
||||
if (!hasRequired)
|
||||
{
|
||||
throw new TenantScopeException(
|
||||
"auth/insufficient-scope",
|
||||
$"Required scope missing. Need one of: {string.Join(", ", requiredScopes)}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,123 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Tenancy;
|
||||
|
||||
/// <summary>
|
||||
/// Tenant scope data per AUTH-TEN-47-001 contract.
|
||||
/// Per CONCELIER-TEN-48-001, enforces tenant scoping through normalization/linking.
|
||||
/// </summary>
|
||||
public sealed record TenantScope(
|
||||
string TenantId,
|
||||
string Issuer,
|
||||
ImmutableArray<string> Scopes,
|
||||
TenantCapabilities Capabilities,
|
||||
TenantAttribution? Attribution,
|
||||
DateTimeOffset IssuedAt,
|
||||
DateTimeOffset ExpiresAt)
|
||||
{
|
||||
/// <summary>
|
||||
/// Validates that the tenant scope is well-formed.
|
||||
/// </summary>
|
||||
public void Validate()
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(TenantId))
|
||||
{
|
||||
throw new TenantScopeException("auth/tenant-scope-missing", "TenantId is required");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(Issuer))
|
||||
{
|
||||
throw new TenantScopeException("auth/tenant-scope-missing", "Issuer is required");
|
||||
}
|
||||
|
||||
if (Scopes.IsDefaultOrEmpty)
|
||||
{
|
||||
throw new TenantScopeException("auth/tenant-scope-missing", "Scopes are required");
|
||||
}
|
||||
|
||||
if (!HasRequiredScope())
|
||||
{
|
||||
throw new TenantScopeException("auth/tenant-scope-missing", "Required concelier scope missing");
|
||||
}
|
||||
|
||||
if (ExpiresAt <= DateTimeOffset.UtcNow)
|
||||
{
|
||||
throw new TenantScopeException("auth/token-expired", "Token has expired");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the scope has at least one required Concelier scope.
|
||||
/// </summary>
|
||||
public bool HasRequiredScope()
|
||||
{
|
||||
return Scopes.Any(s =>
|
||||
s.StartsWith("concelier.", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the scope allows read access.
|
||||
/// </summary>
|
||||
public bool CanRead =>
|
||||
Scopes.Any(s => s.Equals("concelier.read", StringComparison.OrdinalIgnoreCase) ||
|
||||
s.Equals("concelier.linkset.read", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the scope allows write access.
|
||||
/// </summary>
|
||||
public bool CanWrite =>
|
||||
Scopes.Any(s => s.Equals("concelier.linkset.write", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the scope allows tenant admin access.
|
||||
/// </summary>
|
||||
public bool CanAdminTenant =>
|
||||
Scopes.Any(s => s.Equals("concelier.tenant.admin", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
/// <summary>
|
||||
/// Gets the canonical tenant URN format.
|
||||
/// </summary>
|
||||
public string TenantUrn => TenantId.StartsWith("urn:tenant:", StringComparison.Ordinal)
|
||||
? TenantId
|
||||
: $"urn:tenant:{TenantId}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tenant capabilities per AUTH-TEN-47-001 contract.
|
||||
/// </summary>
|
||||
public sealed record TenantCapabilities(
|
||||
bool MergeAllowed = false,
|
||||
bool OfflineAllowed = true)
|
||||
{
|
||||
/// <summary>
|
||||
/// Default capabilities for Link-Not-Merge mode.
|
||||
/// </summary>
|
||||
public static TenantCapabilities Default { get; } = new(
|
||||
MergeAllowed: false,
|
||||
OfflineAllowed: true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tenant attribution for audit logging.
|
||||
/// </summary>
|
||||
public sealed record TenantAttribution(
|
||||
string? Actor,
|
||||
string? TraceId);
|
||||
|
||||
/// <summary>
|
||||
/// Exception thrown when tenant scope validation fails.
|
||||
/// </summary>
|
||||
public sealed class TenantScopeException : Exception
|
||||
{
|
||||
public TenantScopeException(string errorCode, string message)
|
||||
: base(message)
|
||||
{
|
||||
ErrorCode = errorCode;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Error code for API responses (e.g., auth/tenant-scope-missing).
|
||||
/// </summary>
|
||||
public string ErrorCode { get; }
|
||||
}
|
||||
@@ -0,0 +1,105 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Tenancy;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes tenant identifiers for consistent storage and lookup.
|
||||
/// Per CONCELIER-TEN-48-001: enforces tenant scoping through normalization.
|
||||
/// </summary>
|
||||
public static class TenantScopeNormalizer
|
||||
{
|
||||
private const string TenantUrnPrefix = "urn:tenant:";
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes a tenant identifier to canonical URN format.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Raw tenant identifier.</param>
|
||||
/// <returns>Normalized tenant URN.</returns>
|
||||
public static string NormalizeToUrn(string tenantId)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
throw new ArgumentException("Tenant ID cannot be empty", nameof(tenantId));
|
||||
}
|
||||
|
||||
var trimmed = tenantId.Trim();
|
||||
|
||||
// Already in URN format
|
||||
if (trimmed.StartsWith(TenantUrnPrefix, StringComparison.Ordinal))
|
||||
{
|
||||
return trimmed.ToLowerInvariant();
|
||||
}
|
||||
|
||||
// Convert to URN format
|
||||
return $"{TenantUrnPrefix}{trimmed.ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts the raw tenant identifier from a URN.
|
||||
/// </summary>
|
||||
/// <param name="tenantUrn">Tenant URN.</param>
|
||||
/// <returns>Raw tenant identifier.</returns>
|
||||
public static string ExtractFromUrn(string tenantUrn)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(tenantUrn))
|
||||
{
|
||||
throw new ArgumentException("Tenant URN cannot be empty", nameof(tenantUrn));
|
||||
}
|
||||
|
||||
var trimmed = tenantUrn.Trim();
|
||||
|
||||
if (trimmed.StartsWith(TenantUrnPrefix, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return trimmed[TenantUrnPrefix.Length..].ToLowerInvariant();
|
||||
}
|
||||
|
||||
return trimmed.ToLowerInvariant();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes a tenant identifier for storage (lowercase, no URN prefix).
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Raw tenant identifier or URN.</param>
|
||||
/// <returns>Normalized tenant ID for storage.</returns>
|
||||
public static string NormalizeForStorage(string tenantId)
|
||||
{
|
||||
return ExtractFromUrn(tenantId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates that two tenant identifiers refer to the same tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId1">First tenant identifier.</param>
|
||||
/// <param name="tenantId2">Second tenant identifier.</param>
|
||||
/// <returns>True if both refer to the same tenant.</returns>
|
||||
public static bool AreEqual(string? tenantId1, string? tenantId2)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(tenantId1) || string.IsNullOrWhiteSpace(tenantId2))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var normalized1 = NormalizeForStorage(tenantId1);
|
||||
var normalized2 = NormalizeForStorage(tenantId2);
|
||||
|
||||
return string.Equals(normalized1, normalized2, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates that the provided tenant ID matches the scope's tenant.
|
||||
/// </summary>
|
||||
/// <param name="requestTenantId">Tenant ID from request.</param>
|
||||
/// <param name="scope">Authenticated tenant scope.</param>
|
||||
/// <exception cref="TenantScopeException">Thrown if tenant IDs don't match.</exception>
|
||||
public static void ValidateTenantMatch(string requestTenantId, TenantScope scope)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(scope);
|
||||
|
||||
if (!AreEqual(requestTenantId, scope.TenantId))
|
||||
{
|
||||
throw new TenantScopeException(
|
||||
"auth/tenant-mismatch",
|
||||
"Request tenant ID does not match authenticated tenant scope");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -31,6 +31,9 @@ This module owns the persistent shape of Concelier's MongoDB database. Upgrades
|
||||
| `20251117_advisory_linksets_tenant_lower` | Lowercases `advisory_linksets.tenantId` to align writes with lookup filters. |
|
||||
| `20251116_link_not_merge_collections` | Ensures `advisory_observations` and `advisory_linksets` collections exist with JSON schema validators and baseline indexes for LNM. |
|
||||
| `20251127_lnm_sharding_and_ttl` | Adds hashed shard key indexes on `tenantId` for horizontal scaling and optional TTL indexes on `ingestedAt`/`createdAt` for storage retention. Creates `advisory_linkset_events` collection for linkset event outbox (LNM-21-101-DEV). |
|
||||
| `20251127_lnm_legacy_backfill` | Backfills `advisory_observations` from `advisory_raw` documents and creates/updates `advisory_linksets` by grouping observations. Seeds `backfill_marker` tombstones on migrated documents for rollback tracking (LNM-21-102-DEV). |
|
||||
| `20251128_policy_delta_checkpoints` | Creates `policy_delta_checkpoints` collection with tenant/consumer indexes for deterministic policy delta tracking. Supports cursor-based pagination and change-stream resume tokens for policy consumers (CONCELIER-POLICY-20-003). |
|
||||
| `20251128_policy_lookup_indexes` | Adds secondary indexes for policy lookup patterns: alias multikey index on observations, confidence/severity indexes on linksets. Supports efficient policy joins without cached verdicts (CONCELIER-POLICY-23-001). |
|
||||
|
||||
## Operator Runbook
|
||||
|
||||
@@ -44,6 +47,11 @@ This module owns the persistent shape of Concelier's MongoDB database. Upgrades
|
||||
- To re-run a migration in a lab, delete the corresponding document from `schema_migrations` and restart the service. **Do not** do this in production unless the migration body is known to be idempotent and safe.
|
||||
- When changing retention settings (`RawDocumentRetention`), deploy the new configuration and restart Concelier. The migration runner will adjust indexes on the next boot.
|
||||
- For the event-log collections (`advisory_statements`, `advisory_conflicts`), rollback is simply `db.advisory_statements.drop()` / `db.advisory_conflicts.drop()` followed by a restart if you must revert to the pre-event-log schema (only in labs). Production rollbacks should instead gate merge features that rely on these collections.
|
||||
- For `20251127_lnm_legacy_backfill` rollback, use the provided Offline Kit script:
|
||||
```bash
|
||||
mongo concelier ops/devops/scripts/rollback-lnm-backfill.js
|
||||
```
|
||||
This script removes backfilled observations and linksets by querying the `backfill_marker` field (`lnm_21_102_dev`), then clears the tombstone markers from `advisory_raw`. After rollback, delete `20251127_lnm_legacy_backfill` from `schema_migrations` and restart.
|
||||
- If migrations fail, restart with `Logging__LogLevel__StellaOps.Concelier.Storage.Mongo.Migrations=Debug` to surface diagnostic output. Remediate underlying index/collection drift before retrying.
|
||||
|
||||
## Validating an Upgrade
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo.PolicyDelta;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
|
||||
/// <summary>
|
||||
/// Creates the policy_delta_checkpoints collection with indexes for deterministic policy delta tracking.
|
||||
/// </summary>
|
||||
internal sealed class EnsurePolicyDeltaCheckpointsCollectionMigration : IMongoMigration
|
||||
{
|
||||
public string Id => "20251128_policy_delta_checkpoints";
|
||||
|
||||
public string Description =>
|
||||
"Creates policy_delta_checkpoints collection with tenant/consumer indexes for deterministic policy deltas (CONCELIER-POLICY-20-003).";
|
||||
|
||||
public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken)
|
||||
{
|
||||
var collectionName = MongoStorageDefaults.Collections.PolicyDeltaCheckpoints;
|
||||
|
||||
// Ensure collection exists
|
||||
var collectionNames = await database
|
||||
.ListCollectionNames(cancellationToken: cancellationToken)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var exists = collectionNames.Contains(collectionName);
|
||||
if (!exists)
|
||||
{
|
||||
await database.CreateCollectionAsync(collectionName, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var collection = database.GetCollection<PolicyDeltaCheckpointDocument>(collectionName);
|
||||
|
||||
// Index: tenantId for listing checkpoints by tenant
|
||||
var tenantIndex = new CreateIndexModel<PolicyDeltaCheckpointDocument>(
|
||||
Builders<PolicyDeltaCheckpointDocument>.IndexKeys.Ascending(d => d.TenantId),
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "ix_tenantId",
|
||||
Background = true
|
||||
});
|
||||
|
||||
// Index: consumerId for querying checkpoints by consumer
|
||||
var consumerIndex = new CreateIndexModel<PolicyDeltaCheckpointDocument>(
|
||||
Builders<PolicyDeltaCheckpointDocument>.IndexKeys.Ascending(d => d.ConsumerId),
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "ix_consumerId",
|
||||
Background = true
|
||||
});
|
||||
|
||||
// Compound index: (tenantId, consumerId) for efficient lookups
|
||||
var compoundIndex = new CreateIndexModel<PolicyDeltaCheckpointDocument>(
|
||||
Builders<PolicyDeltaCheckpointDocument>.IndexKeys
|
||||
.Ascending(d => d.TenantId)
|
||||
.Ascending(d => d.ConsumerId),
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "ix_tenantId_consumerId",
|
||||
Background = true
|
||||
});
|
||||
|
||||
// Index: updatedAt for maintenance queries (stale checkpoint detection)
|
||||
var updatedAtIndex = new CreateIndexModel<PolicyDeltaCheckpointDocument>(
|
||||
Builders<PolicyDeltaCheckpointDocument>.IndexKeys.Ascending(d => d.UpdatedAt),
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "ix_updatedAt",
|
||||
Background = true
|
||||
});
|
||||
|
||||
await collection.Indexes.CreateManyAsync(
|
||||
[tenantIndex, consumerIndex, compoundIndex, updatedAtIndex],
|
||||
cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
|
||||
/// <summary>
|
||||
/// Adds secondary indexes for policy lookup patterns: alias lookups, confidence filtering, and severity-based queries.
|
||||
/// Supports efficient policy joins without cached verdicts per CONCELIER-POLICY-23-001.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Query patterns supported:
|
||||
/// <list type="bullet">
|
||||
/// <item>Find observations by alias (CVE-ID, GHSA-ID): db.advisory_observations.find({"linkset.aliases": "cve-2024-1234"})</item>
|
||||
/// <item>Find linksets by confidence range: db.advisory_linksets.find({"confidence": {$gte: 0.7}})</item>
|
||||
/// <item>Find linksets by provider severity: db.advisory_linksets.find({"normalized.severities.system": "cvss_v31", "normalized.severities.score": {$gte: 7.0}})</item>
|
||||
/// <item>Find linksets by tenant and advisory with confidence: db.advisory_linksets.find({"tenantId": "...", "advisoryId": "...", "confidence": {$gte: 0.5}})</item>
|
||||
/// </list>
|
||||
/// </remarks>
|
||||
internal sealed class EnsurePolicyLookupIndexesMigration : IMongoMigration
|
||||
{
|
||||
public string Id => "20251128_policy_lookup_indexes";
|
||||
|
||||
public string Description => "Add secondary indexes for alias, confidence, and severity-based policy lookups (CONCELIER-POLICY-23-001)";
|
||||
|
||||
public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(database);
|
||||
|
||||
await EnsureObservationPolicyIndexesAsync(database, cancellationToken).ConfigureAwait(false);
|
||||
await EnsureLinksetPolicyIndexesAsync(database, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static async Task EnsureObservationPolicyIndexesAsync(IMongoDatabase database, CancellationToken ct)
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
|
||||
var indexes = new List<CreateIndexModel<BsonDocument>>
|
||||
{
|
||||
// Multikey index on linkset.aliases for alias-based lookups (CVE-ID, GHSA-ID, etc.)
|
||||
// Query pattern: db.advisory_observations.find({"linkset.aliases": "cve-2024-1234"})
|
||||
new(new BsonDocument("linkset.aliases", 1),
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "obs_linkset_aliases",
|
||||
Background = true,
|
||||
Sparse = true
|
||||
}),
|
||||
|
||||
// Compound index for tenant + alias lookups
|
||||
// Query pattern: db.advisory_observations.find({"tenant": "...", "linkset.aliases": "cve-2024-1234"})
|
||||
new(new BsonDocument { { "tenant", 1 }, { "linkset.aliases", 1 } },
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "obs_tenant_aliases",
|
||||
Background = true
|
||||
})
|
||||
};
|
||||
|
||||
await collection.Indexes.CreateManyAsync(indexes, cancellationToken: ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static async Task EnsureLinksetPolicyIndexesAsync(IMongoDatabase database, CancellationToken ct)
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
|
||||
var indexes = new List<CreateIndexModel<BsonDocument>>
|
||||
{
|
||||
// Index on confidence for confidence-based filtering
|
||||
// Query pattern: db.advisory_linksets.find({"confidence": {$gte: 0.7}})
|
||||
new(new BsonDocument("confidence", -1),
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "linkset_confidence",
|
||||
Background = true,
|
||||
Sparse = true
|
||||
}),
|
||||
|
||||
// Compound index for tenant + confidence lookups
|
||||
// Query pattern: db.advisory_linksets.find({"tenantId": "...", "confidence": {$gte: 0.7}})
|
||||
new(new BsonDocument { { "tenantId", 1 }, { "confidence", -1 } },
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "linkset_tenant_confidence",
|
||||
Background = true
|
||||
}),
|
||||
|
||||
// Index on normalized.severities.system for severity system filtering
|
||||
// Query pattern: db.advisory_linksets.find({"normalized.severities.system": "cvss_v31"})
|
||||
new(new BsonDocument("normalized.severities.system", 1),
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "linkset_severity_system",
|
||||
Background = true,
|
||||
Sparse = true
|
||||
}),
|
||||
|
||||
// Compound index for severity system + score for range queries
|
||||
// Query pattern: db.advisory_linksets.find({"normalized.severities.system": "cvss_v31", "normalized.severities.score": {$gte: 7.0}})
|
||||
new(new BsonDocument { { "normalized.severities.system", 1 }, { "normalized.severities.score", -1 } },
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "linkset_severity_system_score",
|
||||
Background = true,
|
||||
Sparse = true
|
||||
}),
|
||||
|
||||
// Compound index for tenant + advisory + confidence (policy delta queries)
|
||||
// Query pattern: db.advisory_linksets.find({"tenantId": "...", "advisoryId": "...", "confidence": {$gte: 0.5}})
|
||||
new(new BsonDocument { { "tenantId", 1 }, { "advisoryId", 1 }, { "confidence", -1 } },
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "linkset_tenant_advisory_confidence",
|
||||
Background = true
|
||||
}),
|
||||
|
||||
// Index for createdAt-based pagination (policy delta cursors)
|
||||
// Query pattern: db.advisory_linksets.find({"tenantId": "...", "createdAt": {$gt: ISODate("...")}}).sort({"createdAt": 1})
|
||||
new(new BsonDocument { { "tenantId", 1 }, { "createdAt", 1 } },
|
||||
new CreateIndexOptions
|
||||
{
|
||||
Name = "linkset_tenant_createdAt",
|
||||
Background = true
|
||||
})
|
||||
};
|
||||
|
||||
await collection.Indexes.CreateManyAsync(indexes, cancellationToken: ct).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,13 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
public static class MongoStorageDefaults
|
||||
{
|
||||
public const string DefaultDatabaseName = "concelier";
|
||||
|
||||
public static class Collections
|
||||
{
|
||||
public const string Source = "source";
|
||||
public const string SourceState = "source_state";
|
||||
namespace StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
public static class MongoStorageDefaults
|
||||
{
|
||||
public const string DefaultDatabaseName = "concelier";
|
||||
|
||||
public static class Collections
|
||||
{
|
||||
public const string Source = "source";
|
||||
public const string SourceState = "source_state";
|
||||
public const string Document = "document";
|
||||
public const string Dto = "dto";
|
||||
public const string Advisory = "advisory";
|
||||
@@ -15,10 +15,10 @@ public static class MongoStorageDefaults
|
||||
public const string Alias = "alias";
|
||||
public const string Affected = "affected";
|
||||
public const string Reference = "reference";
|
||||
public const string KevFlag = "kev_flag";
|
||||
public const string RuFlags = "ru_flags";
|
||||
public const string JpFlags = "jp_flags";
|
||||
public const string PsirtFlags = "psirt_flags";
|
||||
public const string KevFlag = "kev_flag";
|
||||
public const string RuFlags = "ru_flags";
|
||||
public const string JpFlags = "jp_flags";
|
||||
public const string PsirtFlags = "psirt_flags";
|
||||
public const string MergeEvent = "merge_event";
|
||||
public const string ExportState = "export_state";
|
||||
public const string Locks = "locks";
|
||||
@@ -33,5 +33,6 @@ public static class MongoStorageDefaults
|
||||
public const string OrchestratorRegistry = "orchestrator_registry";
|
||||
public const string OrchestratorCommands = "orchestrator_commands";
|
||||
public const string OrchestratorHeartbeats = "orchestrator_heartbeats";
|
||||
public const string PolicyDeltaCheckpoints = "policy_delta_checkpoints";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,135 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.PolicyDelta;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB implementation of <see cref="IPolicyDeltaCheckpointStore"/>.
|
||||
/// </summary>
|
||||
internal sealed class MongoPolicyDeltaCheckpointStore : IPolicyDeltaCheckpointStore
|
||||
{
|
||||
private readonly IMongoCollection<PolicyDeltaCheckpointDocument> _collection;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public MongoPolicyDeltaCheckpointStore(IMongoDatabase database, TimeProvider timeProvider)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(database);
|
||||
ArgumentNullException.ThrowIfNull(timeProvider);
|
||||
|
||||
_collection = database.GetCollection<PolicyDeltaCheckpointDocument>(
|
||||
MongoStorageDefaults.Collections.PolicyDeltaCheckpoints);
|
||||
_timeProvider = timeProvider;
|
||||
}
|
||||
|
||||
public async Task<PolicyDeltaCheckpoint> GetOrCreateAsync(
|
||||
string tenantId,
|
||||
string consumerId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(consumerId);
|
||||
|
||||
var checkpointId = $"{consumerId}:{tenantId}";
|
||||
var existing = await _collection
|
||||
.Find(d => d.Id == checkpointId)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (existing is not null)
|
||||
{
|
||||
return existing.ToRecord();
|
||||
}
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var checkpoint = PolicyDeltaCheckpoint.CreateNew(tenantId, consumerId, now);
|
||||
var document = PolicyDeltaCheckpointDocument.FromRecord(checkpoint);
|
||||
|
||||
try
|
||||
{
|
||||
await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
return checkpoint;
|
||||
}
|
||||
catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey)
|
||||
{
|
||||
// Race condition: another process created the checkpoint concurrently.
|
||||
existing = await _collection
|
||||
.Find(d => d.Id == checkpointId)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return existing?.ToRecord() ?? checkpoint;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<PolicyDeltaCheckpoint?> GetAsync(
|
||||
string checkpointId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(checkpointId);
|
||||
|
||||
var document = await _collection
|
||||
.Find(d => d.Id == checkpointId)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return document?.ToRecord();
|
||||
}
|
||||
|
||||
public async Task<PolicyDeltaCheckpoint> UpdateAsync(
|
||||
PolicyDeltaCheckpoint checkpoint,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(checkpoint);
|
||||
|
||||
var document = PolicyDeltaCheckpointDocument.FromRecord(checkpoint);
|
||||
var options = new ReplaceOptions { IsUpsert = true };
|
||||
|
||||
await _collection
|
||||
.ReplaceOneAsync(
|
||||
d => d.Id == checkpoint.CheckpointId,
|
||||
document,
|
||||
options,
|
||||
cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return checkpoint;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PolicyDeltaCheckpoint>> ListByTenantAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
var documents = await _collection
|
||||
.Find(d => d.TenantId == tenantId)
|
||||
.SortBy(d => d.ConsumerId)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var results = new List<PolicyDeltaCheckpoint>(documents.Count);
|
||||
foreach (var doc in documents)
|
||||
{
|
||||
results.Add(doc.ToRecord());
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<bool> DeleteAsync(
|
||||
string checkpointId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(checkpointId);
|
||||
|
||||
var result = await _collection
|
||||
.DeleteOneAsync(d => d.Id == checkpointId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return result.DeletedCount > 0;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
using System;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization.Attributes;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.PolicyDelta;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document for storing policy delta checkpoints.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
internal sealed class PolicyDeltaCheckpointDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier: {consumerId}:{tenantId}
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
public string Id { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("consumerId")]
|
||||
public string ConsumerId { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("lastCreatedAt")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTime? LastCreatedAt { get; set; }
|
||||
|
||||
[BsonElement("lastAdvisoryId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? LastAdvisoryId { get; set; }
|
||||
|
||||
[BsonElement("resumeToken")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ResumeToken { get; set; }
|
||||
|
||||
[BsonElement("sequenceNumber")]
|
||||
public long SequenceNumber { get; set; }
|
||||
|
||||
[BsonElement("updatedAt")]
|
||||
public DateTime UpdatedAt { get; set; }
|
||||
|
||||
[BsonElement("processedCount")]
|
||||
public long ProcessedCount { get; set; }
|
||||
|
||||
[BsonElement("lastBatchHash")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? LastBatchHash { get; set; }
|
||||
|
||||
public PolicyDeltaCheckpoint ToRecord() =>
|
||||
new(
|
||||
CheckpointId: Id,
|
||||
TenantId: TenantId,
|
||||
ConsumerId: ConsumerId,
|
||||
LastCreatedAt: LastCreatedAt.HasValue ? new DateTimeOffset(LastCreatedAt.Value, TimeSpan.Zero) : null,
|
||||
LastAdvisoryId: LastAdvisoryId,
|
||||
ResumeToken: ResumeToken,
|
||||
SequenceNumber: SequenceNumber,
|
||||
UpdatedAt: new DateTimeOffset(UpdatedAt, TimeSpan.Zero),
|
||||
ProcessedCount: ProcessedCount,
|
||||
LastBatchHash: LastBatchHash);
|
||||
|
||||
public static PolicyDeltaCheckpointDocument FromRecord(PolicyDeltaCheckpoint record) =>
|
||||
new()
|
||||
{
|
||||
Id = record.CheckpointId,
|
||||
TenantId = record.TenantId,
|
||||
ConsumerId = record.ConsumerId,
|
||||
LastCreatedAt = record.LastCreatedAt?.UtcDateTime,
|
||||
LastAdvisoryId = record.LastAdvisoryId,
|
||||
ResumeToken = record.ResumeToken,
|
||||
SequenceNumber = record.SequenceNumber,
|
||||
UpdatedAt = record.UpdatedAt.UtcDateTime,
|
||||
ProcessedCount = record.ProcessedCount,
|
||||
LastBatchHash = record.LastBatchHash
|
||||
};
|
||||
}
|
||||
@@ -24,6 +24,8 @@ using StellaOps.Concelier.Storage.Mongo.Observations;
|
||||
using StellaOps.Concelier.Core.Observations;
|
||||
using StellaOps.Concelier.Storage.Mongo.Linksets;
|
||||
using StellaOps.Concelier.Storage.Mongo.Orchestrator;
|
||||
using StellaOps.Concelier.Storage.Mongo.PolicyDelta;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
@@ -190,8 +192,12 @@ public static class ServiceCollectionExtensions
|
||||
services.AddSingleton<IMongoMigration, EnsureOrchestratorCollectionsMigration>();
|
||||
services.AddSingleton<IMongoMigration, EnsureLinkNotMergeCollectionsMigration>();
|
||||
services.AddSingleton<IMongoMigration, EnsureLinkNotMergeShardingAndTtlMigration>();
|
||||
services.AddSingleton<IMongoMigration, EnsureLegacyAdvisoriesBackfillMigration>();
|
||||
services.AddSingleton<IMongoMigration, EnsurePolicyDeltaCheckpointsCollectionMigration>();
|
||||
services.AddSingleton<IMongoMigration, EnsurePolicyLookupIndexesMigration>();
|
||||
|
||||
services.AddSingleton<IOrchestratorRegistryStore, MongoOrchestratorRegistryStore>();
|
||||
services.AddSingleton<IPolicyDeltaCheckpointStore, MongoPolicyDeltaCheckpointStore>();
|
||||
|
||||
services.AddSingleton<IHostedService, AdvisoryObservationTransportWorker>();
|
||||
|
||||
|
||||
@@ -0,0 +1,220 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using StellaOps.PolicyDsl;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Compilation;
|
||||
|
||||
/// <summary>
|
||||
/// Extended compile output metadata for policy analysis, coverage tracking, and editor support.
|
||||
/// </summary>
|
||||
public sealed record PolicyCompileMetadata(
|
||||
PolicySymbolTable SymbolTable,
|
||||
PolicyRuleIndex RuleIndex,
|
||||
PolicyDocumentation Documentation,
|
||||
PolicyRuleCoverageMetadata CoverageMetadata,
|
||||
PolicyDeterministicHashes Hashes);
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic hashes for policy identity and change detection.
|
||||
/// </summary>
|
||||
public sealed record PolicyDeterministicHashes(
|
||||
/// <summary>SHA256 of canonical IR JSON representation.</summary>
|
||||
string ContentHash,
|
||||
/// <summary>SHA256 of rule structure only (names, priorities, conditions).</summary>
|
||||
string StructureHash,
|
||||
/// <summary>SHA256 of rule names and priorities (for ordering verification).</summary>
|
||||
string OrderingHash,
|
||||
/// <summary>Combined hash for complete identity verification.</summary>
|
||||
string IdentityHash);
|
||||
|
||||
/// <summary>
|
||||
/// Symbol table containing all identifiers, functions, and their usages.
|
||||
/// </summary>
|
||||
public sealed record PolicySymbolTable(
|
||||
ImmutableArray<PolicySymbol> Symbols,
|
||||
ImmutableArray<PolicyFunctionSignature> BuiltInFunctions,
|
||||
ImmutableArray<PolicyVariableDefinition> Variables,
|
||||
ImmutableDictionary<string, ImmutableArray<PolicySymbolReference>> ReferencesByName);
|
||||
|
||||
/// <summary>
|
||||
/// A symbol in the policy DSL (identifier, function, variable, etc.).
|
||||
/// </summary>
|
||||
public sealed record PolicySymbol(
|
||||
string Name,
|
||||
PolicySymbolKind Kind,
|
||||
string? Type,
|
||||
PolicySymbolScope Scope,
|
||||
ImmutableArray<PolicySymbolReference> References);
|
||||
|
||||
/// <summary>
|
||||
/// Symbol kinds in the policy DSL.
|
||||
/// </summary>
|
||||
public enum PolicySymbolKind
|
||||
{
|
||||
Variable,
|
||||
Function,
|
||||
Profile,
|
||||
ProfileMap,
|
||||
ProfileEnv,
|
||||
ProfileScalar,
|
||||
Rule,
|
||||
Metadata,
|
||||
Setting,
|
||||
Parameter,
|
||||
BuiltIn
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Symbol scope information.
|
||||
/// </summary>
|
||||
public sealed record PolicySymbolScope(
|
||||
string? RuleName,
|
||||
string? ProfileName,
|
||||
bool IsGlobal);
|
||||
|
||||
/// <summary>
|
||||
/// Reference to a symbol usage in the policy.
|
||||
/// </summary>
|
||||
public sealed record PolicySymbolReference(
|
||||
string SymbolName,
|
||||
string Context,
|
||||
int? LineNumber,
|
||||
int? ColumnNumber,
|
||||
PolicySymbolUsage Usage);
|
||||
|
||||
/// <summary>
|
||||
/// How a symbol is used.
|
||||
/// </summary>
|
||||
public enum PolicySymbolUsage
|
||||
{
|
||||
Definition,
|
||||
Read,
|
||||
Write,
|
||||
Invocation,
|
||||
MemberAccess
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Built-in function signature for autocomplete.
|
||||
/// </summary>
|
||||
public sealed record PolicyFunctionSignature(
|
||||
string Name,
|
||||
string Description,
|
||||
ImmutableArray<PolicyParameterInfo> Parameters,
|
||||
string ReturnType,
|
||||
ImmutableArray<string> Examples);
|
||||
|
||||
/// <summary>
|
||||
/// Parameter information for function signatures.
|
||||
/// </summary>
|
||||
public sealed record PolicyParameterInfo(
|
||||
string Name,
|
||||
string Type,
|
||||
bool IsOptional,
|
||||
string? DefaultValue,
|
||||
string Description);
|
||||
|
||||
/// <summary>
|
||||
/// Variable definition extracted from policy.
|
||||
/// </summary>
|
||||
public sealed record PolicyVariableDefinition(
|
||||
string Name,
|
||||
string? InferredType,
|
||||
string? InitialValue,
|
||||
string DefinedInRule,
|
||||
bool IsAssignment);
|
||||
|
||||
/// <summary>
|
||||
/// Rule index for fast lookup and editor autocomplete.
|
||||
/// </summary>
|
||||
public sealed record PolicyRuleIndex(
|
||||
ImmutableArray<PolicyRuleEntry> Rules,
|
||||
ImmutableDictionary<string, PolicyRuleEntry> ByName,
|
||||
ImmutableDictionary<int, ImmutableArray<PolicyRuleEntry>> ByPriority,
|
||||
ImmutableArray<string> ActionTypes,
|
||||
ImmutableArray<string> UsedIdentifiers);
|
||||
|
||||
/// <summary>
|
||||
/// Index entry for a single rule.
|
||||
/// </summary>
|
||||
public sealed record PolicyRuleEntry(
|
||||
string Name,
|
||||
int Priority,
|
||||
int Index,
|
||||
string ConditionSummary,
|
||||
ImmutableArray<string> ThenActionTypes,
|
||||
ImmutableArray<string> ElseActionTypes,
|
||||
string Justification,
|
||||
ImmutableArray<string> ReferencedIdentifiers,
|
||||
ImmutableArray<string> ReferencedFunctions);
|
||||
|
||||
/// <summary>
|
||||
/// Extracted documentation from policy source.
|
||||
/// </summary>
|
||||
public sealed record PolicyDocumentation(
|
||||
string? PolicyDescription,
|
||||
ImmutableArray<string> Tags,
|
||||
string? Author,
|
||||
ImmutableDictionary<string, string> CustomMetadata,
|
||||
ImmutableArray<PolicyRuleDocumentation> RuleDocumentation,
|
||||
ImmutableArray<PolicyProfileDocumentation> ProfileDocumentation);
|
||||
|
||||
/// <summary>
|
||||
/// Documentation for a single rule.
|
||||
/// </summary>
|
||||
public sealed record PolicyRuleDocumentation(
|
||||
string RuleName,
|
||||
int Priority,
|
||||
string Justification,
|
||||
string ConditionDescription,
|
||||
ImmutableArray<string> ActionDescriptions);
|
||||
|
||||
/// <summary>
|
||||
/// Documentation for a profile.
|
||||
/// </summary>
|
||||
public sealed record PolicyProfileDocumentation(
|
||||
string ProfileName,
|
||||
ImmutableArray<string> MapNames,
|
||||
ImmutableArray<string> EnvNames,
|
||||
ImmutableArray<string> ScalarNames);
|
||||
|
||||
/// <summary>
|
||||
/// Rule coverage metadata for tracking test coverage.
|
||||
/// </summary>
|
||||
public sealed record PolicyRuleCoverageMetadata(
|
||||
ImmutableArray<PolicyRuleCoverageEntry> Rules,
|
||||
int TotalRules,
|
||||
int TotalConditions,
|
||||
int TotalActions,
|
||||
ImmutableDictionary<string, int> ActionTypeCounts,
|
||||
ImmutableArray<PolicyCoveragePath> CoveragePaths);
|
||||
|
||||
/// <summary>
|
||||
/// Coverage entry for a single rule.
|
||||
/// </summary>
|
||||
public sealed record PolicyRuleCoverageEntry(
|
||||
string RuleName,
|
||||
int Priority,
|
||||
string ConditionHash,
|
||||
int ThenActionCount,
|
||||
int ElseActionCount,
|
||||
bool HasElseBranch,
|
||||
ImmutableArray<string> CoveragePoints);
|
||||
|
||||
/// <summary>
|
||||
/// A coverage path through the policy (for test generation).
|
||||
/// </summary>
|
||||
public sealed record PolicyCoveragePath(
|
||||
string PathId,
|
||||
ImmutableArray<string> RuleSequence,
|
||||
ImmutableArray<PolicyBranchDecision> Decisions,
|
||||
string PathHash);
|
||||
|
||||
/// <summary>
|
||||
/// A branch decision point.
|
||||
/// </summary>
|
||||
public sealed record PolicyBranchDecision(
|
||||
string RuleName,
|
||||
bool TookThenBranch,
|
||||
string ConditionHash);
|
||||
@@ -0,0 +1,988 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.PolicyDsl;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Compilation;
|
||||
|
||||
/// <summary>
|
||||
/// Extracts comprehensive metadata from compiled policy IR documents.
|
||||
/// Generates symbol tables, rule indices, documentation, coverage metadata, and deterministic hashes.
|
||||
/// </summary>
|
||||
internal sealed class PolicyMetadataExtractor
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Extracts all metadata from a compiled policy document.
|
||||
/// </summary>
|
||||
public PolicyCompileMetadata Extract(PolicyIrDocument document, ImmutableArray<byte> canonicalRepresentation)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(document);
|
||||
|
||||
var symbolTable = ExtractSymbolTable(document);
|
||||
var ruleIndex = BuildRuleIndex(document);
|
||||
var documentation = ExtractDocumentation(document);
|
||||
var coverageMetadata = BuildCoverageMetadata(document);
|
||||
var hashes = ComputeHashes(document, canonicalRepresentation);
|
||||
|
||||
return new PolicyCompileMetadata(
|
||||
symbolTable,
|
||||
ruleIndex,
|
||||
documentation,
|
||||
coverageMetadata,
|
||||
hashes);
|
||||
}
|
||||
|
||||
#region Symbol Table Extraction
|
||||
|
||||
private PolicySymbolTable ExtractSymbolTable(PolicyIrDocument document)
|
||||
{
|
||||
var symbols = new List<PolicySymbol>();
|
||||
var variables = new List<PolicyVariableDefinition>();
|
||||
var referencesByName = new Dictionary<string, List<PolicySymbolReference>>();
|
||||
|
||||
// Extract profile symbols
|
||||
if (!document.Profiles.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var profile in document.Profiles)
|
||||
{
|
||||
symbols.Add(new PolicySymbol(
|
||||
profile.Name,
|
||||
PolicySymbolKind.Profile,
|
||||
"profile",
|
||||
new PolicySymbolScope(null, profile.Name, true),
|
||||
ImmutableArray<PolicySymbolReference>.Empty));
|
||||
|
||||
if (!profile.Maps.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var map in profile.Maps)
|
||||
{
|
||||
symbols.Add(new PolicySymbol(
|
||||
map.Name,
|
||||
PolicySymbolKind.ProfileMap,
|
||||
"map",
|
||||
new PolicySymbolScope(null, profile.Name, false),
|
||||
ImmutableArray<PolicySymbolReference>.Empty));
|
||||
}
|
||||
}
|
||||
|
||||
if (!profile.Environments.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var env in profile.Environments)
|
||||
{
|
||||
symbols.Add(new PolicySymbol(
|
||||
env.Name,
|
||||
PolicySymbolKind.ProfileEnv,
|
||||
"env",
|
||||
new PolicySymbolScope(null, profile.Name, false),
|
||||
ImmutableArray<PolicySymbolReference>.Empty));
|
||||
|
||||
// Extract identifiers from environment conditions
|
||||
if (!env.Entries.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var entry in env.Entries)
|
||||
{
|
||||
ExtractExpressionReferences(entry.Condition, null, profile.Name, referencesByName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!profile.Scalars.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var scalar in profile.Scalars)
|
||||
{
|
||||
symbols.Add(new PolicySymbol(
|
||||
scalar.Name,
|
||||
PolicySymbolKind.ProfileScalar,
|
||||
InferLiteralType(scalar.Value),
|
||||
new PolicySymbolScope(null, profile.Name, false),
|
||||
ImmutableArray<PolicySymbolReference>.Empty));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract rule symbols and variable definitions
|
||||
if (!document.Rules.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var rule in document.Rules)
|
||||
{
|
||||
symbols.Add(new PolicySymbol(
|
||||
rule.Name,
|
||||
PolicySymbolKind.Rule,
|
||||
"rule",
|
||||
new PolicySymbolScope(rule.Name, null, true),
|
||||
ImmutableArray<PolicySymbolReference>.Empty));
|
||||
|
||||
// Extract identifiers from rule condition
|
||||
ExtractExpressionReferences(rule.When, rule.Name, null, referencesByName);
|
||||
|
||||
// Extract from then actions
|
||||
if (!rule.ThenActions.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var action in rule.ThenActions)
|
||||
{
|
||||
ExtractActionReferences(action, rule.Name, referencesByName, variables);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract from else actions
|
||||
if (!rule.ElseActions.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var action in rule.ElseActions)
|
||||
{
|
||||
ExtractActionReferences(action, rule.Name, referencesByName, variables);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract metadata symbols
|
||||
foreach (var (key, _) in document.Metadata)
|
||||
{
|
||||
symbols.Add(new PolicySymbol(
|
||||
key,
|
||||
PolicySymbolKind.Metadata,
|
||||
"metadata",
|
||||
new PolicySymbolScope(null, null, true),
|
||||
ImmutableArray<PolicySymbolReference>.Empty));
|
||||
}
|
||||
|
||||
// Extract settings symbols
|
||||
foreach (var (key, _) in document.Settings)
|
||||
{
|
||||
symbols.Add(new PolicySymbol(
|
||||
key,
|
||||
PolicySymbolKind.Setting,
|
||||
"setting",
|
||||
new PolicySymbolScope(null, null, true),
|
||||
ImmutableArray<PolicySymbolReference>.Empty));
|
||||
}
|
||||
|
||||
return new PolicySymbolTable(
|
||||
symbols.ToImmutableArray(),
|
||||
GetBuiltInFunctions(),
|
||||
variables.ToImmutableArray(),
|
||||
referencesByName.ToImmutableDictionary(
|
||||
kvp => kvp.Key,
|
||||
kvp => kvp.Value.ToImmutableArray()));
|
||||
}
|
||||
|
||||
private void ExtractExpressionReferences(
|
||||
PolicyExpression? expression,
|
||||
string? ruleName,
|
||||
string? profileName,
|
||||
Dictionary<string, List<PolicySymbolReference>> referencesByName)
|
||||
{
|
||||
if (expression is null) return;
|
||||
|
||||
switch (expression)
|
||||
{
|
||||
case PolicyIdentifierExpression identifier:
|
||||
AddReference(referencesByName, identifier.Name, ruleName, profileName, PolicySymbolUsage.Read);
|
||||
break;
|
||||
|
||||
case PolicyMemberAccessExpression member:
|
||||
ExtractExpressionReferences(member.Target, ruleName, profileName, referencesByName);
|
||||
// Member name is not a standalone identifier
|
||||
break;
|
||||
|
||||
case PolicyInvocationExpression invocation:
|
||||
ExtractExpressionReferences(invocation.Target, ruleName, profileName, referencesByName);
|
||||
if (!invocation.Arguments.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var arg in invocation.Arguments)
|
||||
{
|
||||
ExtractExpressionReferences(arg, ruleName, profileName, referencesByName);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case PolicyIndexerExpression indexer:
|
||||
ExtractExpressionReferences(indexer.Target, ruleName, profileName, referencesByName);
|
||||
ExtractExpressionReferences(indexer.Index, ruleName, profileName, referencesByName);
|
||||
break;
|
||||
|
||||
case PolicyUnaryExpression unary:
|
||||
ExtractExpressionReferences(unary.Operand, ruleName, profileName, referencesByName);
|
||||
break;
|
||||
|
||||
case PolicyBinaryExpression binary:
|
||||
ExtractExpressionReferences(binary.Left, ruleName, profileName, referencesByName);
|
||||
ExtractExpressionReferences(binary.Right, ruleName, profileName, referencesByName);
|
||||
break;
|
||||
|
||||
case PolicyListExpression list when !list.Items.IsDefaultOrEmpty:
|
||||
foreach (var item in list.Items)
|
||||
{
|
||||
ExtractExpressionReferences(item, ruleName, profileName, referencesByName);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private void ExtractActionReferences(
|
||||
PolicyIrAction action,
|
||||
string ruleName,
|
||||
Dictionary<string, List<PolicySymbolReference>> referencesByName,
|
||||
List<PolicyVariableDefinition> variables)
|
||||
{
|
||||
switch (action)
|
||||
{
|
||||
case PolicyIrAssignmentAction assignment:
|
||||
if (!assignment.Target.IsDefaultOrEmpty)
|
||||
{
|
||||
var varName = string.Join(".", assignment.Target);
|
||||
AddReference(referencesByName, varName, ruleName, null, PolicySymbolUsage.Write);
|
||||
variables.Add(new PolicyVariableDefinition(
|
||||
varName,
|
||||
InferExpressionType(assignment.Value),
|
||||
SummarizeExpression(assignment.Value),
|
||||
ruleName,
|
||||
true));
|
||||
}
|
||||
ExtractExpressionReferences(assignment.Value, ruleName, null, referencesByName);
|
||||
break;
|
||||
|
||||
case PolicyIrAnnotateAction annotate:
|
||||
if (!annotate.Target.IsDefaultOrEmpty)
|
||||
{
|
||||
var targetName = string.Join(".", annotate.Target);
|
||||
AddReference(referencesByName, targetName, ruleName, null, PolicySymbolUsage.Write);
|
||||
}
|
||||
ExtractExpressionReferences(annotate.Value, ruleName, null, referencesByName);
|
||||
break;
|
||||
|
||||
case PolicyIrIgnoreAction ignore:
|
||||
ExtractExpressionReferences(ignore.Until, ruleName, null, referencesByName);
|
||||
break;
|
||||
|
||||
case PolicyIrEscalateAction escalate:
|
||||
ExtractExpressionReferences(escalate.To, ruleName, null, referencesByName);
|
||||
ExtractExpressionReferences(escalate.When, ruleName, null, referencesByName);
|
||||
break;
|
||||
|
||||
case PolicyIrRequireVexAction require:
|
||||
foreach (var condition in require.Conditions.Values)
|
||||
{
|
||||
ExtractExpressionReferences(condition, ruleName, null, referencesByName);
|
||||
}
|
||||
break;
|
||||
|
||||
case PolicyIrWarnAction warn:
|
||||
ExtractExpressionReferences(warn.Message, ruleName, null, referencesByName);
|
||||
break;
|
||||
|
||||
case PolicyIrDeferAction defer:
|
||||
ExtractExpressionReferences(defer.Until, ruleName, null, referencesByName);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private static void AddReference(
|
||||
Dictionary<string, List<PolicySymbolReference>> referencesByName,
|
||||
string symbolName,
|
||||
string? ruleName,
|
||||
string? profileName,
|
||||
PolicySymbolUsage usage)
|
||||
{
|
||||
if (!referencesByName.TryGetValue(symbolName, out var refs))
|
||||
{
|
||||
refs = [];
|
||||
referencesByName[symbolName] = refs;
|
||||
}
|
||||
|
||||
refs.Add(new PolicySymbolReference(
|
||||
symbolName,
|
||||
ruleName ?? profileName ?? "global",
|
||||
null,
|
||||
null,
|
||||
usage));
|
||||
}
|
||||
|
||||
private static string? InferLiteralType(PolicyIrLiteral literal) => literal switch
|
||||
{
|
||||
PolicyIrStringLiteral => "string",
|
||||
PolicyIrNumberLiteral => "number",
|
||||
PolicyIrBooleanLiteral => "boolean",
|
||||
PolicyIrListLiteral => "list",
|
||||
_ => null
|
||||
};
|
||||
|
||||
private static string? InferExpressionType(PolicyExpression? expression) => expression switch
|
||||
{
|
||||
PolicyLiteralExpression lit => lit.Value switch
|
||||
{
|
||||
string => "string",
|
||||
decimal or double or float or int or long => "number",
|
||||
bool => "boolean",
|
||||
null => "null",
|
||||
_ => "unknown"
|
||||
},
|
||||
PolicyListExpression => "list",
|
||||
PolicyBinaryExpression bin => bin.Operator switch
|
||||
{
|
||||
PolicyBinaryOperator.And or PolicyBinaryOperator.Or or PolicyBinaryOperator.Equal or
|
||||
PolicyBinaryOperator.NotEqual or PolicyBinaryOperator.LessThan or PolicyBinaryOperator.LessThanOrEqual or
|
||||
PolicyBinaryOperator.GreaterThan or PolicyBinaryOperator.GreaterThanOrEqual or
|
||||
PolicyBinaryOperator.In or PolicyBinaryOperator.NotIn => "boolean",
|
||||
_ => "unknown"
|
||||
},
|
||||
PolicyUnaryExpression { Operator: PolicyUnaryOperator.Not } => "boolean",
|
||||
_ => null
|
||||
};
|
||||
|
||||
private static ImmutableArray<PolicyFunctionSignature> GetBuiltInFunctions()
|
||||
{
|
||||
return
|
||||
[
|
||||
new PolicyFunctionSignature(
|
||||
"contains",
|
||||
"Checks if a string contains a substring or a list contains an element",
|
||||
[
|
||||
new PolicyParameterInfo("haystack", "string|list", false, null, "The string or list to search in"),
|
||||
new PolicyParameterInfo("needle", "any", false, null, "The value to search for")
|
||||
],
|
||||
"boolean",
|
||||
["contains(advisory.id, \"CVE\")", "contains(tags, \"critical\")"]),
|
||||
|
||||
new PolicyFunctionSignature(
|
||||
"startsWith",
|
||||
"Checks if a string starts with a prefix",
|
||||
[
|
||||
new PolicyParameterInfo("value", "string", false, null, "The string to check"),
|
||||
new PolicyParameterInfo("prefix", "string", false, null, "The prefix to match")
|
||||
],
|
||||
"boolean",
|
||||
["startsWith(component.purl, \"pkg:npm\")"]),
|
||||
|
||||
new PolicyFunctionSignature(
|
||||
"endsWith",
|
||||
"Checks if a string ends with a suffix",
|
||||
[
|
||||
new PolicyParameterInfo("value", "string", false, null, "The string to check"),
|
||||
new PolicyParameterInfo("suffix", "string", false, null, "The suffix to match")
|
||||
],
|
||||
"boolean",
|
||||
["endsWith(component.name, \"-dev\")"]),
|
||||
|
||||
new PolicyFunctionSignature(
|
||||
"matches",
|
||||
"Checks if a string matches a regex pattern",
|
||||
[
|
||||
new PolicyParameterInfo("value", "string", false, null, "The string to check"),
|
||||
new PolicyParameterInfo("pattern", "string", false, null, "The regex pattern")
|
||||
],
|
||||
"boolean",
|
||||
["matches(advisory.id, \"^CVE-202[3-9]\")"]),
|
||||
|
||||
new PolicyFunctionSignature(
|
||||
"length",
|
||||
"Returns the length of a string or list",
|
||||
[
|
||||
new PolicyParameterInfo("value", "string|list", false, null, "The value to measure")
|
||||
],
|
||||
"number",
|
||||
["length(component.name)", "length(tags)"]),
|
||||
|
||||
new PolicyFunctionSignature(
|
||||
"lower",
|
||||
"Converts a string to lowercase",
|
||||
[
|
||||
new PolicyParameterInfo("value", "string", false, null, "The string to convert")
|
||||
],
|
||||
"string",
|
||||
["lower(component.ecosystem)"]),
|
||||
|
||||
new PolicyFunctionSignature(
|
||||
"upper",
|
||||
"Converts a string to uppercase",
|
||||
[
|
||||
new PolicyParameterInfo("value", "string", false, null, "The string to convert")
|
||||
],
|
||||
"string",
|
||||
["upper(severity)"]),
|
||||
|
||||
new PolicyFunctionSignature(
|
||||
"now",
|
||||
"Returns the current evaluation timestamp (deterministic within a run)",
|
||||
[],
|
||||
"datetime",
|
||||
["now()"]),
|
||||
|
||||
new PolicyFunctionSignature(
|
||||
"days",
|
||||
"Creates a duration in days",
|
||||
[
|
||||
new PolicyParameterInfo("count", "number", false, null, "Number of days")
|
||||
],
|
||||
"duration",
|
||||
["days(30)", "days(7)"]),
|
||||
|
||||
new PolicyFunctionSignature(
|
||||
"semver",
|
||||
"Parses a semantic version string",
|
||||
[
|
||||
new PolicyParameterInfo("version", "string", false, null, "The version string to parse")
|
||||
],
|
||||
"semver",
|
||||
["semver(component.version)"]),
|
||||
|
||||
new PolicyFunctionSignature(
|
||||
"semverCompare",
|
||||
"Compares two semantic versions",
|
||||
[
|
||||
new PolicyParameterInfo("left", "string|semver", false, null, "First version"),
|
||||
new PolicyParameterInfo("right", "string|semver", false, null, "Second version")
|
||||
],
|
||||
"number",
|
||||
["semverCompare(component.version, \"1.0.0\")"])
|
||||
];
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Rule Index Building
|
||||
|
||||
private PolicyRuleIndex BuildRuleIndex(PolicyIrDocument document)
|
||||
{
|
||||
var rules = new List<PolicyRuleEntry>();
|
||||
var byName = new Dictionary<string, PolicyRuleEntry>(StringComparer.Ordinal);
|
||||
var byPriority = new Dictionary<int, List<PolicyRuleEntry>>();
|
||||
var allActionTypes = new HashSet<string>();
|
||||
var allIdentifiers = new HashSet<string>();
|
||||
|
||||
if (!document.Rules.IsDefaultOrEmpty)
|
||||
{
|
||||
for (var i = 0; i < document.Rules.Length; i++)
|
||||
{
|
||||
var rule = document.Rules[i];
|
||||
var thenActionTypes = GetActionTypes(rule.ThenActions, allActionTypes);
|
||||
var elseActionTypes = GetActionTypes(rule.ElseActions, allActionTypes);
|
||||
var (identifiers, functions) = ExtractRuleReferences(rule);
|
||||
|
||||
foreach (var id in identifiers)
|
||||
{
|
||||
allIdentifiers.Add(id);
|
||||
}
|
||||
|
||||
var entry = new PolicyRuleEntry(
|
||||
rule.Name,
|
||||
rule.Priority,
|
||||
i,
|
||||
SummarizeExpression(rule.When) ?? "true",
|
||||
thenActionTypes,
|
||||
elseActionTypes,
|
||||
rule.Because,
|
||||
identifiers,
|
||||
functions);
|
||||
|
||||
rules.Add(entry);
|
||||
byName[rule.Name] = entry;
|
||||
|
||||
if (!byPriority.TryGetValue(rule.Priority, out var priorityList))
|
||||
{
|
||||
priorityList = [];
|
||||
byPriority[rule.Priority] = priorityList;
|
||||
}
|
||||
priorityList.Add(entry);
|
||||
}
|
||||
}
|
||||
|
||||
return new PolicyRuleIndex(
|
||||
rules.ToImmutableArray(),
|
||||
byName.ToImmutableDictionary(),
|
||||
byPriority.ToImmutableDictionary(kvp => kvp.Key, kvp => kvp.Value.ToImmutableArray()),
|
||||
allActionTypes.Order().ToImmutableArray(),
|
||||
allIdentifiers.Order().ToImmutableArray());
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> GetActionTypes(
|
||||
ImmutableArray<PolicyIrAction> actions,
|
||||
HashSet<string> allActionTypes)
|
||||
{
|
||||
if (actions.IsDefaultOrEmpty) return [];
|
||||
|
||||
var types = new List<string>();
|
||||
foreach (var action in actions)
|
||||
{
|
||||
var typeName = action switch
|
||||
{
|
||||
PolicyIrAssignmentAction => "assign",
|
||||
PolicyIrAnnotateAction => "annotate",
|
||||
PolicyIrIgnoreAction => "ignore",
|
||||
PolicyIrEscalateAction => "escalate",
|
||||
PolicyIrRequireVexAction => "requireVex",
|
||||
PolicyIrWarnAction => "warn",
|
||||
PolicyIrDeferAction => "defer",
|
||||
_ => "unknown"
|
||||
};
|
||||
types.Add(typeName);
|
||||
allActionTypes.Add(typeName);
|
||||
}
|
||||
return types.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static (ImmutableArray<string> Identifiers, ImmutableArray<string> Functions) ExtractRuleReferences(PolicyIrRule rule)
|
||||
{
|
||||
var identifiers = new HashSet<string>();
|
||||
var functions = new HashSet<string>();
|
||||
|
||||
CollectExpressionReferences(rule.When, identifiers, functions);
|
||||
|
||||
if (!rule.ThenActions.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var action in rule.ThenActions)
|
||||
{
|
||||
CollectActionReferences(action, identifiers, functions);
|
||||
}
|
||||
}
|
||||
|
||||
if (!rule.ElseActions.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var action in rule.ElseActions)
|
||||
{
|
||||
CollectActionReferences(action, identifiers, functions);
|
||||
}
|
||||
}
|
||||
|
||||
return (identifiers.Order().ToImmutableArray(), functions.Order().ToImmutableArray());
|
||||
}
|
||||
|
||||
private static void CollectExpressionReferences(
|
||||
PolicyExpression? expression,
|
||||
HashSet<string> identifiers,
|
||||
HashSet<string> functions)
|
||||
{
|
||||
if (expression is null) return;
|
||||
|
||||
switch (expression)
|
||||
{
|
||||
case PolicyIdentifierExpression id:
|
||||
identifiers.Add(id.Name);
|
||||
break;
|
||||
case PolicyMemberAccessExpression member:
|
||||
CollectExpressionReferences(member.Target, identifiers, functions);
|
||||
break;
|
||||
case PolicyInvocationExpression invocation:
|
||||
if (invocation.Target is PolicyIdentifierExpression funcId)
|
||||
{
|
||||
functions.Add(funcId.Name);
|
||||
}
|
||||
else
|
||||
{
|
||||
CollectExpressionReferences(invocation.Target, identifiers, functions);
|
||||
}
|
||||
if (!invocation.Arguments.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var arg in invocation.Arguments)
|
||||
{
|
||||
CollectExpressionReferences(arg, identifiers, functions);
|
||||
}
|
||||
}
|
||||
break;
|
||||
case PolicyIndexerExpression indexer:
|
||||
CollectExpressionReferences(indexer.Target, identifiers, functions);
|
||||
CollectExpressionReferences(indexer.Index, identifiers, functions);
|
||||
break;
|
||||
case PolicyUnaryExpression unary:
|
||||
CollectExpressionReferences(unary.Operand, identifiers, functions);
|
||||
break;
|
||||
case PolicyBinaryExpression binary:
|
||||
CollectExpressionReferences(binary.Left, identifiers, functions);
|
||||
CollectExpressionReferences(binary.Right, identifiers, functions);
|
||||
break;
|
||||
case PolicyListExpression list when !list.Items.IsDefaultOrEmpty:
|
||||
foreach (var item in list.Items)
|
||||
{
|
||||
CollectExpressionReferences(item, identifiers, functions);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private static void CollectActionReferences(
|
||||
PolicyIrAction action,
|
||||
HashSet<string> identifiers,
|
||||
HashSet<string> functions)
|
||||
{
|
||||
switch (action)
|
||||
{
|
||||
case PolicyIrAssignmentAction assign:
|
||||
CollectExpressionReferences(assign.Value, identifiers, functions);
|
||||
break;
|
||||
case PolicyIrAnnotateAction annotate:
|
||||
CollectExpressionReferences(annotate.Value, identifiers, functions);
|
||||
break;
|
||||
case PolicyIrIgnoreAction ignore:
|
||||
CollectExpressionReferences(ignore.Until, identifiers, functions);
|
||||
break;
|
||||
case PolicyIrEscalateAction escalate:
|
||||
CollectExpressionReferences(escalate.To, identifiers, functions);
|
||||
CollectExpressionReferences(escalate.When, identifiers, functions);
|
||||
break;
|
||||
case PolicyIrRequireVexAction require:
|
||||
foreach (var condition in require.Conditions.Values)
|
||||
{
|
||||
CollectExpressionReferences(condition, identifiers, functions);
|
||||
}
|
||||
break;
|
||||
case PolicyIrWarnAction warn:
|
||||
CollectExpressionReferences(warn.Message, identifiers, functions);
|
||||
break;
|
||||
case PolicyIrDeferAction defer:
|
||||
CollectExpressionReferences(defer.Until, identifiers, functions);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Documentation Extraction
|
||||
|
||||
private PolicyDocumentation ExtractDocumentation(PolicyIrDocument document)
|
||||
{
|
||||
string? description = null;
|
||||
var tags = ImmutableArray<string>.Empty;
|
||||
string? author = null;
|
||||
var customMetadata = new Dictionary<string, string>();
|
||||
|
||||
// Extract from metadata
|
||||
if (document.Metadata.TryGetValue("description", out var descLit) && descLit is PolicyIrStringLiteral descStr)
|
||||
{
|
||||
description = descStr.Value;
|
||||
}
|
||||
|
||||
if (document.Metadata.TryGetValue("author", out var authorLit) && authorLit is PolicyIrStringLiteral authorStr)
|
||||
{
|
||||
author = authorStr.Value;
|
||||
}
|
||||
|
||||
if (document.Metadata.TryGetValue("tags", out var tagsLit) && tagsLit is PolicyIrListLiteral tagsList)
|
||||
{
|
||||
tags = tagsList.Items
|
||||
.OfType<PolicyIrStringLiteral>()
|
||||
.Select(s => s.Value)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
foreach (var (key, value) in document.Metadata)
|
||||
{
|
||||
if (key is not ("description" or "author" or "tags") && value is PolicyIrStringLiteral strVal)
|
||||
{
|
||||
customMetadata[key] = strVal.Value;
|
||||
}
|
||||
}
|
||||
|
||||
// Extract rule documentation
|
||||
var ruleDocs = new List<PolicyRuleDocumentation>();
|
||||
if (!document.Rules.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var rule in document.Rules)
|
||||
{
|
||||
var actionDescs = new List<string>();
|
||||
if (!rule.ThenActions.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var action in rule.ThenActions)
|
||||
{
|
||||
actionDescs.Add($"then: {DescribeAction(action)}");
|
||||
}
|
||||
}
|
||||
if (!rule.ElseActions.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var action in rule.ElseActions)
|
||||
{
|
||||
actionDescs.Add($"else: {DescribeAction(action)}");
|
||||
}
|
||||
}
|
||||
|
||||
ruleDocs.Add(new PolicyRuleDocumentation(
|
||||
rule.Name,
|
||||
rule.Priority,
|
||||
rule.Because,
|
||||
SummarizeExpression(rule.When) ?? "true",
|
||||
actionDescs.ToImmutableArray()));
|
||||
}
|
||||
}
|
||||
|
||||
// Extract profile documentation
|
||||
var profileDocs = new List<PolicyProfileDocumentation>();
|
||||
if (!document.Profiles.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var profile in document.Profiles)
|
||||
{
|
||||
profileDocs.Add(new PolicyProfileDocumentation(
|
||||
profile.Name,
|
||||
profile.Maps.IsDefaultOrEmpty
|
||||
? []
|
||||
: profile.Maps.Select(m => m.Name).ToImmutableArray(),
|
||||
profile.Environments.IsDefaultOrEmpty
|
||||
? []
|
||||
: profile.Environments.Select(e => e.Name).ToImmutableArray(),
|
||||
profile.Scalars.IsDefaultOrEmpty
|
||||
? []
|
||||
: profile.Scalars.Select(s => s.Name).ToImmutableArray()));
|
||||
}
|
||||
}
|
||||
|
||||
return new PolicyDocumentation(
|
||||
description,
|
||||
tags,
|
||||
author,
|
||||
customMetadata.ToImmutableDictionary(),
|
||||
ruleDocs.ToImmutableArray(),
|
||||
profileDocs.ToImmutableArray());
|
||||
}
|
||||
|
||||
private static string DescribeAction(PolicyIrAction action) => action switch
|
||||
{
|
||||
PolicyIrAssignmentAction a => $"assign {string.Join(".", a.Target)} = {SummarizeExpression(a.Value)}",
|
||||
PolicyIrAnnotateAction a => $"annotate {string.Join(".", a.Target)} = {SummarizeExpression(a.Value)}",
|
||||
PolicyIrIgnoreAction a => $"ignore{(a.Until is not null ? $" until {SummarizeExpression(a.Until)}" : "")}{(a.Because is not null ? $" because \"{a.Because}\"" : "")}",
|
||||
PolicyIrEscalateAction a => $"escalate{(a.To is not null ? $" to {SummarizeExpression(a.To)}" : "")}{(a.When is not null ? $" when {SummarizeExpression(a.When)}" : "")}",
|
||||
PolicyIrRequireVexAction a => $"requireVex({string.Join(", ", a.Conditions.Keys)})",
|
||||
PolicyIrWarnAction a => $"warn {SummarizeExpression(a.Message)}",
|
||||
PolicyIrDeferAction a => $"defer{(a.Until is not null ? $" until {SummarizeExpression(a.Until)}" : "")}",
|
||||
_ => "unknown"
|
||||
};
|
||||
|
||||
#endregion
|
||||
|
||||
#region Coverage Metadata Building
|
||||
|
||||
private PolicyRuleCoverageMetadata BuildCoverageMetadata(PolicyIrDocument document)
|
||||
{
|
||||
var rules = new List<PolicyRuleCoverageEntry>();
|
||||
var actionTypeCounts = new Dictionary<string, int>();
|
||||
var totalConditions = 0;
|
||||
var totalActions = 0;
|
||||
|
||||
if (!document.Rules.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var rule in document.Rules)
|
||||
{
|
||||
totalConditions++;
|
||||
var thenCount = rule.ThenActions.IsDefaultOrEmpty ? 0 : rule.ThenActions.Length;
|
||||
var elseCount = rule.ElseActions.IsDefaultOrEmpty ? 0 : rule.ElseActions.Length;
|
||||
totalActions += thenCount + elseCount;
|
||||
|
||||
// Count action types
|
||||
CountActionTypes(rule.ThenActions, actionTypeCounts);
|
||||
CountActionTypes(rule.ElseActions, actionTypeCounts);
|
||||
|
||||
// Generate coverage points
|
||||
var coveragePoints = new List<string>
|
||||
{
|
||||
$"{rule.Name}:condition"
|
||||
};
|
||||
|
||||
if (thenCount > 0)
|
||||
{
|
||||
coveragePoints.Add($"{rule.Name}:then");
|
||||
for (var i = 0; i < thenCount; i++)
|
||||
{
|
||||
coveragePoints.Add($"{rule.Name}:then[{i}]");
|
||||
}
|
||||
}
|
||||
|
||||
if (elseCount > 0)
|
||||
{
|
||||
coveragePoints.Add($"{rule.Name}:else");
|
||||
for (var i = 0; i < elseCount; i++)
|
||||
{
|
||||
coveragePoints.Add($"{rule.Name}:else[{i}]");
|
||||
}
|
||||
}
|
||||
|
||||
rules.Add(new PolicyRuleCoverageEntry(
|
||||
rule.Name,
|
||||
rule.Priority,
|
||||
ComputeExpressionHash(rule.When),
|
||||
thenCount,
|
||||
elseCount,
|
||||
elseCount > 0,
|
||||
coveragePoints.ToImmutableArray()));
|
||||
}
|
||||
}
|
||||
|
||||
// Generate coverage paths (simplified - exhaustive paths for small policies)
|
||||
var coveragePaths = GenerateCoveragePaths(document.Rules);
|
||||
|
||||
return new PolicyRuleCoverageMetadata(
|
||||
rules.ToImmutableArray(),
|
||||
rules.Count,
|
||||
totalConditions,
|
||||
totalActions,
|
||||
actionTypeCounts.ToImmutableDictionary(),
|
||||
coveragePaths);
|
||||
}
|
||||
|
||||
private static void CountActionTypes(ImmutableArray<PolicyIrAction> actions, Dictionary<string, int> counts)
|
||||
{
|
||||
if (actions.IsDefaultOrEmpty) return;
|
||||
|
||||
foreach (var action in actions)
|
||||
{
|
||||
var typeName = action switch
|
||||
{
|
||||
PolicyIrAssignmentAction => "assign",
|
||||
PolicyIrAnnotateAction => "annotate",
|
||||
PolicyIrIgnoreAction => "ignore",
|
||||
PolicyIrEscalateAction => "escalate",
|
||||
PolicyIrRequireVexAction => "requireVex",
|
||||
PolicyIrWarnAction => "warn",
|
||||
PolicyIrDeferAction => "defer",
|
||||
_ => "unknown"
|
||||
};
|
||||
|
||||
counts.TryGetValue(typeName, out var count);
|
||||
counts[typeName] = count + 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static ImmutableArray<PolicyCoveragePath> GenerateCoveragePaths(ImmutableArray<PolicyIrRule> rules)
|
||||
{
|
||||
if (rules.IsDefaultOrEmpty) return [];
|
||||
|
||||
var paths = new List<PolicyCoveragePath>();
|
||||
|
||||
// For small policies, generate all 2^n paths
|
||||
// For larger policies, generate key paths only
|
||||
var ruleCount = rules.Length;
|
||||
var maxPaths = ruleCount <= 10 ? (1 << ruleCount) : 100;
|
||||
|
||||
for (var pathIndex = 0; pathIndex < maxPaths && pathIndex < (1 << ruleCount); pathIndex++)
|
||||
{
|
||||
var sequence = new List<string>();
|
||||
var decisions = new List<PolicyBranchDecision>();
|
||||
var pathHashBuilder = new StringBuilder();
|
||||
|
||||
for (var ruleIndex = 0; ruleIndex < ruleCount; ruleIndex++)
|
||||
{
|
||||
var rule = rules[ruleIndex];
|
||||
var tookThen = (pathIndex & (1 << ruleIndex)) != 0;
|
||||
|
||||
sequence.Add(rule.Name);
|
||||
decisions.Add(new PolicyBranchDecision(
|
||||
rule.Name,
|
||||
tookThen,
|
||||
ComputeExpressionHash(rule.When)));
|
||||
|
||||
pathHashBuilder.Append(rule.Name);
|
||||
pathHashBuilder.Append(tookThen ? ":T" : ":F");
|
||||
pathHashBuilder.Append('|');
|
||||
}
|
||||
|
||||
var pathId = $"path_{pathIndex:D4}";
|
||||
var pathHash = ComputeStringHash(pathHashBuilder.ToString());
|
||||
|
||||
paths.Add(new PolicyCoveragePath(
|
||||
pathId,
|
||||
sequence.ToImmutableArray(),
|
||||
decisions.ToImmutableArray(),
|
||||
pathHash));
|
||||
}
|
||||
|
||||
return paths.ToImmutableArray();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Hash Computation
|
||||
|
||||
private PolicyDeterministicHashes ComputeHashes(PolicyIrDocument document, ImmutableArray<byte> canonicalRepresentation)
|
||||
{
|
||||
// Content hash from canonical representation
|
||||
var contentHash = ComputeHash(canonicalRepresentation.AsSpan());
|
||||
|
||||
// Structure hash (rules only)
|
||||
var structureBuilder = new StringBuilder();
|
||||
if (!document.Rules.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var rule in document.Rules)
|
||||
{
|
||||
structureBuilder.Append(rule.Name);
|
||||
structureBuilder.Append(':');
|
||||
structureBuilder.Append(rule.Priority);
|
||||
structureBuilder.Append(':');
|
||||
structureBuilder.Append(ComputeExpressionHash(rule.When));
|
||||
structureBuilder.Append('|');
|
||||
}
|
||||
}
|
||||
var structureHash = ComputeStringHash(structureBuilder.ToString());
|
||||
|
||||
// Ordering hash (names and priorities only)
|
||||
var orderingBuilder = new StringBuilder();
|
||||
if (!document.Rules.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var rule in document.Rules)
|
||||
{
|
||||
orderingBuilder.Append(rule.Name);
|
||||
orderingBuilder.Append(':');
|
||||
orderingBuilder.Append(rule.Priority);
|
||||
orderingBuilder.Append('|');
|
||||
}
|
||||
}
|
||||
var orderingHash = ComputeStringHash(orderingBuilder.ToString());
|
||||
|
||||
// Identity hash (combination)
|
||||
var identityBuilder = new StringBuilder();
|
||||
identityBuilder.Append(document.Name);
|
||||
identityBuilder.Append(':');
|
||||
identityBuilder.Append(document.Syntax);
|
||||
identityBuilder.Append(':');
|
||||
identityBuilder.Append(contentHash);
|
||||
var identityHash = ComputeStringHash(identityBuilder.ToString());
|
||||
|
||||
return new PolicyDeterministicHashes(contentHash, structureHash, orderingHash, identityHash);
|
||||
}
|
||||
|
||||
private static string ComputeExpressionHash(PolicyExpression? expression)
|
||||
{
|
||||
if (expression is null) return "null";
|
||||
var summary = SummarizeExpression(expression) ?? "empty";
|
||||
return ComputeStringHash(summary);
|
||||
}
|
||||
|
||||
private static string ComputeStringHash(string value)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(value);
|
||||
return ComputeHash(bytes);
|
||||
}
|
||||
|
||||
private static string ComputeHash(ReadOnlySpan<byte> bytes)
|
||||
{
|
||||
Span<byte> hash = stackalloc byte[32];
|
||||
SHA256.HashData(bytes, hash);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
private static string? SummarizeExpression(PolicyExpression? expression, int maxLength = 100)
|
||||
{
|
||||
if (expression is null) return null;
|
||||
|
||||
var summary = expression switch
|
||||
{
|
||||
PolicyLiteralExpression lit => lit.Value?.ToString() ?? "null",
|
||||
PolicyIdentifierExpression id => id.Name,
|
||||
PolicyMemberAccessExpression member => $"{SummarizeExpression(member.Target)}.{member.Member}",
|
||||
PolicyInvocationExpression inv => $"{SummarizeExpression(inv.Target)}({string.Join(", ", inv.Arguments.IsDefaultOrEmpty ? [] : inv.Arguments.Select(a => SummarizeExpression(a)))})",
|
||||
PolicyIndexerExpression idx => $"{SummarizeExpression(idx.Target)}[{SummarizeExpression(idx.Index)}]",
|
||||
PolicyUnaryExpression unary => $"{unary.Operator} {SummarizeExpression(unary.Operand)}",
|
||||
PolicyBinaryExpression binary => $"{SummarizeExpression(binary.Left)} {binary.Operator} {SummarizeExpression(binary.Right)}",
|
||||
PolicyListExpression list => $"[{string.Join(", ", list.Items.IsDefaultOrEmpty ? [] : list.Items.Take(3).Select(i => SummarizeExpression(i)))}{(list.Items.Length > 3 ? ", ..." : "")}]",
|
||||
_ => expression.GetType().Name
|
||||
};
|
||||
|
||||
return summary.Length > maxLength ? summary[..(maxLength - 3)] + "..." : summary;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,154 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Policy.Engine.Caching;
|
||||
using StellaOps.Policy.Engine.EffectiveDecisionMap;
|
||||
using StellaOps.Policy.Engine.Events;
|
||||
using StellaOps.Policy.Engine.ExceptionCache;
|
||||
using StellaOps.Policy.Engine.Options;
|
||||
using StellaOps.Policy.Engine.Services;
|
||||
using StellaOps.Policy.Engine.WhatIfSimulation;
|
||||
using StellaOps.Policy.Engine.Workers;
|
||||
using StackExchange.Redis;
|
||||
|
||||
namespace StellaOps.Policy.Engine.DependencyInjection;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering Policy Engine services.
|
||||
/// </summary>
|
||||
public static class PolicyEngineServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds the core Policy Engine services to the service collection.
|
||||
/// Includes TimeProvider, cache, and core evaluation services.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyEngineCore(this IServiceCollection services)
|
||||
{
|
||||
// Time provider
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
|
||||
// Core compilation and evaluation services
|
||||
services.TryAddSingleton<PolicyCompilationService>();
|
||||
|
||||
// Cache
|
||||
services.TryAddSingleton<IPolicyEvaluationCache, InMemoryPolicyEvaluationCache>();
|
||||
|
||||
// Runtime evaluation
|
||||
services.TryAddSingleton<PolicyRuntimeEvaluationService>();
|
||||
|
||||
// Bundle service
|
||||
services.TryAddSingleton<PolicyBundleService>();
|
||||
|
||||
// Decision service
|
||||
services.TryAddSingleton<PolicyDecisionService>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the Policy Engine event pipeline services.
|
||||
/// Includes event processor and job scheduler.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyEngineEventPipeline(this IServiceCollection services)
|
||||
{
|
||||
// Event processor (implements both IPolicyEffectiveEventPublisher and IReEvaluationJobScheduler)
|
||||
services.TryAddSingleton<PolicyEventProcessor>();
|
||||
services.TryAddSingleton<IPolicyEffectiveEventPublisher>(sp =>
|
||||
sp.GetRequiredService<PolicyEventProcessor>());
|
||||
services.TryAddSingleton<IReEvaluationJobScheduler>(sp =>
|
||||
sp.GetRequiredService<PolicyEventProcessor>());
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the Policy Engine evaluation worker services.
|
||||
/// Includes background host for continuous job processing.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyEngineWorker(this IServiceCollection services)
|
||||
{
|
||||
// Worker service
|
||||
services.TryAddSingleton<PolicyEvaluationWorkerService>();
|
||||
|
||||
// Background host
|
||||
services.AddHostedService<PolicyEvaluationWorkerHost>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the Policy Engine explainer services.
|
||||
/// Requires IExplainTraceRepository and IPolicyPackRepository to be registered.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyEngineExplainer(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton<PolicyExplainerService>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the effective decision map services for Graph overlays.
|
||||
/// Requires Redis connection to be registered.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddEffectiveDecisionMap(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton<IEffectiveDecisionMap, RedisEffectiveDecisionMap>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the exception effective cache for fast exception lookups during policy evaluation.
|
||||
/// Requires Redis connection and IExceptionRepository to be registered.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddExceptionEffectiveCache(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton<IExceptionEffectiveCache, RedisExceptionEffectiveCache>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the What-If simulation service for Graph APIs.
|
||||
/// Supports hypothetical SBOM diffs and draft policies without persisting results.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddWhatIfSimulation(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton<WhatIfSimulationService>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds Redis connection for effective decision map and evaluation cache.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyEngineRedis(
|
||||
this IServiceCollection services,
|
||||
string connectionString)
|
||||
{
|
||||
services.TryAddSingleton<IConnectionMultiplexer>(sp =>
|
||||
ConnectionMultiplexer.Connect(connectionString));
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds all Policy Engine services with default configuration.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyEngine(this IServiceCollection services)
|
||||
{
|
||||
services.AddPolicyEngineCore();
|
||||
services.AddPolicyEngineEventPipeline();
|
||||
services.AddPolicyEngineWorker();
|
||||
services.AddPolicyEngineExplainer();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds all Policy Engine services with configuration binding.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyEngine(
|
||||
this IServiceCollection services,
|
||||
Action<PolicyEngineOptions> configure)
|
||||
{
|
||||
services.Configure(configure);
|
||||
return services.AddPolicyEngine();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.PolicyDsl;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Domain;
|
||||
|
||||
@@ -113,6 +114,7 @@ internal sealed record PolicyBundleRecord(
|
||||
int Size,
|
||||
DateTimeOffset CreatedAt,
|
||||
ImmutableArray<byte> Payload,
|
||||
PolicyIrDocument? CompiledDocument = null,
|
||||
PolicyAocMetadata? AocMetadata = null);
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -0,0 +1,221 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Engine.EffectiveDecisionMap;
|
||||
|
||||
/// <summary>
|
||||
/// Represents an effective policy decision for an asset/snapshot.
|
||||
/// Stored in Redis for Graph overlay lookups.
|
||||
/// </summary>
|
||||
public sealed record EffectiveDecisionEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("tenant_id")]
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Asset identifier (PURL or SBOM ID).
|
||||
/// </summary>
|
||||
[JsonPropertyName("asset_id")]
|
||||
public required string AssetId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot identifier (SBOM version or evaluation run).
|
||||
/// </summary>
|
||||
[JsonPropertyName("snapshot_id")]
|
||||
public required string SnapshotId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy pack ID that produced this decision.
|
||||
/// </summary>
|
||||
[JsonPropertyName("pack_id")]
|
||||
public required string PackId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy pack version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("pack_version")]
|
||||
public required int PackVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Final decision status (allow, warn, deny, blocked).
|
||||
/// </summary>
|
||||
[JsonPropertyName("status")]
|
||||
public required string Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Severity level if applicable.
|
||||
/// </summary>
|
||||
[JsonPropertyName("severity")]
|
||||
public string? Severity { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rule name that determined the decision.
|
||||
/// </summary>
|
||||
[JsonPropertyName("rule_name")]
|
||||
public string? RuleName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Priority of the applied rule.
|
||||
/// </summary>
|
||||
[JsonPropertyName("priority")]
|
||||
public int? Priority { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Exception ID if an exception was applied.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exception_id")]
|
||||
public string? ExceptionId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Count of advisories affecting this asset.
|
||||
/// </summary>
|
||||
[JsonPropertyName("advisory_count")]
|
||||
public int AdvisoryCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Count of critical/high severity findings.
|
||||
/// </summary>
|
||||
[JsonPropertyName("high_severity_count")]
|
||||
public int HighSeverityCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Aggregated annotations from the decision.
|
||||
/// </summary>
|
||||
[JsonPropertyName("annotations")]
|
||||
public ImmutableDictionary<string, string> Annotations { get; init; } = ImmutableDictionary<string, string>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Version counter for cache coherency.
|
||||
/// </summary>
|
||||
[JsonPropertyName("version")]
|
||||
public required long Version { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this entry was evaluated.
|
||||
/// </summary>
|
||||
[JsonPropertyName("evaluated_at")]
|
||||
public required DateTimeOffset EvaluatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this entry expires.
|
||||
/// </summary>
|
||||
[JsonPropertyName("expires_at")]
|
||||
public required DateTimeOffset ExpiresAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Correlation ID for tracing.
|
||||
/// </summary>
|
||||
[JsonPropertyName("correlation_id")]
|
||||
public string? CorrelationId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of an effective decision map query.
|
||||
/// </summary>
|
||||
public sealed record EffectiveDecisionQueryResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Found entries mapped by asset ID.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, EffectiveDecisionEntry> Entries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Asset IDs that were not found.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<string> NotFound { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current version of the decision map.
|
||||
/// </summary>
|
||||
public long MapVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the result came from cache.
|
||||
/// </summary>
|
||||
public bool FromCache { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary statistics for a snapshot's effective decisions.
|
||||
/// </summary>
|
||||
public sealed record EffectiveDecisionSummary
|
||||
{
|
||||
/// <summary>
|
||||
/// Snapshot ID.
|
||||
/// </summary>
|
||||
public required string SnapshotId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total assets evaluated.
|
||||
/// </summary>
|
||||
public int TotalAssets { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Count by status.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, int> StatusCounts { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Count by severity.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, int> SeverityCounts { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Assets with exceptions applied.
|
||||
/// </summary>
|
||||
public int ExceptionCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Map version at time of summary.
|
||||
/// </summary>
|
||||
public long MapVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this summary was computed.
|
||||
/// </summary>
|
||||
public DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Filter options for querying effective decisions.
|
||||
/// </summary>
|
||||
public sealed record EffectiveDecisionFilter
|
||||
{
|
||||
/// <summary>
|
||||
/// Filter by status values.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? Statuses { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by severity values.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? Severities { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Include only assets with exceptions.
|
||||
/// </summary>
|
||||
public bool? HasException { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by minimum advisory count.
|
||||
/// </summary>
|
||||
public int? MinAdvisoryCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by minimum high severity count.
|
||||
/// </summary>
|
||||
public int? MinHighSeverityCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum results to return.
|
||||
/// </summary>
|
||||
public int Limit { get; init; } = 1000;
|
||||
|
||||
/// <summary>
|
||||
/// Offset for pagination.
|
||||
/// </summary>
|
||||
public int Offset { get; init; } = 0;
|
||||
}
|
||||
@@ -0,0 +1,144 @@
|
||||
namespace StellaOps.Policy.Engine.EffectiveDecisionMap;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for effective decision map storage.
|
||||
/// Maintains policy decisions per asset/snapshot for Graph overlays.
|
||||
/// </summary>
|
||||
public interface IEffectiveDecisionMap
|
||||
{
|
||||
/// <summary>
|
||||
/// Sets an effective decision entry.
|
||||
/// </summary>
|
||||
Task SetAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
EffectiveDecisionEntry entry,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Sets multiple effective decision entries.
|
||||
/// </summary>
|
||||
Task SetBatchAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
IEnumerable<EffectiveDecisionEntry> entries,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets an effective decision entry.
|
||||
/// </summary>
|
||||
Task<EffectiveDecisionEntry?> GetAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
string assetId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets multiple effective decision entries.
|
||||
/// </summary>
|
||||
Task<EffectiveDecisionQueryResult> GetBatchAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
IReadOnlyList<string> assetIds,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all effective decisions for a snapshot.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<EffectiveDecisionEntry>> GetAllForSnapshotAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
EffectiveDecisionFilter? filter = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a summary of effective decisions for a snapshot.
|
||||
/// </summary>
|
||||
Task<EffectiveDecisionSummary> GetSummaryAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Invalidates a specific entry.
|
||||
/// </summary>
|
||||
Task InvalidateAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
string assetId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Invalidates all entries for a snapshot.
|
||||
/// </summary>
|
||||
Task InvalidateSnapshotAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Invalidates all entries for a tenant.
|
||||
/// </summary>
|
||||
Task InvalidateTenantAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current map version for a snapshot.
|
||||
/// </summary>
|
||||
Task<long> GetVersionAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Increments and returns the new map version for a snapshot.
|
||||
/// </summary>
|
||||
Task<long> IncrementVersionAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets statistics about the effective decision map.
|
||||
/// </summary>
|
||||
Task<EffectiveDecisionMapStats> GetStatsAsync(
|
||||
string? tenantId = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Statistics about the effective decision map.
|
||||
/// </summary>
|
||||
public sealed record EffectiveDecisionMapStats
|
||||
{
|
||||
/// <summary>
|
||||
/// Total entries across all tenants/snapshots.
|
||||
/// </summary>
|
||||
public long TotalEntries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total snapshots tracked.
|
||||
/// </summary>
|
||||
public long TotalSnapshots { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Memory used in bytes (if available).
|
||||
/// </summary>
|
||||
public long? MemoryUsedBytes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entries expiring in the next hour.
|
||||
/// </summary>
|
||||
public long ExpiringWithinHour { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Last eviction timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset? LastEvictionAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Count of entries evicted in last eviction run.
|
||||
/// </summary>
|
||||
public long LastEvictionCount { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,501 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Engine.Options;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
using StackExchange.Redis;
|
||||
|
||||
namespace StellaOps.Policy.Engine.EffectiveDecisionMap;
|
||||
|
||||
/// <summary>
|
||||
/// Redis-backed effective decision map with versioning and TTL-based eviction.
|
||||
/// Key structure:
|
||||
/// - Entry: stellaops:edm:{tenant}:{snapshot}:e:{asset} -> JSON entry
|
||||
/// - Version: stellaops:edm:{tenant}:{snapshot}:v -> integer version
|
||||
/// - Index: stellaops:edm:{tenant}:{snapshot}:idx -> sorted set of assets by evaluated_at
|
||||
/// </summary>
|
||||
internal sealed class RedisEffectiveDecisionMap : IEffectiveDecisionMap
|
||||
{
|
||||
private readonly IConnectionMultiplexer _redis;
|
||||
private readonly ILogger<RedisEffectiveDecisionMap> _logger;
|
||||
private readonly EffectiveDecisionMapOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private const string KeyPrefix = "stellaops:edm";
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
};
|
||||
|
||||
public RedisEffectiveDecisionMap(
|
||||
IConnectionMultiplexer redis,
|
||||
ILogger<RedisEffectiveDecisionMap> logger,
|
||||
IOptions<PolicyEngineOptions> options,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
_redis = redis ?? throw new ArgumentNullException(nameof(redis));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_options = options?.Value.EffectiveDecisionMap ?? new EffectiveDecisionMapOptions();
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
}
|
||||
|
||||
public async Task SetAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
EffectiveDecisionEntry entry,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(entry);
|
||||
|
||||
var db = _redis.GetDatabase();
|
||||
var entryKey = GetEntryKey(tenantId, snapshotId, entry.AssetId);
|
||||
var indexKey = GetIndexKey(tenantId, snapshotId);
|
||||
|
||||
var json = JsonSerializer.Serialize(entry, JsonOptions);
|
||||
var ttl = entry.ExpiresAt - _timeProvider.GetUtcNow();
|
||||
if (ttl <= TimeSpan.Zero)
|
||||
{
|
||||
ttl = TimeSpan.FromMinutes(_options.DefaultTtlMinutes);
|
||||
}
|
||||
|
||||
var tasks = new List<Task>
|
||||
{
|
||||
db.StringSetAsync(entryKey, json, ttl),
|
||||
db.SortedSetAddAsync(indexKey, entry.AssetId, entry.EvaluatedAt.ToUnixTimeMilliseconds()),
|
||||
db.KeyExpireAsync(indexKey, ttl + TimeSpan.FromMinutes(5)), // Index lives slightly longer
|
||||
};
|
||||
|
||||
await Task.WhenAll(tasks).ConfigureAwait(false);
|
||||
|
||||
PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(1,
|
||||
new KeyValuePair<string, object?>("operation", "set"),
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId));
|
||||
}
|
||||
|
||||
public async Task SetBatchAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
IEnumerable<EffectiveDecisionEntry> entries,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var batch = db.CreateBatch();
|
||||
var indexKey = GetIndexKey(tenantId, snapshotId);
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var count = 0;
|
||||
|
||||
var sortedSetEntries = new List<SortedSetEntry>();
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
var entryKey = GetEntryKey(tenantId, snapshotId, entry.AssetId);
|
||||
var json = JsonSerializer.Serialize(entry, JsonOptions);
|
||||
var ttl = entry.ExpiresAt - now;
|
||||
if (ttl <= TimeSpan.Zero)
|
||||
{
|
||||
ttl = TimeSpan.FromMinutes(_options.DefaultTtlMinutes);
|
||||
}
|
||||
|
||||
_ = batch.StringSetAsync(entryKey, json, ttl);
|
||||
sortedSetEntries.Add(new SortedSetEntry(entry.AssetId, entry.EvaluatedAt.ToUnixTimeMilliseconds()));
|
||||
count++;
|
||||
}
|
||||
|
||||
if (sortedSetEntries.Count > 0)
|
||||
{
|
||||
_ = batch.SortedSetAddAsync(indexKey, sortedSetEntries.ToArray());
|
||||
_ = batch.KeyExpireAsync(indexKey, TimeSpan.FromMinutes(_options.DefaultTtlMinutes + 5));
|
||||
}
|
||||
|
||||
batch.Execute();
|
||||
await Task.CompletedTask; // Batch operations are synchronous
|
||||
|
||||
// Increment version after batch write
|
||||
await IncrementVersionAsync(tenantId, snapshotId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(count,
|
||||
new KeyValuePair<string, object?>("operation", "set_batch"),
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId));
|
||||
|
||||
_logger.LogDebug("Set {Count} effective decisions for snapshot {SnapshotId}", count, snapshotId);
|
||||
}
|
||||
|
||||
public async Task<EffectiveDecisionEntry?> GetAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
string assetId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var entryKey = GetEntryKey(tenantId, snapshotId, assetId);
|
||||
|
||||
var json = await db.StringGetAsync(entryKey).ConfigureAwait(false);
|
||||
|
||||
PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(1,
|
||||
new KeyValuePair<string, object?>("operation", "get"),
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("cache_hit", json.HasValue));
|
||||
|
||||
if (!json.HasValue)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return JsonSerializer.Deserialize<EffectiveDecisionEntry>((string)json!, JsonOptions);
|
||||
}
|
||||
|
||||
public async Task<EffectiveDecisionQueryResult> GetBatchAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
IReadOnlyList<string> assetIds,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var keys = assetIds.Select(id => (RedisKey)GetEntryKey(tenantId, snapshotId, id)).ToArray();
|
||||
|
||||
var values = await db.StringGetAsync(keys).ConfigureAwait(false);
|
||||
|
||||
var entries = new Dictionary<string, EffectiveDecisionEntry>();
|
||||
var notFound = new List<string>();
|
||||
|
||||
for (int i = 0; i < assetIds.Count; i++)
|
||||
{
|
||||
if (values[i].HasValue)
|
||||
{
|
||||
var entry = JsonSerializer.Deserialize<EffectiveDecisionEntry>((string)values[i]!, JsonOptions);
|
||||
if (entry != null)
|
||||
{
|
||||
entries[assetIds[i]] = entry;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
notFound.Add(assetIds[i]);
|
||||
}
|
||||
}
|
||||
|
||||
var version = await GetVersionAsync(tenantId, snapshotId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(assetIds.Count,
|
||||
new KeyValuePair<string, object?>("operation", "get_batch"),
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId));
|
||||
|
||||
return new EffectiveDecisionQueryResult
|
||||
{
|
||||
Entries = entries,
|
||||
NotFound = notFound,
|
||||
MapVersion = version,
|
||||
FromCache = true,
|
||||
};
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<EffectiveDecisionEntry>> GetAllForSnapshotAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
EffectiveDecisionFilter? filter = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var indexKey = GetIndexKey(tenantId, snapshotId);
|
||||
|
||||
// Get all asset IDs from the index
|
||||
var assetIds = await db.SortedSetRangeByRankAsync(indexKey, 0, -1, Order.Descending)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (assetIds.Length == 0)
|
||||
{
|
||||
return Array.Empty<EffectiveDecisionEntry>();
|
||||
}
|
||||
|
||||
// Get all entries
|
||||
var keys = assetIds.Select(id => (RedisKey)GetEntryKey(tenantId, snapshotId, id!)).ToArray();
|
||||
var values = await db.StringGetAsync(keys).ConfigureAwait(false);
|
||||
|
||||
var entries = new List<EffectiveDecisionEntry>();
|
||||
|
||||
foreach (var value in values)
|
||||
{
|
||||
if (!value.HasValue) continue;
|
||||
|
||||
var entry = JsonSerializer.Deserialize<EffectiveDecisionEntry>((string)value!, JsonOptions);
|
||||
if (entry is null) continue;
|
||||
|
||||
// Apply filters
|
||||
if (filter != null)
|
||||
{
|
||||
if (filter.Statuses?.Count > 0 &&
|
||||
!filter.Statuses.Contains(entry.Status, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (filter.Severities?.Count > 0 &&
|
||||
(entry.Severity is null || !filter.Severities.Contains(entry.Severity, StringComparer.OrdinalIgnoreCase)))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (filter.HasException == true && entry.ExceptionId is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (filter.HasException == false && entry.ExceptionId is not null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (filter.MinAdvisoryCount.HasValue && entry.AdvisoryCount < filter.MinAdvisoryCount)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (filter.MinHighSeverityCount.HasValue && entry.HighSeverityCount < filter.MinHighSeverityCount)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
entries.Add(entry);
|
||||
|
||||
// Apply limit
|
||||
if (filter?.Limit > 0 && entries.Count >= filter.Limit + (filter?.Offset ?? 0))
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Apply offset
|
||||
if (filter?.Offset > 0)
|
||||
{
|
||||
entries = entries.Skip(filter.Offset).ToList();
|
||||
}
|
||||
|
||||
// Apply final limit
|
||||
if (filter?.Limit > 0)
|
||||
{
|
||||
entries = entries.Take(filter.Limit).ToList();
|
||||
}
|
||||
|
||||
PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(1,
|
||||
new KeyValuePair<string, object?>("operation", "get_all"),
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId));
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
public async Task<EffectiveDecisionSummary> GetSummaryAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var entries = await GetAllForSnapshotAsync(tenantId, snapshotId, null, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var statusCounts = entries
|
||||
.GroupBy(e => e.Status, StringComparer.OrdinalIgnoreCase)
|
||||
.ToDictionary(g => g.Key, g => g.Count(), StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
var severityCounts = entries
|
||||
.Where(e => e.Severity is not null)
|
||||
.GroupBy(e => e.Severity!, StringComparer.OrdinalIgnoreCase)
|
||||
.ToDictionary(g => g.Key, g => g.Count(), StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
var version = await GetVersionAsync(tenantId, snapshotId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return new EffectiveDecisionSummary
|
||||
{
|
||||
SnapshotId = snapshotId,
|
||||
TotalAssets = entries.Count,
|
||||
StatusCounts = statusCounts,
|
||||
SeverityCounts = severityCounts,
|
||||
ExceptionCount = entries.Count(e => e.ExceptionId is not null),
|
||||
MapVersion = version,
|
||||
ComputedAt = _timeProvider.GetUtcNow(),
|
||||
};
|
||||
}
|
||||
|
||||
public async Task InvalidateAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
string assetId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var entryKey = GetEntryKey(tenantId, snapshotId, assetId);
|
||||
var indexKey = GetIndexKey(tenantId, snapshotId);
|
||||
|
||||
await Task.WhenAll(
|
||||
db.KeyDeleteAsync(entryKey),
|
||||
db.SortedSetRemoveAsync(indexKey, assetId)
|
||||
).ConfigureAwait(false);
|
||||
|
||||
await IncrementVersionAsync(tenantId, snapshotId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(1,
|
||||
new KeyValuePair<string, object?>("operation", "invalidate"),
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId));
|
||||
}
|
||||
|
||||
public async Task InvalidateSnapshotAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var indexKey = GetIndexKey(tenantId, snapshotId);
|
||||
|
||||
// Get all asset IDs from the index
|
||||
var assetIds = await db.SortedSetRangeByRankAsync(indexKey).ConfigureAwait(false);
|
||||
|
||||
if (assetIds.Length > 0)
|
||||
{
|
||||
var keys = assetIds
|
||||
.Select(id => (RedisKey)GetEntryKey(tenantId, snapshotId, id!))
|
||||
.Append(indexKey)
|
||||
.Append(GetVersionKey(tenantId, snapshotId))
|
||||
.ToArray();
|
||||
|
||||
await db.KeyDeleteAsync(keys).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(assetIds.Length,
|
||||
new KeyValuePair<string, object?>("operation", "invalidate_snapshot"),
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId));
|
||||
|
||||
_logger.LogInformation("Invalidated {Count} entries for snapshot {SnapshotId}", assetIds.Length, snapshotId);
|
||||
}
|
||||
|
||||
public async Task InvalidateTenantAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var server = _redis.GetServer(_redis.GetEndPoints().First());
|
||||
var pattern = $"{KeyPrefix}:{tenantId}:*";
|
||||
var keys = server.Keys(pattern: pattern).ToArray();
|
||||
|
||||
if (keys.Length > 0)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
await db.KeyDeleteAsync(keys).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
PolicyEngineTelemetry.EffectiveDecisionMapOperations.Add(keys.Length,
|
||||
new KeyValuePair<string, object?>("operation", "invalidate_tenant"),
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId));
|
||||
|
||||
_logger.LogInformation("Invalidated {Count} keys for tenant {TenantId}", keys.Length, tenantId);
|
||||
}
|
||||
|
||||
public async Task<long> GetVersionAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var versionKey = GetVersionKey(tenantId, snapshotId);
|
||||
|
||||
var version = await db.StringGetAsync(versionKey).ConfigureAwait(false);
|
||||
return version.HasValue ? (long)version : 0;
|
||||
}
|
||||
|
||||
public async Task<long> IncrementVersionAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var versionKey = GetVersionKey(tenantId, snapshotId);
|
||||
|
||||
var newVersion = await db.StringIncrementAsync(versionKey).ConfigureAwait(false);
|
||||
|
||||
// Set TTL on version key if not already set
|
||||
await db.KeyExpireAsync(versionKey, TimeSpan.FromMinutes(_options.DefaultTtlMinutes + 10), ExpireWhen.HasNoExpiry)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return newVersion;
|
||||
}
|
||||
|
||||
public async Task<EffectiveDecisionMapStats> GetStatsAsync(
|
||||
string? tenantId = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var server = _redis.GetServer(_redis.GetEndPoints().First());
|
||||
var pattern = tenantId != null
|
||||
? $"{KeyPrefix}:{tenantId}:*:e:*"
|
||||
: $"{KeyPrefix}:*:e:*";
|
||||
|
||||
var entryCount = server.Keys(pattern: pattern).Count();
|
||||
|
||||
var snapshotPattern = tenantId != null
|
||||
? $"{KeyPrefix}:{tenantId}:*:idx"
|
||||
: $"{KeyPrefix}:*:idx";
|
||||
|
||||
var snapshotCount = server.Keys(pattern: snapshotPattern).Count();
|
||||
|
||||
long? memoryUsed = null;
|
||||
try
|
||||
{
|
||||
var info = server.Info("memory");
|
||||
var memorySection = info.FirstOrDefault(s => s.Key == "Memory");
|
||||
if (memorySection is not null)
|
||||
{
|
||||
var usedMemory = memorySection.FirstOrDefault(p => p.Key == "used_memory");
|
||||
if (usedMemory.Key is not null && long.TryParse(usedMemory.Value, out var bytes))
|
||||
{
|
||||
memoryUsed = bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore - memory info not available
|
||||
}
|
||||
|
||||
return new EffectiveDecisionMapStats
|
||||
{
|
||||
TotalEntries = entryCount,
|
||||
TotalSnapshots = snapshotCount,
|
||||
MemoryUsedBytes = memoryUsed,
|
||||
ExpiringWithinHour = 0, // Would require scanning TTLs
|
||||
LastEvictionAt = null,
|
||||
LastEvictionCount = 0,
|
||||
};
|
||||
}
|
||||
|
||||
private static string GetEntryKey(string tenantId, string snapshotId, string assetId) =>
|
||||
$"{KeyPrefix}:{tenantId}:{snapshotId}:e:{assetId}";
|
||||
|
||||
private static string GetIndexKey(string tenantId, string snapshotId) =>
|
||||
$"{KeyPrefix}:{tenantId}:{snapshotId}:idx";
|
||||
|
||||
private static string GetVersionKey(string tenantId, string snapshotId) =>
|
||||
$"{KeyPrefix}:{tenantId}:{snapshotId}:v";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for effective decision map.
|
||||
/// </summary>
|
||||
public sealed class EffectiveDecisionMapOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Default TTL for entries in minutes.
|
||||
/// </summary>
|
||||
public int DefaultTtlMinutes { get; set; } = 60;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum entries per snapshot.
|
||||
/// </summary>
|
||||
public int MaxEntriesPerSnapshot { get; set; } = 100000;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to enable automatic eviction of expired entries.
|
||||
/// </summary>
|
||||
public bool EnableAutoEviction { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Eviction check interval in minutes.
|
||||
/// </summary>
|
||||
public int EvictionIntervalMinutes { get; set; } = 5;
|
||||
}
|
||||
@@ -0,0 +1,184 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Events;
|
||||
|
||||
/// <summary>
|
||||
/// Type of policy effective event.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<PolicyEffectiveEventType>))]
|
||||
public enum PolicyEffectiveEventType
|
||||
{
|
||||
/// <summary>Policy decision changed for a subject.</summary>
|
||||
[JsonPropertyName("policy.effective.updated")]
|
||||
EffectiveUpdated,
|
||||
|
||||
/// <summary>Policy decision added for new subject.</summary>
|
||||
[JsonPropertyName("policy.effective.added")]
|
||||
EffectiveAdded,
|
||||
|
||||
/// <summary>Policy decision removed (subject no longer affected).</summary>
|
||||
[JsonPropertyName("policy.effective.removed")]
|
||||
EffectiveRemoved,
|
||||
|
||||
/// <summary>Batch re-evaluation completed.</summary>
|
||||
[JsonPropertyName("policy.effective.batch_completed")]
|
||||
BatchCompleted
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Base class for policy effective events.
|
||||
/// </summary>
|
||||
public abstract record PolicyEffectiveEvent(
|
||||
[property: JsonPropertyName("event_id")] string EventId,
|
||||
[property: JsonPropertyName("event_type")] PolicyEffectiveEventType EventType,
|
||||
[property: JsonPropertyName("tenant_id")] string TenantId,
|
||||
[property: JsonPropertyName("timestamp")] DateTimeOffset Timestamp,
|
||||
[property: JsonPropertyName("correlation_id")] string? CorrelationId);
|
||||
|
||||
/// <summary>
|
||||
/// Event emitted when a policy decision is updated for a subject.
|
||||
/// </summary>
|
||||
public sealed record PolicyEffectiveUpdatedEvent(
|
||||
string EventId,
|
||||
string TenantId,
|
||||
DateTimeOffset Timestamp,
|
||||
string? CorrelationId,
|
||||
[property: JsonPropertyName("pack_id")] string PackId,
|
||||
[property: JsonPropertyName("pack_version")] int PackVersion,
|
||||
[property: JsonPropertyName("subject_purl")] string SubjectPurl,
|
||||
[property: JsonPropertyName("advisory_id")] string AdvisoryId,
|
||||
[property: JsonPropertyName("trigger_type")] string TriggerType,
|
||||
[property: JsonPropertyName("diff")] PolicyDecisionDiff Diff)
|
||||
: PolicyEffectiveEvent(EventId, PolicyEffectiveEventType.EffectiveUpdated, TenantId, Timestamp, CorrelationId);
|
||||
|
||||
/// <summary>
|
||||
/// Diff metadata for policy decision changes.
|
||||
/// </summary>
|
||||
public sealed record PolicyDecisionDiff(
|
||||
[property: JsonPropertyName("old_status")] string? OldStatus,
|
||||
[property: JsonPropertyName("new_status")] string NewStatus,
|
||||
[property: JsonPropertyName("old_severity")] string? OldSeverity,
|
||||
[property: JsonPropertyName("new_severity")] string? NewSeverity,
|
||||
[property: JsonPropertyName("old_rule")] string? OldRule,
|
||||
[property: JsonPropertyName("new_rule")] string? NewRule,
|
||||
[property: JsonPropertyName("old_priority")] int? OldPriority,
|
||||
[property: JsonPropertyName("new_priority")] int? NewPriority,
|
||||
[property: JsonPropertyName("status_changed")] bool StatusChanged,
|
||||
[property: JsonPropertyName("severity_changed")] bool SeverityChanged,
|
||||
[property: JsonPropertyName("rule_changed")] bool RuleChanged,
|
||||
[property: JsonPropertyName("annotations_added")] ImmutableArray<string> AnnotationsAdded,
|
||||
[property: JsonPropertyName("annotations_removed")] ImmutableArray<string> AnnotationsRemoved)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a diff between two policy decisions.
|
||||
/// </summary>
|
||||
public static PolicyDecisionDiff Create(
|
||||
string? oldStatus, string newStatus,
|
||||
string? oldSeverity, string? newSeverity,
|
||||
string? oldRule, string? newRule,
|
||||
int? oldPriority, int? newPriority,
|
||||
ImmutableDictionary<string, string>? oldAnnotations,
|
||||
ImmutableDictionary<string, string>? newAnnotations)
|
||||
{
|
||||
var oldKeys = oldAnnotations?.Keys ?? Enumerable.Empty<string>();
|
||||
var newKeys = newAnnotations?.Keys ?? Enumerable.Empty<string>();
|
||||
|
||||
var annotationsAdded = newKeys
|
||||
.Where(k => oldAnnotations?.ContainsKey(k) != true)
|
||||
.OrderBy(k => k)
|
||||
.ToImmutableArray();
|
||||
|
||||
var annotationsRemoved = oldKeys
|
||||
.Where(k => newAnnotations?.ContainsKey(k) != true)
|
||||
.OrderBy(k => k)
|
||||
.ToImmutableArray();
|
||||
|
||||
return new PolicyDecisionDiff(
|
||||
OldStatus: oldStatus,
|
||||
NewStatus: newStatus,
|
||||
OldSeverity: oldSeverity,
|
||||
NewSeverity: newSeverity,
|
||||
OldRule: oldRule,
|
||||
NewRule: newRule,
|
||||
OldPriority: oldPriority,
|
||||
NewPriority: newPriority,
|
||||
StatusChanged: !string.Equals(oldStatus, newStatus, StringComparison.Ordinal),
|
||||
SeverityChanged: !string.Equals(oldSeverity, newSeverity, StringComparison.Ordinal),
|
||||
RuleChanged: !string.Equals(oldRule, newRule, StringComparison.Ordinal),
|
||||
AnnotationsAdded: annotationsAdded,
|
||||
AnnotationsRemoved: annotationsRemoved);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Event emitted when batch re-evaluation completes.
|
||||
/// </summary>
|
||||
public sealed record PolicyBatchCompletedEvent(
|
||||
string EventId,
|
||||
string TenantId,
|
||||
DateTimeOffset Timestamp,
|
||||
string? CorrelationId,
|
||||
[property: JsonPropertyName("batch_id")] string BatchId,
|
||||
[property: JsonPropertyName("trigger_type")] string TriggerType,
|
||||
[property: JsonPropertyName("subjects_evaluated")] int SubjectsEvaluated,
|
||||
[property: JsonPropertyName("decisions_changed")] int DecisionsChanged,
|
||||
[property: JsonPropertyName("duration_ms")] long DurationMs,
|
||||
[property: JsonPropertyName("summary")] PolicyBatchSummary Summary)
|
||||
: PolicyEffectiveEvent(EventId, PolicyEffectiveEventType.BatchCompleted, TenantId, Timestamp, CorrelationId);
|
||||
|
||||
/// <summary>
|
||||
/// Summary of changes in a batch re-evaluation.
|
||||
/// </summary>
|
||||
public sealed record PolicyBatchSummary(
|
||||
[property: JsonPropertyName("status_upgrades")] int StatusUpgrades,
|
||||
[property: JsonPropertyName("status_downgrades")] int StatusDowngrades,
|
||||
[property: JsonPropertyName("new_blocks")] int NewBlocks,
|
||||
[property: JsonPropertyName("blocks_removed")] int BlocksRemoved,
|
||||
[property: JsonPropertyName("affected_advisories")] ImmutableArray<string> AffectedAdvisories,
|
||||
[property: JsonPropertyName("affected_purls")] ImmutableArray<string> AffectedPurls);
|
||||
|
||||
/// <summary>
|
||||
/// Request to schedule a re-evaluation job.
|
||||
/// </summary>
|
||||
public sealed record ReEvaluationJobRequest(
|
||||
string JobId,
|
||||
string TenantId,
|
||||
string PackId,
|
||||
int PackVersion,
|
||||
string TriggerType,
|
||||
string? CorrelationId,
|
||||
DateTimeOffset CreatedAt,
|
||||
PolicyChangePriority Priority,
|
||||
ImmutableArray<string> AdvisoryIds,
|
||||
ImmutableArray<string> SubjectPurls,
|
||||
ImmutableArray<string> SbomIds,
|
||||
ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a deterministic job ID.
|
||||
/// </summary>
|
||||
public static string CreateJobId(
|
||||
string tenantId,
|
||||
string packId,
|
||||
int packVersion,
|
||||
string triggerType,
|
||||
DateTimeOffset createdAt)
|
||||
{
|
||||
var seed = $"{tenantId}|{packId}|{packVersion}|{triggerType}|{createdAt:O}";
|
||||
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(seed));
|
||||
return $"rej-{Convert.ToHexStringLower(bytes)[..16]}";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Policy change priority from IncrementalOrchestrator namespace.
|
||||
/// </summary>
|
||||
public enum PolicyChangePriority
|
||||
{
|
||||
Normal = 0,
|
||||
High = 1,
|
||||
Emergency = 2
|
||||
}
|
||||
@@ -0,0 +1,454 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Engine.IncrementalOrchestrator;
|
||||
using StellaOps.Policy.Engine.Services;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Events;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for publishing policy effective events.
|
||||
/// </summary>
|
||||
public interface IPolicyEffectiveEventPublisher
|
||||
{
|
||||
/// <summary>
|
||||
/// Publishes a policy effective updated event.
|
||||
/// </summary>
|
||||
Task PublishEffectiveUpdatedAsync(PolicyEffectiveUpdatedEvent evt, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Publishes a batch completed event.
|
||||
/// </summary>
|
||||
Task PublishBatchCompletedAsync(PolicyBatchCompletedEvent evt, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Registers a handler for effective events.
|
||||
/// </summary>
|
||||
void RegisterHandler(Func<PolicyEffectiveEvent, Task> handler);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for scheduling re-evaluation jobs.
|
||||
/// </summary>
|
||||
public interface IReEvaluationJobScheduler
|
||||
{
|
||||
/// <summary>
|
||||
/// Schedules a re-evaluation job.
|
||||
/// </summary>
|
||||
Task<string> ScheduleAsync(ReEvaluationJobRequest request, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets pending job count.
|
||||
/// </summary>
|
||||
int GetPendingJobCount();
|
||||
|
||||
/// <summary>
|
||||
/// Gets job by ID.
|
||||
/// </summary>
|
||||
ReEvaluationJobRequest? GetJob(string jobId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Processes policy change events, schedules re-evaluations, and emits effective events.
|
||||
/// </summary>
|
||||
public sealed class PolicyEventProcessor : IPolicyEffectiveEventPublisher, IReEvaluationJobScheduler
|
||||
{
|
||||
private readonly ILogger<PolicyEventProcessor> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ConcurrentQueue<ReEvaluationJobRequest> _jobQueue;
|
||||
private readonly ConcurrentDictionary<string, ReEvaluationJobRequest> _jobIndex;
|
||||
private readonly ConcurrentQueue<PolicyEffectiveEvent> _eventStream;
|
||||
private readonly List<Func<PolicyEffectiveEvent, Task>> _eventHandlers;
|
||||
private readonly object _handlersLock = new();
|
||||
|
||||
private const int MaxQueueSize = 10000;
|
||||
private const int MaxEventStreamSize = 50000;
|
||||
|
||||
public PolicyEventProcessor(
|
||||
ILogger<PolicyEventProcessor> logger,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_jobQueue = new ConcurrentQueue<ReEvaluationJobRequest>();
|
||||
_jobIndex = new ConcurrentDictionary<string, ReEvaluationJobRequest>(StringComparer.OrdinalIgnoreCase);
|
||||
_eventStream = new ConcurrentQueue<PolicyEffectiveEvent>();
|
||||
_eventHandlers = new List<Func<PolicyEffectiveEvent, Task>>();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Processes a policy change event and schedules re-evaluation if needed.
|
||||
/// </summary>
|
||||
public async Task<string?> ProcessChangeEventAsync(
|
||||
PolicyChangeEvent changeEvent,
|
||||
string packId,
|
||||
int packVersion,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(changeEvent);
|
||||
|
||||
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("policy_event.process", ActivityKind.Internal);
|
||||
activity?.SetTag("event.id", changeEvent.EventId);
|
||||
activity?.SetTag("event.type", changeEvent.ChangeType.ToString());
|
||||
activity?.SetTag("tenant.id", changeEvent.TenantId);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Processing policy change event {EventId}: {ChangeType} for tenant {TenantId}",
|
||||
changeEvent.EventId, changeEvent.ChangeType, changeEvent.TenantId);
|
||||
|
||||
// Skip if event targets no subjects
|
||||
if (changeEvent.AffectedPurls.IsDefaultOrEmpty &&
|
||||
changeEvent.AffectedSbomIds.IsDefaultOrEmpty &&
|
||||
changeEvent.AffectedProductKeys.IsDefaultOrEmpty)
|
||||
{
|
||||
_logger.LogDebug("Skipping event {EventId}: no affected subjects", changeEvent.EventId);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Create re-evaluation job request
|
||||
var jobId = ReEvaluationJobRequest.CreateJobId(
|
||||
changeEvent.TenantId,
|
||||
packId,
|
||||
packVersion,
|
||||
changeEvent.ChangeType.ToString(),
|
||||
_timeProvider.GetUtcNow());
|
||||
|
||||
var jobRequest = new ReEvaluationJobRequest(
|
||||
JobId: jobId,
|
||||
TenantId: changeEvent.TenantId,
|
||||
PackId: packId,
|
||||
PackVersion: packVersion,
|
||||
TriggerType: changeEvent.ChangeType.ToString(),
|
||||
CorrelationId: changeEvent.CorrelationId,
|
||||
CreatedAt: _timeProvider.GetUtcNow(),
|
||||
Priority: MapPriority(changeEvent.Priority),
|
||||
AdvisoryIds: changeEvent.AdvisoryId is not null
|
||||
? ImmutableArray.Create(changeEvent.AdvisoryId)
|
||||
: ImmutableArray<string>.Empty,
|
||||
SubjectPurls: changeEvent.AffectedPurls,
|
||||
SbomIds: changeEvent.AffectedSbomIds,
|
||||
Metadata: changeEvent.Metadata);
|
||||
|
||||
// Schedule the job
|
||||
var scheduledId = await ScheduleAsync(jobRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
activity?.SetTag("job.id", scheduledId);
|
||||
PolicyEngineTelemetry.PolicyEventsProcessed.Add(1);
|
||||
|
||||
return scheduledId;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Processes results from a re-evaluation and emits effective events.
|
||||
/// </summary>
|
||||
public async Task ProcessReEvaluationResultsAsync(
|
||||
string jobId,
|
||||
string tenantId,
|
||||
string packId,
|
||||
int packVersion,
|
||||
string triggerType,
|
||||
string? correlationId,
|
||||
IReadOnlyList<PolicyDecisionChange> changes,
|
||||
long durationMs,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("policy_event.emit_results", ActivityKind.Internal);
|
||||
activity?.SetTag("job.id", jobId);
|
||||
activity?.SetTag("changes.count", changes.Count);
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var changedCount = 0;
|
||||
|
||||
// Emit individual effective events for each changed decision
|
||||
foreach (var change in changes)
|
||||
{
|
||||
if (!change.HasChanged)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
changedCount++;
|
||||
|
||||
var diff = PolicyDecisionDiff.Create(
|
||||
change.OldStatus, change.NewStatus,
|
||||
change.OldSeverity, change.NewSeverity,
|
||||
change.OldRule, change.NewRule,
|
||||
change.OldPriority, change.NewPriority,
|
||||
change.OldAnnotations, change.NewAnnotations);
|
||||
|
||||
var evt = new PolicyEffectiveUpdatedEvent(
|
||||
EventId: GenerateEventId(),
|
||||
TenantId: tenantId,
|
||||
Timestamp: now,
|
||||
CorrelationId: correlationId,
|
||||
PackId: packId,
|
||||
PackVersion: packVersion,
|
||||
SubjectPurl: change.SubjectPurl,
|
||||
AdvisoryId: change.AdvisoryId,
|
||||
TriggerType: triggerType,
|
||||
Diff: diff);
|
||||
|
||||
await PublishEffectiveUpdatedAsync(evt, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Emit batch completed event
|
||||
var summary = ComputeBatchSummary(changes);
|
||||
var batchEvent = new PolicyBatchCompletedEvent(
|
||||
EventId: GenerateEventId(),
|
||||
TenantId: tenantId,
|
||||
Timestamp: now,
|
||||
CorrelationId: correlationId,
|
||||
BatchId: jobId,
|
||||
TriggerType: triggerType,
|
||||
SubjectsEvaluated: changes.Count,
|
||||
DecisionsChanged: changedCount,
|
||||
DurationMs: durationMs,
|
||||
Summary: summary);
|
||||
|
||||
await PublishBatchCompletedAsync(batchEvent, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
activity?.SetTag("decisions.changed", changedCount);
|
||||
_logger.LogInformation(
|
||||
"Re-evaluation {JobId} completed: {Evaluated} subjects, {Changed} decisions changed in {Duration}ms",
|
||||
jobId, changes.Count, changedCount, durationMs);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task PublishEffectiveUpdatedAsync(
|
||||
PolicyEffectiveUpdatedEvent evt,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
await PublishEventAsync(evt).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task PublishBatchCompletedAsync(
|
||||
PolicyBatchCompletedEvent evt,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
await PublishEventAsync(evt).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public void RegisterHandler(Func<PolicyEffectiveEvent, Task> handler)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(handler);
|
||||
|
||||
lock (_handlersLock)
|
||||
{
|
||||
_eventHandlers.Add(handler);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<string> ScheduleAsync(ReEvaluationJobRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
// Check for duplicate
|
||||
if (_jobIndex.ContainsKey(request.JobId))
|
||||
{
|
||||
_logger.LogDebug("Duplicate job {JobId} ignored", request.JobId);
|
||||
return Task.FromResult(request.JobId);
|
||||
}
|
||||
|
||||
// Enforce queue limit
|
||||
if (_jobQueue.Count >= MaxQueueSize)
|
||||
{
|
||||
_logger.LogWarning("Job queue full, rejecting job {JobId}", request.JobId);
|
||||
throw new InvalidOperationException("Re-evaluation job queue is full");
|
||||
}
|
||||
|
||||
_jobIndex[request.JobId] = request;
|
||||
_jobQueue.Enqueue(request);
|
||||
|
||||
PolicyEngineTelemetry.ReEvaluationJobsScheduled.Add(1);
|
||||
_logger.LogDebug(
|
||||
"Scheduled re-evaluation job {JobId}: {TriggerType} for {TenantId}/{PackId}@{Version}",
|
||||
request.JobId, request.TriggerType, request.TenantId, request.PackId, request.PackVersion);
|
||||
|
||||
return Task.FromResult(request.JobId);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public int GetPendingJobCount() => _jobQueue.Count;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public ReEvaluationJobRequest? GetJob(string jobId)
|
||||
{
|
||||
_jobIndex.TryGetValue(jobId, out var job);
|
||||
return job;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Dequeues the next job for processing.
|
||||
/// </summary>
|
||||
public ReEvaluationJobRequest? DequeueJob()
|
||||
{
|
||||
if (_jobQueue.TryDequeue(out var job))
|
||||
{
|
||||
_jobIndex.TryRemove(job.JobId, out _);
|
||||
return job;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets recent effective events.
|
||||
/// </summary>
|
||||
public IReadOnlyList<PolicyEffectiveEvent> GetRecentEvents(int limit = 100)
|
||||
{
|
||||
return _eventStream
|
||||
.ToArray()
|
||||
.OrderByDescending(e => e.Timestamp)
|
||||
.Take(limit)
|
||||
.ToList()
|
||||
.AsReadOnly();
|
||||
}
|
||||
|
||||
private async Task PublishEventAsync(PolicyEffectiveEvent evt)
|
||||
{
|
||||
// Add to stream
|
||||
_eventStream.Enqueue(evt);
|
||||
|
||||
// Trim if too large
|
||||
while (_eventStream.Count > MaxEventStreamSize)
|
||||
{
|
||||
_eventStream.TryDequeue(out _);
|
||||
}
|
||||
|
||||
// Invoke handlers
|
||||
List<Func<PolicyEffectiveEvent, Task>> handlers;
|
||||
lock (_handlersLock)
|
||||
{
|
||||
handlers = _eventHandlers.ToList();
|
||||
}
|
||||
|
||||
foreach (var handler in handlers)
|
||||
{
|
||||
try
|
||||
{
|
||||
await handler(evt).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error invoking event handler for {EventType}", evt.EventType);
|
||||
}
|
||||
}
|
||||
|
||||
PolicyEngineTelemetry.PolicyEffectiveEventsPublished.Add(1);
|
||||
}
|
||||
|
||||
private static PolicyBatchSummary ComputeBatchSummary(IReadOnlyList<PolicyDecisionChange> changes)
|
||||
{
|
||||
var statusUpgrades = 0;
|
||||
var statusDowngrades = 0;
|
||||
var newBlocks = 0;
|
||||
var blocksRemoved = 0;
|
||||
var advisories = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var purls = new HashSet<string>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var change in changes)
|
||||
{
|
||||
advisories.Add(change.AdvisoryId);
|
||||
purls.Add(change.SubjectPurl);
|
||||
|
||||
if (!change.HasChanged)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var severityChange = CompareSeverity(change.OldStatus, change.NewStatus);
|
||||
if (severityChange > 0)
|
||||
{
|
||||
statusUpgrades++;
|
||||
}
|
||||
else if (severityChange < 0)
|
||||
{
|
||||
statusDowngrades++;
|
||||
}
|
||||
|
||||
if (IsBlockStatus(change.NewStatus) && !IsBlockStatus(change.OldStatus))
|
||||
{
|
||||
newBlocks++;
|
||||
}
|
||||
else if (IsBlockStatus(change.OldStatus) && !IsBlockStatus(change.NewStatus))
|
||||
{
|
||||
blocksRemoved++;
|
||||
}
|
||||
}
|
||||
|
||||
return new PolicyBatchSummary(
|
||||
StatusUpgrades: statusUpgrades,
|
||||
StatusDowngrades: statusDowngrades,
|
||||
NewBlocks: newBlocks,
|
||||
BlocksRemoved: blocksRemoved,
|
||||
AffectedAdvisories: advisories.OrderBy(a => a).ToImmutableArray(),
|
||||
AffectedPurls: purls.OrderBy(p => p).Take(100).ToImmutableArray());
|
||||
}
|
||||
|
||||
private static int CompareSeverity(string? oldStatus, string? newStatus)
|
||||
{
|
||||
var oldSeverity = GetStatusSeverityLevel(oldStatus);
|
||||
var newSeverity = GetStatusSeverityLevel(newStatus);
|
||||
return newSeverity.CompareTo(oldSeverity);
|
||||
}
|
||||
|
||||
private static int GetStatusSeverityLevel(string? status) => status?.ToLowerInvariant() switch
|
||||
{
|
||||
"blocked" => 4,
|
||||
"deny" => 4,
|
||||
"warn" => 3,
|
||||
"affected" => 2,
|
||||
"allow" => 1,
|
||||
"ignored" => 0,
|
||||
_ => 1
|
||||
};
|
||||
|
||||
private static bool IsBlockStatus(string? status) =>
|
||||
string.Equals(status, "blocked", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(status, "deny", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private static Events.PolicyChangePriority MapPriority(IncrementalOrchestrator.PolicyChangePriority priority) =>
|
||||
priority switch
|
||||
{
|
||||
IncrementalOrchestrator.PolicyChangePriority.Emergency => Events.PolicyChangePriority.Emergency,
|
||||
IncrementalOrchestrator.PolicyChangePriority.High => Events.PolicyChangePriority.High,
|
||||
_ => Events.PolicyChangePriority.Normal
|
||||
};
|
||||
|
||||
private static string GenerateEventId()
|
||||
{
|
||||
var guid = Guid.NewGuid().ToByteArray();
|
||||
return $"pee-{Convert.ToHexStringLower(guid)[..16]}";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a change in policy decision for a subject.
|
||||
/// </summary>
|
||||
public sealed record PolicyDecisionChange(
|
||||
string SubjectPurl,
|
||||
string AdvisoryId,
|
||||
string? OldStatus,
|
||||
string NewStatus,
|
||||
string? OldSeverity,
|
||||
string? NewSeverity,
|
||||
string? OldRule,
|
||||
string? NewRule,
|
||||
int? OldPriority,
|
||||
int? NewPriority,
|
||||
ImmutableDictionary<string, string>? OldAnnotations,
|
||||
ImmutableDictionary<string, string>? NewAnnotations)
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the decision has changed.
|
||||
/// </summary>
|
||||
public bool HasChanged =>
|
||||
!string.Equals(OldStatus, NewStatus, StringComparison.Ordinal) ||
|
||||
!string.Equals(OldSeverity, NewSeverity, StringComparison.Ordinal) ||
|
||||
!string.Equals(OldRule, NewRule, StringComparison.Ordinal);
|
||||
}
|
||||
@@ -0,0 +1,225 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Engine.ExceptionCache;
|
||||
|
||||
/// <summary>
|
||||
/// Cached exception entry for fast lookup during policy evaluation.
|
||||
/// </summary>
|
||||
public sealed record ExceptionCacheEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Exception identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exception_id")]
|
||||
public required string ExceptionId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Asset identifier this exception applies to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("asset_id")]
|
||||
public required string AssetId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Advisory ID covered (null if applies to all advisories for asset).
|
||||
/// </summary>
|
||||
[JsonPropertyName("advisory_id")]
|
||||
public string? AdvisoryId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVE ID covered (null if applies to all CVEs for asset).
|
||||
/// </summary>
|
||||
[JsonPropertyName("cve_id")]
|
||||
public string? CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Decision override applied by this exception.
|
||||
/// </summary>
|
||||
[JsonPropertyName("decision_override")]
|
||||
public required string DecisionOverride { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Exception type: waiver, override, temporary, permanent.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exception_type")]
|
||||
public required string ExceptionType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Priority for conflict resolution (higher = more precedence).
|
||||
/// </summary>
|
||||
[JsonPropertyName("priority")]
|
||||
public int Priority { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the exception becomes effective.
|
||||
/// </summary>
|
||||
[JsonPropertyName("effective_from")]
|
||||
public DateTimeOffset EffectiveFrom { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the exception expires (null = no expiration).
|
||||
/// </summary>
|
||||
[JsonPropertyName("expires_at")]
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this cache entry was created.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cached_at")]
|
||||
public DateTimeOffset CachedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Original exception name for display.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exception_name")]
|
||||
public string? ExceptionName { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of querying exceptions for an asset.
|
||||
/// </summary>
|
||||
public sealed record ExceptionCacheQueryResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Applicable exceptions for the asset, ordered by priority (highest first).
|
||||
/// </summary>
|
||||
public required ImmutableArray<ExceptionCacheEntry> Entries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the result came from cache.
|
||||
/// </summary>
|
||||
public bool FromCache { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Cache version at time of query.
|
||||
/// </summary>
|
||||
public long CacheVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time taken to query in milliseconds.
|
||||
/// </summary>
|
||||
public long QueryDurationMs { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary of cached exceptions for a tenant.
|
||||
/// </summary>
|
||||
public sealed record ExceptionCacheSummary
|
||||
{
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total cached exception entries.
|
||||
/// </summary>
|
||||
public int TotalEntries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Unique exceptions in cache.
|
||||
/// </summary>
|
||||
public int UniqueExceptions { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Unique assets with exceptions.
|
||||
/// </summary>
|
||||
public int UniqueAssets { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Counts by exception type.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, int> ByType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Counts by decision override.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, int> ByDecision { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entries expiring within the next hour.
|
||||
/// </summary>
|
||||
public int ExpiringWithinHour { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Cache version.
|
||||
/// </summary>
|
||||
public long CacheVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When summary was computed.
|
||||
/// </summary>
|
||||
public DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for exception cache operations.
|
||||
/// </summary>
|
||||
public sealed record ExceptionCacheOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Default TTL for cache entries in minutes.
|
||||
/// </summary>
|
||||
public int DefaultTtlMinutes { get; set; } = 60;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to enable automatic cache warming.
|
||||
/// </summary>
|
||||
public bool EnableAutoWarm { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Warm interval in minutes.
|
||||
/// </summary>
|
||||
public int WarmIntervalMinutes { get; set; } = 15;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum entries per tenant.
|
||||
/// </summary>
|
||||
public int MaxEntriesPerTenant { get; set; } = 50000;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to invalidate cache on exception events.
|
||||
/// </summary>
|
||||
public bool InvalidateOnEvents { get; set; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Statistics for the exception cache.
|
||||
/// </summary>
|
||||
public sealed record ExceptionCacheStats
|
||||
{
|
||||
/// <summary>
|
||||
/// Total entries in cache.
|
||||
/// </summary>
|
||||
public int TotalEntries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total tenants with cached data.
|
||||
/// </summary>
|
||||
public int TotalTenants { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Memory used by cache in bytes (if available).
|
||||
/// </summary>
|
||||
public long? MemoryUsedBytes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Cache hit count since last reset.
|
||||
/// </summary>
|
||||
public long HitCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Cache miss count since last reset.
|
||||
/// </summary>
|
||||
public long MissCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Last warm operation timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset? LastWarmAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Last invalidation timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset? LastInvalidationAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy.Engine.ExceptionCache;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for caching effective exception decisions per asset.
|
||||
/// Supports warm/invalidation logic reacting to exception events.
|
||||
/// </summary>
|
||||
internal interface IExceptionEffectiveCache
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets applicable exceptions for an asset at a given time.
|
||||
/// </summary>
|
||||
Task<ExceptionCacheQueryResult> GetForAssetAsync(
|
||||
string tenantId,
|
||||
string assetId,
|
||||
string? advisoryId,
|
||||
DateTimeOffset asOf,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets applicable exceptions for multiple assets.
|
||||
/// </summary>
|
||||
Task<IReadOnlyDictionary<string, ExceptionCacheQueryResult>> GetBatchAsync(
|
||||
string tenantId,
|
||||
IReadOnlyList<string> assetIds,
|
||||
DateTimeOffset asOf,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Sets a cache entry.
|
||||
/// </summary>
|
||||
Task SetAsync(
|
||||
string tenantId,
|
||||
ExceptionCacheEntry entry,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Sets multiple cache entries in batch.
|
||||
/// </summary>
|
||||
Task SetBatchAsync(
|
||||
string tenantId,
|
||||
IEnumerable<ExceptionCacheEntry> entries,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Invalidates cache entries for an exception.
|
||||
/// Called when an exception is modified/revoked/expired.
|
||||
/// </summary>
|
||||
Task InvalidateExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Invalidates cache entries for an asset.
|
||||
/// Called when asset exceptions need re-evaluation.
|
||||
/// </summary>
|
||||
Task InvalidateAssetAsync(
|
||||
string tenantId,
|
||||
string assetId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Invalidates all cache entries for a tenant.
|
||||
/// </summary>
|
||||
Task InvalidateTenantAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Warms the cache for a tenant by loading active exceptions from the repository.
|
||||
/// </summary>
|
||||
Task WarmAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets cache summary for a tenant.
|
||||
/// </summary>
|
||||
Task<ExceptionCacheSummary> GetSummaryAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets cache statistics.
|
||||
/// </summary>
|
||||
Task<ExceptionCacheStats> GetStatsAsync(
|
||||
string? tenantId = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current cache version for a tenant.
|
||||
/// </summary>
|
||||
Task<long> GetVersionAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Processes an exception event and updates cache accordingly.
|
||||
/// </summary>
|
||||
Task HandleExceptionEventAsync(
|
||||
ExceptionEvent exceptionEvent,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Event representing a change to an exception.
|
||||
/// </summary>
|
||||
public sealed record ExceptionEvent
|
||||
{
|
||||
/// <summary>
|
||||
/// Event type: activated, expired, revoked, updated, created, deleted.
|
||||
/// </summary>
|
||||
public required string EventType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Exception identifier.
|
||||
/// </summary>
|
||||
public required string ExceptionId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Exception name.
|
||||
/// </summary>
|
||||
public string? ExceptionName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Exception type.
|
||||
/// </summary>
|
||||
public string? ExceptionType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Affected asset IDs (if known).
|
||||
/// </summary>
|
||||
public ImmutableArray<string> AffectedAssetIds { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Affected advisory IDs (if known).
|
||||
/// </summary>
|
||||
public ImmutableArray<string> AffectedAdvisoryIds { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// When the event occurred.
|
||||
/// </summary>
|
||||
public DateTimeOffset OccurredAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Correlation ID for tracing.
|
||||
/// </summary>
|
||||
public string? CorrelationId { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,725 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Engine.Options;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Repositories;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
using StackExchange.Redis;
|
||||
|
||||
namespace StellaOps.Policy.Engine.ExceptionCache;
|
||||
|
||||
/// <summary>
|
||||
/// Redis-backed exception effective cache with warm/invalidation support.
|
||||
/// Key structure:
|
||||
/// - Entry by asset: stellaops:exc:{tenant}:a:{asset}:{advisory|all} -> JSON array of entries
|
||||
/// - Entry by exception: stellaops:exc:{tenant}:e:{exceptionId} -> JSON entry
|
||||
/// - Index by exception: stellaops:exc:{tenant}:idx:e:{exceptionId} -> set of asset keys
|
||||
/// - Version: stellaops:exc:{tenant}:v -> integer version
|
||||
/// - Stats: stellaops:exc:{tenant}:stats -> JSON stats
|
||||
/// </summary>
|
||||
internal sealed class RedisExceptionEffectiveCache : IExceptionEffectiveCache
|
||||
{
|
||||
private readonly IConnectionMultiplexer _redis;
|
||||
private readonly IExceptionRepository _repository;
|
||||
private readonly ILogger<RedisExceptionEffectiveCache> _logger;
|
||||
private readonly ExceptionCacheOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private const string KeyPrefix = "stellaops:exc";
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
};
|
||||
|
||||
public RedisExceptionEffectiveCache(
|
||||
IConnectionMultiplexer redis,
|
||||
IExceptionRepository repository,
|
||||
ILogger<RedisExceptionEffectiveCache> logger,
|
||||
IOptions<PolicyEngineOptions> options,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
_redis = redis ?? throw new ArgumentNullException(nameof(redis));
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_options = options?.Value.ExceptionCache ?? new ExceptionCacheOptions();
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
}
|
||||
|
||||
public async Task<ExceptionCacheQueryResult> GetForAssetAsync(
|
||||
string tenantId,
|
||||
string assetId,
|
||||
string? advisoryId,
|
||||
DateTimeOffset asOf,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
var db = _redis.GetDatabase();
|
||||
|
||||
// Try specific advisory key first, then fall back to "all"
|
||||
var entries = new List<ExceptionCacheEntry>();
|
||||
var fromCache = false;
|
||||
|
||||
if (advisoryId is not null)
|
||||
{
|
||||
var specificKey = GetAssetKey(tenantId, assetId, advisoryId);
|
||||
var specificJson = await db.StringGetAsync(specificKey).ConfigureAwait(false);
|
||||
if (specificJson.HasValue)
|
||||
{
|
||||
var specificEntries = JsonSerializer.Deserialize<List<ExceptionCacheEntry>>((string)specificJson!, JsonOptions);
|
||||
if (specificEntries is not null)
|
||||
{
|
||||
entries.AddRange(specificEntries);
|
||||
fromCache = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also get "all" entries (exceptions without specific advisory)
|
||||
var allKey = GetAssetKey(tenantId, assetId, null);
|
||||
var allJson = await db.StringGetAsync(allKey).ConfigureAwait(false);
|
||||
if (allJson.HasValue)
|
||||
{
|
||||
var allEntries = JsonSerializer.Deserialize<List<ExceptionCacheEntry>>((string)allJson!, JsonOptions);
|
||||
if (allEntries is not null)
|
||||
{
|
||||
entries.AddRange(allEntries);
|
||||
fromCache = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by time and sort by priority
|
||||
var validEntries = entries
|
||||
.Where(e => e.EffectiveFrom <= asOf && (e.ExpiresAt is null || e.ExpiresAt > asOf))
|
||||
.OrderByDescending(e => e.Priority)
|
||||
.ToImmutableArray();
|
||||
|
||||
var version = await GetVersionAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
sw.Stop();
|
||||
|
||||
PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, fromCache ? "hit" : "miss");
|
||||
|
||||
return new ExceptionCacheQueryResult
|
||||
{
|
||||
Entries = validEntries,
|
||||
FromCache = fromCache,
|
||||
CacheVersion = version,
|
||||
QueryDurationMs = sw.ElapsedMilliseconds,
|
||||
};
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyDictionary<string, ExceptionCacheQueryResult>> GetBatchAsync(
|
||||
string tenantId,
|
||||
IReadOnlyList<string> assetIds,
|
||||
DateTimeOffset asOf,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var results = new Dictionary<string, ExceptionCacheQueryResult>(StringComparer.OrdinalIgnoreCase);
|
||||
var db = _redis.GetDatabase();
|
||||
|
||||
// Get all "all" keys for assets
|
||||
var keys = assetIds.Select(id => (RedisKey)GetAssetKey(tenantId, id, null)).ToArray();
|
||||
var values = await db.StringGetAsync(keys).ConfigureAwait(false);
|
||||
|
||||
var version = await GetVersionAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
for (int i = 0; i < assetIds.Count; i++)
|
||||
{
|
||||
var entries = ImmutableArray<ExceptionCacheEntry>.Empty;
|
||||
var fromCache = false;
|
||||
|
||||
if (values[i].HasValue)
|
||||
{
|
||||
var cachedEntries = JsonSerializer.Deserialize<List<ExceptionCacheEntry>>((string)values[i]!, JsonOptions);
|
||||
if (cachedEntries is not null)
|
||||
{
|
||||
entries = cachedEntries
|
||||
.Where(e => e.EffectiveFrom <= asOf && (e.ExpiresAt is null || e.ExpiresAt > asOf))
|
||||
.OrderByDescending(e => e.Priority)
|
||||
.ToImmutableArray();
|
||||
fromCache = true;
|
||||
}
|
||||
}
|
||||
|
||||
results[assetIds[i]] = new ExceptionCacheQueryResult
|
||||
{
|
||||
Entries = entries,
|
||||
FromCache = fromCache,
|
||||
CacheVersion = version,
|
||||
QueryDurationMs = 0,
|
||||
};
|
||||
}
|
||||
|
||||
PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "batch_get");
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task SetAsync(
|
||||
string tenantId,
|
||||
ExceptionCacheEntry entry,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(entry);
|
||||
|
||||
var db = _redis.GetDatabase();
|
||||
var assetKey = GetAssetKey(tenantId, entry.AssetId, entry.AdvisoryId);
|
||||
var exceptionIndexKey = GetExceptionIndexKey(tenantId, entry.ExceptionId);
|
||||
|
||||
// Get existing entries for this asset
|
||||
var existingJson = await db.StringGetAsync(assetKey).ConfigureAwait(false);
|
||||
var entries = existingJson.HasValue
|
||||
? JsonSerializer.Deserialize<List<ExceptionCacheEntry>>((string)existingJson!, JsonOptions) ?? new List<ExceptionCacheEntry>()
|
||||
: new List<ExceptionCacheEntry>();
|
||||
|
||||
// Remove existing entry for same exception if any
|
||||
entries.RemoveAll(e => e.ExceptionId == entry.ExceptionId);
|
||||
|
||||
// Add new entry
|
||||
entries.Add(entry);
|
||||
|
||||
var ttl = ComputeTtl(entry);
|
||||
var json = JsonSerializer.Serialize(entries, JsonOptions);
|
||||
|
||||
var tasks = new List<Task>
|
||||
{
|
||||
db.StringSetAsync(assetKey, json, ttl),
|
||||
db.SetAddAsync(exceptionIndexKey, assetKey),
|
||||
db.KeyExpireAsync(exceptionIndexKey, ttl + TimeSpan.FromMinutes(5)),
|
||||
};
|
||||
|
||||
await Task.WhenAll(tasks).ConfigureAwait(false);
|
||||
|
||||
PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "set");
|
||||
}
|
||||
|
||||
public async Task SetBatchAsync(
|
||||
string tenantId,
|
||||
IEnumerable<ExceptionCacheEntry> entries,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var batch = db.CreateBatch();
|
||||
var count = 0;
|
||||
|
||||
// Group entries by asset+advisory
|
||||
var groupedEntries = entries
|
||||
.GroupBy(e => GetAssetKey(tenantId, e.AssetId, e.AdvisoryId))
|
||||
.ToDictionary(g => g.Key, g => g.ToList());
|
||||
|
||||
foreach (var (assetKey, assetEntries) in groupedEntries)
|
||||
{
|
||||
var ttl = assetEntries.Max(e => ComputeTtl(e));
|
||||
var json = JsonSerializer.Serialize(assetEntries, JsonOptions);
|
||||
|
||||
_ = batch.StringSetAsync(assetKey, json, ttl);
|
||||
|
||||
// Update exception indexes
|
||||
foreach (var entry in assetEntries)
|
||||
{
|
||||
var exceptionIndexKey = GetExceptionIndexKey(tenantId, entry.ExceptionId);
|
||||
_ = batch.SetAddAsync(exceptionIndexKey, assetKey);
|
||||
_ = batch.KeyExpireAsync(exceptionIndexKey, ttl + TimeSpan.FromMinutes(5));
|
||||
}
|
||||
|
||||
count += assetEntries.Count;
|
||||
}
|
||||
|
||||
batch.Execute();
|
||||
|
||||
// Increment version
|
||||
await IncrementVersionAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "set_batch");
|
||||
|
||||
_logger.LogDebug("Set {Count} exception cache entries for tenant {TenantId}", count, tenantId);
|
||||
}
|
||||
|
||||
public async Task InvalidateExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var exceptionIndexKey = GetExceptionIndexKey(tenantId, exceptionId);
|
||||
|
||||
// Get all asset keys affected by this exception
|
||||
var assetKeys = await db.SetMembersAsync(exceptionIndexKey).ConfigureAwait(false);
|
||||
|
||||
if (assetKeys.Length > 0)
|
||||
{
|
||||
// For each asset key, remove entries for this exception
|
||||
foreach (var assetKey in assetKeys)
|
||||
{
|
||||
var json = await db.StringGetAsync((string)assetKey!).ConfigureAwait(false);
|
||||
if (json.HasValue)
|
||||
{
|
||||
var entries = JsonSerializer.Deserialize<List<ExceptionCacheEntry>>((string)json!, JsonOptions);
|
||||
if (entries is not null)
|
||||
{
|
||||
entries.RemoveAll(e => e.ExceptionId == exceptionId);
|
||||
if (entries.Count > 0)
|
||||
{
|
||||
await db.StringSetAsync((string)assetKey!, JsonSerializer.Serialize(entries, JsonOptions))
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
await db.KeyDeleteAsync((string)assetKey!).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the exception index
|
||||
await db.KeyDeleteAsync(exceptionIndexKey).ConfigureAwait(false);
|
||||
|
||||
// Increment version
|
||||
await IncrementVersionAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "invalidate_exception");
|
||||
|
||||
_logger.LogInformation(
|
||||
"Invalidated exception {ExceptionId} affecting {Count} assets for tenant {TenantId}",
|
||||
exceptionId, assetKeys.Length, tenantId);
|
||||
}
|
||||
|
||||
public async Task InvalidateAssetAsync(
|
||||
string tenantId,
|
||||
string assetId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var server = _redis.GetServer(_redis.GetEndPoints().First());
|
||||
|
||||
// Find all keys for this asset (all advisory variants)
|
||||
var pattern = $"{KeyPrefix}:{tenantId}:a:{assetId}:*";
|
||||
var keys = server.Keys(pattern: pattern).ToArray();
|
||||
|
||||
if (keys.Length > 0)
|
||||
{
|
||||
await db.KeyDeleteAsync(keys).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Increment version
|
||||
await IncrementVersionAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "invalidate_asset");
|
||||
|
||||
_logger.LogDebug("Invalidated {Count} cache keys for asset {AssetId}", keys.Length, assetId);
|
||||
}
|
||||
|
||||
public async Task InvalidateTenantAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var server = _redis.GetServer(_redis.GetEndPoints().First());
|
||||
var pattern = $"{KeyPrefix}:{tenantId}:*";
|
||||
var keys = server.Keys(pattern: pattern).ToArray();
|
||||
|
||||
if (keys.Length > 0)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
await db.KeyDeleteAsync(keys).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "invalidate_tenant");
|
||||
|
||||
_logger.LogInformation("Invalidated {Count} cache keys for tenant {TenantId}", keys.Length, tenantId);
|
||||
}
|
||||
|
||||
public async Task WarmAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity(
|
||||
"exception.cache.warm", ActivityKind.Internal);
|
||||
activity?.SetTag("tenant_id", tenantId);
|
||||
|
||||
var sw = Stopwatch.StartNew();
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
_logger.LogInformation("Starting cache warm for tenant {TenantId}", tenantId);
|
||||
|
||||
try
|
||||
{
|
||||
// Get all active exceptions from repository
|
||||
var exceptions = await _repository.ListExceptionsAsync(
|
||||
tenantId,
|
||||
new ExceptionQueryOptions
|
||||
{
|
||||
Statuses = ImmutableArray.Create("active"),
|
||||
IncludeExpired = false,
|
||||
Limit = _options.MaxEntriesPerTenant,
|
||||
},
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (exceptions.Length == 0)
|
||||
{
|
||||
_logger.LogDebug("No active exceptions to warm for tenant {TenantId}", tenantId);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get bindings for all exceptions
|
||||
var entries = new List<ExceptionCacheEntry>();
|
||||
|
||||
foreach (var exception in exceptions)
|
||||
{
|
||||
var bindings = await _repository.GetBindingsForExceptionAsync(
|
||||
tenantId, exception.Id, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
foreach (var binding in bindings.Where(b => b.Status == "active"))
|
||||
{
|
||||
entries.Add(new ExceptionCacheEntry
|
||||
{
|
||||
ExceptionId = exception.Id,
|
||||
AssetId = binding.AssetId,
|
||||
AdvisoryId = binding.AdvisoryId,
|
||||
CveId = binding.CveId,
|
||||
DecisionOverride = binding.DecisionOverride,
|
||||
ExceptionType = exception.ExceptionType,
|
||||
Priority = exception.Priority,
|
||||
EffectiveFrom = binding.EffectiveFrom,
|
||||
ExpiresAt = binding.ExpiresAt ?? exception.ExpiresAt,
|
||||
CachedAt = now,
|
||||
ExceptionName = exception.Name,
|
||||
});
|
||||
}
|
||||
|
||||
// Also add entries for scope-based exceptions without explicit bindings
|
||||
if (exception.Scope.ApplyToAll || exception.Scope.AssetIds.Count > 0)
|
||||
{
|
||||
foreach (var assetId in exception.Scope.AssetIds)
|
||||
{
|
||||
foreach (var advisoryId in exception.Scope.AdvisoryIds.DefaultIfEmpty(null!))
|
||||
{
|
||||
entries.Add(new ExceptionCacheEntry
|
||||
{
|
||||
ExceptionId = exception.Id,
|
||||
AssetId = assetId,
|
||||
AdvisoryId = advisoryId,
|
||||
CveId = null,
|
||||
DecisionOverride = "allow",
|
||||
ExceptionType = exception.ExceptionType,
|
||||
Priority = exception.Priority,
|
||||
EffectiveFrom = exception.EffectiveFrom ?? exception.CreatedAt,
|
||||
ExpiresAt = exception.ExpiresAt,
|
||||
CachedAt = now,
|
||||
ExceptionName = exception.Name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (entries.Count > 0)
|
||||
{
|
||||
await SetBatchAsync(tenantId, entries, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
sw.Stop();
|
||||
|
||||
// Update warm stats
|
||||
await UpdateWarmStatsAsync(tenantId, now, entries.Count).ConfigureAwait(false);
|
||||
|
||||
PolicyEngineTelemetry.RecordExceptionCacheOperation(tenantId, "warm");
|
||||
|
||||
_logger.LogInformation(
|
||||
"Warmed cache with {Count} entries from {ExceptionCount} exceptions for tenant {TenantId} in {Duration}ms",
|
||||
entries.Count, exceptions.Length, tenantId, sw.ElapsedMilliseconds);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to warm cache for tenant {TenantId}", tenantId);
|
||||
PolicyEngineTelemetry.RecordError("exception_cache_warm", tenantId);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<ExceptionCacheSummary> GetSummaryAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var server = _redis.GetServer(_redis.GetEndPoints().First());
|
||||
var db = _redis.GetDatabase();
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
// Count asset keys
|
||||
var assetPattern = $"{KeyPrefix}:{tenantId}:a:*";
|
||||
var assetKeys = server.Keys(pattern: assetPattern).ToArray();
|
||||
|
||||
// Count exception index keys
|
||||
var exceptionPattern = $"{KeyPrefix}:{tenantId}:idx:e:*";
|
||||
var exceptionKeys = server.Keys(pattern: exceptionPattern).ToArray();
|
||||
|
||||
// Aggregate stats
|
||||
var byType = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase);
|
||||
var byDecision = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase);
|
||||
var totalEntries = 0;
|
||||
var expiringWithinHour = 0;
|
||||
var uniqueAssets = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var key in assetKeys.Take(1000)) // Limit scan for performance
|
||||
{
|
||||
var json = await db.StringGetAsync(key).ConfigureAwait(false);
|
||||
if (!json.HasValue) continue;
|
||||
|
||||
var entries = JsonSerializer.Deserialize<List<ExceptionCacheEntry>>((string)json!, JsonOptions);
|
||||
if (entries is null) continue;
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
totalEntries++;
|
||||
uniqueAssets.Add(entry.AssetId);
|
||||
|
||||
byType.TryGetValue(entry.ExceptionType, out var typeCount);
|
||||
byType[entry.ExceptionType] = typeCount + 1;
|
||||
|
||||
byDecision.TryGetValue(entry.DecisionOverride, out var decisionCount);
|
||||
byDecision[entry.DecisionOverride] = decisionCount + 1;
|
||||
|
||||
if (entry.ExpiresAt.HasValue && entry.ExpiresAt.Value - now <= TimeSpan.FromHours(1))
|
||||
{
|
||||
expiringWithinHour++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var version = await GetVersionAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return new ExceptionCacheSummary
|
||||
{
|
||||
TenantId = tenantId,
|
||||
TotalEntries = totalEntries,
|
||||
UniqueExceptions = exceptionKeys.Length,
|
||||
UniqueAssets = uniqueAssets.Count,
|
||||
ByType = byType,
|
||||
ByDecision = byDecision,
|
||||
ExpiringWithinHour = expiringWithinHour,
|
||||
CacheVersion = version,
|
||||
ComputedAt = now,
|
||||
};
|
||||
}
|
||||
|
||||
public async Task<ExceptionCacheStats> GetStatsAsync(
|
||||
string? tenantId = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var server = _redis.GetServer(_redis.GetEndPoints().First());
|
||||
|
||||
var pattern = tenantId != null
|
||||
? $"{KeyPrefix}:{tenantId}:a:*"
|
||||
: $"{KeyPrefix}:*:a:*";
|
||||
|
||||
var entryCount = server.Keys(pattern: pattern).Count();
|
||||
|
||||
var tenantPattern = tenantId != null
|
||||
? $"{KeyPrefix}:{tenantId}:v"
|
||||
: $"{KeyPrefix}:*:v";
|
||||
|
||||
var tenantCount = server.Keys(pattern: tenantPattern).Count();
|
||||
|
||||
long? memoryUsed = null;
|
||||
try
|
||||
{
|
||||
var info = server.Info("memory");
|
||||
var memorySection = info.FirstOrDefault(s => s.Key == "Memory");
|
||||
if (memorySection is not null)
|
||||
{
|
||||
var usedMemory = memorySection.FirstOrDefault(p => p.Key == "used_memory");
|
||||
if (usedMemory.Key is not null && long.TryParse(usedMemory.Value, out var bytes))
|
||||
{
|
||||
memoryUsed = bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore - memory info not available
|
||||
}
|
||||
|
||||
return new ExceptionCacheStats
|
||||
{
|
||||
TotalEntries = entryCount,
|
||||
TotalTenants = tenantCount,
|
||||
MemoryUsedBytes = memoryUsed,
|
||||
HitCount = 0, // Would need to track separately
|
||||
MissCount = 0,
|
||||
LastWarmAt = null,
|
||||
LastInvalidationAt = null,
|
||||
};
|
||||
}
|
||||
|
||||
public async Task<long> GetVersionAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var versionKey = GetVersionKey(tenantId);
|
||||
|
||||
var version = await db.StringGetAsync(versionKey).ConfigureAwait(false);
|
||||
return version.HasValue ? (long)version : 0;
|
||||
}
|
||||
|
||||
public async Task HandleExceptionEventAsync(
|
||||
ExceptionEvent exceptionEvent,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(exceptionEvent);
|
||||
|
||||
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity(
|
||||
"exception.cache.handle_event", ActivityKind.Internal);
|
||||
activity?.SetTag("tenant_id", exceptionEvent.TenantId);
|
||||
activity?.SetTag("event_type", exceptionEvent.EventType);
|
||||
activity?.SetTag("exception_id", exceptionEvent.ExceptionId);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Handling exception event {EventType} for exception {ExceptionId} tenant {TenantId}",
|
||||
exceptionEvent.EventType, exceptionEvent.ExceptionId, exceptionEvent.TenantId);
|
||||
|
||||
switch (exceptionEvent.EventType.ToLowerInvariant())
|
||||
{
|
||||
case "activated":
|
||||
// Warm the cache with the new exception
|
||||
await WarmExceptionAsync(exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
break;
|
||||
|
||||
case "expired":
|
||||
case "revoked":
|
||||
case "deleted":
|
||||
// Invalidate cache entries for this exception
|
||||
await InvalidateExceptionAsync(exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
break;
|
||||
|
||||
case "updated":
|
||||
// Invalidate and re-warm
|
||||
await InvalidateExceptionAsync(exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
await WarmExceptionAsync(exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
break;
|
||||
|
||||
case "created":
|
||||
// Only warm if already active
|
||||
var exception = await _repository.GetExceptionAsync(
|
||||
exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken).ConfigureAwait(false);
|
||||
if (exception?.Status == "active")
|
||||
{
|
||||
await WarmExceptionAsync(exceptionEvent.TenantId, exceptionEvent.ExceptionId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
_logger.LogWarning("Unknown exception event type: {EventType}", exceptionEvent.EventType);
|
||||
break;
|
||||
}
|
||||
|
||||
PolicyEngineTelemetry.RecordExceptionCacheOperation(exceptionEvent.TenantId, $"event_{exceptionEvent.EventType}");
|
||||
}
|
||||
|
||||
private async Task WarmExceptionAsync(string tenantId, string exceptionId, CancellationToken cancellationToken)
|
||||
{
|
||||
var exception = await _repository.GetExceptionAsync(tenantId, exceptionId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (exception is null || exception.Status != "active")
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var entries = new List<ExceptionCacheEntry>();
|
||||
|
||||
var bindings = await _repository.GetBindingsForExceptionAsync(tenantId, exceptionId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
foreach (var binding in bindings.Where(b => b.Status == "active"))
|
||||
{
|
||||
entries.Add(new ExceptionCacheEntry
|
||||
{
|
||||
ExceptionId = exception.Id,
|
||||
AssetId = binding.AssetId,
|
||||
AdvisoryId = binding.AdvisoryId,
|
||||
CveId = binding.CveId,
|
||||
DecisionOverride = binding.DecisionOverride,
|
||||
ExceptionType = exception.ExceptionType,
|
||||
Priority = exception.Priority,
|
||||
EffectiveFrom = binding.EffectiveFrom,
|
||||
ExpiresAt = binding.ExpiresAt ?? exception.ExpiresAt,
|
||||
CachedAt = now,
|
||||
ExceptionName = exception.Name,
|
||||
});
|
||||
}
|
||||
|
||||
if (entries.Count > 0)
|
||||
{
|
||||
await SetBatchAsync(tenantId, entries, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Warmed cache with {Count} entries for exception {ExceptionId}",
|
||||
entries.Count, exceptionId);
|
||||
}
|
||||
|
||||
private async Task<long> IncrementVersionAsync(string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var versionKey = GetVersionKey(tenantId);
|
||||
|
||||
var newVersion = await db.StringIncrementAsync(versionKey).ConfigureAwait(false);
|
||||
|
||||
// Set TTL on version key if not already set
|
||||
await db.KeyExpireAsync(versionKey, TimeSpan.FromMinutes(_options.DefaultTtlMinutes + 10), ExpireWhen.HasNoExpiry)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return newVersion;
|
||||
}
|
||||
|
||||
private async Task UpdateWarmStatsAsync(string tenantId, DateTimeOffset warmAt, int count)
|
||||
{
|
||||
var db = _redis.GetDatabase();
|
||||
var statsKey = GetStatsKey(tenantId);
|
||||
|
||||
var stats = new Dictionary<string, string>
|
||||
{
|
||||
["lastWarmAt"] = warmAt.ToString("O"),
|
||||
["lastWarmCount"] = count.ToString(),
|
||||
};
|
||||
|
||||
await db.HashSetAsync(statsKey, stats.Select(kv => new HashEntry(kv.Key, kv.Value)).ToArray())
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private TimeSpan ComputeTtl(ExceptionCacheEntry entry)
|
||||
{
|
||||
if (entry.ExpiresAt.HasValue)
|
||||
{
|
||||
var ttl = entry.ExpiresAt.Value - _timeProvider.GetUtcNow();
|
||||
if (ttl > TimeSpan.Zero)
|
||||
{
|
||||
return ttl;
|
||||
}
|
||||
}
|
||||
|
||||
return TimeSpan.FromMinutes(_options.DefaultTtlMinutes);
|
||||
}
|
||||
|
||||
private static string GetAssetKey(string tenantId, string assetId, string? advisoryId) =>
|
||||
$"{KeyPrefix}:{tenantId}:a:{assetId}:{advisoryId ?? "all"}";
|
||||
|
||||
private static string GetExceptionIndexKey(string tenantId, string exceptionId) =>
|
||||
$"{KeyPrefix}:{tenantId}:idx:e:{exceptionId}";
|
||||
|
||||
private static string GetVersionKey(string tenantId) =>
|
||||
$"{KeyPrefix}:{tenantId}:v";
|
||||
|
||||
private static string GetStatsKey(string tenantId) =>
|
||||
$"{KeyPrefix}:{tenantId}:stats";
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
using System.Collections.ObjectModel;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Policy.Engine.Caching;
|
||||
using StellaOps.Policy.Engine.EffectiveDecisionMap;
|
||||
using StellaOps.Policy.Engine.ExceptionCache;
|
||||
using StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
|
||||
@@ -33,6 +35,10 @@ public sealed class PolicyEngineOptions
|
||||
|
||||
public PolicyEvaluationCacheOptions EvaluationCache { get; } = new();
|
||||
|
||||
public EffectiveDecisionMapOptions EffectiveDecisionMap { get; } = new();
|
||||
|
||||
public ExceptionCacheOptions ExceptionCache { get; } = new();
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
Authority.Validate();
|
||||
|
||||
@@ -79,6 +79,7 @@ internal sealed class PolicyBundleService
|
||||
Size: payload.Length,
|
||||
CreatedAt: createdAt,
|
||||
Payload: payload.ToImmutableArray(),
|
||||
CompiledDocument: compileResult.Document,
|
||||
AocMetadata: aocMetadata);
|
||||
|
||||
await _repository.StoreBundleAsync(packId, version, record, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.Policy.Engine.Compilation;
|
||||
using StellaOps.Policy.Engine.Options;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
using StellaOps.PolicyDsl;
|
||||
using DslCompiler = StellaOps.PolicyDsl.PolicyCompiler;
|
||||
using DslCompilationResult = StellaOps.PolicyDsl.PolicyCompilationResult;
|
||||
@@ -27,19 +30,25 @@ internal sealed class PolicyCompilationService
|
||||
{
|
||||
private readonly DslCompiler compiler;
|
||||
private readonly PolicyComplexityAnalyzer complexityAnalyzer;
|
||||
private readonly PolicyMetadataExtractor metadataExtractor;
|
||||
private readonly IOptionsMonitor<PolicyEngineOptions> optionsMonitor;
|
||||
private readonly TimeProvider timeProvider;
|
||||
private readonly ILogger<PolicyCompilationService> _logger;
|
||||
|
||||
public PolicyCompilationService(
|
||||
DslCompiler compiler,
|
||||
PolicyComplexityAnalyzer complexityAnalyzer,
|
||||
PolicyMetadataExtractor metadataExtractor,
|
||||
IOptionsMonitor<PolicyEngineOptions> optionsMonitor,
|
||||
TimeProvider timeProvider)
|
||||
TimeProvider timeProvider,
|
||||
ILogger<PolicyCompilationService>? logger = null)
|
||||
{
|
||||
this.compiler = compiler ?? throw new ArgumentNullException(nameof(compiler));
|
||||
this.complexityAnalyzer = complexityAnalyzer ?? throw new ArgumentNullException(nameof(complexityAnalyzer));
|
||||
this.metadataExtractor = metadataExtractor ?? throw new ArgumentNullException(nameof(metadataExtractor));
|
||||
this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor));
|
||||
this.timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<PolicyCompilationService>.Instance;
|
||||
}
|
||||
|
||||
public PolicyCompilationResultDto Compile(PolicyCompileRequest request)
|
||||
@@ -56,6 +65,9 @@ internal sealed class PolicyCompilationService
|
||||
|
||||
if (!string.Equals(request.Dsl.Syntax, "stella-dsl@1", StringComparison.Ordinal))
|
||||
{
|
||||
PolicyEngineTelemetry.RecordCompilation("unsupported_syntax", 0);
|
||||
PolicyEngineTelemetry.RecordError("compilation");
|
||||
_logger.LogWarning("Compilation rejected: unsupported syntax {Syntax}", request.Dsl.Syntax ?? "null");
|
||||
return PolicyCompilationResultDto.FromFailure(
|
||||
ImmutableArray.Create(PolicyIssue.Error(
|
||||
DiagnosticCodes.UnsupportedSyntaxVersion,
|
||||
@@ -65,13 +77,23 @@ internal sealed class PolicyCompilationService
|
||||
durationMilliseconds: 0);
|
||||
}
|
||||
|
||||
using var activity = PolicyEngineTelemetry.StartCompileActivity(policyId: null, version: request.Dsl.Syntax);
|
||||
|
||||
var start = timeProvider.GetTimestamp();
|
||||
var result = compiler.Compile(request.Dsl.Source);
|
||||
var elapsed = timeProvider.GetElapsedTime(start, timeProvider.GetTimestamp());
|
||||
var durationMilliseconds = (long)Math.Ceiling(elapsed.TotalMilliseconds);
|
||||
var durationSeconds = elapsed.TotalSeconds;
|
||||
|
||||
if (!result.Success || result.Document is null)
|
||||
{
|
||||
PolicyEngineTelemetry.RecordCompilation("failure", durationSeconds);
|
||||
PolicyEngineTelemetry.RecordError("compilation");
|
||||
activity?.SetStatus(ActivityStatusCode.Error, "Compilation failed");
|
||||
_logger.LogWarning(
|
||||
"Policy compilation failed in {DurationMs}ms with {DiagnosticCount} diagnostics",
|
||||
durationMilliseconds,
|
||||
result.Diagnostics.IsDefault ? 0 : result.Diagnostics.Length);
|
||||
return PolicyCompilationResultDto.FromFailure(result.Diagnostics, null, durationMilliseconds);
|
||||
}
|
||||
|
||||
@@ -79,6 +101,9 @@ internal sealed class PolicyCompilationService
|
||||
var diagnostics = result.Diagnostics.IsDefault ? ImmutableArray<PolicyIssue>.Empty : result.Diagnostics;
|
||||
var limits = optionsMonitor.CurrentValue?.Compilation ?? new PolicyEngineCompilationOptions();
|
||||
|
||||
activity?.SetTag("policy.rule_count", result.Document.Rules.Length);
|
||||
activity?.SetTag("policy.complexity_score", complexity.Score);
|
||||
|
||||
if (limits.EnforceComplexity && complexity.Score > limits.MaxComplexityScore)
|
||||
{
|
||||
var diagnostic = PolicyIssue.Error(
|
||||
@@ -86,6 +111,12 @@ internal sealed class PolicyCompilationService
|
||||
$"Policy complexity score {complexity.Score:F2} exceeds configured maximum {limits.MaxComplexityScore:F2}. Reduce rule count or expression depth.",
|
||||
"$.rules");
|
||||
diagnostics = AppendDiagnostic(diagnostics, diagnostic);
|
||||
PolicyEngineTelemetry.RecordCompilation("complexity_exceeded", durationSeconds);
|
||||
PolicyEngineTelemetry.RecordError("compilation");
|
||||
activity?.SetStatus(ActivityStatusCode.Error, "Complexity exceeded");
|
||||
_logger.LogWarning(
|
||||
"Policy compilation rejected: complexity {Score:F2} exceeds limit {MaxScore:F2}",
|
||||
complexity.Score, limits.MaxComplexityScore);
|
||||
return PolicyCompilationResultDto.FromFailure(diagnostics, complexity, durationMilliseconds);
|
||||
}
|
||||
|
||||
@@ -96,10 +127,27 @@ internal sealed class PolicyCompilationService
|
||||
$"Policy compilation time {durationMilliseconds} ms exceeded limit {limits.MaxDurationMilliseconds} ms.",
|
||||
"$.dsl");
|
||||
diagnostics = AppendDiagnostic(diagnostics, diagnostic);
|
||||
PolicyEngineTelemetry.RecordCompilation("duration_exceeded", durationSeconds);
|
||||
PolicyEngineTelemetry.RecordError("compilation");
|
||||
activity?.SetStatus(ActivityStatusCode.Error, "Duration exceeded");
|
||||
_logger.LogWarning(
|
||||
"Policy compilation rejected: duration {DurationMs}ms exceeds limit {MaxDurationMs}ms",
|
||||
durationMilliseconds, limits.MaxDurationMilliseconds);
|
||||
return PolicyCompilationResultDto.FromFailure(diagnostics, complexity, durationMilliseconds);
|
||||
}
|
||||
|
||||
return PolicyCompilationResultDto.FromSuccess(result, complexity, durationMilliseconds);
|
||||
// Extract extended metadata (symbol table, rule index, documentation, coverage, hashes)
|
||||
var metadata = metadataExtractor.Extract(result.Document, result.CanonicalRepresentation);
|
||||
|
||||
PolicyEngineTelemetry.RecordCompilation("success", durationSeconds);
|
||||
activity?.SetStatus(ActivityStatusCode.Ok);
|
||||
activity?.SetTag("policy.symbol_count", metadata.SymbolTable.Symbols.Length);
|
||||
activity?.SetTag("policy.coverage_paths", metadata.CoverageMetadata.CoveragePaths.Length);
|
||||
_logger.LogDebug(
|
||||
"Policy compiled successfully in {DurationMs}ms: {RuleCount} rules, complexity {Score:F2}, {SymbolCount} symbols",
|
||||
durationMilliseconds, result.Document.Rules.Length, complexity.Score, metadata.SymbolTable.Symbols.Length);
|
||||
|
||||
return PolicyCompilationResultDto.FromSuccess(result, complexity, metadata, durationMilliseconds);
|
||||
}
|
||||
|
||||
private static ImmutableArray<PolicyIssue> AppendDiagnostic(ImmutableArray<PolicyIssue> diagnostics, PolicyIssue diagnostic)
|
||||
@@ -119,17 +167,20 @@ internal sealed record PolicyCompilationResultDto(
|
||||
ImmutableArray<byte> CanonicalRepresentation,
|
||||
ImmutableArray<PolicyIssue> Diagnostics,
|
||||
PolicyComplexityReport? Complexity,
|
||||
long DurationMilliseconds)
|
||||
long DurationMilliseconds,
|
||||
IrDocument? Document = null,
|
||||
PolicyCompileMetadata? Metadata = null)
|
||||
{
|
||||
public static PolicyCompilationResultDto FromFailure(
|
||||
ImmutableArray<PolicyIssue> diagnostics,
|
||||
PolicyComplexityReport? complexity,
|
||||
long durationMilliseconds) =>
|
||||
new(false, null, null, ImmutableArray<byte>.Empty, diagnostics, complexity, durationMilliseconds);
|
||||
new(false, null, null, ImmutableArray<byte>.Empty, diagnostics, complexity, durationMilliseconds, null, null);
|
||||
|
||||
public static PolicyCompilationResultDto FromSuccess(
|
||||
DslCompilationResult compilationResult,
|
||||
PolicyComplexityReport complexity,
|
||||
PolicyCompileMetadata metadata,
|
||||
long durationMilliseconds)
|
||||
{
|
||||
if (compilationResult.Document is null)
|
||||
@@ -145,7 +196,9 @@ internal sealed record PolicyCompilationResultDto(
|
||||
compilationResult.CanonicalRepresentation,
|
||||
compilationResult.Diagnostics,
|
||||
complexity,
|
||||
durationMilliseconds);
|
||||
durationMilliseconds,
|
||||
compilationResult.Document,
|
||||
metadata);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,497 @@
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Engine.Domain;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Query options for retrieving explain traces.
|
||||
/// </summary>
|
||||
public sealed record ExplainQueryOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Filter by policy ID.
|
||||
/// </summary>
|
||||
public string? PolicyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by policy version.
|
||||
/// </summary>
|
||||
public int? PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by run ID.
|
||||
/// </summary>
|
||||
public string? RunId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by component PURL.
|
||||
/// </summary>
|
||||
public string? ComponentPurl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by vulnerability ID.
|
||||
/// </summary>
|
||||
public string? VulnerabilityId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by final outcome.
|
||||
/// </summary>
|
||||
public string? FinalOutcome { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by evaluation time range start.
|
||||
/// </summary>
|
||||
public DateTimeOffset? FromTime { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by evaluation time range end.
|
||||
/// </summary>
|
||||
public DateTimeOffset? ToTime { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of results to return.
|
||||
/// </summary>
|
||||
public int Limit { get; init; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Number of results to skip for pagination.
|
||||
/// </summary>
|
||||
public int Skip { get; init; } = 0;
|
||||
|
||||
/// <summary>
|
||||
/// Include rule steps in results (can be large).
|
||||
/// </summary>
|
||||
public bool IncludeRuleSteps { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Include VEX evidence in results.
|
||||
/// </summary>
|
||||
public bool IncludeVexEvidence { get; init; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Stored explain trace with AOC chain reference.
|
||||
/// </summary>
|
||||
public sealed record StoredExplainTrace
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier.
|
||||
/// </summary>
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The explain trace data.
|
||||
/// </summary>
|
||||
public required ExplainTrace Trace { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the AOC chain for this decision.
|
||||
/// </summary>
|
||||
public ExplainAocChain? AocChain { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this trace was stored.
|
||||
/// </summary>
|
||||
public required DateTimeOffset StoredAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// AOC chain linking a decision to its attestation chain.
|
||||
/// </summary>
|
||||
public sealed record ExplainAocChain
|
||||
{
|
||||
/// <summary>
|
||||
/// Compilation ID that produced the policy bundle.
|
||||
/// </summary>
|
||||
public required string CompilationId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Compiler version used.
|
||||
/// </summary>
|
||||
public required string CompilerVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source digest of the policy document.
|
||||
/// </summary>
|
||||
public required string SourceDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Artifact digest of the compiled bundle.
|
||||
/// </summary>
|
||||
public required string ArtifactDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the signed attestation.
|
||||
/// </summary>
|
||||
public ExplainAttestationRef? AttestationRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Provenance information.
|
||||
/// </summary>
|
||||
public ExplainProvenance? Provenance { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Attestation reference for AOC chain.
|
||||
/// </summary>
|
||||
public sealed record ExplainAttestationRef(
|
||||
string AttestationId,
|
||||
string EnvelopeDigest,
|
||||
string? Uri,
|
||||
string? SigningKeyId);
|
||||
|
||||
/// <summary>
|
||||
/// Provenance for AOC chain.
|
||||
/// </summary>
|
||||
public sealed record ExplainProvenance(
|
||||
string SourceType,
|
||||
string? SourceUrl,
|
||||
string? Submitter,
|
||||
string? CommitSha,
|
||||
string? Branch);
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for explain trace persistence.
|
||||
/// </summary>
|
||||
public interface IExplainTraceRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Stores an explain trace.
|
||||
/// </summary>
|
||||
Task<StoredExplainTrace> StoreAsync(
|
||||
string tenantId,
|
||||
ExplainTrace trace,
|
||||
ExplainAocChain? aocChain,
|
||||
TimeSpan? retention,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves an explain trace by ID.
|
||||
/// </summary>
|
||||
Task<StoredExplainTrace?> GetByIdAsync(
|
||||
string tenantId,
|
||||
string id,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves an explain trace by run ID and subject hash.
|
||||
/// </summary>
|
||||
Task<StoredExplainTrace?> GetByRunAndSubjectAsync(
|
||||
string tenantId,
|
||||
string runId,
|
||||
string subjectHash,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Queries explain traces with filtering and pagination.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<StoredExplainTrace>> QueryAsync(
|
||||
string tenantId,
|
||||
ExplainQueryOptions options,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all explain traces for a policy run.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<StoredExplainTrace>> GetByRunIdAsync(
|
||||
string tenantId,
|
||||
string runId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes explain traces older than the specified retention period.
|
||||
/// </summary>
|
||||
Task<int> PruneExpiredAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for persisting and retrieving policy explain traces with AOC chain linkage.
|
||||
/// </summary>
|
||||
internal sealed class PolicyExplainerService
|
||||
{
|
||||
private readonly IExplainTraceRepository _repository;
|
||||
private readonly IPolicyPackRepository _policyRepository;
|
||||
private readonly ILogger<PolicyExplainerService> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly TimeSpan _defaultRetention;
|
||||
|
||||
public PolicyExplainerService(
|
||||
IExplainTraceRepository repository,
|
||||
IPolicyPackRepository policyRepository,
|
||||
ILogger<PolicyExplainerService> logger,
|
||||
TimeProvider timeProvider,
|
||||
TimeSpan? defaultRetention = null)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_policyRepository = policyRepository ?? throw new ArgumentNullException(nameof(policyRepository));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_defaultRetention = defaultRetention ?? TimeSpan.FromDays(30);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Stores an explain trace and links it to the AOC chain from the policy bundle.
|
||||
/// </summary>
|
||||
public async Task<StoredExplainTrace> StoreExplainTraceAsync(
|
||||
string tenantId,
|
||||
ExplainTrace trace,
|
||||
TimeSpan? retention = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(tenantId);
|
||||
ArgumentNullException.ThrowIfNull(trace);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Storing explain trace for run {RunId}, policy {PolicyId}:{Version}, tenant {TenantId}",
|
||||
trace.RunId, trace.PolicyId, trace.PolicyVersion, tenantId);
|
||||
|
||||
// Try to get AOC chain from the policy bundle
|
||||
ExplainAocChain? aocChain = null;
|
||||
if (trace.PolicyVersion.HasValue)
|
||||
{
|
||||
var revision = await _policyRepository.GetRevisionAsync(
|
||||
trace.PolicyId,
|
||||
trace.PolicyVersion.Value,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (revision?.Bundle?.AocMetadata is not null)
|
||||
{
|
||||
var aoc = revision.Bundle.AocMetadata;
|
||||
aocChain = new ExplainAocChain
|
||||
{
|
||||
CompilationId = aoc.CompilationId,
|
||||
CompilerVersion = aoc.CompilerVersion,
|
||||
SourceDigest = aoc.SourceDigest,
|
||||
ArtifactDigest = aoc.ArtifactDigest,
|
||||
AttestationRef = aoc.AttestationRef is not null
|
||||
? new ExplainAttestationRef(
|
||||
aoc.AttestationRef.AttestationId,
|
||||
aoc.AttestationRef.EnvelopeDigest,
|
||||
aoc.AttestationRef.Uri,
|
||||
aoc.AttestationRef.SigningKeyId)
|
||||
: null,
|
||||
Provenance = aoc.Provenance is not null
|
||||
? new ExplainProvenance(
|
||||
aoc.Provenance.SourceType,
|
||||
aoc.Provenance.SourceUrl,
|
||||
aoc.Provenance.Submitter,
|
||||
aoc.Provenance.CommitSha,
|
||||
aoc.Provenance.Branch)
|
||||
: null
|
||||
};
|
||||
|
||||
_logger.LogDebug(
|
||||
"Linked explain trace to AOC chain: compilation {CompilationId}, attestation {AttestationId}",
|
||||
aocChain.CompilationId,
|
||||
aocChain.AttestationRef?.AttestationId ?? "(none)");
|
||||
}
|
||||
}
|
||||
|
||||
var stored = await _repository.StoreAsync(
|
||||
tenantId,
|
||||
trace,
|
||||
aocChain,
|
||||
retention ?? _defaultRetention,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
PolicyEngineTelemetry.ExplainTracesStored.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId),
|
||||
new KeyValuePair<string, object?>("policy_id", trace.PolicyId));
|
||||
|
||||
return stored;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves an explain trace by its ID.
|
||||
/// </summary>
|
||||
public Task<StoredExplainTrace?> GetExplainTraceAsync(
|
||||
string tenantId,
|
||||
string traceId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(tenantId);
|
||||
ArgumentNullException.ThrowIfNull(traceId);
|
||||
|
||||
return _repository.GetByIdAsync(tenantId, traceId, cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves an explain trace for a specific decision.
|
||||
/// </summary>
|
||||
public Task<StoredExplainTrace?> GetExplainTraceForDecisionAsync(
|
||||
string tenantId,
|
||||
string runId,
|
||||
string subjectHash,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(tenantId);
|
||||
ArgumentNullException.ThrowIfNull(runId);
|
||||
ArgumentNullException.ThrowIfNull(subjectHash);
|
||||
|
||||
return _repository.GetByRunAndSubjectAsync(tenantId, runId, subjectHash, cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all explain traces for a policy run.
|
||||
/// </summary>
|
||||
public Task<IReadOnlyList<StoredExplainTrace>> GetExplainTracesForRunAsync(
|
||||
string tenantId,
|
||||
string runId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(tenantId);
|
||||
ArgumentNullException.ThrowIfNull(runId);
|
||||
|
||||
return _repository.GetByRunIdAsync(tenantId, runId, cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Queries explain traces with filtering and pagination.
|
||||
/// </summary>
|
||||
public Task<IReadOnlyList<StoredExplainTrace>> QueryExplainTracesAsync(
|
||||
string tenantId,
|
||||
ExplainQueryOptions options,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(tenantId);
|
||||
options ??= new ExplainQueryOptions();
|
||||
|
||||
return _repository.QueryAsync(tenantId, options, cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the AOC chain for a stored explain trace.
|
||||
/// </summary>
|
||||
public async Task<ExplainAocChain?> GetAocChainForTraceAsync(
|
||||
string tenantId,
|
||||
string traceId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var trace = await GetExplainTraceAsync(tenantId, traceId, cancellationToken).ConfigureAwait(false);
|
||||
return trace?.AocChain;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates that an explain trace's AOC chain is intact.
|
||||
/// </summary>
|
||||
public async Task<AocChainValidationResult> ValidateAocChainAsync(
|
||||
string tenantId,
|
||||
string traceId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var trace = await GetExplainTraceAsync(tenantId, traceId, cancellationToken).ConfigureAwait(false);
|
||||
if (trace is null)
|
||||
{
|
||||
return new AocChainValidationResult(
|
||||
IsValid: false,
|
||||
ValidationMessage: "Explain trace not found",
|
||||
PolicyFound: false,
|
||||
BundleIntact: false,
|
||||
AttestationAvailable: false);
|
||||
}
|
||||
|
||||
if (trace.AocChain is null)
|
||||
{
|
||||
return new AocChainValidationResult(
|
||||
IsValid: false,
|
||||
ValidationMessage: "No AOC chain linked to this trace",
|
||||
PolicyFound: true,
|
||||
BundleIntact: false,
|
||||
AttestationAvailable: false);
|
||||
}
|
||||
|
||||
// Verify the policy revision still exists
|
||||
if (!trace.Trace.PolicyVersion.HasValue)
|
||||
{
|
||||
return new AocChainValidationResult(
|
||||
IsValid: false,
|
||||
ValidationMessage: "Trace has no policy version",
|
||||
PolicyFound: false,
|
||||
BundleIntact: false,
|
||||
AttestationAvailable: false);
|
||||
}
|
||||
|
||||
var revision = await _policyRepository.GetRevisionAsync(
|
||||
trace.Trace.PolicyId,
|
||||
trace.Trace.PolicyVersion.Value,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (revision is null)
|
||||
{
|
||||
return new AocChainValidationResult(
|
||||
IsValid: false,
|
||||
ValidationMessage: $"Policy revision {trace.Trace.PolicyId}:{trace.Trace.PolicyVersion} no longer exists",
|
||||
PolicyFound: false,
|
||||
BundleIntact: false,
|
||||
AttestationAvailable: false);
|
||||
}
|
||||
|
||||
// Verify bundle digest matches
|
||||
var bundleIntact = revision.Bundle?.Digest == trace.AocChain.ArtifactDigest;
|
||||
if (!bundleIntact)
|
||||
{
|
||||
return new AocChainValidationResult(
|
||||
IsValid: false,
|
||||
ValidationMessage: "Bundle digest mismatch - policy bundle has been modified",
|
||||
PolicyFound: true,
|
||||
BundleIntact: false,
|
||||
AttestationAvailable: trace.AocChain.AttestationRef is not null);
|
||||
}
|
||||
|
||||
// Verify AOC metadata matches
|
||||
var aocMatches = revision.Bundle?.AocMetadata?.CompilationId == trace.AocChain.CompilationId &&
|
||||
revision.Bundle?.AocMetadata?.SourceDigest == trace.AocChain.SourceDigest;
|
||||
|
||||
if (!aocMatches)
|
||||
{
|
||||
return new AocChainValidationResult(
|
||||
IsValid: false,
|
||||
ValidationMessage: "AOC metadata mismatch - compilation chain has been modified",
|
||||
PolicyFound: true,
|
||||
BundleIntact: true,
|
||||
AttestationAvailable: trace.AocChain.AttestationRef is not null);
|
||||
}
|
||||
|
||||
return new AocChainValidationResult(
|
||||
IsValid: true,
|
||||
ValidationMessage: "AOC chain is intact and verifiable",
|
||||
PolicyFound: true,
|
||||
BundleIntact: true,
|
||||
AttestationAvailable: trace.AocChain.AttestationRef is not null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Prunes expired explain traces for a tenant.
|
||||
/// </summary>
|
||||
public async Task<int> PruneExpiredTracesAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(tenantId);
|
||||
|
||||
var pruned = await _repository.PruneExpiredAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (pruned > 0)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Pruned {Count} expired explain traces for tenant {TenantId}",
|
||||
pruned, tenantId);
|
||||
}
|
||||
|
||||
return pruned;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of AOC chain validation.
|
||||
/// </summary>
|
||||
public sealed record AocChainValidationResult(
|
||||
bool IsValid,
|
||||
string ValidationMessage,
|
||||
bool PolicyFound,
|
||||
bool BundleIntact,
|
||||
bool AttestationAvailable);
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
@@ -6,6 +7,7 @@ using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Engine.Caching;
|
||||
using StellaOps.Policy.Engine.Domain;
|
||||
using StellaOps.Policy.Engine.Evaluation;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
using StellaOps.PolicyDsl;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Services;
|
||||
@@ -88,6 +90,12 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
using var activity = PolicyEngineTelemetry.StartEvaluateActivity(
|
||||
request.TenantId, request.PackId, runId: null);
|
||||
activity?.SetTag("policy.version", request.Version);
|
||||
activity?.SetTag("subject.purl", request.SubjectPurl);
|
||||
activity?.SetTag("advisory.id", request.AdvisoryId);
|
||||
|
||||
var startTimestamp = _timeProvider.GetTimestamp();
|
||||
var evaluationTimestamp = request.EvaluationTimestamp ?? _timeProvider.GetUtcNow();
|
||||
|
||||
@@ -97,6 +105,9 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
|
||||
if (bundle is null)
|
||||
{
|
||||
PolicyEngineTelemetry.RecordError("evaluation", request.TenantId);
|
||||
PolicyEngineTelemetry.RecordEvaluationFailure(request.TenantId, request.PackId, "bundle_not_found");
|
||||
activity?.SetStatus(ActivityStatusCode.Error, "Bundle not found");
|
||||
throw new InvalidOperationException(
|
||||
$"Policy bundle not found for pack '{request.PackId}' version {request.Version}.");
|
||||
}
|
||||
@@ -113,6 +124,12 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
if (cacheResult.CacheHit && cacheResult.Entry is not null)
|
||||
{
|
||||
var duration = GetElapsedMilliseconds(startTimestamp);
|
||||
var durationSeconds = duration / 1000.0;
|
||||
PolicyEngineTelemetry.RecordEvaluationLatency(durationSeconds, request.TenantId, request.PackId);
|
||||
PolicyEngineTelemetry.RecordEvaluation(request.TenantId, request.PackId, "cached");
|
||||
activity?.SetTag("cache.hit", true);
|
||||
activity?.SetTag("cache.source", cacheResult.Source.ToString());
|
||||
activity?.SetStatus(ActivityStatusCode.Ok);
|
||||
_logger.LogDebug(
|
||||
"Cache hit for evaluation {PackId}@{Version} subject {Subject} from {Source}",
|
||||
request.PackId, request.Version, request.SubjectPurl, cacheResult.Source);
|
||||
@@ -122,12 +139,17 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
}
|
||||
}
|
||||
|
||||
activity?.SetTag("cache.hit", false);
|
||||
|
||||
// Cache miss - perform evaluation
|
||||
var document = DeserializeCompiledPolicy(bundle.Payload);
|
||||
var document = bundle.CompiledDocument;
|
||||
if (document is null)
|
||||
{
|
||||
PolicyEngineTelemetry.RecordError("evaluation", request.TenantId);
|
||||
PolicyEngineTelemetry.RecordEvaluationFailure(request.TenantId, request.PackId, "document_not_found");
|
||||
activity?.SetStatus(ActivityStatusCode.Error, "Document not found");
|
||||
throw new InvalidOperationException(
|
||||
$"Failed to deserialize compiled policy for pack '{request.PackId}' version {request.Version}.");
|
||||
$"Compiled policy document not found for pack '{request.PackId}' version {request.Version}.");
|
||||
}
|
||||
|
||||
var context = new PolicyEvaluationContext(
|
||||
@@ -162,6 +184,21 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
await _cache.SetAsync(cacheKey, cacheEntry, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var evalDuration = GetElapsedMilliseconds(startTimestamp);
|
||||
var evalDurationSeconds = evalDuration / 1000.0;
|
||||
|
||||
// Record metrics
|
||||
PolicyEngineTelemetry.RecordEvaluationLatency(evalDurationSeconds, request.TenantId, request.PackId);
|
||||
PolicyEngineTelemetry.RecordEvaluation(request.TenantId, request.PackId, "full");
|
||||
if (!string.IsNullOrEmpty(result.RuleName))
|
||||
{
|
||||
PolicyEngineTelemetry.RecordRuleFired(request.PackId, result.RuleName);
|
||||
}
|
||||
|
||||
activity?.SetTag("evaluation.status", result.Status);
|
||||
activity?.SetTag("evaluation.rule", result.RuleName ?? "none");
|
||||
activity?.SetTag("evaluation.duration_ms", evalDuration);
|
||||
activity?.SetStatus(ActivityStatusCode.Ok);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Evaluated {PackId}@{Version} subject {Subject} in {Duration}ms - {Status}",
|
||||
request.PackId, request.Version, request.SubjectPurl, evalDuration, result.Status);
|
||||
@@ -195,7 +232,13 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
return Array.Empty<RuntimeEvaluationResponse>();
|
||||
}
|
||||
|
||||
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("policy.evaluate_batch", ActivityKind.Internal);
|
||||
activity?.SetTag("batch.size", requests.Count);
|
||||
|
||||
var batchStartTimestamp = _timeProvider.GetTimestamp();
|
||||
var results = new List<RuntimeEvaluationResponse>(requests.Count);
|
||||
var cacheHits = 0;
|
||||
var cacheMisses = 0;
|
||||
|
||||
// Group by pack/version for bundle loading efficiency
|
||||
var groups = requests.GroupBy(r => (r.PackId, r.Version));
|
||||
@@ -210,6 +253,7 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
{
|
||||
foreach (var request in group)
|
||||
{
|
||||
PolicyEngineTelemetry.RecordEvaluationFailure(request.TenantId, packId, "bundle_not_found");
|
||||
_logger.LogWarning(
|
||||
"Policy bundle not found for pack '{PackId}' version {Version}, skipping evaluation",
|
||||
packId, version);
|
||||
@@ -217,11 +261,12 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
continue;
|
||||
}
|
||||
|
||||
var document = DeserializeCompiledPolicy(bundle.Payload);
|
||||
var document = bundle.CompiledDocument;
|
||||
if (document is null)
|
||||
{
|
||||
PolicyEngineTelemetry.RecordEvaluationFailure("default", packId, "document_not_found");
|
||||
_logger.LogWarning(
|
||||
"Failed to deserialize policy bundle for pack '{PackId}' version {Version}",
|
||||
"Compiled policy document not found for pack '{PackId}' version {Version}",
|
||||
packId, version);
|
||||
continue;
|
||||
}
|
||||
@@ -249,6 +294,8 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
{
|
||||
var response = CreateResponseFromCache(request, bundle.Digest, entry, CacheSource.InMemory, 0);
|
||||
results.Add(response);
|
||||
cacheHits++;
|
||||
PolicyEngineTelemetry.RecordEvaluation(request.TenantId, packId, "cached");
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -294,6 +341,15 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
expiresAt);
|
||||
|
||||
entriesToCache[key] = cacheEntry;
|
||||
cacheMisses++;
|
||||
|
||||
// Record metrics for each evaluation
|
||||
PolicyEngineTelemetry.RecordEvaluationLatency(duration / 1000.0, request.TenantId, packId);
|
||||
PolicyEngineTelemetry.RecordEvaluation(request.TenantId, packId, "full");
|
||||
if (!string.IsNullOrEmpty(result.RuleName))
|
||||
{
|
||||
PolicyEngineTelemetry.RecordRuleFired(packId, result.RuleName);
|
||||
}
|
||||
|
||||
results.Add(new RuntimeEvaluationResponse(
|
||||
request.PackId,
|
||||
@@ -319,6 +375,17 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
}
|
||||
}
|
||||
|
||||
// Record batch-level metrics
|
||||
var batchDuration = GetElapsedMilliseconds(batchStartTimestamp);
|
||||
activity?.SetTag("batch.cache_hits", cacheHits);
|
||||
activity?.SetTag("batch.cache_misses", cacheMisses);
|
||||
activity?.SetTag("batch.duration_ms", batchDuration);
|
||||
activity?.SetStatus(ActivityStatusCode.Ok);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Batch evaluation completed: {Total} subjects, {CacheHits} cache hits, {CacheMisses} evaluated in {Duration}ms",
|
||||
requests.Count, cacheHits, cacheMisses, batchDuration);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
@@ -398,24 +465,6 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
return Convert.ToHexString(hash);
|
||||
}
|
||||
|
||||
private static PolicyIrDocument? DeserializeCompiledPolicy(ImmutableArray<byte> payload)
|
||||
{
|
||||
if (payload.IsDefaultOrEmpty)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var json = Encoding.UTF8.GetString(payload.AsSpan());
|
||||
return JsonSerializer.Deserialize<PolicyIrDocument>(json);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private long GetElapsedMilliseconds(long startTimestamp)
|
||||
{
|
||||
var elapsed = _timeProvider.GetElapsedTime(startTimestamp);
|
||||
|
||||
@@ -61,7 +61,8 @@ public sealed record RiskSimulationResult(
|
||||
[property: JsonPropertyName("distribution")] RiskDistribution? Distribution,
|
||||
[property: JsonPropertyName("top_movers")] IReadOnlyList<TopMover>? TopMovers,
|
||||
[property: JsonPropertyName("aggregate_metrics")] AggregateRiskMetrics AggregateMetrics,
|
||||
[property: JsonPropertyName("execution_time_ms")] double ExecutionTimeMs);
|
||||
[property: JsonPropertyName("execution_time_ms")] double ExecutionTimeMs,
|
||||
[property: JsonPropertyName("analytics")] SimulationAnalytics? Analytics = null);
|
||||
|
||||
/// <summary>
|
||||
/// Computed risk score for a finding.
|
||||
|
||||
@@ -0,0 +1,236 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Simulation;
|
||||
|
||||
/// <summary>
|
||||
/// Extended simulation analytics including rule firing counts, heatmaps, traces, and delta summaries.
|
||||
/// </summary>
|
||||
public sealed record SimulationAnalytics(
|
||||
[property: JsonPropertyName("rule_firing_counts")] RuleFiringCounts RuleFiringCounts,
|
||||
[property: JsonPropertyName("heatmap")] SimulationHeatmap Heatmap,
|
||||
[property: JsonPropertyName("sampled_traces")] SampledExplainTraces SampledTraces,
|
||||
[property: JsonPropertyName("delta_summary")] SimulationDeltaSummary? DeltaSummary);
|
||||
|
||||
/// <summary>
|
||||
/// Rule firing counts aggregated across simulation runs.
|
||||
/// </summary>
|
||||
public sealed record RuleFiringCounts(
|
||||
[property: JsonPropertyName("total_evaluations")] int TotalEvaluations,
|
||||
[property: JsonPropertyName("total_rules_fired")] int TotalRulesFired,
|
||||
[property: JsonPropertyName("rules_by_name")] ImmutableDictionary<string, RuleFireCount> RulesByName,
|
||||
[property: JsonPropertyName("rules_by_priority")] ImmutableDictionary<int, int> RulesByPriority,
|
||||
[property: JsonPropertyName("rules_by_outcome")] ImmutableDictionary<string, int> RulesByOutcome,
|
||||
[property: JsonPropertyName("rules_by_category")] ImmutableDictionary<string, int> RulesByCategory,
|
||||
[property: JsonPropertyName("top_rules")] ImmutableArray<RuleFireCount> TopRules,
|
||||
[property: JsonPropertyName("vex_override_counts")] VexOverrideCounts VexOverrides);
|
||||
|
||||
/// <summary>
|
||||
/// Fire count for a single rule.
|
||||
/// </summary>
|
||||
public sealed record RuleFireCount(
|
||||
[property: JsonPropertyName("rule_name")] string RuleName,
|
||||
[property: JsonPropertyName("priority")] int Priority,
|
||||
[property: JsonPropertyName("category")] string? Category,
|
||||
[property: JsonPropertyName("fire_count")] int FireCount,
|
||||
[property: JsonPropertyName("fire_percentage")] double FirePercentage,
|
||||
[property: JsonPropertyName("outcomes")] ImmutableDictionary<string, int> OutcomeBreakdown,
|
||||
[property: JsonPropertyName("avg_evaluation_us")] double AverageEvaluationMicroseconds);
|
||||
|
||||
/// <summary>
|
||||
/// VEX override aggregation.
|
||||
/// </summary>
|
||||
public sealed record VexOverrideCounts(
|
||||
[property: JsonPropertyName("total_overrides")] int TotalOverrides,
|
||||
[property: JsonPropertyName("by_vendor")] ImmutableDictionary<string, int> ByVendor,
|
||||
[property: JsonPropertyName("by_status")] ImmutableDictionary<string, int> ByStatus,
|
||||
[property: JsonPropertyName("by_justification")] ImmutableDictionary<string, int> ByJustification);
|
||||
|
||||
/// <summary>
|
||||
/// Heatmap aggregates for visualization.
|
||||
/// </summary>
|
||||
public sealed record SimulationHeatmap(
|
||||
[property: JsonPropertyName("rule_severity_matrix")] ImmutableArray<HeatmapCell> RuleSeverityMatrix,
|
||||
[property: JsonPropertyName("rule_outcome_matrix")] ImmutableArray<HeatmapCell> RuleOutcomeMatrix,
|
||||
[property: JsonPropertyName("finding_rule_coverage")] FindingRuleCoverage FindingRuleCoverage,
|
||||
[property: JsonPropertyName("temporal_distribution")] ImmutableArray<TemporalBucket> TemporalDistribution);
|
||||
|
||||
/// <summary>
|
||||
/// A cell in the heatmap matrix.
|
||||
/// </summary>
|
||||
public sealed record HeatmapCell(
|
||||
[property: JsonPropertyName("x")] string X,
|
||||
[property: JsonPropertyName("y")] string Y,
|
||||
[property: JsonPropertyName("value")] int Value,
|
||||
[property: JsonPropertyName("normalized")] double Normalized);
|
||||
|
||||
/// <summary>
|
||||
/// Coverage of findings by rules.
|
||||
/// </summary>
|
||||
public sealed record FindingRuleCoverage(
|
||||
[property: JsonPropertyName("total_findings")] int TotalFindings,
|
||||
[property: JsonPropertyName("findings_matched")] int FindingsMatched,
|
||||
[property: JsonPropertyName("findings_unmatched")] int FindingsUnmatched,
|
||||
[property: JsonPropertyName("coverage_percentage")] double CoveragePercentage,
|
||||
[property: JsonPropertyName("rules_never_fired")] ImmutableArray<string> RulesNeverFired,
|
||||
[property: JsonPropertyName("findings_by_match_count")] ImmutableDictionary<int, int> FindingsByMatchCount);
|
||||
|
||||
/// <summary>
|
||||
/// Temporal distribution bucket.
|
||||
/// </summary>
|
||||
public sealed record TemporalBucket(
|
||||
[property: JsonPropertyName("bucket_start_ms")] long BucketStartMs,
|
||||
[property: JsonPropertyName("bucket_end_ms")] long BucketEndMs,
|
||||
[property: JsonPropertyName("evaluation_count")] int EvaluationCount,
|
||||
[property: JsonPropertyName("rules_fired")] int RulesFired);
|
||||
|
||||
/// <summary>
|
||||
/// Sampled explain traces with deterministic ordering.
|
||||
/// </summary>
|
||||
public sealed record SampledExplainTraces(
|
||||
[property: JsonPropertyName("sample_rate")] double SampleRate,
|
||||
[property: JsonPropertyName("total_traces")] int TotalTraces,
|
||||
[property: JsonPropertyName("sampled_count")] int SampledCount,
|
||||
[property: JsonPropertyName("ordering")] TraceOrdering Ordering,
|
||||
[property: JsonPropertyName("traces")] ImmutableArray<SampledTrace> Traces,
|
||||
[property: JsonPropertyName("determinism_hash")] string DeterminismHash);
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic ordering specification.
|
||||
/// </summary>
|
||||
public sealed record TraceOrdering(
|
||||
[property: JsonPropertyName("primary_key")] string PrimaryKey,
|
||||
[property: JsonPropertyName("secondary_key")] string? SecondaryKey,
|
||||
[property: JsonPropertyName("direction")] string Direction);
|
||||
|
||||
/// <summary>
|
||||
/// A sampled trace with key metadata.
|
||||
/// </summary>
|
||||
public sealed record SampledTrace(
|
||||
[property: JsonPropertyName("trace_id")] string TraceId,
|
||||
[property: JsonPropertyName("finding_id")] string FindingId,
|
||||
[property: JsonPropertyName("component_purl")] string? ComponentPurl,
|
||||
[property: JsonPropertyName("advisory_id")] string? AdvisoryId,
|
||||
[property: JsonPropertyName("final_outcome")] string FinalOutcome,
|
||||
[property: JsonPropertyName("assigned_severity")] string? AssignedSeverity,
|
||||
[property: JsonPropertyName("rules_evaluated")] int RulesEvaluated,
|
||||
[property: JsonPropertyName("rules_fired")] int RulesFired,
|
||||
[property: JsonPropertyName("vex_applied")] bool VexApplied,
|
||||
[property: JsonPropertyName("evaluation_ms")] double EvaluationMs,
|
||||
[property: JsonPropertyName("rule_sequence")] ImmutableArray<string> RuleSequence,
|
||||
[property: JsonPropertyName("sample_reason")] string SampleReason);
|
||||
|
||||
/// <summary>
|
||||
/// Delta summary comparing simulation results.
|
||||
/// </summary>
|
||||
public sealed record SimulationDeltaSummary(
|
||||
[property: JsonPropertyName("comparison_type")] SimulationComparisonType ComparisonType,
|
||||
[property: JsonPropertyName("base_policy_ref")] string BasePolicyRef,
|
||||
[property: JsonPropertyName("candidate_policy_ref")] string? CandidatePolicyRef,
|
||||
[property: JsonPropertyName("total_findings")] int TotalFindings,
|
||||
[property: JsonPropertyName("outcome_changes")] OutcomeChangeSummary OutcomeChanges,
|
||||
[property: JsonPropertyName("severity_changes")] SeverityChangeSummary SeverityChanges,
|
||||
[property: JsonPropertyName("rule_changes")] RuleChangeSummary RuleChanges,
|
||||
[property: JsonPropertyName("high_impact_findings")] ImmutableArray<HighImpactFinding> HighImpactFindings,
|
||||
[property: JsonPropertyName("determinism_hash")] string DeterminismHash);
|
||||
|
||||
/// <summary>
|
||||
/// Type of simulation comparison.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<SimulationComparisonType>))]
|
||||
public enum SimulationComparisonType
|
||||
{
|
||||
/// <summary>Single policy snapshot.</summary>
|
||||
[JsonPropertyName("snapshot")]
|
||||
Snapshot,
|
||||
|
||||
/// <summary>Comparing two policy versions.</summary>
|
||||
[JsonPropertyName("version_compare")]
|
||||
VersionCompare,
|
||||
|
||||
/// <summary>What-if analysis with hypothetical changes.</summary>
|
||||
[JsonPropertyName("whatif")]
|
||||
WhatIf,
|
||||
|
||||
/// <summary>Batch comparison across multiple inputs.</summary>
|
||||
[JsonPropertyName("batch")]
|
||||
Batch
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary of outcome changes.
|
||||
/// </summary>
|
||||
public sealed record OutcomeChangeSummary(
|
||||
[property: JsonPropertyName("unchanged")] int Unchanged,
|
||||
[property: JsonPropertyName("improved")] int Improved,
|
||||
[property: JsonPropertyName("regressed")] int Regressed,
|
||||
[property: JsonPropertyName("transitions")] ImmutableArray<OutcomeTransition> Transitions);
|
||||
|
||||
/// <summary>
|
||||
/// A specific outcome transition.
|
||||
/// </summary>
|
||||
public sealed record OutcomeTransition(
|
||||
[property: JsonPropertyName("from_outcome")] string FromOutcome,
|
||||
[property: JsonPropertyName("to_outcome")] string ToOutcome,
|
||||
[property: JsonPropertyName("count")] int Count,
|
||||
[property: JsonPropertyName("percentage")] double Percentage,
|
||||
[property: JsonPropertyName("is_improvement")] bool IsImprovement);
|
||||
|
||||
/// <summary>
|
||||
/// Summary of severity changes.
|
||||
/// </summary>
|
||||
public sealed record SeverityChangeSummary(
|
||||
[property: JsonPropertyName("unchanged")] int Unchanged,
|
||||
[property: JsonPropertyName("escalated")] int Escalated,
|
||||
[property: JsonPropertyName("deescalated")] int Deescalated,
|
||||
[property: JsonPropertyName("transitions")] ImmutableArray<SeverityTransition> Transitions);
|
||||
|
||||
/// <summary>
|
||||
/// A specific severity transition.
|
||||
/// </summary>
|
||||
public sealed record SeverityTransition(
|
||||
[property: JsonPropertyName("from_severity")] string FromSeverity,
|
||||
[property: JsonPropertyName("to_severity")] string ToSeverity,
|
||||
[property: JsonPropertyName("count")] int Count,
|
||||
[property: JsonPropertyName("percentage")] double Percentage);
|
||||
|
||||
/// <summary>
|
||||
/// Summary of rule behavior changes.
|
||||
/// </summary>
|
||||
public sealed record RuleChangeSummary(
|
||||
[property: JsonPropertyName("rules_added")] ImmutableArray<string> RulesAdded,
|
||||
[property: JsonPropertyName("rules_removed")] ImmutableArray<string> RulesRemoved,
|
||||
[property: JsonPropertyName("rules_modified")] ImmutableArray<RuleModification> RulesModified,
|
||||
[property: JsonPropertyName("fire_rate_changes")] ImmutableArray<RuleFireRateChange> FireRateChanges);
|
||||
|
||||
/// <summary>
|
||||
/// A rule modification between versions.
|
||||
/// </summary>
|
||||
public sealed record RuleModification(
|
||||
[property: JsonPropertyName("rule_name")] string RuleName,
|
||||
[property: JsonPropertyName("modification_type")] string ModificationType,
|
||||
[property: JsonPropertyName("description")] string Description);
|
||||
|
||||
/// <summary>
|
||||
/// Change in rule fire rate.
|
||||
/// </summary>
|
||||
public sealed record RuleFireRateChange(
|
||||
[property: JsonPropertyName("rule_name")] string RuleName,
|
||||
[property: JsonPropertyName("base_fire_rate")] double BaseFireRate,
|
||||
[property: JsonPropertyName("candidate_fire_rate")] double CandidateFireRate,
|
||||
[property: JsonPropertyName("change_percentage")] double ChangePercentage,
|
||||
[property: JsonPropertyName("is_significant")] bool IsSignificant);
|
||||
|
||||
/// <summary>
|
||||
/// A finding with high impact from policy changes.
|
||||
/// </summary>
|
||||
public sealed record HighImpactFinding(
|
||||
[property: JsonPropertyName("finding_id")] string FindingId,
|
||||
[property: JsonPropertyName("component_purl")] string? ComponentPurl,
|
||||
[property: JsonPropertyName("advisory_id")] string? AdvisoryId,
|
||||
[property: JsonPropertyName("base_outcome")] string BaseOutcome,
|
||||
[property: JsonPropertyName("candidate_outcome")] string? CandidateOutcome,
|
||||
[property: JsonPropertyName("base_severity")] string? BaseSeverity,
|
||||
[property: JsonPropertyName("candidate_severity")] string? CandidateSeverity,
|
||||
[property: JsonPropertyName("impact_score")] double ImpactScore,
|
||||
[property: JsonPropertyName("impact_reason")] string ImpactReason);
|
||||
@@ -0,0 +1,811 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Simulation;
|
||||
|
||||
/// <summary>
|
||||
/// Service for computing simulation analytics including rule firing counts, heatmaps,
|
||||
/// sampled traces, and delta summaries.
|
||||
/// </summary>
|
||||
public sealed class SimulationAnalyticsService
|
||||
{
|
||||
private static readonly ImmutableArray<string> OutcomeSeverityOrder = ImmutableArray.Create(
|
||||
"allow", "info", "warn", "review", "block", "deny", "critical");
|
||||
|
||||
private static readonly ImmutableArray<string> SeverityOrder = ImmutableArray.Create(
|
||||
"informational", "low", "medium", "high", "critical");
|
||||
|
||||
/// <summary>
|
||||
/// Computes full simulation analytics from rule hit traces.
|
||||
/// </summary>
|
||||
public SimulationAnalytics ComputeAnalytics(
|
||||
string policyRef,
|
||||
IReadOnlyList<RuleHitTrace> traces,
|
||||
IReadOnlyList<SimulationFinding> findings,
|
||||
SimulationAnalyticsOptions? options = null)
|
||||
{
|
||||
options ??= SimulationAnalyticsOptions.Default;
|
||||
|
||||
var firingCounts = ComputeRuleFiringCounts(traces, findings.Count);
|
||||
var heatmap = ComputeHeatmap(traces, findings, options);
|
||||
var sampledTraces = ComputeSampledTraces(traces, findings, options);
|
||||
|
||||
return new SimulationAnalytics(
|
||||
firingCounts,
|
||||
heatmap,
|
||||
sampledTraces,
|
||||
DeltaSummary: null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes delta summary comparing base and candidate simulation results.
|
||||
/// </summary>
|
||||
public SimulationDeltaSummary ComputeDeltaSummary(
|
||||
string basePolicyRef,
|
||||
string candidatePolicyRef,
|
||||
IReadOnlyList<SimulationFindingResult> baseResults,
|
||||
IReadOnlyList<SimulationFindingResult> candidateResults,
|
||||
SimulationComparisonType comparisonType = SimulationComparisonType.VersionCompare)
|
||||
{
|
||||
var baseByFinding = baseResults.ToDictionary(r => r.FindingId);
|
||||
var candidateByFinding = candidateResults.ToDictionary(r => r.FindingId);
|
||||
|
||||
var outcomeChanges = ComputeOutcomeChanges(baseByFinding, candidateByFinding);
|
||||
var severityChanges = ComputeSeverityChanges(baseByFinding, candidateByFinding);
|
||||
var ruleChanges = ComputeRuleChanges(baseResults, candidateResults);
|
||||
var highImpact = ComputeHighImpactFindings(baseByFinding, candidateByFinding);
|
||||
|
||||
var hashInput = $"{basePolicyRef}:{candidatePolicyRef}:{baseResults.Count}:{candidateResults.Count}";
|
||||
var determinismHash = ComputeHash(hashInput);
|
||||
|
||||
return new SimulationDeltaSummary(
|
||||
comparisonType,
|
||||
basePolicyRef,
|
||||
candidatePolicyRef,
|
||||
TotalFindings: baseResults.Count,
|
||||
outcomeChanges,
|
||||
severityChanges,
|
||||
ruleChanges,
|
||||
highImpact,
|
||||
determinismHash);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes rule firing counts from traces.
|
||||
/// </summary>
|
||||
public RuleFiringCounts ComputeRuleFiringCounts(
|
||||
IReadOnlyList<RuleHitTrace> traces,
|
||||
int totalEvaluations)
|
||||
{
|
||||
var ruleStats = new Dictionary<string, RuleStats>();
|
||||
var byPriority = new Dictionary<int, int>();
|
||||
var byOutcome = new Dictionary<string, int>();
|
||||
var byCategory = new Dictionary<string, int>();
|
||||
var vexByVendor = new Dictionary<string, int>();
|
||||
var vexByStatus = new Dictionary<string, int>();
|
||||
var vexByJustification = new Dictionary<string, int>();
|
||||
var totalFired = 0;
|
||||
var totalVexOverrides = 0;
|
||||
|
||||
foreach (var trace in traces)
|
||||
{
|
||||
if (!trace.ExpressionResult)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
totalFired++;
|
||||
|
||||
// Rule stats
|
||||
if (!ruleStats.TryGetValue(trace.RuleName, out var stats))
|
||||
{
|
||||
stats = new RuleStats(trace.RuleName, trace.RulePriority, trace.RuleCategory);
|
||||
ruleStats[trace.RuleName] = stats;
|
||||
}
|
||||
|
||||
stats.FireCount++;
|
||||
stats.TotalEvaluationUs += trace.EvaluationMicroseconds;
|
||||
stats.IncrementOutcome(trace.Outcome);
|
||||
|
||||
// Priority aggregation
|
||||
byPriority.TryGetValue(trace.RulePriority, out var priorityCount);
|
||||
byPriority[trace.RulePriority] = priorityCount + 1;
|
||||
|
||||
// Outcome aggregation
|
||||
byOutcome.TryGetValue(trace.Outcome, out var outcomeCount);
|
||||
byOutcome[trace.Outcome] = outcomeCount + 1;
|
||||
|
||||
// Category aggregation
|
||||
if (!string.IsNullOrWhiteSpace(trace.RuleCategory))
|
||||
{
|
||||
byCategory.TryGetValue(trace.RuleCategory, out var categoryCount);
|
||||
byCategory[trace.RuleCategory] = categoryCount + 1;
|
||||
}
|
||||
|
||||
// VEX overrides
|
||||
if (trace.IsVexOverride)
|
||||
{
|
||||
totalVexOverrides++;
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(trace.VexVendor))
|
||||
{
|
||||
vexByVendor.TryGetValue(trace.VexVendor, out var vendorCount);
|
||||
vexByVendor[trace.VexVendor] = vendorCount + 1;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(trace.VexStatus))
|
||||
{
|
||||
vexByStatus.TryGetValue(trace.VexStatus, out var statusCount);
|
||||
vexByStatus[trace.VexStatus] = statusCount + 1;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(trace.VexJustification))
|
||||
{
|
||||
vexByJustification.TryGetValue(trace.VexJustification, out var justCount);
|
||||
vexByJustification[trace.VexJustification] = justCount + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build rule fire counts
|
||||
var ruleFireCounts = ruleStats.Values
|
||||
.Select(s => new RuleFireCount(
|
||||
s.RuleName,
|
||||
s.Priority,
|
||||
s.Category,
|
||||
s.FireCount,
|
||||
totalEvaluations > 0 ? (double)s.FireCount / totalEvaluations * 100 : 0,
|
||||
s.OutcomeCounts.ToImmutableDictionary(),
|
||||
s.FireCount > 0 ? (double)s.TotalEvaluationUs / s.FireCount : 0))
|
||||
.ToImmutableDictionary(r => r.RuleName);
|
||||
|
||||
var topRules = ruleFireCounts.Values
|
||||
.OrderByDescending(r => r.FireCount)
|
||||
.Take(10)
|
||||
.ToImmutableArray();
|
||||
|
||||
var vexOverrides = new VexOverrideCounts(
|
||||
totalVexOverrides,
|
||||
vexByVendor.ToImmutableDictionary(),
|
||||
vexByStatus.ToImmutableDictionary(),
|
||||
vexByJustification.ToImmutableDictionary());
|
||||
|
||||
return new RuleFiringCounts(
|
||||
totalEvaluations,
|
||||
totalFired,
|
||||
ruleFireCounts,
|
||||
byPriority.ToImmutableDictionary(),
|
||||
byOutcome.ToImmutableDictionary(),
|
||||
byCategory.ToImmutableDictionary(),
|
||||
topRules,
|
||||
vexOverrides);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes heatmap aggregates for visualization.
|
||||
/// </summary>
|
||||
public SimulationHeatmap ComputeHeatmap(
|
||||
IReadOnlyList<RuleHitTrace> traces,
|
||||
IReadOnlyList<SimulationFinding> findings,
|
||||
SimulationAnalyticsOptions options)
|
||||
{
|
||||
var ruleSeverityMatrix = ComputeRuleSeverityMatrix(traces);
|
||||
var ruleOutcomeMatrix = ComputeRuleOutcomeMatrix(traces);
|
||||
var findingCoverage = ComputeFindingRuleCoverage(traces, findings);
|
||||
var temporalDist = ComputeTemporalDistribution(traces, options.TemporalBucketMs);
|
||||
|
||||
return new SimulationHeatmap(
|
||||
ruleSeverityMatrix,
|
||||
ruleOutcomeMatrix,
|
||||
findingCoverage,
|
||||
temporalDist);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes sampled explain traces with deterministic ordering.
|
||||
/// </summary>
|
||||
public SampledExplainTraces ComputeSampledTraces(
|
||||
IReadOnlyList<RuleHitTrace> traces,
|
||||
IReadOnlyList<SimulationFinding> findings,
|
||||
SimulationAnalyticsOptions options)
|
||||
{
|
||||
// Group traces by finding
|
||||
var tracesByFinding = traces
|
||||
.GroupBy(t => t.ComponentPurl ?? t.AdvisoryId ?? "unknown")
|
||||
.ToDictionary(g => g.Key, g => g.ToList());
|
||||
|
||||
var findingsById = findings.ToDictionary(f => f.FindingId);
|
||||
|
||||
// Deterministic ordering by finding_id, then rule_priority
|
||||
var ordering = new TraceOrdering("finding_id", "rule_priority", "ascending");
|
||||
|
||||
// Sample traces deterministically
|
||||
var sampledList = new List<SampledTrace>();
|
||||
var totalTraceCount = 0;
|
||||
|
||||
foreach (var finding in findings.OrderBy(f => f.FindingId, StringComparer.Ordinal))
|
||||
{
|
||||
var key = finding.ComponentPurl ?? finding.AdvisoryId ?? finding.FindingId;
|
||||
if (!tracesByFinding.TryGetValue(key, out var findingTraces))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
totalTraceCount += findingTraces.Count;
|
||||
|
||||
// Deterministic sampling based on finding_id hash
|
||||
var sampleHash = ComputeHash(finding.FindingId);
|
||||
var sampleValue = Math.Abs(sampleHash.GetHashCode()) % 100;
|
||||
var shouldSample = sampleValue < (int)(options.TraceSampleRate * 100);
|
||||
|
||||
if (!shouldSample && sampledList.Count >= options.MaxSampledTraces)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Always sample high-impact findings
|
||||
var hasFiredRule = findingTraces.Any(t => t.ExpressionResult);
|
||||
var isHighSeverity = findingTraces.Any(t =>
|
||||
t.AssignedSeverity?.Equals("critical", StringComparison.OrdinalIgnoreCase) == true ||
|
||||
t.AssignedSeverity?.Equals("high", StringComparison.OrdinalIgnoreCase) == true);
|
||||
var hasVexOverride = findingTraces.Any(t => t.IsVexOverride);
|
||||
|
||||
var sampleReason = DetermineSampleReason(shouldSample, isHighSeverity, hasVexOverride);
|
||||
|
||||
if (!shouldSample && !isHighSeverity && !hasVexOverride)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var orderedTraces = findingTraces.OrderBy(t => t.RulePriority).ToList();
|
||||
var finalTrace = orderedTraces.LastOrDefault(t => t.ExpressionResult) ?? orderedTraces.LastOrDefault();
|
||||
|
||||
if (finalTrace == null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var ruleSequence = orderedTraces
|
||||
.Where(t => t.ExpressionResult)
|
||||
.Select(t => t.RuleName)
|
||||
.ToImmutableArray();
|
||||
|
||||
sampledList.Add(new SampledTrace(
|
||||
TraceId: $"{finding.FindingId}:{finalTrace.SpanId}",
|
||||
FindingId: finding.FindingId,
|
||||
ComponentPurl: finding.ComponentPurl,
|
||||
AdvisoryId: finding.AdvisoryId,
|
||||
FinalOutcome: finalTrace.Outcome,
|
||||
AssignedSeverity: finalTrace.AssignedSeverity,
|
||||
RulesEvaluated: findingTraces.Count,
|
||||
RulesFired: findingTraces.Count(t => t.ExpressionResult),
|
||||
VexApplied: hasVexOverride,
|
||||
EvaluationMs: findingTraces.Sum(t => t.EvaluationMicroseconds) / 1000.0,
|
||||
RuleSequence: ruleSequence,
|
||||
SampleReason: sampleReason));
|
||||
|
||||
if (sampledList.Count >= options.MaxSampledTraces)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Compute determinism hash from ordered sample
|
||||
var hashBuilder = new StringBuilder();
|
||||
foreach (var sample in sampledList.OrderBy(s => s.FindingId, StringComparer.Ordinal))
|
||||
{
|
||||
hashBuilder.Append(sample.FindingId);
|
||||
hashBuilder.Append(':');
|
||||
hashBuilder.Append(sample.FinalOutcome);
|
||||
hashBuilder.Append(';');
|
||||
}
|
||||
|
||||
var determinismHash = ComputeHash(hashBuilder.ToString());
|
||||
|
||||
return new SampledExplainTraces(
|
||||
options.TraceSampleRate,
|
||||
totalTraceCount,
|
||||
sampledList.Count,
|
||||
ordering,
|
||||
sampledList.ToImmutableArray(),
|
||||
determinismHash);
|
||||
}
|
||||
|
||||
private ImmutableArray<HeatmapCell> ComputeRuleSeverityMatrix(IReadOnlyList<RuleHitTrace> traces)
|
||||
{
|
||||
var matrix = new Dictionary<(string rule, string severity), int>();
|
||||
|
||||
foreach (var trace in traces.Where(t => t.ExpressionResult && !string.IsNullOrWhiteSpace(t.AssignedSeverity)))
|
||||
{
|
||||
var key = (trace.RuleName, trace.AssignedSeverity!);
|
||||
matrix.TryGetValue(key, out var count);
|
||||
matrix[key] = count + 1;
|
||||
}
|
||||
|
||||
var maxValue = matrix.Values.DefaultIfEmpty(1).Max();
|
||||
|
||||
return matrix
|
||||
.Select(kvp => new HeatmapCell(
|
||||
kvp.Key.rule,
|
||||
kvp.Key.severity,
|
||||
kvp.Value,
|
||||
maxValue > 0 ? (double)kvp.Value / maxValue : 0))
|
||||
.OrderBy(c => c.X, StringComparer.Ordinal)
|
||||
.ThenBy(c => SeverityOrder.IndexOf(c.Y.ToLowerInvariant()))
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private ImmutableArray<HeatmapCell> ComputeRuleOutcomeMatrix(IReadOnlyList<RuleHitTrace> traces)
|
||||
{
|
||||
var matrix = new Dictionary<(string rule, string outcome), int>();
|
||||
|
||||
foreach (var trace in traces.Where(t => t.ExpressionResult))
|
||||
{
|
||||
var key = (trace.RuleName, trace.Outcome);
|
||||
matrix.TryGetValue(key, out var count);
|
||||
matrix[key] = count + 1;
|
||||
}
|
||||
|
||||
var maxValue = matrix.Values.DefaultIfEmpty(1).Max();
|
||||
|
||||
return matrix
|
||||
.Select(kvp => new HeatmapCell(
|
||||
kvp.Key.rule,
|
||||
kvp.Key.outcome,
|
||||
kvp.Value,
|
||||
maxValue > 0 ? (double)kvp.Value / maxValue : 0))
|
||||
.OrderBy(c => c.X, StringComparer.Ordinal)
|
||||
.ThenBy(c => OutcomeSeverityOrder.IndexOf(c.Y.ToLowerInvariant()))
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private FindingRuleCoverage ComputeFindingRuleCoverage(
|
||||
IReadOnlyList<RuleHitTrace> traces,
|
||||
IReadOnlyList<SimulationFinding> findings)
|
||||
{
|
||||
var rulesThatFired = traces
|
||||
.Where(t => t.ExpressionResult)
|
||||
.Select(t => t.RuleName)
|
||||
.ToHashSet();
|
||||
|
||||
var allRules = traces
|
||||
.Select(t => t.RuleName)
|
||||
.Distinct()
|
||||
.ToHashSet();
|
||||
|
||||
var rulesNeverFired = allRules.Except(rulesThatFired).ToImmutableArray();
|
||||
|
||||
// Group by finding to count matches per finding
|
||||
var findingMatchCounts = traces
|
||||
.Where(t => t.ExpressionResult)
|
||||
.GroupBy(t => t.ComponentPurl ?? t.AdvisoryId ?? "unknown")
|
||||
.ToDictionary(g => g.Key, g => g.Select(t => t.RuleName).Distinct().Count());
|
||||
|
||||
var matchCountDistribution = findingMatchCounts.Values
|
||||
.GroupBy(c => c)
|
||||
.ToDictionary(g => g.Key, g => g.Count())
|
||||
.ToImmutableDictionary();
|
||||
|
||||
var findingsMatched = findingMatchCounts.Count;
|
||||
var findingsUnmatched = findings.Count - findingsMatched;
|
||||
|
||||
return new FindingRuleCoverage(
|
||||
findings.Count,
|
||||
findingsMatched,
|
||||
findingsUnmatched,
|
||||
findings.Count > 0 ? (double)findingsMatched / findings.Count * 100 : 0,
|
||||
rulesNeverFired,
|
||||
matchCountDistribution);
|
||||
}
|
||||
|
||||
private ImmutableArray<TemporalBucket> ComputeTemporalDistribution(
|
||||
IReadOnlyList<RuleHitTrace> traces,
|
||||
long bucketMs)
|
||||
{
|
||||
if (traces.Count == 0)
|
||||
{
|
||||
return ImmutableArray<TemporalBucket>.Empty;
|
||||
}
|
||||
|
||||
var minTime = traces.Min(t => t.EvaluationTimestamp);
|
||||
var maxTime = traces.Max(t => t.EvaluationTimestamp);
|
||||
var totalMs = (long)(maxTime - minTime).TotalMilliseconds;
|
||||
|
||||
if (totalMs <= 0)
|
||||
{
|
||||
return ImmutableArray.Create(new TemporalBucket(0, bucketMs, traces.Count, traces.Count(t => t.ExpressionResult)));
|
||||
}
|
||||
|
||||
var buckets = new Dictionary<long, (int evalCount, int fireCount)>();
|
||||
|
||||
foreach (var trace in traces)
|
||||
{
|
||||
var offsetMs = (long)(trace.EvaluationTimestamp - minTime).TotalMilliseconds;
|
||||
var bucketStart = (offsetMs / bucketMs) * bucketMs;
|
||||
|
||||
buckets.TryGetValue(bucketStart, out var counts);
|
||||
buckets[bucketStart] = (counts.evalCount + 1, counts.fireCount + (trace.ExpressionResult ? 1 : 0));
|
||||
}
|
||||
|
||||
return buckets
|
||||
.OrderBy(kvp => kvp.Key)
|
||||
.Select(kvp => new TemporalBucket(kvp.Key, kvp.Key + bucketMs, kvp.Value.evalCount, kvp.Value.fireCount))
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private OutcomeChangeSummary ComputeOutcomeChanges(
|
||||
Dictionary<string, SimulationFindingResult> baseResults,
|
||||
Dictionary<string, SimulationFindingResult> candidateResults)
|
||||
{
|
||||
var unchanged = 0;
|
||||
var improved = 0;
|
||||
var regressed = 0;
|
||||
var transitionCounts = new Dictionary<(string from, string to), int>();
|
||||
|
||||
foreach (var (findingId, baseResult) in baseResults)
|
||||
{
|
||||
if (!candidateResults.TryGetValue(findingId, out var candidateResult))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (baseResult.Outcome == candidateResult.Outcome)
|
||||
{
|
||||
unchanged++;
|
||||
}
|
||||
else
|
||||
{
|
||||
var key = (baseResult.Outcome, candidateResult.Outcome);
|
||||
transitionCounts.TryGetValue(key, out var count);
|
||||
transitionCounts[key] = count + 1;
|
||||
|
||||
var isImprovement = IsOutcomeImprovement(baseResult.Outcome, candidateResult.Outcome);
|
||||
if (isImprovement)
|
||||
{
|
||||
improved++;
|
||||
}
|
||||
else
|
||||
{
|
||||
regressed++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var total = baseResults.Count;
|
||||
var transitions = transitionCounts
|
||||
.Select(kvp => new OutcomeTransition(
|
||||
kvp.Key.from,
|
||||
kvp.Key.to,
|
||||
kvp.Value,
|
||||
total > 0 ? (double)kvp.Value / total * 100 : 0,
|
||||
IsOutcomeImprovement(kvp.Key.from, kvp.Key.to)))
|
||||
.OrderByDescending(t => t.Count)
|
||||
.ToImmutableArray();
|
||||
|
||||
return new OutcomeChangeSummary(unchanged, improved, regressed, transitions);
|
||||
}
|
||||
|
||||
private SeverityChangeSummary ComputeSeverityChanges(
|
||||
Dictionary<string, SimulationFindingResult> baseResults,
|
||||
Dictionary<string, SimulationFindingResult> candidateResults)
|
||||
{
|
||||
var unchanged = 0;
|
||||
var escalated = 0;
|
||||
var deescalated = 0;
|
||||
var transitionCounts = new Dictionary<(string from, string to), int>();
|
||||
|
||||
foreach (var (findingId, baseResult) in baseResults)
|
||||
{
|
||||
if (!candidateResults.TryGetValue(findingId, out var candidateResult))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var baseSeverity = baseResult.Severity ?? "unknown";
|
||||
var candidateSeverity = candidateResult.Severity ?? "unknown";
|
||||
|
||||
if (baseSeverity == candidateSeverity)
|
||||
{
|
||||
unchanged++;
|
||||
}
|
||||
else
|
||||
{
|
||||
var key = (baseSeverity, candidateSeverity);
|
||||
transitionCounts.TryGetValue(key, out var count);
|
||||
transitionCounts[key] = count + 1;
|
||||
|
||||
var baseIdx = SeverityOrder.IndexOf(baseSeverity.ToLowerInvariant());
|
||||
var candidateIdx = SeverityOrder.IndexOf(candidateSeverity.ToLowerInvariant());
|
||||
|
||||
if (candidateIdx > baseIdx)
|
||||
{
|
||||
escalated++;
|
||||
}
|
||||
else
|
||||
{
|
||||
deescalated++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var total = baseResults.Count;
|
||||
var transitions = transitionCounts
|
||||
.Select(kvp => new SeverityTransition(
|
||||
kvp.Key.from,
|
||||
kvp.Key.to,
|
||||
kvp.Value,
|
||||
total > 0 ? (double)kvp.Value / total * 100 : 0))
|
||||
.OrderByDescending(t => t.Count)
|
||||
.ToImmutableArray();
|
||||
|
||||
return new SeverityChangeSummary(unchanged, escalated, deescalated, transitions);
|
||||
}
|
||||
|
||||
private RuleChangeSummary ComputeRuleChanges(
|
||||
IReadOnlyList<SimulationFindingResult> baseResults,
|
||||
IReadOnlyList<SimulationFindingResult> candidateResults)
|
||||
{
|
||||
var baseRules = baseResults
|
||||
.SelectMany(r => r.FiredRules ?? Array.Empty<string>())
|
||||
.Distinct()
|
||||
.ToHashSet();
|
||||
|
||||
var candidateRules = candidateResults
|
||||
.SelectMany(r => r.FiredRules ?? Array.Empty<string>())
|
||||
.Distinct()
|
||||
.ToHashSet();
|
||||
|
||||
var rulesAdded = candidateRules.Except(baseRules).ToImmutableArray();
|
||||
var rulesRemoved = baseRules.Except(candidateRules).ToImmutableArray();
|
||||
|
||||
// Compute fire rate changes for common rules
|
||||
var baseFireRates = ComputeFireRates(baseResults);
|
||||
var candidateFireRates = ComputeFireRates(candidateResults);
|
||||
|
||||
var fireRateChanges = baseRules.Intersect(candidateRules)
|
||||
.Select(rule =>
|
||||
{
|
||||
var baseRate = baseFireRates.GetValueOrDefault(rule, 0);
|
||||
var candidateRate = candidateFireRates.GetValueOrDefault(rule, 0);
|
||||
var change = candidateRate - baseRate;
|
||||
return new RuleFireRateChange(
|
||||
rule,
|
||||
baseRate,
|
||||
candidateRate,
|
||||
change,
|
||||
Math.Abs(change) > 5.0); // >5% change is significant
|
||||
})
|
||||
.Where(c => Math.Abs(c.ChangePercentage) > 1.0) // Only show changes > 1%
|
||||
.OrderByDescending(c => Math.Abs(c.ChangePercentage))
|
||||
.Take(20)
|
||||
.ToImmutableArray();
|
||||
|
||||
return new RuleChangeSummary(
|
||||
rulesAdded,
|
||||
rulesRemoved,
|
||||
ImmutableArray<RuleModification>.Empty, // Would require policy diff analysis
|
||||
fireRateChanges);
|
||||
}
|
||||
|
||||
private Dictionary<string, double> ComputeFireRates(IReadOnlyList<SimulationFindingResult> results)
|
||||
{
|
||||
var ruleCounts = new Dictionary<string, int>();
|
||||
|
||||
foreach (var result in results)
|
||||
{
|
||||
foreach (var rule in result.FiredRules ?? Array.Empty<string>())
|
||||
{
|
||||
ruleCounts.TryGetValue(rule, out var count);
|
||||
ruleCounts[rule] = count + 1;
|
||||
}
|
||||
}
|
||||
|
||||
var total = results.Count;
|
||||
return ruleCounts.ToDictionary(
|
||||
kvp => kvp.Key,
|
||||
kvp => total > 0 ? (double)kvp.Value / total * 100 : 0);
|
||||
}
|
||||
|
||||
private ImmutableArray<HighImpactFinding> ComputeHighImpactFindings(
|
||||
Dictionary<string, SimulationFindingResult> baseResults,
|
||||
Dictionary<string, SimulationFindingResult> candidateResults)
|
||||
{
|
||||
var highImpact = new List<HighImpactFinding>();
|
||||
|
||||
foreach (var (findingId, baseResult) in baseResults)
|
||||
{
|
||||
if (!candidateResults.TryGetValue(findingId, out var candidateResult))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var impactScore = ComputeImpactScore(baseResult, candidateResult);
|
||||
if (impactScore < 0.3) // Threshold for high impact
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var impactReason = DetermineImpactReason(baseResult, candidateResult);
|
||||
|
||||
highImpact.Add(new HighImpactFinding(
|
||||
findingId,
|
||||
baseResult.ComponentPurl,
|
||||
baseResult.AdvisoryId,
|
||||
baseResult.Outcome,
|
||||
candidateResult.Outcome,
|
||||
baseResult.Severity,
|
||||
candidateResult.Severity,
|
||||
impactScore,
|
||||
impactReason));
|
||||
}
|
||||
|
||||
return highImpact
|
||||
.OrderByDescending(f => f.ImpactScore)
|
||||
.Take(50)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private double ComputeImpactScore(SimulationFindingResult baseResult, SimulationFindingResult candidateResult)
|
||||
{
|
||||
var score = 0.0;
|
||||
|
||||
// Outcome change weight
|
||||
if (baseResult.Outcome != candidateResult.Outcome)
|
||||
{
|
||||
var baseIdx = OutcomeSeverityOrder.IndexOf(baseResult.Outcome.ToLowerInvariant());
|
||||
var candidateIdx = OutcomeSeverityOrder.IndexOf(candidateResult.Outcome.ToLowerInvariant());
|
||||
score += Math.Abs(candidateIdx - baseIdx) * 0.2;
|
||||
}
|
||||
|
||||
// Severity change weight
|
||||
var baseSeverity = baseResult.Severity ?? "unknown";
|
||||
var candidateSeverity = candidateResult.Severity ?? "unknown";
|
||||
if (baseSeverity != candidateSeverity)
|
||||
{
|
||||
var baseIdx = SeverityOrder.IndexOf(baseSeverity.ToLowerInvariant());
|
||||
var candidateIdx = SeverityOrder.IndexOf(candidateSeverity.ToLowerInvariant());
|
||||
score += Math.Abs(candidateIdx - baseIdx) * 0.15;
|
||||
}
|
||||
|
||||
return Math.Min(1.0, score);
|
||||
}
|
||||
|
||||
private string DetermineImpactReason(SimulationFindingResult baseResult, SimulationFindingResult candidateResult)
|
||||
{
|
||||
var reasons = new List<string>();
|
||||
|
||||
if (baseResult.Outcome != candidateResult.Outcome)
|
||||
{
|
||||
reasons.Add($"Outcome changed from '{baseResult.Outcome}' to '{candidateResult.Outcome}'");
|
||||
}
|
||||
|
||||
if (baseResult.Severity != candidateResult.Severity)
|
||||
{
|
||||
reasons.Add($"Severity changed from '{baseResult.Severity}' to '{candidateResult.Severity}'");
|
||||
}
|
||||
|
||||
return string.Join("; ", reasons);
|
||||
}
|
||||
|
||||
private bool IsOutcomeImprovement(string from, string to)
|
||||
{
|
||||
var fromIdx = OutcomeSeverityOrder.IndexOf(from.ToLowerInvariant());
|
||||
var toIdx = OutcomeSeverityOrder.IndexOf(to.ToLowerInvariant());
|
||||
|
||||
// Lower index = less severe = improvement
|
||||
return toIdx < fromIdx;
|
||||
}
|
||||
|
||||
private static string DetermineSampleReason(bool randomSample, bool highSeverity, bool vexOverride)
|
||||
{
|
||||
if (vexOverride)
|
||||
{
|
||||
return "vex_override";
|
||||
}
|
||||
|
||||
if (highSeverity)
|
||||
{
|
||||
return "high_severity";
|
||||
}
|
||||
|
||||
return randomSample ? "random_sample" : "coverage";
|
||||
}
|
||||
|
||||
private static string ComputeHash(string input)
|
||||
{
|
||||
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
return Convert.ToHexString(bytes)[..16].ToLowerInvariant();
|
||||
}
|
||||
|
||||
private sealed class RuleStats
|
||||
{
|
||||
public string RuleName { get; }
|
||||
public int Priority { get; }
|
||||
public string? Category { get; }
|
||||
public int FireCount { get; set; }
|
||||
public long TotalEvaluationUs { get; set; }
|
||||
public Dictionary<string, int> OutcomeCounts { get; } = new();
|
||||
|
||||
public RuleStats(string ruleName, int priority, string? category)
|
||||
{
|
||||
RuleName = ruleName;
|
||||
Priority = priority;
|
||||
Category = category;
|
||||
}
|
||||
|
||||
public void IncrementOutcome(string outcome)
|
||||
{
|
||||
OutcomeCounts.TryGetValue(outcome, out var count);
|
||||
OutcomeCounts[outcome] = count + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for simulation analytics computation.
|
||||
/// </summary>
|
||||
public sealed record SimulationAnalyticsOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Sample rate for traces (0.0 to 1.0).
|
||||
/// </summary>
|
||||
public double TraceSampleRate { get; init; } = 0.1;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of sampled traces to include.
|
||||
/// </summary>
|
||||
public int MaxSampledTraces { get; init; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Temporal bucket size in milliseconds.
|
||||
/// </summary>
|
||||
public long TemporalBucketMs { get; init; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of top rules to include.
|
||||
/// </summary>
|
||||
public int MaxTopRules { get; init; } = 10;
|
||||
|
||||
/// <summary>
|
||||
/// Significance threshold for fire rate changes (percentage).
|
||||
/// </summary>
|
||||
public double FireRateSignificanceThreshold { get; init; } = 5.0;
|
||||
|
||||
/// <summary>
|
||||
/// Default options.
|
||||
/// </summary>
|
||||
public static SimulationAnalyticsOptions Default { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Options for quick simulations (lower sampling, faster).
|
||||
/// </summary>
|
||||
public static SimulationAnalyticsOptions Quick { get; } = new()
|
||||
{
|
||||
TraceSampleRate = 0.01,
|
||||
MaxSampledTraces = 20,
|
||||
TemporalBucketMs = 500
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Options for batch simulations (balanced).
|
||||
/// </summary>
|
||||
public static SimulationAnalyticsOptions Batch { get; } = new()
|
||||
{
|
||||
TraceSampleRate = 0.05,
|
||||
MaxSampledTraces = 50,
|
||||
TemporalBucketMs = 200
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a single finding simulation (for delta comparison).
|
||||
/// </summary>
|
||||
public sealed record SimulationFindingResult(
|
||||
string FindingId,
|
||||
string? ComponentPurl,
|
||||
string? AdvisoryId,
|
||||
string Outcome,
|
||||
string? Severity,
|
||||
IReadOnlyList<string>? FiredRules);
|
||||
@@ -10,6 +10,8 @@
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
|
||||
<PackageReference Include="StackExchange.Redis" Version="2.8.37" />
|
||||
<PackageReference Include="OpenTelemetry.Exporter.Console" Version="1.12.0" />
|
||||
<PackageReference Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.12.0" />
|
||||
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0" />
|
||||
|
||||
@@ -0,0 +1,325 @@
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization.Attributes;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Documents;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document representing an effective finding after policy evaluation.
|
||||
/// Collection: effective_finding_{policyId}
|
||||
/// Tenant-scoped with unique constraint on (tenantId, componentPurl, advisoryId).
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class EffectiveFindingDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier: sha256:{hash of tenantId|policyId|componentPurl|advisoryId}
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
[BsonElement("_id")]
|
||||
public string Id { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier (normalized to lowercase).
|
||||
/// </summary>
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Policy identifier.
|
||||
/// </summary>
|
||||
[BsonElement("policyId")]
|
||||
public string PolicyId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Policy version at time of evaluation.
|
||||
/// </summary>
|
||||
[BsonElement("policyVersion")]
|
||||
public int PolicyVersion { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Component PURL from the SBOM.
|
||||
/// </summary>
|
||||
[BsonElement("componentPurl")]
|
||||
public string ComponentPurl { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Component name.
|
||||
/// </summary>
|
||||
[BsonElement("componentName")]
|
||||
public string ComponentName { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Component version.
|
||||
/// </summary>
|
||||
[BsonElement("componentVersion")]
|
||||
public string ComponentVersion { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Package ecosystem (npm, maven, pypi, etc.).
|
||||
/// </summary>
|
||||
[BsonElement("ecosystem")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Ecosystem { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Advisory identifier (CVE, GHSA, etc.).
|
||||
/// </summary>
|
||||
[BsonElement("advisoryId")]
|
||||
public string AdvisoryId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Advisory source.
|
||||
/// </summary>
|
||||
[BsonElement("advisorySource")]
|
||||
public string AdvisorySource { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability ID (may differ from advisory ID).
|
||||
/// </summary>
|
||||
[BsonElement("vulnerabilityId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? VulnerabilityId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy evaluation status (affected, blocked, suppressed, etc.).
|
||||
/// </summary>
|
||||
[BsonElement("status")]
|
||||
public string Status { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Normalized severity (Critical, High, Medium, Low, None).
|
||||
/// </summary>
|
||||
[BsonElement("severity")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Severity { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// CVSS score (if available).
|
||||
/// </summary>
|
||||
[BsonElement("cvssScore")]
|
||||
[BsonIgnoreIfNull]
|
||||
public double? CvssScore { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Rule name that matched.
|
||||
/// </summary>
|
||||
[BsonElement("ruleName")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? RuleName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Rule priority.
|
||||
/// </summary>
|
||||
[BsonElement("rulePriority")]
|
||||
[BsonIgnoreIfNull]
|
||||
public int? RulePriority { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX status overlay (if VEX was applied).
|
||||
/// </summary>
|
||||
[BsonElement("vexStatus")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? VexStatus { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX justification (if VEX was applied).
|
||||
/// </summary>
|
||||
[BsonElement("vexJustification")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? VexJustification { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX provider/vendor.
|
||||
/// </summary>
|
||||
[BsonElement("vexVendor")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? VexVendor { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether a VEX override was applied.
|
||||
/// </summary>
|
||||
[BsonElement("isVexOverride")]
|
||||
public bool IsVexOverride { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// SBOM ID where component was found.
|
||||
/// </summary>
|
||||
[BsonElement("sbomId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? SbomId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Product key associated with the SBOM.
|
||||
/// </summary>
|
||||
[BsonElement("productKey")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ProductKey { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy evaluation annotations.
|
||||
/// </summary>
|
||||
[BsonElement("annotations")]
|
||||
public Dictionary<string, string> Annotations { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Current history version (incremented on each update).
|
||||
/// </summary>
|
||||
[BsonElement("historyVersion")]
|
||||
public long HistoryVersion { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the policy run that produced this finding.
|
||||
/// </summary>
|
||||
[BsonElement("policyRunId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? PolicyRunId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Trace ID for distributed tracing.
|
||||
/// </summary>
|
||||
[BsonElement("traceId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? TraceId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Span ID for distributed tracing.
|
||||
/// </summary>
|
||||
[BsonElement("spanId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? SpanId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When this finding was first created.
|
||||
/// </summary>
|
||||
[BsonElement("createdAt")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When this finding was last updated.
|
||||
/// </summary>
|
||||
[BsonElement("updatedAt")]
|
||||
public DateTimeOffset UpdatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Content hash for deduplication and change detection.
|
||||
/// </summary>
|
||||
[BsonElement("contentHash")]
|
||||
public string ContentHash { get; set; } = string.Empty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document for effective finding history (append-only).
|
||||
/// Collection: effective_finding_history_{policyId}
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class EffectiveFindingHistoryDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier: {findingId}:v{version}
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
[BsonElement("_id")]
|
||||
public string Id { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the effective finding.
|
||||
/// </summary>
|
||||
[BsonElement("findingId")]
|
||||
public string FindingId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Policy identifier.
|
||||
/// </summary>
|
||||
[BsonElement("policyId")]
|
||||
public string PolicyId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// History version number (monotonically increasing).
|
||||
/// </summary>
|
||||
[BsonElement("version")]
|
||||
public long Version { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Type of change (Created, StatusChanged, SeverityChanged, VexApplied, etc.).
|
||||
/// </summary>
|
||||
[BsonElement("changeType")]
|
||||
public string ChangeType { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Previous status (for status changes).
|
||||
/// </summary>
|
||||
[BsonElement("previousStatus")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? PreviousStatus { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// New status.
|
||||
/// </summary>
|
||||
[BsonElement("newStatus")]
|
||||
public string NewStatus { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Previous severity (for severity changes).
|
||||
/// </summary>
|
||||
[BsonElement("previousSeverity")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? PreviousSeverity { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// New severity.
|
||||
/// </summary>
|
||||
[BsonElement("newSeverity")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? NewSeverity { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Previous content hash.
|
||||
/// </summary>
|
||||
[BsonElement("previousContentHash")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? PreviousContentHash { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// New content hash.
|
||||
/// </summary>
|
||||
[BsonElement("newContentHash")]
|
||||
public string NewContentHash { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Policy run that triggered this change.
|
||||
/// </summary>
|
||||
[BsonElement("policyRunId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? PolicyRunId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Trace ID for distributed tracing.
|
||||
/// </summary>
|
||||
[BsonElement("traceId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? TraceId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When this change occurred.
|
||||
/// </summary>
|
||||
[BsonElement("occurredAt")]
|
||||
public DateTimeOffset OccurredAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// TTL expiration timestamp for automatic cleanup.
|
||||
/// </summary>
|
||||
[BsonElement("expiresAt")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? ExpiresAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates the composite ID for a history entry.
|
||||
/// </summary>
|
||||
public static string CreateId(string findingId, long version) => $"{findingId}:v{version}";
|
||||
}
|
||||
@@ -0,0 +1,157 @@
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization.Attributes;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Documents;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document for policy audit log entries.
|
||||
/// Collection: policy_audit
|
||||
/// Tracks all policy-related actions for compliance and debugging.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyAuditDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique audit entry identifier.
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
[BsonElement("_id")]
|
||||
public ObjectId Id { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Action type (PolicyCreated, PolicyUpdated, RevisionApproved, RunStarted, etc.).
|
||||
/// </summary>
|
||||
[BsonElement("action")]
|
||||
public string Action { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Resource type (Policy, Revision, Bundle, Run, Finding).
|
||||
/// </summary>
|
||||
[BsonElement("resourceType")]
|
||||
public string ResourceType { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Resource identifier.
|
||||
/// </summary>
|
||||
[BsonElement("resourceId")]
|
||||
public string ResourceId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Actor identifier (user ID or service account).
|
||||
/// </summary>
|
||||
[BsonElement("actorId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ActorId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Actor type (User, ServiceAccount, System).
|
||||
/// </summary>
|
||||
[BsonElement("actorType")]
|
||||
public string ActorType { get; set; } = "System";
|
||||
|
||||
/// <summary>
|
||||
/// Previous state snapshot (for update actions).
|
||||
/// </summary>
|
||||
[BsonElement("previousState")]
|
||||
[BsonIgnoreIfNull]
|
||||
public BsonDocument? PreviousState { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// New state snapshot (for create/update actions).
|
||||
/// </summary>
|
||||
[BsonElement("newState")]
|
||||
[BsonIgnoreIfNull]
|
||||
public BsonDocument? NewState { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional context/metadata.
|
||||
/// </summary>
|
||||
[BsonElement("metadata")]
|
||||
public Dictionary<string, string> Metadata { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Correlation ID for distributed tracing.
|
||||
/// </summary>
|
||||
[BsonElement("correlationId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? CorrelationId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Trace ID for OpenTelemetry.
|
||||
/// </summary>
|
||||
[BsonElement("traceId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? TraceId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Client IP address.
|
||||
/// </summary>
|
||||
[BsonElement("clientIp")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ClientIp { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// User agent string.
|
||||
/// </summary>
|
||||
[BsonElement("userAgent")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? UserAgent { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the action occurred.
|
||||
/// </summary>
|
||||
[BsonElement("occurredAt")]
|
||||
public DateTimeOffset OccurredAt { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Audit action types for policy operations.
|
||||
/// </summary>
|
||||
public static class PolicyAuditActions
|
||||
{
|
||||
public const string PolicyCreated = "PolicyCreated";
|
||||
public const string PolicyUpdated = "PolicyUpdated";
|
||||
public const string PolicyDeleted = "PolicyDeleted";
|
||||
public const string RevisionCreated = "RevisionCreated";
|
||||
public const string RevisionApproved = "RevisionApproved";
|
||||
public const string RevisionActivated = "RevisionActivated";
|
||||
public const string RevisionArchived = "RevisionArchived";
|
||||
public const string BundleCompiled = "BundleCompiled";
|
||||
public const string RunStarted = "RunStarted";
|
||||
public const string RunCompleted = "RunCompleted";
|
||||
public const string RunFailed = "RunFailed";
|
||||
public const string RunCancelled = "RunCancelled";
|
||||
public const string FindingCreated = "FindingCreated";
|
||||
public const string FindingUpdated = "FindingUpdated";
|
||||
public const string SimulationStarted = "SimulationStarted";
|
||||
public const string SimulationCompleted = "SimulationCompleted";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resource types for policy audit entries.
|
||||
/// </summary>
|
||||
public static class PolicyAuditResourceTypes
|
||||
{
|
||||
public const string Policy = "Policy";
|
||||
public const string Revision = "Revision";
|
||||
public const string Bundle = "Bundle";
|
||||
public const string Run = "Run";
|
||||
public const string Finding = "Finding";
|
||||
public const string Simulation = "Simulation";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Actor types for policy audit entries.
|
||||
/// </summary>
|
||||
public static class PolicyAuditActorTypes
|
||||
{
|
||||
public const string User = "User";
|
||||
public const string ServiceAccount = "ServiceAccount";
|
||||
public const string System = "System";
|
||||
}
|
||||
@@ -0,0 +1,343 @@
|
||||
using System.Collections.Immutable;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization.Attributes;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Documents;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document representing a policy pack.
|
||||
/// Collection: policies
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier (packId).
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
[BsonElement("_id")]
|
||||
public string Id { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier (normalized to lowercase).
|
||||
/// </summary>
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Display name for the policy pack.
|
||||
/// </summary>
|
||||
[BsonElement("displayName")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? DisplayName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Description of the policy pack.
|
||||
/// </summary>
|
||||
[BsonElement("description")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Description { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Current active revision version (null if none active).
|
||||
/// </summary>
|
||||
[BsonElement("activeVersion")]
|
||||
[BsonIgnoreIfNull]
|
||||
public int? ActiveVersion { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Latest revision version.
|
||||
/// </summary>
|
||||
[BsonElement("latestVersion")]
|
||||
public int LatestVersion { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Tags for categorization and filtering.
|
||||
/// </summary>
|
||||
[BsonElement("tags")]
|
||||
public List<string> Tags { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Creation timestamp.
|
||||
/// </summary>
|
||||
[BsonElement("createdAt")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Last update timestamp.
|
||||
/// </summary>
|
||||
[BsonElement("updatedAt")]
|
||||
public DateTimeOffset UpdatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// User who created the policy pack.
|
||||
/// </summary>
|
||||
[BsonElement("createdBy")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? CreatedBy { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document representing a policy revision.
|
||||
/// Collection: policy_revisions
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyRevisionDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier: {packId}:{version}
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
[BsonElement("_id")]
|
||||
public string Id { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Reference to policy pack.
|
||||
/// </summary>
|
||||
[BsonElement("packId")]
|
||||
public string PackId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Revision version number.
|
||||
/// </summary>
|
||||
[BsonElement("version")]
|
||||
public int Version { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Revision status (Draft, Approved, Active, Archived).
|
||||
/// </summary>
|
||||
[BsonElement("status")]
|
||||
public string Status { get; set; } = "Draft";
|
||||
|
||||
/// <summary>
|
||||
/// Whether two-person approval is required.
|
||||
/// </summary>
|
||||
[BsonElement("requiresTwoPersonApproval")]
|
||||
public bool RequiresTwoPersonApproval { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Approval records.
|
||||
/// </summary>
|
||||
[BsonElement("approvals")]
|
||||
public List<PolicyApprovalRecord> Approvals { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the compiled bundle.
|
||||
/// </summary>
|
||||
[BsonElement("bundleId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? BundleId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 digest of the bundle.
|
||||
/// </summary>
|
||||
[BsonElement("bundleDigest")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? BundleDigest { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Creation timestamp.
|
||||
/// </summary>
|
||||
[BsonElement("createdAt")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Activation timestamp (when status became Active).
|
||||
/// </summary>
|
||||
[BsonElement("activatedAt")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? ActivatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates the composite ID for a revision.
|
||||
/// </summary>
|
||||
public static string CreateId(string packId, int version) => $"{packId}:{version}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Embedded approval record for policy revisions.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyApprovalRecord
|
||||
{
|
||||
/// <summary>
|
||||
/// User who approved.
|
||||
/// </summary>
|
||||
[BsonElement("actorId")]
|
||||
public string ActorId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Approval timestamp.
|
||||
/// </summary>
|
||||
[BsonElement("approvedAt")]
|
||||
public DateTimeOffset ApprovedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional comment.
|
||||
/// </summary>
|
||||
[BsonElement("comment")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Comment { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document for compiled policy bundles.
|
||||
/// Collection: policy_bundles
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyBundleDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier (SHA256 digest).
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
[BsonElement("_id")]
|
||||
public string Id { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Reference to policy pack.
|
||||
/// </summary>
|
||||
[BsonElement("packId")]
|
||||
public string PackId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Revision version.
|
||||
/// </summary>
|
||||
[BsonElement("version")]
|
||||
public int Version { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Cryptographic signature.
|
||||
/// </summary>
|
||||
[BsonElement("signature")]
|
||||
public string Signature { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Bundle size in bytes.
|
||||
/// </summary>
|
||||
[BsonElement("sizeBytes")]
|
||||
public int SizeBytes { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Compiled bundle payload (binary).
|
||||
/// </summary>
|
||||
[BsonElement("payload")]
|
||||
public byte[] Payload { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// AOC metadata for compliance tracking.
|
||||
/// </summary>
|
||||
[BsonElement("aocMetadata")]
|
||||
[BsonIgnoreIfNull]
|
||||
public PolicyAocMetadataDocument? AocMetadata { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Creation timestamp.
|
||||
/// </summary>
|
||||
[BsonElement("createdAt")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Embedded AOC metadata document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyAocMetadataDocument
|
||||
{
|
||||
[BsonElement("compilationId")]
|
||||
public string CompilationId { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("compilerVersion")]
|
||||
public string CompilerVersion { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("compiledAt")]
|
||||
public DateTimeOffset CompiledAt { get; set; }
|
||||
|
||||
[BsonElement("sourceDigest")]
|
||||
public string SourceDigest { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("artifactDigest")]
|
||||
public string ArtifactDigest { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("complexityScore")]
|
||||
public double ComplexityScore { get; set; }
|
||||
|
||||
[BsonElement("ruleCount")]
|
||||
public int RuleCount { get; set; }
|
||||
|
||||
[BsonElement("durationMilliseconds")]
|
||||
public long DurationMilliseconds { get; set; }
|
||||
|
||||
[BsonElement("provenance")]
|
||||
[BsonIgnoreIfNull]
|
||||
public PolicyProvenanceDocument? Provenance { get; set; }
|
||||
|
||||
[BsonElement("attestationRef")]
|
||||
[BsonIgnoreIfNull]
|
||||
public PolicyAttestationRefDocument? AttestationRef { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Embedded provenance document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyProvenanceDocument
|
||||
{
|
||||
[BsonElement("sourceType")]
|
||||
public string SourceType { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("sourceUrl")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? SourceUrl { get; set; }
|
||||
|
||||
[BsonElement("submitter")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Submitter { get; set; }
|
||||
|
||||
[BsonElement("commitSha")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? CommitSha { get; set; }
|
||||
|
||||
[BsonElement("branch")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Branch { get; set; }
|
||||
|
||||
[BsonElement("ingestedAt")]
|
||||
public DateTimeOffset IngestedAt { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Embedded attestation reference document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyAttestationRefDocument
|
||||
{
|
||||
[BsonElement("attestationId")]
|
||||
public string AttestationId { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("envelopeDigest")]
|
||||
public string EnvelopeDigest { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("uri")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Uri { get; set; }
|
||||
|
||||
[BsonElement("signingKeyId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? SigningKeyId { get; set; }
|
||||
|
||||
[BsonElement("createdAt")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,482 @@
|
||||
using System.Collections.Immutable;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization.Attributes;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Documents;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document representing a policy exception.
|
||||
/// Collection: exceptions
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyExceptionDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier.
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
[BsonElement("_id")]
|
||||
public string Id { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier (normalized to lowercase).
|
||||
/// </summary>
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable name for the exception.
|
||||
/// </summary>
|
||||
[BsonElement("name")]
|
||||
public string Name { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Description and justification for the exception.
|
||||
/// </summary>
|
||||
[BsonElement("description")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Description { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Exception type: waiver, override, temporary, permanent.
|
||||
/// </summary>
|
||||
[BsonElement("exceptionType")]
|
||||
public string ExceptionType { get; set; } = "waiver";
|
||||
|
||||
/// <summary>
|
||||
/// Exception status: draft, pending_review, approved, active, expired, revoked.
|
||||
/// </summary>
|
||||
[BsonElement("status")]
|
||||
public string Status { get; set; } = "draft";
|
||||
|
||||
/// <summary>
|
||||
/// Scope of the exception (e.g., advisory IDs, PURL patterns, CVE IDs).
|
||||
/// </summary>
|
||||
[BsonElement("scope")]
|
||||
public ExceptionScopeDocument Scope { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Risk assessment and mitigation details.
|
||||
/// </summary>
|
||||
[BsonElement("riskAssessment")]
|
||||
[BsonIgnoreIfNull]
|
||||
public ExceptionRiskAssessmentDocument? RiskAssessment { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Compensating controls in place while exception is active.
|
||||
/// </summary>
|
||||
[BsonElement("compensatingControls")]
|
||||
public List<string> CompensatingControls { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Tags for categorization and filtering.
|
||||
/// </summary>
|
||||
[BsonElement("tags")]
|
||||
public List<string> Tags { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Priority for conflict resolution (higher = more precedence).
|
||||
/// </summary>
|
||||
[BsonElement("priority")]
|
||||
public int Priority { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the exception becomes active (null = immediately upon approval).
|
||||
/// </summary>
|
||||
[BsonElement("effectiveFrom")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? EffectiveFrom { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the exception expires (null = no expiration).
|
||||
/// </summary>
|
||||
[BsonElement("expiresAt")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? ExpiresAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// User who created the exception.
|
||||
/// </summary>
|
||||
[BsonElement("createdBy")]
|
||||
public string CreatedBy { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Creation timestamp.
|
||||
/// </summary>
|
||||
[BsonElement("createdAt")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Last update timestamp.
|
||||
/// </summary>
|
||||
[BsonElement("updatedAt")]
|
||||
public DateTimeOffset UpdatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the exception was activated.
|
||||
/// </summary>
|
||||
[BsonElement("activatedAt")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? ActivatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the exception was revoked.
|
||||
/// </summary>
|
||||
[BsonElement("revokedAt")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? RevokedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// User who revoked the exception.
|
||||
/// </summary>
|
||||
[BsonElement("revokedBy")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? RevokedBy { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for revocation.
|
||||
/// </summary>
|
||||
[BsonElement("revocationReason")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? RevocationReason { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the active review (if pending_review status).
|
||||
/// </summary>
|
||||
[BsonElement("activeReviewId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ActiveReviewId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Correlation ID for tracing.
|
||||
/// </summary>
|
||||
[BsonElement("correlationId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? CorrelationId { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Embedded document for exception scope definition.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ExceptionScopeDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Advisory IDs covered by this exception.
|
||||
/// </summary>
|
||||
[BsonElement("advisoryIds")]
|
||||
public List<string> AdvisoryIds { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// CVE IDs covered by this exception.
|
||||
/// </summary>
|
||||
[BsonElement("cveIds")]
|
||||
public List<string> CveIds { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// PURL patterns (supports wildcards) covered by this exception.
|
||||
/// </summary>
|
||||
[BsonElement("purlPatterns")]
|
||||
public List<string> PurlPatterns { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Specific asset IDs covered.
|
||||
/// </summary>
|
||||
[BsonElement("assetIds")]
|
||||
public List<string> AssetIds { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Repository IDs covered (scope limiter).
|
||||
/// </summary>
|
||||
[BsonElement("repositoryIds")]
|
||||
public List<string> RepositoryIds { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot IDs covered (scope limiter).
|
||||
/// </summary>
|
||||
[BsonElement("snapshotIds")]
|
||||
public List<string> SnapshotIds { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Severity levels to apply exception to.
|
||||
/// </summary>
|
||||
[BsonElement("severities")]
|
||||
public List<string> Severities { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Whether this exception applies to all assets (tenant-wide).
|
||||
/// </summary>
|
||||
[BsonElement("applyToAll")]
|
||||
public bool ApplyToAll { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Embedded document for risk assessment.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ExceptionRiskAssessmentDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Original risk level being excepted.
|
||||
/// </summary>
|
||||
[BsonElement("originalRiskLevel")]
|
||||
public string OriginalRiskLevel { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Residual risk level after compensating controls.
|
||||
/// </summary>
|
||||
[BsonElement("residualRiskLevel")]
|
||||
public string ResidualRiskLevel { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Business justification for accepting the risk.
|
||||
/// </summary>
|
||||
[BsonElement("businessJustification")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? BusinessJustification { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Impact assessment if vulnerability is exploited.
|
||||
/// </summary>
|
||||
[BsonElement("impactAssessment")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ImpactAssessment { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Exploitability assessment.
|
||||
/// </summary>
|
||||
[BsonElement("exploitability")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Exploitability { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document representing an exception review.
|
||||
/// Collection: exception_reviews
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ExceptionReviewDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier.
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
[BsonElement("_id")]
|
||||
public string Id { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the exception being reviewed.
|
||||
/// </summary>
|
||||
[BsonElement("exceptionId")]
|
||||
public string ExceptionId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Review status: pending, approved, rejected.
|
||||
/// </summary>
|
||||
[BsonElement("status")]
|
||||
public string Status { get; set; } = "pending";
|
||||
|
||||
/// <summary>
|
||||
/// Type of review: initial, renewal, modification.
|
||||
/// </summary>
|
||||
[BsonElement("reviewType")]
|
||||
public string ReviewType { get; set; } = "initial";
|
||||
|
||||
/// <summary>
|
||||
/// Whether multiple approvers are required.
|
||||
/// </summary>
|
||||
[BsonElement("requiresMultipleApprovers")]
|
||||
public bool RequiresMultipleApprovers { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Minimum number of approvals required.
|
||||
/// </summary>
|
||||
[BsonElement("requiredApprovals")]
|
||||
public int RequiredApprovals { get; set; } = 1;
|
||||
|
||||
/// <summary>
|
||||
/// Designated reviewers (user or group IDs).
|
||||
/// </summary>
|
||||
[BsonElement("designatedReviewers")]
|
||||
public List<string> DesignatedReviewers { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Individual approval/rejection decisions.
|
||||
/// </summary>
|
||||
[BsonElement("decisions")]
|
||||
public List<ReviewDecisionDocument> Decisions { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// User who requested the review.
|
||||
/// </summary>
|
||||
[BsonElement("requestedBy")]
|
||||
public string RequestedBy { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// When the review was requested.
|
||||
/// </summary>
|
||||
[BsonElement("requestedAt")]
|
||||
public DateTimeOffset RequestedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the review was completed.
|
||||
/// </summary>
|
||||
[BsonElement("completedAt")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? CompletedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Review deadline.
|
||||
/// </summary>
|
||||
[BsonElement("deadline")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? Deadline { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Notes or comments on the review.
|
||||
/// </summary>
|
||||
[BsonElement("notes")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Notes { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates the composite ID for a review.
|
||||
/// </summary>
|
||||
public static string CreateId(string exceptionId, string reviewType, DateTimeOffset timestamp)
|
||||
=> $"{exceptionId}:{reviewType}:{timestamp:yyyyMMddHHmmss}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Embedded document for an individual reviewer's decision.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ReviewDecisionDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Reviewer ID (user or service account).
|
||||
/// </summary>
|
||||
[BsonElement("reviewerId")]
|
||||
public string ReviewerId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Decision: approved, rejected, abstained.
|
||||
/// </summary>
|
||||
[BsonElement("decision")]
|
||||
public string Decision { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp of the decision.
|
||||
/// </summary>
|
||||
[BsonElement("decidedAt")]
|
||||
public DateTimeOffset DecidedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Comment explaining the decision.
|
||||
/// </summary>
|
||||
[BsonElement("comment")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Comment { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Conditions attached to approval.
|
||||
/// </summary>
|
||||
[BsonElement("conditions")]
|
||||
public List<string> Conditions { get; set; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document representing an exception binding to specific assets.
|
||||
/// Collection: exception_bindings
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ExceptionBindingDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier: {exceptionId}:{assetId}:{advisoryId}
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
[BsonElement("_id")]
|
||||
public string Id { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the exception.
|
||||
/// </summary>
|
||||
[BsonElement("exceptionId")]
|
||||
public string ExceptionId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Asset ID (PURL or other identifier) this binding applies to.
|
||||
/// </summary>
|
||||
[BsonElement("assetId")]
|
||||
public string AssetId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Advisory ID this binding covers.
|
||||
/// </summary>
|
||||
[BsonElement("advisoryId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? AdvisoryId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// CVE ID this binding covers.
|
||||
/// </summary>
|
||||
[BsonElement("cveId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? CveId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot ID where binding was created.
|
||||
/// </summary>
|
||||
[BsonElement("snapshotId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? SnapshotId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Binding status: active, expired, revoked.
|
||||
/// </summary>
|
||||
[BsonElement("status")]
|
||||
public string Status { get; set; } = "active";
|
||||
|
||||
/// <summary>
|
||||
/// Policy decision override applied by this binding.
|
||||
/// </summary>
|
||||
[BsonElement("decisionOverride")]
|
||||
public string DecisionOverride { get; set; } = "allow";
|
||||
|
||||
/// <summary>
|
||||
/// When the binding becomes effective.
|
||||
/// </summary>
|
||||
[BsonElement("effectiveFrom")]
|
||||
public DateTimeOffset EffectiveFrom { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the binding expires.
|
||||
/// </summary>
|
||||
[BsonElement("expiresAt")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? ExpiresAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the binding was created.
|
||||
/// </summary>
|
||||
[BsonElement("createdAt")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates the composite ID for a binding.
|
||||
/// </summary>
|
||||
public static string CreateId(string exceptionId, string assetId, string? advisoryId)
|
||||
=> $"{exceptionId}:{assetId}:{advisoryId ?? "all"}";
|
||||
}
|
||||
@@ -0,0 +1,383 @@
|
||||
using System.Collections.Immutable;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization.Attributes;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Documents;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document for storing policy explain traces.
|
||||
/// Collection: policy_explains
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyExplainDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier (combination of runId and subjectHash).
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
[BsonElement("_id")]
|
||||
public string Id { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Policy run identifier.
|
||||
/// </summary>
|
||||
[BsonElement("runId")]
|
||||
public string RunId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Policy pack identifier.
|
||||
/// </summary>
|
||||
[BsonElement("policyId")]
|
||||
public string PolicyId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Policy version at time of evaluation.
|
||||
/// </summary>
|
||||
[BsonElement("policyVersion")]
|
||||
[BsonIgnoreIfNull]
|
||||
public int? PolicyVersion { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the evaluation subject (component + advisory).
|
||||
/// </summary>
|
||||
[BsonElement("subjectHash")]
|
||||
public string SubjectHash { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the policy bundle used.
|
||||
/// </summary>
|
||||
[BsonElement("bundleDigest")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? BundleDigest { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Evaluation timestamp (deterministic).
|
||||
/// </summary>
|
||||
[BsonElement("evaluatedAt")]
|
||||
public DateTimeOffset EvaluatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Evaluation duration in milliseconds.
|
||||
/// </summary>
|
||||
[BsonElement("durationMs")]
|
||||
public long DurationMs { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Final outcome of the evaluation.
|
||||
/// </summary>
|
||||
[BsonElement("finalOutcome")]
|
||||
public string FinalOutcome { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Input context information.
|
||||
/// </summary>
|
||||
[BsonElement("inputContext")]
|
||||
public ExplainInputContextDocument InputContext { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Rule evaluation steps.
|
||||
/// </summary>
|
||||
[BsonElement("ruleSteps")]
|
||||
public List<ExplainRuleStepDocument> RuleSteps { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// VEX evidence applied.
|
||||
/// </summary>
|
||||
[BsonElement("vexEvidence")]
|
||||
public List<ExplainVexEvidenceDocument> VexEvidence { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Statistics summary.
|
||||
/// </summary>
|
||||
[BsonElement("statistics")]
|
||||
public ExplainStatisticsDocument Statistics { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Determinism hash for reproducibility verification.
|
||||
/// </summary>
|
||||
[BsonElement("determinismHash")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? DeterminismHash { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to AOC chain for this evaluation.
|
||||
/// </summary>
|
||||
[BsonElement("aocChain")]
|
||||
[BsonIgnoreIfNull]
|
||||
public ExplainAocChainDocument? AocChain { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional metadata.
|
||||
/// </summary>
|
||||
[BsonElement("metadata")]
|
||||
public Dictionary<string, string> Metadata { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Creation timestamp.
|
||||
/// </summary>
|
||||
[BsonElement("createdAt")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// TTL expiration timestamp for automatic cleanup.
|
||||
/// </summary>
|
||||
[BsonElement("expiresAt")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? ExpiresAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates the composite ID for an explain trace.
|
||||
/// </summary>
|
||||
public static string CreateId(string runId, string subjectHash) => $"{runId}:{subjectHash}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Input context embedded document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ExplainInputContextDocument
|
||||
{
|
||||
[BsonElement("componentPurl")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ComponentPurl { get; set; }
|
||||
|
||||
[BsonElement("componentName")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ComponentName { get; set; }
|
||||
|
||||
[BsonElement("componentVersion")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ComponentVersion { get; set; }
|
||||
|
||||
[BsonElement("advisoryId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? AdvisoryId { get; set; }
|
||||
|
||||
[BsonElement("vulnerabilityId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? VulnerabilityId { get; set; }
|
||||
|
||||
[BsonElement("inputSeverity")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? InputSeverity { get; set; }
|
||||
|
||||
[BsonElement("inputCvssScore")]
|
||||
[BsonIgnoreIfNull]
|
||||
public decimal? InputCvssScore { get; set; }
|
||||
|
||||
[BsonElement("environment")]
|
||||
public Dictionary<string, string> Environment { get; set; } = new();
|
||||
|
||||
[BsonElement("sbomTags")]
|
||||
public List<string> SbomTags { get; set; } = [];
|
||||
|
||||
[BsonElement("reachabilityState")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ReachabilityState { get; set; }
|
||||
|
||||
[BsonElement("reachabilityConfidence")]
|
||||
[BsonIgnoreIfNull]
|
||||
public double? ReachabilityConfidence { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rule step embedded document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ExplainRuleStepDocument
|
||||
{
|
||||
[BsonElement("stepNumber")]
|
||||
public int StepNumber { get; set; }
|
||||
|
||||
[BsonElement("ruleName")]
|
||||
public string RuleName { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("rulePriority")]
|
||||
public int RulePriority { get; set; }
|
||||
|
||||
[BsonElement("ruleCategory")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? RuleCategory { get; set; }
|
||||
|
||||
[BsonElement("expression")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Expression { get; set; }
|
||||
|
||||
[BsonElement("matched")]
|
||||
public bool Matched { get; set; }
|
||||
|
||||
[BsonElement("outcome")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Outcome { get; set; }
|
||||
|
||||
[BsonElement("assignedSeverity")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? AssignedSeverity { get; set; }
|
||||
|
||||
[BsonElement("isFinalMatch")]
|
||||
public bool IsFinalMatch { get; set; }
|
||||
|
||||
[BsonElement("explanation")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Explanation { get; set; }
|
||||
|
||||
[BsonElement("evaluationMicroseconds")]
|
||||
public long EvaluationMicroseconds { get; set; }
|
||||
|
||||
[BsonElement("intermediateValues")]
|
||||
public Dictionary<string, string> IntermediateValues { get; set; } = new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX evidence embedded document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ExplainVexEvidenceDocument
|
||||
{
|
||||
[BsonElement("vendor")]
|
||||
public string Vendor { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("status")]
|
||||
public string Status { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("justification")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Justification { get; set; }
|
||||
|
||||
[BsonElement("confidence")]
|
||||
[BsonIgnoreIfNull]
|
||||
public double? Confidence { get; set; }
|
||||
|
||||
[BsonElement("wasApplied")]
|
||||
public bool WasApplied { get; set; }
|
||||
|
||||
[BsonElement("explanation")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Explanation { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Statistics embedded document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ExplainStatisticsDocument
|
||||
{
|
||||
[BsonElement("totalRulesEvaluated")]
|
||||
public int TotalRulesEvaluated { get; set; }
|
||||
|
||||
[BsonElement("totalRulesFired")]
|
||||
public int TotalRulesFired { get; set; }
|
||||
|
||||
[BsonElement("totalVexOverrides")]
|
||||
public int TotalVexOverrides { get; set; }
|
||||
|
||||
[BsonElement("totalEvaluationMs")]
|
||||
public long TotalEvaluationMs { get; set; }
|
||||
|
||||
[BsonElement("averageRuleEvaluationMicroseconds")]
|
||||
public double AverageRuleEvaluationMicroseconds { get; set; }
|
||||
|
||||
[BsonElement("rulesFiredByCategory")]
|
||||
public Dictionary<string, int> RulesFiredByCategory { get; set; } = new();
|
||||
|
||||
[BsonElement("rulesFiredByOutcome")]
|
||||
public Dictionary<string, int> RulesFiredByOutcome { get; set; } = new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// AOC chain reference for linking decisions to attestations.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ExplainAocChainDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Compilation ID that produced the policy bundle.
|
||||
/// </summary>
|
||||
[BsonElement("compilationId")]
|
||||
public string CompilationId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Compiler version used.
|
||||
/// </summary>
|
||||
[BsonElement("compilerVersion")]
|
||||
public string CompilerVersion { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Source digest of the policy document.
|
||||
/// </summary>
|
||||
[BsonElement("sourceDigest")]
|
||||
public string SourceDigest { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Artifact digest of the compiled bundle.
|
||||
/// </summary>
|
||||
[BsonElement("artifactDigest")]
|
||||
public string ArtifactDigest { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the signed attestation.
|
||||
/// </summary>
|
||||
[BsonElement("attestationRef")]
|
||||
[BsonIgnoreIfNull]
|
||||
public ExplainAttestationRefDocument? AttestationRef { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Provenance information.
|
||||
/// </summary>
|
||||
[BsonElement("provenance")]
|
||||
[BsonIgnoreIfNull]
|
||||
public ExplainProvenanceDocument? Provenance { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Attestation reference embedded document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ExplainAttestationRefDocument
|
||||
{
|
||||
[BsonElement("attestationId")]
|
||||
public string AttestationId { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("envelopeDigest")]
|
||||
public string EnvelopeDigest { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("uri")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Uri { get; set; }
|
||||
|
||||
[BsonElement("signingKeyId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? SigningKeyId { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provenance embedded document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class ExplainProvenanceDocument
|
||||
{
|
||||
[BsonElement("sourceType")]
|
||||
public string SourceType { get; set; } = string.Empty;
|
||||
|
||||
[BsonElement("sourceUrl")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? SourceUrl { get; set; }
|
||||
|
||||
[BsonElement("submitter")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Submitter { get; set; }
|
||||
|
||||
[BsonElement("commitSha")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? CommitSha { get; set; }
|
||||
|
||||
[BsonElement("branch")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Branch { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,319 @@
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization.Attributes;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Documents;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document representing a policy evaluation run.
|
||||
/// Collection: policy_runs
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyRunDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique run identifier.
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
[BsonElement("_id")]
|
||||
public string Id { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[BsonElement("tenantId")]
|
||||
public string TenantId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Policy pack identifier.
|
||||
/// </summary>
|
||||
[BsonElement("policyId")]
|
||||
public string PolicyId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Policy version evaluated.
|
||||
/// </summary>
|
||||
[BsonElement("policyVersion")]
|
||||
public int PolicyVersion { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Run mode (full, incremental, simulation, batch).
|
||||
/// </summary>
|
||||
[BsonElement("mode")]
|
||||
public string Mode { get; set; } = "full";
|
||||
|
||||
/// <summary>
|
||||
/// Run status (pending, running, completed, failed, cancelled).
|
||||
/// </summary>
|
||||
[BsonElement("status")]
|
||||
public string Status { get; set; } = "pending";
|
||||
|
||||
/// <summary>
|
||||
/// Trigger type (scheduled, manual, event, api).
|
||||
/// </summary>
|
||||
[BsonElement("triggerType")]
|
||||
public string TriggerType { get; set; } = "manual";
|
||||
|
||||
/// <summary>
|
||||
/// Correlation ID for distributed tracing.
|
||||
/// </summary>
|
||||
[BsonElement("correlationId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? CorrelationId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Trace ID for OpenTelemetry.
|
||||
/// </summary>
|
||||
[BsonElement("traceId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? TraceId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Parent span ID if part of larger operation.
|
||||
/// </summary>
|
||||
[BsonElement("parentSpanId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ParentSpanId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// User or service that initiated the run.
|
||||
/// </summary>
|
||||
[BsonElement("initiatedBy")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? InitiatedBy { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic evaluation timestamp used for this run.
|
||||
/// </summary>
|
||||
[BsonElement("evaluationTimestamp")]
|
||||
public DateTimeOffset EvaluationTimestamp { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the run started.
|
||||
/// </summary>
|
||||
[BsonElement("startedAt")]
|
||||
public DateTimeOffset StartedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the run completed (null if still running).
|
||||
/// </summary>
|
||||
[BsonElement("completedAt")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? CompletedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Run metrics and statistics.
|
||||
/// </summary>
|
||||
[BsonElement("metrics")]
|
||||
public PolicyRunMetricsDocument Metrics { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Input parameters for the run.
|
||||
/// </summary>
|
||||
[BsonElement("input")]
|
||||
[BsonIgnoreIfNull]
|
||||
public PolicyRunInputDocument? Input { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Run outcome summary.
|
||||
/// </summary>
|
||||
[BsonElement("outcome")]
|
||||
[BsonIgnoreIfNull]
|
||||
public PolicyRunOutcomeDocument? Outcome { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Error information if run failed.
|
||||
/// </summary>
|
||||
[BsonElement("error")]
|
||||
[BsonIgnoreIfNull]
|
||||
public PolicyRunErrorDocument? Error { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Determinism hash for reproducibility verification.
|
||||
/// </summary>
|
||||
[BsonElement("determinismHash")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? DeterminismHash { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// TTL expiration timestamp for automatic cleanup.
|
||||
/// </summary>
|
||||
[BsonElement("expiresAt")]
|
||||
[BsonIgnoreIfNull]
|
||||
public DateTimeOffset? ExpiresAt { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Embedded metrics document for policy runs.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyRunMetricsDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Total components evaluated.
|
||||
/// </summary>
|
||||
[BsonElement("totalComponents")]
|
||||
public int TotalComponents { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Total advisories evaluated.
|
||||
/// </summary>
|
||||
[BsonElement("totalAdvisories")]
|
||||
public int TotalAdvisories { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Total findings generated.
|
||||
/// </summary>
|
||||
[BsonElement("totalFindings")]
|
||||
public int TotalFindings { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Rules evaluated count.
|
||||
/// </summary>
|
||||
[BsonElement("rulesEvaluated")]
|
||||
public int RulesEvaluated { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Rules that matched/fired.
|
||||
/// </summary>
|
||||
[BsonElement("rulesFired")]
|
||||
public int RulesFired { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX overrides applied.
|
||||
/// </summary>
|
||||
[BsonElement("vexOverridesApplied")]
|
||||
public int VexOverridesApplied { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Findings created (new).
|
||||
/// </summary>
|
||||
[BsonElement("findingsCreated")]
|
||||
public int FindingsCreated { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Findings updated (changed).
|
||||
/// </summary>
|
||||
[BsonElement("findingsUpdated")]
|
||||
public int FindingsUpdated { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Findings unchanged.
|
||||
/// </summary>
|
||||
[BsonElement("findingsUnchanged")]
|
||||
public int FindingsUnchanged { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Duration in milliseconds.
|
||||
/// </summary>
|
||||
[BsonElement("durationMs")]
|
||||
public long DurationMs { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Memory used in bytes.
|
||||
/// </summary>
|
||||
[BsonElement("memoryUsedBytes")]
|
||||
public long MemoryUsedBytes { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Embedded input parameters document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyRunInputDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// SBOM IDs included in evaluation.
|
||||
/// </summary>
|
||||
[BsonElement("sbomIds")]
|
||||
public List<string> SbomIds { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Product keys included in evaluation.
|
||||
/// </summary>
|
||||
[BsonElement("productKeys")]
|
||||
public List<string> ProductKeys { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Advisory IDs to evaluate (empty = all).
|
||||
/// </summary>
|
||||
[BsonElement("advisoryIds")]
|
||||
public List<string> AdvisoryIds { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Filter criteria applied.
|
||||
/// </summary>
|
||||
[BsonElement("filters")]
|
||||
[BsonIgnoreIfNull]
|
||||
public Dictionary<string, string>? Filters { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Embedded outcome summary document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyRunOutcomeDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Overall outcome (pass, fail, warn).
|
||||
/// </summary>
|
||||
[BsonElement("result")]
|
||||
public string Result { get; set; } = "pass";
|
||||
|
||||
/// <summary>
|
||||
/// Findings by severity.
|
||||
/// </summary>
|
||||
[BsonElement("bySeverity")]
|
||||
public Dictionary<string, int> BySeverity { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Findings by status.
|
||||
/// </summary>
|
||||
[BsonElement("byStatus")]
|
||||
public Dictionary<string, int> ByStatus { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Blocking findings count.
|
||||
/// </summary>
|
||||
[BsonElement("blockingCount")]
|
||||
public int BlockingCount { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Summary message.
|
||||
/// </summary>
|
||||
[BsonElement("message")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? Message { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Embedded error document.
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
public sealed class PolicyRunErrorDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Error code.
|
||||
/// </summary>
|
||||
[BsonElement("code")]
|
||||
public string Code { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Error message.
|
||||
/// </summary>
|
||||
[BsonElement("message")]
|
||||
public string Message { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Stack trace (if available).
|
||||
/// </summary>
|
||||
[BsonElement("stackTrace")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? StackTrace { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Inner error details.
|
||||
/// </summary>
|
||||
[BsonElement("innerError")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? InnerError { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Options;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Internal;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB context for Policy Engine storage operations.
|
||||
/// Provides configured access to the database with appropriate read/write concerns.
|
||||
/// </summary>
|
||||
internal sealed class PolicyEngineMongoContext
|
||||
{
|
||||
public PolicyEngineMongoContext(IOptions<PolicyEngineMongoOptions> options, ILogger<PolicyEngineMongoContext> logger)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(logger);
|
||||
var value = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
|
||||
if (string.IsNullOrWhiteSpace(value.ConnectionString))
|
||||
{
|
||||
throw new InvalidOperationException("Policy Engine Mongo connection string is not configured.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(value.Database))
|
||||
{
|
||||
throw new InvalidOperationException("Policy Engine Mongo database name is not configured.");
|
||||
}
|
||||
|
||||
Client = new MongoClient(value.ConnectionString);
|
||||
var settings = new MongoDatabaseSettings();
|
||||
if (value.UseMajorityReadConcern)
|
||||
{
|
||||
settings.ReadConcern = ReadConcern.Majority;
|
||||
}
|
||||
|
||||
if (value.UseMajorityWriteConcern)
|
||||
{
|
||||
settings.WriteConcern = WriteConcern.WMajority;
|
||||
}
|
||||
|
||||
Database = Client.GetDatabase(value.Database, settings);
|
||||
Options = value;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB client instance.
|
||||
/// </summary>
|
||||
public MongoClient Client { get; }
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB database instance with configured read/write concerns.
|
||||
/// </summary>
|
||||
public IMongoDatabase Database { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy Engine MongoDB options.
|
||||
/// </summary>
|
||||
public PolicyEngineMongoOptions Options { get; }
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Migrations;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Internal;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for Policy Engine MongoDB initialization.
|
||||
/// </summary>
|
||||
internal interface IPolicyEngineMongoInitializer
|
||||
{
|
||||
/// <summary>
|
||||
/// Ensures all migrations are applied to the database.
|
||||
/// </summary>
|
||||
Task EnsureMigrationsAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes Policy Engine MongoDB storage by applying migrations.
|
||||
/// </summary>
|
||||
internal sealed class PolicyEngineMongoInitializer : IPolicyEngineMongoInitializer
|
||||
{
|
||||
private readonly PolicyEngineMongoContext _context;
|
||||
private readonly PolicyEngineMigrationRunner _migrationRunner;
|
||||
private readonly ILogger<PolicyEngineMongoInitializer> _logger;
|
||||
|
||||
public PolicyEngineMongoInitializer(
|
||||
PolicyEngineMongoContext context,
|
||||
PolicyEngineMigrationRunner migrationRunner,
|
||||
ILogger<PolicyEngineMongoInitializer> logger)
|
||||
{
|
||||
_context = context ?? throw new ArgumentNullException(nameof(context));
|
||||
_migrationRunner = migrationRunner ?? throw new ArgumentNullException(nameof(migrationRunner));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task EnsureMigrationsAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Ensuring Policy Engine Mongo migrations are applied for database {Database}.",
|
||||
_context.Options.Database);
|
||||
await _migrationRunner.RunAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
using MongoDB.Driver;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Internal;
|
||||
|
||||
/// <summary>
|
||||
/// Builds tenant-scoped filters for Policy Engine MongoDB queries.
|
||||
/// Ensures all queries are properly scoped to the current tenant.
|
||||
/// </summary>
|
||||
internal static class TenantFilterBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a filter that matches documents for the specified tenant.
|
||||
/// </summary>
|
||||
/// <typeparam name="TDocument">Document type with tenantId field.</typeparam>
|
||||
/// <param name="tenantId">Tenant identifier (will be normalized to lowercase).</param>
|
||||
/// <returns>A filter definition scoped to the tenant.</returns>
|
||||
public static FilterDefinition<TDocument> ForTenant<TDocument>(string tenantId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
var normalizedTenantId = tenantId.ToLowerInvariant();
|
||||
return Builders<TDocument>.Filter.Eq("tenantId", normalizedTenantId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Combines a tenant filter with an additional filter using AND.
|
||||
/// </summary>
|
||||
/// <typeparam name="TDocument">Document type with tenantId field.</typeparam>
|
||||
/// <param name="tenantId">Tenant identifier (will be normalized to lowercase).</param>
|
||||
/// <param name="additionalFilter">Additional filter to combine.</param>
|
||||
/// <returns>A combined filter definition.</returns>
|
||||
public static FilterDefinition<TDocument> ForTenantAnd<TDocument>(
|
||||
string tenantId,
|
||||
FilterDefinition<TDocument> additionalFilter)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentNullException.ThrowIfNull(additionalFilter);
|
||||
|
||||
var tenantFilter = ForTenant<TDocument>(tenantId);
|
||||
return Builders<TDocument>.Filter.And(tenantFilter, additionalFilter);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a filter that matches documents by ID within a tenant scope.
|
||||
/// </summary>
|
||||
/// <typeparam name="TDocument">Document type with tenantId and _id fields.</typeparam>
|
||||
/// <param name="tenantId">Tenant identifier (will be normalized to lowercase).</param>
|
||||
/// <param name="documentId">Document identifier.</param>
|
||||
/// <returns>A filter definition matching both tenant and ID.</returns>
|
||||
public static FilterDefinition<TDocument> ForTenantById<TDocument>(string tenantId, string documentId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(documentId);
|
||||
|
||||
var tenantFilter = ForTenant<TDocument>(tenantId);
|
||||
var idFilter = Builders<TDocument>.Filter.Eq("_id", documentId);
|
||||
return Builders<TDocument>.Filter.And(tenantFilter, idFilter);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes a tenant ID to lowercase for consistent storage and queries.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <returns>Normalized (lowercase) tenant identifier.</returns>
|
||||
public static string NormalizeTenantId(string tenantId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
return tenantId.ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,283 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Internal;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes effective_finding_* and effective_finding_history_* collections for a policy.
|
||||
/// Creates collections and indexes on-demand when a policy is first evaluated.
|
||||
/// </summary>
|
||||
internal interface IEffectiveFindingCollectionInitializer
|
||||
{
|
||||
/// <summary>
|
||||
/// Ensures the effective finding collection and indexes exist for a policy.
|
||||
/// </summary>
|
||||
/// <param name="policyId">The policy identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
ValueTask EnsureCollectionAsync(string policyId, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
internal sealed class EffectiveFindingCollectionInitializer : IEffectiveFindingCollectionInitializer
|
||||
{
|
||||
private readonly PolicyEngineMongoContext _context;
|
||||
private readonly ILogger<EffectiveFindingCollectionInitializer> _logger;
|
||||
private readonly HashSet<string> _initializedCollections = new(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly SemaphoreSlim _lock = new(1, 1);
|
||||
|
||||
public EffectiveFindingCollectionInitializer(
|
||||
PolicyEngineMongoContext context,
|
||||
ILogger<EffectiveFindingCollectionInitializer> logger)
|
||||
{
|
||||
_context = context ?? throw new ArgumentNullException(nameof(context));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async ValueTask EnsureCollectionAsync(string policyId, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(policyId);
|
||||
|
||||
var findingsCollectionName = _context.Options.GetEffectiveFindingsCollectionName(policyId);
|
||||
var historyCollectionName = _context.Options.GetEffectiveFindingsHistoryCollectionName(policyId);
|
||||
|
||||
// Fast path: already initialized in memory
|
||||
if (_initializedCollections.Contains(findingsCollectionName))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
await _lock.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
// Double-check after acquiring lock
|
||||
if (_initializedCollections.Contains(findingsCollectionName))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
await EnsureEffectiveFindingCollectionAsync(findingsCollectionName, cancellationToken).ConfigureAwait(false);
|
||||
await EnsureEffectiveFindingHistoryCollectionAsync(historyCollectionName, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_initializedCollections.Add(findingsCollectionName);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_lock.Release();
|
||||
}
|
||||
}
|
||||
|
||||
private async Task EnsureEffectiveFindingCollectionAsync(string collectionName, CancellationToken cancellationToken)
|
||||
{
|
||||
var cursor = await _context.Database
|
||||
.ListCollectionNamesAsync(cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var existing = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (!existing.Contains(collectionName, StringComparer.Ordinal))
|
||||
{
|
||||
_logger.LogInformation("Creating effective finding collection '{CollectionName}'.", collectionName);
|
||||
await _context.Database.CreateCollectionAsync(collectionName, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var collection = _context.Database.GetCollection<BsonDocument>(collectionName);
|
||||
|
||||
// Unique constraint on (tenantId, componentPurl, advisoryId)
|
||||
var tenantComponentAdvisory = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("componentPurl")
|
||||
.Ascending("advisoryId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_component_advisory_unique",
|
||||
Unique = true
|
||||
});
|
||||
|
||||
// Tenant + severity for filtering by risk level
|
||||
var tenantSeverity = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("severity")
|
||||
.Descending("updatedAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_severity_updatedAt_desc"
|
||||
});
|
||||
|
||||
// Tenant + status for filtering by policy status
|
||||
var tenantStatus = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status")
|
||||
.Descending("updatedAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status_updatedAt_desc"
|
||||
});
|
||||
|
||||
// Product key lookup for SBOM-based queries
|
||||
var tenantProduct = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("productKey"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_product",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("productKey", true)
|
||||
});
|
||||
|
||||
// SBOM ID lookup
|
||||
var tenantSbom = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("sbomId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_sbom",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("sbomId", true)
|
||||
});
|
||||
|
||||
// Component name lookup for search
|
||||
var tenantComponentName = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("componentName"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_componentName"
|
||||
});
|
||||
|
||||
// Advisory ID lookup for cross-policy queries
|
||||
var tenantAdvisory = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("advisoryId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_advisory"
|
||||
});
|
||||
|
||||
// Policy run reference for traceability
|
||||
var policyRun = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("policyRunId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "policyRun_lookup",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("policyRunId", true)
|
||||
});
|
||||
|
||||
// Content hash for deduplication checks
|
||||
var contentHash = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("contentHash"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "contentHash_lookup"
|
||||
});
|
||||
|
||||
await collection.Indexes.CreateManyAsync(
|
||||
new[]
|
||||
{
|
||||
tenantComponentAdvisory,
|
||||
tenantSeverity,
|
||||
tenantStatus,
|
||||
tenantProduct,
|
||||
tenantSbom,
|
||||
tenantComponentName,
|
||||
tenantAdvisory,
|
||||
policyRun,
|
||||
contentHash
|
||||
},
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation("Created indexes for effective finding collection '{CollectionName}'.", collectionName);
|
||||
}
|
||||
|
||||
private async Task EnsureEffectiveFindingHistoryCollectionAsync(string collectionName, CancellationToken cancellationToken)
|
||||
{
|
||||
var cursor = await _context.Database
|
||||
.ListCollectionNamesAsync(cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var existing = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (!existing.Contains(collectionName, StringComparer.Ordinal))
|
||||
{
|
||||
_logger.LogInformation("Creating effective finding history collection '{CollectionName}'.", collectionName);
|
||||
await _context.Database.CreateCollectionAsync(collectionName, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var collection = _context.Database.GetCollection<BsonDocument>(collectionName);
|
||||
|
||||
// Finding + version for retrieving history
|
||||
var findingVersion = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("findingId")
|
||||
.Descending("version"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "finding_version_desc"
|
||||
});
|
||||
|
||||
// Tenant + occurred for chronological history
|
||||
var tenantOccurred = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Descending("occurredAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_occurredAt_desc"
|
||||
});
|
||||
|
||||
// Change type lookup for filtering history events
|
||||
var tenantChangeType = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("changeType"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_changeType"
|
||||
});
|
||||
|
||||
// Policy run reference
|
||||
var policyRun = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("policyRunId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "policyRun_lookup",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("policyRunId", true)
|
||||
});
|
||||
|
||||
var models = new List<CreateIndexModel<BsonDocument>>
|
||||
{
|
||||
findingVersion,
|
||||
tenantOccurred,
|
||||
tenantChangeType,
|
||||
policyRun
|
||||
};
|
||||
|
||||
// TTL index for automatic cleanup of old history entries
|
||||
if (_context.Options.EffectiveFindingsHistoryRetention > TimeSpan.Zero)
|
||||
{
|
||||
var ttlModel = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys.Ascending("expiresAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "expiresAt_ttl",
|
||||
ExpireAfter = TimeSpan.Zero
|
||||
});
|
||||
|
||||
models.Add(ttlModel);
|
||||
}
|
||||
|
||||
await collection.Indexes.CreateManyAsync(models, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation("Created indexes for effective finding history collection '{CollectionName}'.", collectionName);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,345 @@
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Internal;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations;
|
||||
|
||||
/// <summary>
|
||||
/// Migration to ensure all required indexes exist for exception collections.
|
||||
/// Creates indexes for efficient tenant-scoped queries and status lookups.
|
||||
/// </summary>
|
||||
internal sealed class EnsureExceptionIndexesMigration : IPolicyEngineMongoMigration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public string Id => "20251128_exception_indexes_v1";
|
||||
|
||||
/// <inheritdoc />
|
||||
public async ValueTask ExecuteAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
await EnsureExceptionsIndexesAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
await EnsureExceptionReviewsIndexesAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
await EnsureExceptionBindingsIndexesAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates indexes for the exceptions collection.
|
||||
/// </summary>
|
||||
private static async Task EnsureExceptionsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
var collection = context.Database.GetCollection<BsonDocument>(context.Options.ExceptionsCollection);
|
||||
|
||||
// Tenant + status for finding active/pending exceptions
|
||||
var tenantStatus = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status"
|
||||
});
|
||||
|
||||
// Tenant + type + status for filtering
|
||||
var tenantTypeStatus = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("exceptionType")
|
||||
.Ascending("status"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_type_status"
|
||||
});
|
||||
|
||||
// Tenant + created descending for recent exceptions
|
||||
var tenantCreated = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Descending("createdAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_createdAt_desc"
|
||||
});
|
||||
|
||||
// Tenant + tags for filtering by tag
|
||||
var tenantTags = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("tags"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_tags"
|
||||
});
|
||||
|
||||
// Tenant + expiresAt for finding expiring exceptions
|
||||
var tenantExpires = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status")
|
||||
.Ascending("expiresAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status_expiresAt",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("expiresAt", true)
|
||||
});
|
||||
|
||||
// Tenant + effectiveFrom for finding pending activations
|
||||
var tenantEffectiveFrom = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status")
|
||||
.Ascending("effectiveFrom"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status_effectiveFrom",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Eq("status", "approved")
|
||||
});
|
||||
|
||||
// Scope advisory IDs for finding applicable exceptions
|
||||
var scopeAdvisoryIds = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status")
|
||||
.Ascending("scope.advisoryIds"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status_scope_advisoryIds"
|
||||
});
|
||||
|
||||
// Scope asset IDs for finding applicable exceptions
|
||||
var scopeAssetIds = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status")
|
||||
.Ascending("scope.assetIds"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status_scope_assetIds"
|
||||
});
|
||||
|
||||
// Scope CVE IDs for finding applicable exceptions
|
||||
var scopeCveIds = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status")
|
||||
.Ascending("scope.cveIds"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status_scope_cveIds"
|
||||
});
|
||||
|
||||
// CreatedBy for audit queries
|
||||
var tenantCreatedBy = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("createdBy"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_createdBy"
|
||||
});
|
||||
|
||||
// Priority for ordering applicable exceptions
|
||||
var tenantPriority = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status")
|
||||
.Descending("priority"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status_priority_desc"
|
||||
});
|
||||
|
||||
// Correlation ID for tracing
|
||||
var correlationId = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("correlationId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "correlationId_lookup",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("correlationId", true)
|
||||
});
|
||||
|
||||
await collection.Indexes.CreateManyAsync(
|
||||
new[]
|
||||
{
|
||||
tenantStatus,
|
||||
tenantTypeStatus,
|
||||
tenantCreated,
|
||||
tenantTags,
|
||||
tenantExpires,
|
||||
tenantEffectiveFrom,
|
||||
scopeAdvisoryIds,
|
||||
scopeAssetIds,
|
||||
scopeCveIds,
|
||||
tenantCreatedBy,
|
||||
tenantPriority,
|
||||
correlationId
|
||||
},
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates indexes for the exception_reviews collection.
|
||||
/// </summary>
|
||||
private static async Task EnsureExceptionReviewsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
var collection = context.Database.GetCollection<BsonDocument>(context.Options.ExceptionReviewsCollection);
|
||||
|
||||
// Tenant + exception for finding reviews of an exception
|
||||
var tenantException = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("exceptionId")
|
||||
.Descending("requestedAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_exceptionId_requestedAt_desc"
|
||||
});
|
||||
|
||||
// Tenant + status for finding pending reviews
|
||||
var tenantStatus = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status"
|
||||
});
|
||||
|
||||
// Tenant + designated reviewers for reviewer's queue
|
||||
var tenantReviewers = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status")
|
||||
.Ascending("designatedReviewers"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status_designatedReviewers"
|
||||
});
|
||||
|
||||
// Deadline for finding overdue reviews
|
||||
var tenantDeadline = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status")
|
||||
.Ascending("deadline"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status_deadline",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.And(
|
||||
Builders<BsonDocument>.Filter.Eq("status", "pending"),
|
||||
Builders<BsonDocument>.Filter.Exists("deadline", true))
|
||||
});
|
||||
|
||||
// RequestedBy for audit queries
|
||||
var tenantRequestedBy = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("requestedBy"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_requestedBy"
|
||||
});
|
||||
|
||||
await collection.Indexes.CreateManyAsync(
|
||||
new[]
|
||||
{
|
||||
tenantException,
|
||||
tenantStatus,
|
||||
tenantReviewers,
|
||||
tenantDeadline,
|
||||
tenantRequestedBy
|
||||
},
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates indexes for the exception_bindings collection.
|
||||
/// </summary>
|
||||
private static async Task EnsureExceptionBindingsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
var collection = context.Database.GetCollection<BsonDocument>(context.Options.ExceptionBindingsCollection);
|
||||
|
||||
// Tenant + exception for finding bindings of an exception
|
||||
var tenantException = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("exceptionId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_exceptionId"
|
||||
});
|
||||
|
||||
// Tenant + asset for finding bindings for an asset
|
||||
var tenantAsset = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("assetId")
|
||||
.Ascending("status"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_assetId_status"
|
||||
});
|
||||
|
||||
// Tenant + advisory for finding bindings by advisory
|
||||
var tenantAdvisory = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("advisoryId")
|
||||
.Ascending("status"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_advisoryId_status",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("advisoryId", true)
|
||||
});
|
||||
|
||||
// Tenant + CVE for finding bindings by CVE
|
||||
var tenantCve = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("cveId")
|
||||
.Ascending("status"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_cveId_status",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("cveId", true)
|
||||
});
|
||||
|
||||
// Tenant + status + expiresAt for finding expired bindings
|
||||
var tenantExpires = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status")
|
||||
.Ascending("expiresAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status_expiresAt",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("expiresAt", true)
|
||||
});
|
||||
|
||||
// Effective time range for finding active bindings at a point in time
|
||||
var tenantEffectiveRange = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("assetId")
|
||||
.Ascending("status")
|
||||
.Ascending("effectiveFrom")
|
||||
.Ascending("expiresAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_asset_status_effectiveRange"
|
||||
});
|
||||
|
||||
await collection.Indexes.CreateManyAsync(
|
||||
new[]
|
||||
{
|
||||
tenantException,
|
||||
tenantAsset,
|
||||
tenantAdvisory,
|
||||
tenantCve,
|
||||
tenantExpires,
|
||||
tenantEffectiveRange
|
||||
},
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Internal;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations;
|
||||
|
||||
/// <summary>
|
||||
/// Migration to ensure all required Policy Engine collections exist.
|
||||
/// Creates: policies, policy_revisions, policy_bundles, policy_runs, policy_audit, _policy_migrations
|
||||
/// Note: effective_finding_* and effective_finding_history_* collections are created dynamically per-policy.
|
||||
/// </summary>
|
||||
internal sealed class EnsurePolicyCollectionsMigration : IPolicyEngineMongoMigration
|
||||
{
|
||||
private readonly ILogger<EnsurePolicyCollectionsMigration> _logger;
|
||||
|
||||
public EnsurePolicyCollectionsMigration(ILogger<EnsurePolicyCollectionsMigration> logger)
|
||||
=> _logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Id => "20251128_policy_collections_v1";
|
||||
|
||||
/// <inheritdoc />
|
||||
public async ValueTask ExecuteAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
var requiredCollections = new[]
|
||||
{
|
||||
context.Options.PoliciesCollection,
|
||||
context.Options.PolicyRevisionsCollection,
|
||||
context.Options.PolicyBundlesCollection,
|
||||
context.Options.PolicyRunsCollection,
|
||||
context.Options.AuditCollection,
|
||||
context.Options.MigrationsCollection
|
||||
};
|
||||
|
||||
var cursor = await context.Database
|
||||
.ListCollectionNamesAsync(cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var existing = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
foreach (var collection in requiredCollections)
|
||||
{
|
||||
if (existing.Contains(collection, StringComparer.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Creating Policy Engine Mongo collection '{CollectionName}'.", collection);
|
||||
await context.Database.CreateCollectionAsync(collection, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,312 @@
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Internal;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations;
|
||||
|
||||
/// <summary>
|
||||
/// Migration to ensure all required indexes exist for Policy Engine collections.
|
||||
/// Creates indexes for efficient tenant-scoped queries and TTL cleanup.
|
||||
/// </summary>
|
||||
internal sealed class EnsurePolicyIndexesMigration : IPolicyEngineMongoMigration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public string Id => "20251128_policy_indexes_v1";
|
||||
|
||||
/// <inheritdoc />
|
||||
public async ValueTask ExecuteAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
await EnsurePoliciesIndexesAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
await EnsurePolicyRevisionsIndexesAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
await EnsurePolicyBundlesIndexesAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
await EnsurePolicyRunsIndexesAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
await EnsureAuditIndexesAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
await EnsureExplainsIndexesAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates indexes for the policies collection.
|
||||
/// </summary>
|
||||
private static async Task EnsurePoliciesIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
var collection = context.Database.GetCollection<BsonDocument>(context.Options.PoliciesCollection);
|
||||
|
||||
// Tenant lookup with optional tag filtering
|
||||
var tenantTags = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("tags"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_tags"
|
||||
});
|
||||
|
||||
// Tenant + updated for recent changes
|
||||
var tenantUpdated = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Descending("updatedAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_updatedAt_desc"
|
||||
});
|
||||
|
||||
await collection.Indexes.CreateManyAsync(new[] { tenantTags, tenantUpdated }, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates indexes for the policy_revisions collection.
|
||||
/// </summary>
|
||||
private static async Task EnsurePolicyRevisionsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
var collection = context.Database.GetCollection<BsonDocument>(context.Options.PolicyRevisionsCollection);
|
||||
|
||||
// Tenant + pack for finding revisions of a policy
|
||||
var tenantPack = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("packId")
|
||||
.Descending("version"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_pack_version_desc"
|
||||
});
|
||||
|
||||
// Status lookup for finding active/draft revisions
|
||||
var tenantStatus = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status"
|
||||
});
|
||||
|
||||
// Bundle digest lookup for integrity verification
|
||||
var bundleDigest = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("bundleDigest"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "bundleDigest_lookup",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("bundleDigest", true)
|
||||
});
|
||||
|
||||
await collection.Indexes.CreateManyAsync(new[] { tenantPack, tenantStatus, bundleDigest }, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates indexes for the policy_bundles collection.
|
||||
/// </summary>
|
||||
private static async Task EnsurePolicyBundlesIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
var collection = context.Database.GetCollection<BsonDocument>(context.Options.PolicyBundlesCollection);
|
||||
|
||||
// Tenant + pack + version for finding specific bundles
|
||||
var tenantPackVersion = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("packId")
|
||||
.Ascending("version"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_pack_version",
|
||||
Unique = true
|
||||
});
|
||||
|
||||
await collection.Indexes.CreateManyAsync(new[] { tenantPackVersion }, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates indexes for the policy_runs collection.
|
||||
/// </summary>
|
||||
private static async Task EnsurePolicyRunsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
var collection = context.Database.GetCollection<BsonDocument>(context.Options.PolicyRunsCollection);
|
||||
|
||||
// Tenant + policy + started for recent runs
|
||||
var tenantPolicyStarted = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("policyId")
|
||||
.Descending("startedAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_policy_startedAt_desc"
|
||||
});
|
||||
|
||||
// Status lookup for finding pending/running evaluations
|
||||
var tenantStatus = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("status"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_status"
|
||||
});
|
||||
|
||||
// Correlation ID lookup for tracing
|
||||
var correlationId = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("correlationId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "correlationId_lookup",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("correlationId", true)
|
||||
});
|
||||
|
||||
// Trace ID lookup for OpenTelemetry
|
||||
var traceId = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("traceId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "traceId_lookup",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("traceId", true)
|
||||
});
|
||||
|
||||
var models = new List<CreateIndexModel<BsonDocument>>
|
||||
{
|
||||
tenantPolicyStarted,
|
||||
tenantStatus,
|
||||
correlationId,
|
||||
traceId
|
||||
};
|
||||
|
||||
// TTL index for automatic cleanup of completed runs
|
||||
if (context.Options.PolicyRunRetention > TimeSpan.Zero)
|
||||
{
|
||||
var ttlModel = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys.Ascending("expiresAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "expiresAt_ttl",
|
||||
ExpireAfter = TimeSpan.Zero
|
||||
});
|
||||
|
||||
models.Add(ttlModel);
|
||||
}
|
||||
|
||||
await collection.Indexes.CreateManyAsync(models, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates indexes for the policy_audit collection.
|
||||
/// </summary>
|
||||
private static async Task EnsureAuditIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
var collection = context.Database.GetCollection<BsonDocument>(context.Options.AuditCollection);
|
||||
|
||||
// Tenant + occurred for chronological audit trail
|
||||
var tenantOccurred = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Descending("occurredAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_occurredAt_desc"
|
||||
});
|
||||
|
||||
// Actor lookup for finding actions by user
|
||||
var tenantActor = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("actorId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_actor"
|
||||
});
|
||||
|
||||
// Resource lookup for finding actions on specific policy
|
||||
var tenantResource = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("resourceType")
|
||||
.Ascending("resourceId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_resource"
|
||||
});
|
||||
|
||||
await collection.Indexes.CreateManyAsync(new[] { tenantOccurred, tenantActor, tenantResource }, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates indexes for the policy_explains collection.
|
||||
/// </summary>
|
||||
private static async Task EnsureExplainsIndexesAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
var collection = context.Database.GetCollection<BsonDocument>(context.Options.PolicyExplainsCollection);
|
||||
|
||||
// Tenant + run for finding all explains in a run
|
||||
var tenantRun = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("runId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_runId"
|
||||
});
|
||||
|
||||
// Tenant + policy + evaluated time for recent explains
|
||||
var tenantPolicyEvaluated = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("policyId")
|
||||
.Descending("evaluatedAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_policy_evaluatedAt_desc"
|
||||
});
|
||||
|
||||
// Subject hash lookup for decision linkage
|
||||
var subjectHash = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenantId")
|
||||
.Ascending("subjectHash"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "tenant_subjectHash"
|
||||
});
|
||||
|
||||
// AOC chain lookup for attestation queries
|
||||
var aocCompilation = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("aocChain.compilationId"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "aocChain_compilationId",
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("aocChain.compilationId", true)
|
||||
});
|
||||
|
||||
var models = new List<CreateIndexModel<BsonDocument>>
|
||||
{
|
||||
tenantRun,
|
||||
tenantPolicyEvaluated,
|
||||
subjectHash,
|
||||
aocCompilation
|
||||
};
|
||||
|
||||
// TTL index for automatic cleanup
|
||||
if (context.Options.ExplainTraceRetention > TimeSpan.Zero)
|
||||
{
|
||||
var ttlModel = new CreateIndexModel<BsonDocument>(
|
||||
Builders<BsonDocument>.IndexKeys.Ascending("expiresAt"),
|
||||
new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "expiresAt_ttl",
|
||||
ExpireAfter = TimeSpan.Zero
|
||||
});
|
||||
|
||||
models.Add(ttlModel);
|
||||
}
|
||||
|
||||
await collection.Indexes.CreateManyAsync(models, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Internal;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for Policy Engine MongoDB migrations.
|
||||
/// Migrations are applied in lexical order by Id and tracked to ensure idempotency.
|
||||
/// </summary>
|
||||
internal interface IPolicyEngineMongoMigration
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique migration identifier.
|
||||
/// Format: YYYYMMDD_description_vN (e.g., "20251128_policy_collections_v1")
|
||||
/// </summary>
|
||||
string Id { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Executes the migration against the Policy Engine database.
|
||||
/// </summary>
|
||||
/// <param name="context">MongoDB context with database access.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
ValueTask ExecuteAsync(PolicyEngineMongoContext context, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization.Attributes;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB document for tracking applied migrations.
|
||||
/// Collection: _policy_migrations
|
||||
/// </summary>
|
||||
[BsonIgnoreExtraElements]
|
||||
internal sealed class PolicyEngineMigrationRecord
|
||||
{
|
||||
/// <summary>
|
||||
/// MongoDB ObjectId.
|
||||
/// </summary>
|
||||
[BsonId]
|
||||
public ObjectId Id { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Unique migration identifier (matches IPolicyEngineMongoMigration.Id).
|
||||
/// </summary>
|
||||
[BsonElement("migrationId")]
|
||||
public string MigrationId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// When the migration was applied.
|
||||
/// </summary>
|
||||
[BsonElement("appliedAt")]
|
||||
public DateTimeOffset AppliedAt { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,85 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Internal;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Migrations;
|
||||
|
||||
/// <summary>
|
||||
/// Executes Policy Engine MongoDB migrations in order.
|
||||
/// Tracks applied migrations to ensure idempotency.
|
||||
/// </summary>
|
||||
internal sealed class PolicyEngineMigrationRunner
|
||||
{
|
||||
private readonly PolicyEngineMongoContext _context;
|
||||
private readonly IReadOnlyList<IPolicyEngineMongoMigration> _migrations;
|
||||
private readonly ILogger<PolicyEngineMigrationRunner> _logger;
|
||||
|
||||
public PolicyEngineMigrationRunner(
|
||||
PolicyEngineMongoContext context,
|
||||
IEnumerable<IPolicyEngineMongoMigration> migrations,
|
||||
ILogger<PolicyEngineMigrationRunner> logger)
|
||||
{
|
||||
_context = context ?? throw new ArgumentNullException(nameof(context));
|
||||
ArgumentNullException.ThrowIfNull(migrations);
|
||||
_migrations = migrations.OrderBy(m => m.Id, StringComparer.Ordinal).ToArray();
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Runs all pending migrations.
|
||||
/// </summary>
|
||||
public async ValueTask RunAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_migrations.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var collection = _context.Database.GetCollection<PolicyEngineMigrationRecord>(_context.Options.MigrationsCollection);
|
||||
await EnsureMigrationIndexAsync(collection, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var applied = await collection
|
||||
.Find(FilterDefinition<PolicyEngineMigrationRecord>.Empty)
|
||||
.Project(record => record.MigrationId)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var appliedSet = applied.ToHashSet(StringComparer.Ordinal);
|
||||
|
||||
foreach (var migration in _migrations)
|
||||
{
|
||||
if (appliedSet.Contains(migration.Id))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Applying Policy Engine Mongo migration {MigrationId}.", migration.Id);
|
||||
await migration.ExecuteAsync(_context, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var record = new PolicyEngineMigrationRecord
|
||||
{
|
||||
Id = ObjectId.GenerateNewId(),
|
||||
MigrationId = migration.Id,
|
||||
AppliedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
await collection.InsertOneAsync(record, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
_logger.LogInformation("Completed Policy Engine Mongo migration {MigrationId}.", migration.Id);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task EnsureMigrationIndexAsync(
|
||||
IMongoCollection<PolicyEngineMigrationRecord> collection,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var keys = Builders<PolicyEngineMigrationRecord>.IndexKeys.Ascending(record => record.MigrationId);
|
||||
var model = new CreateIndexModel<PolicyEngineMigrationRecord>(keys, new CreateIndexOptions
|
||||
{
|
||||
Name = "migrationId_unique",
|
||||
Unique = true
|
||||
});
|
||||
|
||||
await collection.Indexes.CreateOneAsync(model, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,140 @@
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Options;
|
||||
|
||||
/// <summary>
|
||||
/// Configures MongoDB connectivity and collection names for Policy Engine storage.
|
||||
/// </summary>
|
||||
public sealed class PolicyEngineMongoOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// MongoDB connection string.
|
||||
/// </summary>
|
||||
public string ConnectionString { get; set; } = "mongodb://localhost:27017";
|
||||
|
||||
/// <summary>
|
||||
/// Database name for policy storage.
|
||||
/// </summary>
|
||||
public string Database { get; set; } = "stellaops_policy";
|
||||
|
||||
/// <summary>
|
||||
/// Collection name for policy packs.
|
||||
/// </summary>
|
||||
public string PoliciesCollection { get; set; } = "policies";
|
||||
|
||||
/// <summary>
|
||||
/// Collection name for policy revisions.
|
||||
/// </summary>
|
||||
public string PolicyRevisionsCollection { get; set; } = "policy_revisions";
|
||||
|
||||
/// <summary>
|
||||
/// Collection name for policy bundles (compiled artifacts).
|
||||
/// </summary>
|
||||
public string PolicyBundlesCollection { get; set; } = "policy_bundles";
|
||||
|
||||
/// <summary>
|
||||
/// Collection name for policy evaluation runs.
|
||||
/// </summary>
|
||||
public string PolicyRunsCollection { get; set; } = "policy_runs";
|
||||
|
||||
/// <summary>
|
||||
/// Collection prefix for effective findings (per-policy tenant-scoped).
|
||||
/// Final collection name: {prefix}_{policyId}
|
||||
/// </summary>
|
||||
public string EffectiveFindingsCollectionPrefix { get; set; } = "effective_finding";
|
||||
|
||||
/// <summary>
|
||||
/// Collection prefix for effective findings history (append-only).
|
||||
/// Final collection name: {prefix}_{policyId}
|
||||
/// </summary>
|
||||
public string EffectiveFindingsHistoryCollectionPrefix { get; set; } = "effective_finding_history";
|
||||
|
||||
/// <summary>
|
||||
/// Collection name for policy audit log.
|
||||
/// </summary>
|
||||
public string AuditCollection { get; set; } = "policy_audit";
|
||||
|
||||
/// <summary>
|
||||
/// Collection name for policy explain traces.
|
||||
/// </summary>
|
||||
public string PolicyExplainsCollection { get; set; } = "policy_explains";
|
||||
|
||||
/// <summary>
|
||||
/// Collection name for policy exceptions.
|
||||
/// </summary>
|
||||
public string ExceptionsCollection { get; set; } = "exceptions";
|
||||
|
||||
/// <summary>
|
||||
/// Collection name for exception reviews.
|
||||
/// </summary>
|
||||
public string ExceptionReviewsCollection { get; set; } = "exception_reviews";
|
||||
|
||||
/// <summary>
|
||||
/// Collection name for exception bindings.
|
||||
/// </summary>
|
||||
public string ExceptionBindingsCollection { get; set; } = "exception_bindings";
|
||||
|
||||
/// <summary>
|
||||
/// Collection name for tracking applied migrations.
|
||||
/// </summary>
|
||||
public string MigrationsCollection { get; set; } = "_policy_migrations";
|
||||
|
||||
/// <summary>
|
||||
/// TTL for completed policy runs. Zero or negative disables TTL.
|
||||
/// </summary>
|
||||
public TimeSpan PolicyRunRetention { get; set; } = TimeSpan.FromDays(90);
|
||||
|
||||
/// <summary>
|
||||
/// TTL for effective findings history entries. Zero or negative disables TTL.
|
||||
/// </summary>
|
||||
public TimeSpan EffectiveFindingsHistoryRetention { get; set; } = TimeSpan.FromDays(365);
|
||||
|
||||
/// <summary>
|
||||
/// TTL for explain traces. Zero or negative disables TTL.
|
||||
/// </summary>
|
||||
public TimeSpan ExplainTraceRetention { get; set; } = TimeSpan.FromDays(30);
|
||||
|
||||
/// <summary>
|
||||
/// Use majority read concern for consistency.
|
||||
/// </summary>
|
||||
public bool UseMajorityReadConcern { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Use majority write concern for durability.
|
||||
/// </summary>
|
||||
public bool UseMajorityWriteConcern { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Command timeout in seconds.
|
||||
/// </summary>
|
||||
public int CommandTimeoutSeconds { get; set; } = 30;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the effective findings collection name for a policy.
|
||||
/// </summary>
|
||||
public string GetEffectiveFindingsCollectionName(string policyId)
|
||||
{
|
||||
var safePolicyId = SanitizeCollectionName(policyId);
|
||||
return $"{EffectiveFindingsCollectionPrefix}_{safePolicyId}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the effective findings history collection name for a policy.
|
||||
/// </summary>
|
||||
public string GetEffectiveFindingsHistoryCollectionName(string policyId)
|
||||
{
|
||||
var safePolicyId = SanitizeCollectionName(policyId);
|
||||
return $"{EffectiveFindingsHistoryCollectionPrefix}_{safePolicyId}";
|
||||
}
|
||||
|
||||
private static string SanitizeCollectionName(string name)
|
||||
{
|
||||
// Replace invalid characters with underscores
|
||||
return string.Create(name.Length, name, (span, source) =>
|
||||
{
|
||||
for (int i = 0; i < source.Length; i++)
|
||||
{
|
||||
var c = source[i];
|
||||
span[i] = char.IsLetterOrDigit(c) || c == '_' || c == '-' ? c : '_';
|
||||
}
|
||||
}).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,254 @@
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Documents;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for policy exception operations.
|
||||
/// </summary>
|
||||
internal interface IExceptionRepository
|
||||
{
|
||||
// Exception operations
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new exception.
|
||||
/// </summary>
|
||||
Task<PolicyExceptionDocument> CreateExceptionAsync(
|
||||
PolicyExceptionDocument exception,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets an exception by ID.
|
||||
/// </summary>
|
||||
Task<PolicyExceptionDocument?> GetExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Updates an existing exception.
|
||||
/// </summary>
|
||||
Task<PolicyExceptionDocument?> UpdateExceptionAsync(
|
||||
PolicyExceptionDocument exception,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Lists exceptions with filtering and pagination.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<PolicyExceptionDocument>> ListExceptionsAsync(
|
||||
string tenantId,
|
||||
ExceptionQueryOptions options,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Finds active exceptions that apply to a specific asset/advisory.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<PolicyExceptionDocument>> FindApplicableExceptionsAsync(
|
||||
string tenantId,
|
||||
string assetId,
|
||||
string? advisoryId,
|
||||
DateTimeOffset evaluationTime,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Updates exception status.
|
||||
/// </summary>
|
||||
Task<bool> UpdateExceptionStatusAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
string newStatus,
|
||||
DateTimeOffset timestamp,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Revokes an exception.
|
||||
/// </summary>
|
||||
Task<bool> RevokeExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
string revokedBy,
|
||||
string? reason,
|
||||
DateTimeOffset timestamp,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets exceptions expiring within a time window.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<PolicyExceptionDocument>> GetExpiringExceptionsAsync(
|
||||
string tenantId,
|
||||
DateTimeOffset from,
|
||||
DateTimeOffset to,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets exceptions that should be auto-activated.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<PolicyExceptionDocument>> GetPendingActivationsAsync(
|
||||
string tenantId,
|
||||
DateTimeOffset asOf,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
// Review operations
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new review for an exception.
|
||||
/// </summary>
|
||||
Task<ExceptionReviewDocument> CreateReviewAsync(
|
||||
ExceptionReviewDocument review,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a review by ID.
|
||||
/// </summary>
|
||||
Task<ExceptionReviewDocument?> GetReviewAsync(
|
||||
string tenantId,
|
||||
string reviewId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Adds a decision to a review.
|
||||
/// </summary>
|
||||
Task<ExceptionReviewDocument?> AddReviewDecisionAsync(
|
||||
string tenantId,
|
||||
string reviewId,
|
||||
ReviewDecisionDocument decision,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Completes a review with final status.
|
||||
/// </summary>
|
||||
Task<ExceptionReviewDocument?> CompleteReviewAsync(
|
||||
string tenantId,
|
||||
string reviewId,
|
||||
string finalStatus,
|
||||
DateTimeOffset completedAt,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets reviews for an exception.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<ExceptionReviewDocument>> GetReviewsForExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets pending reviews for a reviewer.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<ExceptionReviewDocument>> GetPendingReviewsAsync(
|
||||
string tenantId,
|
||||
string? reviewerId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
// Binding operations
|
||||
|
||||
/// <summary>
|
||||
/// Creates or updates a binding.
|
||||
/// </summary>
|
||||
Task<ExceptionBindingDocument> UpsertBindingAsync(
|
||||
ExceptionBindingDocument binding,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets bindings for an exception.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<ExceptionBindingDocument>> GetBindingsForExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets active bindings for an asset.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<ExceptionBindingDocument>> GetActiveBindingsForAssetAsync(
|
||||
string tenantId,
|
||||
string assetId,
|
||||
DateTimeOffset asOf,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes bindings for an exception.
|
||||
/// </summary>
|
||||
Task<long> DeleteBindingsForExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Updates binding status.
|
||||
/// </summary>
|
||||
Task<bool> UpdateBindingStatusAsync(
|
||||
string tenantId,
|
||||
string bindingId,
|
||||
string newStatus,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets expired bindings for cleanup.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<ExceptionBindingDocument>> GetExpiredBindingsAsync(
|
||||
string tenantId,
|
||||
DateTimeOffset asOf,
|
||||
int limit,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
// Statistics
|
||||
|
||||
/// <summary>
|
||||
/// Gets exception counts by status.
|
||||
/// </summary>
|
||||
Task<IReadOnlyDictionary<string, int>> GetExceptionCountsByStatusAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Query options for listing exceptions.
|
||||
/// </summary>
|
||||
public sealed record ExceptionQueryOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Filter by status.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> Statuses { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Filter by exception type.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> Types { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Filter by tag.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> Tags { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Filter by creator.
|
||||
/// </summary>
|
||||
public string? CreatedBy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Include expired exceptions.
|
||||
/// </summary>
|
||||
public bool IncludeExpired { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Skip count for pagination.
|
||||
/// </summary>
|
||||
public int Skip { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Limit for pagination (default 100).
|
||||
/// </summary>
|
||||
public int Limit { get; init; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Sort field.
|
||||
/// </summary>
|
||||
public string SortBy { get; init; } = "createdAt";
|
||||
|
||||
/// <summary>
|
||||
/// Sort direction (asc or desc).
|
||||
/// </summary>
|
||||
public string SortDirection { get; init; } = "desc";
|
||||
}
|
||||
@@ -0,0 +1,611 @@
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Documents;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Options;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB implementation of the exception repository.
|
||||
/// </summary>
|
||||
internal sealed class MongoExceptionRepository : IExceptionRepository
|
||||
{
|
||||
private readonly IMongoDatabase _database;
|
||||
private readonly PolicyEngineMongoOptions _options;
|
||||
private readonly ILogger<MongoExceptionRepository> _logger;
|
||||
|
||||
public MongoExceptionRepository(
|
||||
IMongoClient mongoClient,
|
||||
IOptions<PolicyEngineMongoOptions> options,
|
||||
ILogger<MongoExceptionRepository> logger)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(mongoClient);
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
_options = options.Value;
|
||||
_database = mongoClient.GetDatabase(_options.Database);
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
private IMongoCollection<PolicyExceptionDocument> Exceptions
|
||||
=> _database.GetCollection<PolicyExceptionDocument>(_options.ExceptionsCollection);
|
||||
|
||||
private IMongoCollection<ExceptionReviewDocument> Reviews
|
||||
=> _database.GetCollection<ExceptionReviewDocument>(_options.ExceptionReviewsCollection);
|
||||
|
||||
private IMongoCollection<ExceptionBindingDocument> Bindings
|
||||
=> _database.GetCollection<ExceptionBindingDocument>(_options.ExceptionBindingsCollection);
|
||||
|
||||
#region Exception Operations
|
||||
|
||||
public async Task<PolicyExceptionDocument> CreateExceptionAsync(
|
||||
PolicyExceptionDocument exception,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(exception);
|
||||
|
||||
exception.TenantId = exception.TenantId.ToLowerInvariant();
|
||||
await Exceptions.InsertOneAsync(exception, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Created exception {ExceptionId} for tenant {TenantId}",
|
||||
exception.Id, exception.TenantId);
|
||||
|
||||
PolicyEngineTelemetry.RecordExceptionOperation(exception.TenantId, "create");
|
||||
|
||||
return exception;
|
||||
}
|
||||
|
||||
public async Task<PolicyExceptionDocument?> GetExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<PolicyExceptionDocument>.Filter.And(
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.Id, exceptionId));
|
||||
|
||||
return await Exceptions.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<PolicyExceptionDocument?> UpdateExceptionAsync(
|
||||
PolicyExceptionDocument exception,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(exception);
|
||||
|
||||
var filter = Builders<PolicyExceptionDocument>.Filter.And(
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.TenantId, exception.TenantId.ToLowerInvariant()),
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.Id, exception.Id));
|
||||
|
||||
var result = await Exceptions.ReplaceOneAsync(filter, exception, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (result.ModifiedCount > 0)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Updated exception {ExceptionId} for tenant {TenantId}",
|
||||
exception.Id, exception.TenantId);
|
||||
PolicyEngineTelemetry.RecordExceptionOperation(exception.TenantId, "update");
|
||||
return exception;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public async Task<ImmutableArray<PolicyExceptionDocument>> ListExceptionsAsync(
|
||||
string tenantId,
|
||||
ExceptionQueryOptions options,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filterBuilder = Builders<PolicyExceptionDocument>.Filter;
|
||||
var filters = new List<FilterDefinition<PolicyExceptionDocument>>
|
||||
{
|
||||
filterBuilder.Eq(e => e.TenantId, tenantId.ToLowerInvariant())
|
||||
};
|
||||
|
||||
if (options.Statuses.Length > 0)
|
||||
{
|
||||
filters.Add(filterBuilder.In(e => e.Status, options.Statuses));
|
||||
}
|
||||
|
||||
if (options.Types.Length > 0)
|
||||
{
|
||||
filters.Add(filterBuilder.In(e => e.ExceptionType, options.Types));
|
||||
}
|
||||
|
||||
if (options.Tags.Length > 0)
|
||||
{
|
||||
filters.Add(filterBuilder.AnyIn(e => e.Tags, options.Tags));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(options.CreatedBy))
|
||||
{
|
||||
filters.Add(filterBuilder.Eq(e => e.CreatedBy, options.CreatedBy));
|
||||
}
|
||||
|
||||
if (!options.IncludeExpired)
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
filters.Add(filterBuilder.Or(
|
||||
filterBuilder.Eq(e => e.ExpiresAt, null),
|
||||
filterBuilder.Gt(e => e.ExpiresAt, now)));
|
||||
}
|
||||
|
||||
var filter = filterBuilder.And(filters);
|
||||
|
||||
var sort = options.SortDirection.Equals("asc", StringComparison.OrdinalIgnoreCase)
|
||||
? Builders<PolicyExceptionDocument>.Sort.Ascending(options.SortBy)
|
||||
: Builders<PolicyExceptionDocument>.Sort.Descending(options.SortBy);
|
||||
|
||||
var results = await Exceptions
|
||||
.Find(filter)
|
||||
.Sort(sort)
|
||||
.Skip(options.Skip)
|
||||
.Limit(options.Limit)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
public async Task<ImmutableArray<PolicyExceptionDocument>> FindApplicableExceptionsAsync(
|
||||
string tenantId,
|
||||
string assetId,
|
||||
string? advisoryId,
|
||||
DateTimeOffset evaluationTime,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filterBuilder = Builders<PolicyExceptionDocument>.Filter;
|
||||
var filters = new List<FilterDefinition<PolicyExceptionDocument>>
|
||||
{
|
||||
filterBuilder.Eq(e => e.TenantId, tenantId.ToLowerInvariant()),
|
||||
filterBuilder.Eq(e => e.Status, "active"),
|
||||
filterBuilder.Or(
|
||||
filterBuilder.Eq(e => e.EffectiveFrom, null),
|
||||
filterBuilder.Lte(e => e.EffectiveFrom, evaluationTime)),
|
||||
filterBuilder.Or(
|
||||
filterBuilder.Eq(e => e.ExpiresAt, null),
|
||||
filterBuilder.Gt(e => e.ExpiresAt, evaluationTime))
|
||||
};
|
||||
|
||||
// Scope matching - must match at least one criterion
|
||||
var scopeFilters = new List<FilterDefinition<PolicyExceptionDocument>>
|
||||
{
|
||||
filterBuilder.Eq("scope.applyToAll", true),
|
||||
filterBuilder.AnyEq("scope.assetIds", assetId)
|
||||
};
|
||||
|
||||
// Add PURL pattern matching (simplified - would need regex in production)
|
||||
scopeFilters.Add(filterBuilder.Not(filterBuilder.Size("scope.purlPatterns", 0)));
|
||||
|
||||
if (!string.IsNullOrEmpty(advisoryId))
|
||||
{
|
||||
scopeFilters.Add(filterBuilder.AnyEq("scope.advisoryIds", advisoryId));
|
||||
}
|
||||
|
||||
filters.Add(filterBuilder.Or(scopeFilters));
|
||||
|
||||
var filter = filterBuilder.And(filters);
|
||||
|
||||
var results = await Exceptions
|
||||
.Find(filter)
|
||||
.Sort(Builders<PolicyExceptionDocument>.Sort.Descending(e => e.Priority))
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
public async Task<bool> UpdateExceptionStatusAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
string newStatus,
|
||||
DateTimeOffset timestamp,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<PolicyExceptionDocument>.Filter.And(
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.Id, exceptionId));
|
||||
|
||||
var updateBuilder = Builders<PolicyExceptionDocument>.Update;
|
||||
var updates = new List<UpdateDefinition<PolicyExceptionDocument>>
|
||||
{
|
||||
updateBuilder.Set(e => e.Status, newStatus),
|
||||
updateBuilder.Set(e => e.UpdatedAt, timestamp)
|
||||
};
|
||||
|
||||
if (newStatus == "active")
|
||||
{
|
||||
updates.Add(updateBuilder.Set(e => e.ActivatedAt, timestamp));
|
||||
}
|
||||
|
||||
var update = updateBuilder.Combine(updates);
|
||||
var result = await Exceptions.UpdateOneAsync(filter, update, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (result.ModifiedCount > 0)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Updated exception {ExceptionId} status to {Status} for tenant {TenantId}",
|
||||
exceptionId, newStatus, tenantId);
|
||||
PolicyEngineTelemetry.RecordExceptionOperation(tenantId, $"status_{newStatus}");
|
||||
}
|
||||
|
||||
return result.ModifiedCount > 0;
|
||||
}
|
||||
|
||||
public async Task<bool> RevokeExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
string revokedBy,
|
||||
string? reason,
|
||||
DateTimeOffset timestamp,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<PolicyExceptionDocument>.Filter.And(
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.Id, exceptionId));
|
||||
|
||||
var update = Builders<PolicyExceptionDocument>.Update
|
||||
.Set(e => e.Status, "revoked")
|
||||
.Set(e => e.RevokedAt, timestamp)
|
||||
.Set(e => e.RevokedBy, revokedBy)
|
||||
.Set(e => e.RevocationReason, reason)
|
||||
.Set(e => e.UpdatedAt, timestamp);
|
||||
|
||||
var result = await Exceptions.UpdateOneAsync(filter, update, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (result.ModifiedCount > 0)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Revoked exception {ExceptionId} by {RevokedBy} for tenant {TenantId}",
|
||||
exceptionId, revokedBy, tenantId);
|
||||
PolicyEngineTelemetry.RecordExceptionOperation(tenantId, "revoke");
|
||||
}
|
||||
|
||||
return result.ModifiedCount > 0;
|
||||
}
|
||||
|
||||
public async Task<ImmutableArray<PolicyExceptionDocument>> GetExpiringExceptionsAsync(
|
||||
string tenantId,
|
||||
DateTimeOffset from,
|
||||
DateTimeOffset to,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<PolicyExceptionDocument>.Filter.And(
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.Status, "active"),
|
||||
Builders<PolicyExceptionDocument>.Filter.Gte(e => e.ExpiresAt, from),
|
||||
Builders<PolicyExceptionDocument>.Filter.Lte(e => e.ExpiresAt, to));
|
||||
|
||||
var results = await Exceptions
|
||||
.Find(filter)
|
||||
.Sort(Builders<PolicyExceptionDocument>.Sort.Ascending(e => e.ExpiresAt))
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
public async Task<ImmutableArray<PolicyExceptionDocument>> GetPendingActivationsAsync(
|
||||
string tenantId,
|
||||
DateTimeOffset asOf,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<PolicyExceptionDocument>.Filter.And(
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<PolicyExceptionDocument>.Filter.Eq(e => e.Status, "approved"),
|
||||
Builders<PolicyExceptionDocument>.Filter.Lte(e => e.EffectiveFrom, asOf));
|
||||
|
||||
var results = await Exceptions
|
||||
.Find(filter)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Review Operations
|
||||
|
||||
public async Task<ExceptionReviewDocument> CreateReviewAsync(
|
||||
ExceptionReviewDocument review,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(review);
|
||||
|
||||
review.TenantId = review.TenantId.ToLowerInvariant();
|
||||
await Reviews.InsertOneAsync(review, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Created review {ReviewId} for exception {ExceptionId}, tenant {TenantId}",
|
||||
review.Id, review.ExceptionId, review.TenantId);
|
||||
|
||||
PolicyEngineTelemetry.RecordExceptionOperation(review.TenantId, "review_create");
|
||||
|
||||
return review;
|
||||
}
|
||||
|
||||
public async Task<ExceptionReviewDocument?> GetReviewAsync(
|
||||
string tenantId,
|
||||
string reviewId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<ExceptionReviewDocument>.Filter.And(
|
||||
Builders<ExceptionReviewDocument>.Filter.Eq(r => r.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<ExceptionReviewDocument>.Filter.Eq(r => r.Id, reviewId));
|
||||
|
||||
return await Reviews.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<ExceptionReviewDocument?> AddReviewDecisionAsync(
|
||||
string tenantId,
|
||||
string reviewId,
|
||||
ReviewDecisionDocument decision,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<ExceptionReviewDocument>.Filter.And(
|
||||
Builders<ExceptionReviewDocument>.Filter.Eq(r => r.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<ExceptionReviewDocument>.Filter.Eq(r => r.Id, reviewId),
|
||||
Builders<ExceptionReviewDocument>.Filter.Eq(r => r.Status, "pending"));
|
||||
|
||||
var update = Builders<ExceptionReviewDocument>.Update
|
||||
.Push(r => r.Decisions, decision);
|
||||
|
||||
var options = new FindOneAndUpdateOptions<ExceptionReviewDocument>
|
||||
{
|
||||
ReturnDocument = ReturnDocument.After
|
||||
};
|
||||
|
||||
var result = await Reviews.FindOneAndUpdateAsync(filter, update, options, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (result is not null)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Added decision from {ReviewerId} to review {ReviewId} for tenant {TenantId}",
|
||||
decision.ReviewerId, reviewId, tenantId);
|
||||
PolicyEngineTelemetry.RecordExceptionOperation(tenantId, $"review_decision_{decision.Decision}");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public async Task<ExceptionReviewDocument?> CompleteReviewAsync(
|
||||
string tenantId,
|
||||
string reviewId,
|
||||
string finalStatus,
|
||||
DateTimeOffset completedAt,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<ExceptionReviewDocument>.Filter.And(
|
||||
Builders<ExceptionReviewDocument>.Filter.Eq(r => r.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<ExceptionReviewDocument>.Filter.Eq(r => r.Id, reviewId));
|
||||
|
||||
var update = Builders<ExceptionReviewDocument>.Update
|
||||
.Set(r => r.Status, finalStatus)
|
||||
.Set(r => r.CompletedAt, completedAt);
|
||||
|
||||
var options = new FindOneAndUpdateOptions<ExceptionReviewDocument>
|
||||
{
|
||||
ReturnDocument = ReturnDocument.After
|
||||
};
|
||||
|
||||
var result = await Reviews.FindOneAndUpdateAsync(filter, update, options, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (result is not null)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Completed review {ReviewId} with status {Status} for tenant {TenantId}",
|
||||
reviewId, finalStatus, tenantId);
|
||||
PolicyEngineTelemetry.RecordExceptionOperation(tenantId, $"review_complete_{finalStatus}");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public async Task<ImmutableArray<ExceptionReviewDocument>> GetReviewsForExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<ExceptionReviewDocument>.Filter.And(
|
||||
Builders<ExceptionReviewDocument>.Filter.Eq(r => r.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<ExceptionReviewDocument>.Filter.Eq(r => r.ExceptionId, exceptionId));
|
||||
|
||||
var results = await Reviews
|
||||
.Find(filter)
|
||||
.Sort(Builders<ExceptionReviewDocument>.Sort.Descending(r => r.RequestedAt))
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
public async Task<ImmutableArray<ExceptionReviewDocument>> GetPendingReviewsAsync(
|
||||
string tenantId,
|
||||
string? reviewerId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filterBuilder = Builders<ExceptionReviewDocument>.Filter;
|
||||
var filters = new List<FilterDefinition<ExceptionReviewDocument>>
|
||||
{
|
||||
filterBuilder.Eq(r => r.TenantId, tenantId.ToLowerInvariant()),
|
||||
filterBuilder.Eq(r => r.Status, "pending")
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(reviewerId))
|
||||
{
|
||||
filters.Add(filterBuilder.AnyEq(r => r.DesignatedReviewers, reviewerId));
|
||||
}
|
||||
|
||||
var filter = filterBuilder.And(filters);
|
||||
|
||||
var results = await Reviews
|
||||
.Find(filter)
|
||||
.Sort(Builders<ExceptionReviewDocument>.Sort.Ascending(r => r.Deadline))
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Binding Operations
|
||||
|
||||
public async Task<ExceptionBindingDocument> UpsertBindingAsync(
|
||||
ExceptionBindingDocument binding,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(binding);
|
||||
|
||||
binding.TenantId = binding.TenantId.ToLowerInvariant();
|
||||
|
||||
var filter = Builders<ExceptionBindingDocument>.Filter.And(
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.TenantId, binding.TenantId),
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.Id, binding.Id));
|
||||
|
||||
var options = new ReplaceOptions { IsUpsert = true };
|
||||
await Bindings.ReplaceOneAsync(filter, binding, options, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Upserted binding {BindingId} for tenant {TenantId}",
|
||||
binding.Id, binding.TenantId);
|
||||
|
||||
return binding;
|
||||
}
|
||||
|
||||
public async Task<ImmutableArray<ExceptionBindingDocument>> GetBindingsForExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<ExceptionBindingDocument>.Filter.And(
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.ExceptionId, exceptionId));
|
||||
|
||||
var results = await Bindings
|
||||
.Find(filter)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
public async Task<ImmutableArray<ExceptionBindingDocument>> GetActiveBindingsForAssetAsync(
|
||||
string tenantId,
|
||||
string assetId,
|
||||
DateTimeOffset asOf,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<ExceptionBindingDocument>.Filter.And(
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.AssetId, assetId),
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.Status, "active"),
|
||||
Builders<ExceptionBindingDocument>.Filter.Lte(b => b.EffectiveFrom, asOf),
|
||||
Builders<ExceptionBindingDocument>.Filter.Or(
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.ExpiresAt, null),
|
||||
Builders<ExceptionBindingDocument>.Filter.Gt(b => b.ExpiresAt, asOf)));
|
||||
|
||||
var results = await Bindings
|
||||
.Find(filter)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
public async Task<long> DeleteBindingsForExceptionAsync(
|
||||
string tenantId,
|
||||
string exceptionId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<ExceptionBindingDocument>.Filter.And(
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.ExceptionId, exceptionId));
|
||||
|
||||
var result = await Bindings.DeleteManyAsync(filter, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Deleted {Count} bindings for exception {ExceptionId} tenant {TenantId}",
|
||||
result.DeletedCount, exceptionId, tenantId);
|
||||
|
||||
return result.DeletedCount;
|
||||
}
|
||||
|
||||
public async Task<bool> UpdateBindingStatusAsync(
|
||||
string tenantId,
|
||||
string bindingId,
|
||||
string newStatus,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<ExceptionBindingDocument>.Filter.And(
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.Id, bindingId));
|
||||
|
||||
var update = Builders<ExceptionBindingDocument>.Update.Set(b => b.Status, newStatus);
|
||||
|
||||
var result = await Bindings.UpdateOneAsync(filter, update, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return result.ModifiedCount > 0;
|
||||
}
|
||||
|
||||
public async Task<ImmutableArray<ExceptionBindingDocument>> GetExpiredBindingsAsync(
|
||||
string tenantId,
|
||||
DateTimeOffset asOf,
|
||||
int limit,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var filter = Builders<ExceptionBindingDocument>.Filter.And(
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.TenantId, tenantId.ToLowerInvariant()),
|
||||
Builders<ExceptionBindingDocument>.Filter.Eq(b => b.Status, "active"),
|
||||
Builders<ExceptionBindingDocument>.Filter.Lt(b => b.ExpiresAt, asOf));
|
||||
|
||||
var results = await Bindings
|
||||
.Find(filter)
|
||||
.Limit(limit)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Statistics
|
||||
|
||||
public async Task<IReadOnlyDictionary<string, int>> GetExceptionCountsByStatusAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var pipeline = new BsonDocument[]
|
||||
{
|
||||
new("$match", new BsonDocument("tenantId", tenantId.ToLowerInvariant())),
|
||||
new("$group", new BsonDocument
|
||||
{
|
||||
{ "_id", "$status" },
|
||||
{ "count", new BsonDocument("$sum", 1) }
|
||||
})
|
||||
};
|
||||
|
||||
var results = await Exceptions
|
||||
.Aggregate<BsonDocument>(pipeline, cancellationToken: cancellationToken)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return results.ToDictionary(
|
||||
r => r["_id"].AsString,
|
||||
r => r["count"].AsInt32);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,496 @@
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Policy.Engine.Domain;
|
||||
using StellaOps.Policy.Engine.Services;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Internal;
|
||||
|
||||
// Alias to disambiguate from StellaOps.Policy.PolicyDocument (compiled policy IR)
|
||||
using PolicyPackDocument = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyDocument;
|
||||
using PolicyRevisionDoc = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyRevisionDocument;
|
||||
using PolicyBundleDoc = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyBundleDocument;
|
||||
using PolicyApprovalRec = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyApprovalRecord;
|
||||
using PolicyAocMetadataDoc = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyAocMetadataDocument;
|
||||
using PolicyProvenanceDoc = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyProvenanceDocument;
|
||||
using PolicyAttestationRefDoc = StellaOps.Policy.Engine.Storage.Mongo.Documents.PolicyAttestationRefDocument;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB implementation of policy pack repository with tenant scoping.
|
||||
/// </summary>
|
||||
internal sealed class MongoPolicyPackRepository : IPolicyPackRepository
|
||||
{
|
||||
private readonly PolicyEngineMongoContext _context;
|
||||
private readonly ILogger<MongoPolicyPackRepository> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly string _tenantId;
|
||||
|
||||
public MongoPolicyPackRepository(
|
||||
PolicyEngineMongoContext context,
|
||||
ILogger<MongoPolicyPackRepository> logger,
|
||||
TimeProvider timeProvider,
|
||||
string tenantId)
|
||||
{
|
||||
_context = context ?? throw new ArgumentNullException(nameof(context));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_tenantId = tenantId?.ToLowerInvariant() ?? throw new ArgumentNullException(nameof(tenantId));
|
||||
}
|
||||
|
||||
private IMongoCollection<PolicyPackDocument> Policies =>
|
||||
_context.Database.GetCollection<PolicyPackDocument>(_context.Options.PoliciesCollection);
|
||||
|
||||
private IMongoCollection<PolicyRevisionDoc> Revisions =>
|
||||
_context.Database.GetCollection<PolicyRevisionDoc>(_context.Options.PolicyRevisionsCollection);
|
||||
|
||||
private IMongoCollection<PolicyBundleDoc> Bundles =>
|
||||
_context.Database.GetCollection<PolicyBundleDoc>(_context.Options.PolicyBundlesCollection);
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<PolicyPackRecord> CreateAsync(string packId, string? displayName, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(packId);
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var document = new PolicyPackDocument
|
||||
{
|
||||
Id = packId,
|
||||
TenantId = _tenantId,
|
||||
DisplayName = displayName,
|
||||
LatestVersion = 0,
|
||||
CreatedAt = now,
|
||||
UpdatedAt = now
|
||||
};
|
||||
|
||||
try
|
||||
{
|
||||
await Policies.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
_logger.LogDebug("Created policy pack {PackId} for tenant {TenantId}", packId, _tenantId);
|
||||
}
|
||||
catch (MongoWriteException ex) when (ex.WriteError.Category == ServerErrorCategory.DuplicateKey)
|
||||
{
|
||||
_logger.LogDebug("Policy pack {PackId} already exists for tenant {TenantId}", packId, _tenantId);
|
||||
var existing = await Policies.Find(p => p.Id == packId && p.TenantId == _tenantId)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (existing is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Policy pack {packId} exists but not for tenant {_tenantId}");
|
||||
}
|
||||
|
||||
return ToDomain(existing);
|
||||
}
|
||||
|
||||
return ToDomain(document);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<PolicyPackRecord>> ListAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var documents = await Policies
|
||||
.Find(p => p.TenantId == _tenantId)
|
||||
.SortBy(p => p.Id)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return documents.Select(ToDomain).ToList().AsReadOnly();
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<PolicyRevisionRecord> UpsertRevisionAsync(
|
||||
string packId,
|
||||
int version,
|
||||
bool requiresTwoPersonApproval,
|
||||
PolicyRevisionStatus initialStatus,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
// Ensure pack exists
|
||||
var pack = await Policies.Find(p => p.Id == packId && p.TenantId == _tenantId)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (pack is null)
|
||||
{
|
||||
pack = new PolicyPackDocument
|
||||
{
|
||||
Id = packId,
|
||||
TenantId = _tenantId,
|
||||
LatestVersion = 0,
|
||||
CreatedAt = now,
|
||||
UpdatedAt = now
|
||||
};
|
||||
|
||||
try
|
||||
{
|
||||
await Policies.InsertOneAsync(pack, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (MongoWriteException ex) when (ex.WriteError.Category == ServerErrorCategory.DuplicateKey)
|
||||
{
|
||||
pack = await Policies.Find(p => p.Id == packId && p.TenantId == _tenantId)
|
||||
.FirstAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
// Determine version
|
||||
var targetVersion = version > 0 ? version : pack.LatestVersion + 1;
|
||||
var revisionId = PolicyRevisionDoc.CreateId(packId, targetVersion);
|
||||
|
||||
// Upsert revision
|
||||
var filter = Builders<PolicyRevisionDoc>.Filter.Eq(r => r.Id, revisionId);
|
||||
var update = Builders<PolicyRevisionDoc>.Update
|
||||
.SetOnInsert(r => r.Id, revisionId)
|
||||
.SetOnInsert(r => r.TenantId, _tenantId)
|
||||
.SetOnInsert(r => r.PackId, packId)
|
||||
.SetOnInsert(r => r.Version, targetVersion)
|
||||
.SetOnInsert(r => r.RequiresTwoPersonApproval, requiresTwoPersonApproval)
|
||||
.SetOnInsert(r => r.CreatedAt, now)
|
||||
.Set(r => r.Status, initialStatus.ToString());
|
||||
|
||||
var options = new FindOneAndUpdateOptions<PolicyRevisionDoc>
|
||||
{
|
||||
IsUpsert = true,
|
||||
ReturnDocument = ReturnDocument.After
|
||||
};
|
||||
|
||||
var revision = await Revisions.FindOneAndUpdateAsync(filter, update, options, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// Update pack latest version
|
||||
if (targetVersion > pack.LatestVersion)
|
||||
{
|
||||
await Policies.UpdateOneAsync(
|
||||
p => p.Id == packId && p.TenantId == _tenantId,
|
||||
Builders<PolicyPackDocument>.Update
|
||||
.Set(p => p.LatestVersion, targetVersion)
|
||||
.Set(p => p.UpdatedAt, now),
|
||||
cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Upserted revision {PackId}:{Version} for tenant {TenantId}",
|
||||
packId, targetVersion, _tenantId);
|
||||
|
||||
return ToDomain(revision);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<PolicyRevisionRecord?> GetRevisionAsync(string packId, int version, CancellationToken cancellationToken)
|
||||
{
|
||||
var revisionId = PolicyRevisionDoc.CreateId(packId, version);
|
||||
var revision = await Revisions
|
||||
.Find(r => r.Id == revisionId && r.TenantId == _tenantId)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (revision is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Load bundle if referenced
|
||||
PolicyBundleDoc? bundle = null;
|
||||
if (!string.IsNullOrEmpty(revision.BundleId))
|
||||
{
|
||||
bundle = await Bundles
|
||||
.Find(b => b.Id == revision.BundleId && b.TenantId == _tenantId)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
return ToDomain(revision, bundle);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<PolicyActivationResult> RecordActivationAsync(
|
||||
string packId,
|
||||
int version,
|
||||
string actorId,
|
||||
DateTimeOffset timestamp,
|
||||
string? comment,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var revisionId = PolicyRevisionDoc.CreateId(packId, version);
|
||||
|
||||
// Get current revision
|
||||
var revision = await Revisions
|
||||
.Find(r => r.Id == revisionId && r.TenantId == _tenantId)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (revision is null)
|
||||
{
|
||||
var pack = await Policies.Find(p => p.Id == packId && p.TenantId == _tenantId)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return pack is null
|
||||
? new PolicyActivationResult(PolicyActivationResultStatus.PackNotFound, null)
|
||||
: new PolicyActivationResult(PolicyActivationResultStatus.RevisionNotFound, null);
|
||||
}
|
||||
|
||||
if (revision.Status == PolicyRevisionStatus.Active.ToString())
|
||||
{
|
||||
return new PolicyActivationResult(PolicyActivationResultStatus.AlreadyActive, ToDomain(revision));
|
||||
}
|
||||
|
||||
if (revision.Status != PolicyRevisionStatus.Approved.ToString())
|
||||
{
|
||||
return new PolicyActivationResult(PolicyActivationResultStatus.NotApproved, ToDomain(revision));
|
||||
}
|
||||
|
||||
// Check for duplicate approval
|
||||
if (revision.Approvals.Any(a => a.ActorId.Equals(actorId, StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
return new PolicyActivationResult(PolicyActivationResultStatus.DuplicateApproval, ToDomain(revision));
|
||||
}
|
||||
|
||||
// Add approval
|
||||
var approval = new PolicyApprovalRec
|
||||
{
|
||||
ActorId = actorId,
|
||||
ApprovedAt = timestamp,
|
||||
Comment = comment
|
||||
};
|
||||
|
||||
var approvalUpdate = Builders<PolicyRevisionDoc>.Update.Push(r => r.Approvals, approval);
|
||||
await Revisions.UpdateOneAsync(r => r.Id == revisionId, approvalUpdate, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
revision.Approvals.Add(approval);
|
||||
|
||||
// Check if we have enough approvals
|
||||
var approvalCount = revision.Approvals.Count;
|
||||
if (revision.RequiresTwoPersonApproval && approvalCount < 2)
|
||||
{
|
||||
return new PolicyActivationResult(PolicyActivationResultStatus.PendingSecondApproval, ToDomain(revision));
|
||||
}
|
||||
|
||||
// Activate
|
||||
var activateUpdate = Builders<PolicyRevisionDoc>.Update
|
||||
.Set(r => r.Status, PolicyRevisionStatus.Active.ToString())
|
||||
.Set(r => r.ActivatedAt, timestamp);
|
||||
|
||||
await Revisions.UpdateOneAsync(r => r.Id == revisionId, activateUpdate, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// Update pack active version
|
||||
await Policies.UpdateOneAsync(
|
||||
p => p.Id == packId && p.TenantId == _tenantId,
|
||||
Builders<PolicyPackDocument>.Update
|
||||
.Set(p => p.ActiveVersion, version)
|
||||
.Set(p => p.UpdatedAt, timestamp),
|
||||
cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
revision.Status = PolicyRevisionStatus.Active.ToString();
|
||||
revision.ActivatedAt = timestamp;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Activated revision {PackId}:{Version} for tenant {TenantId} by {ActorId}",
|
||||
packId, version, _tenantId, actorId);
|
||||
|
||||
return new PolicyActivationResult(PolicyActivationResultStatus.Activated, ToDomain(revision));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<PolicyBundleRecord> StoreBundleAsync(
|
||||
string packId,
|
||||
int version,
|
||||
PolicyBundleRecord bundle,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(bundle);
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
// Ensure revision exists
|
||||
await UpsertRevisionAsync(packId, version, requiresTwoPersonApproval: false, PolicyRevisionStatus.Draft, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// Create bundle document
|
||||
var bundleDoc = new PolicyBundleDoc
|
||||
{
|
||||
Id = bundle.Digest,
|
||||
TenantId = _tenantId,
|
||||
PackId = packId,
|
||||
Version = version,
|
||||
Signature = bundle.Signature,
|
||||
SizeBytes = bundle.Size,
|
||||
Payload = bundle.Payload.ToArray(),
|
||||
CreatedAt = bundle.CreatedAt,
|
||||
AocMetadata = bundle.AocMetadata is not null ? ToDocument(bundle.AocMetadata) : null
|
||||
};
|
||||
|
||||
// Upsert bundle
|
||||
await Bundles.ReplaceOneAsync(
|
||||
b => b.Id == bundle.Digest && b.TenantId == _tenantId,
|
||||
bundleDoc,
|
||||
new ReplaceOptions { IsUpsert = true },
|
||||
cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// Link revision to bundle
|
||||
var revisionId = PolicyRevisionDoc.CreateId(packId, version);
|
||||
await Revisions.UpdateOneAsync(
|
||||
r => r.Id == revisionId && r.TenantId == _tenantId,
|
||||
Builders<PolicyRevisionDoc>.Update
|
||||
.Set(r => r.BundleId, bundle.Digest)
|
||||
.Set(r => r.BundleDigest, bundle.Digest),
|
||||
cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Stored bundle {Digest} for {PackId}:{Version} tenant {TenantId}",
|
||||
bundle.Digest, packId, version, _tenantId);
|
||||
|
||||
return bundle;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<PolicyBundleRecord?> GetBundleAsync(string packId, int version, CancellationToken cancellationToken)
|
||||
{
|
||||
var bundle = await Bundles
|
||||
.Find(b => b.PackId == packId && b.Version == version && b.TenantId == _tenantId)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return bundle is null ? null : ToDomain(bundle);
|
||||
}
|
||||
|
||||
#region Mapping
|
||||
|
||||
private static PolicyPackRecord ToDomain(PolicyPackDocument doc)
|
||||
{
|
||||
return new PolicyPackRecord(doc.Id, doc.DisplayName, doc.CreatedAt);
|
||||
}
|
||||
|
||||
private static PolicyRevisionRecord ToDomain(PolicyRevisionDoc doc, PolicyBundleDoc? bundleDoc = null)
|
||||
{
|
||||
var status = Enum.TryParse<PolicyRevisionStatus>(doc.Status, ignoreCase: true, out var s)
|
||||
? s
|
||||
: PolicyRevisionStatus.Draft;
|
||||
|
||||
var revision = new PolicyRevisionRecord(doc.Version, doc.RequiresTwoPersonApproval, status, doc.CreatedAt);
|
||||
|
||||
if (doc.ActivatedAt.HasValue)
|
||||
{
|
||||
revision.SetStatus(PolicyRevisionStatus.Active, doc.ActivatedAt.Value);
|
||||
}
|
||||
|
||||
foreach (var approval in doc.Approvals)
|
||||
{
|
||||
revision.AddApproval(new PolicyActivationApproval(approval.ActorId, approval.ApprovedAt, approval.Comment));
|
||||
}
|
||||
|
||||
if (bundleDoc is not null)
|
||||
{
|
||||
revision.SetBundle(ToDomain(bundleDoc));
|
||||
}
|
||||
|
||||
return revision;
|
||||
}
|
||||
|
||||
private static PolicyBundleRecord ToDomain(PolicyBundleDoc doc)
|
||||
{
|
||||
PolicyAocMetadata? aocMetadata = null;
|
||||
if (doc.AocMetadata is not null)
|
||||
{
|
||||
var aoc = doc.AocMetadata;
|
||||
PolicyProvenance? provenance = null;
|
||||
if (aoc.Provenance is not null)
|
||||
{
|
||||
var p = aoc.Provenance;
|
||||
provenance = new PolicyProvenance(
|
||||
p.SourceType,
|
||||
p.SourceUrl,
|
||||
p.Submitter,
|
||||
p.CommitSha,
|
||||
p.Branch,
|
||||
p.IngestedAt);
|
||||
}
|
||||
|
||||
PolicyAttestationRef? attestationRef = null;
|
||||
if (aoc.AttestationRef is not null)
|
||||
{
|
||||
var a = aoc.AttestationRef;
|
||||
attestationRef = new PolicyAttestationRef(
|
||||
a.AttestationId,
|
||||
a.EnvelopeDigest,
|
||||
a.Uri,
|
||||
a.SigningKeyId,
|
||||
a.CreatedAt);
|
||||
}
|
||||
|
||||
aocMetadata = new PolicyAocMetadata(
|
||||
aoc.CompilationId,
|
||||
aoc.CompilerVersion,
|
||||
aoc.CompiledAt,
|
||||
aoc.SourceDigest,
|
||||
aoc.ArtifactDigest,
|
||||
aoc.ComplexityScore,
|
||||
aoc.RuleCount,
|
||||
aoc.DurationMilliseconds,
|
||||
provenance,
|
||||
attestationRef);
|
||||
}
|
||||
|
||||
return new PolicyBundleRecord(
|
||||
doc.Id,
|
||||
doc.Signature,
|
||||
doc.SizeBytes,
|
||||
doc.CreatedAt,
|
||||
doc.Payload.ToImmutableArray(),
|
||||
CompiledDocument: null, // Cannot serialize IR document to/from Mongo
|
||||
aocMetadata);
|
||||
}
|
||||
|
||||
private static PolicyAocMetadataDoc ToDocument(PolicyAocMetadata aoc)
|
||||
{
|
||||
return new PolicyAocMetadataDoc
|
||||
{
|
||||
CompilationId = aoc.CompilationId,
|
||||
CompilerVersion = aoc.CompilerVersion,
|
||||
CompiledAt = aoc.CompiledAt,
|
||||
SourceDigest = aoc.SourceDigest,
|
||||
ArtifactDigest = aoc.ArtifactDigest,
|
||||
ComplexityScore = aoc.ComplexityScore,
|
||||
RuleCount = aoc.RuleCount,
|
||||
DurationMilliseconds = aoc.DurationMilliseconds,
|
||||
Provenance = aoc.Provenance is not null ? ToDocument(aoc.Provenance) : null,
|
||||
AttestationRef = aoc.AttestationRef is not null ? ToDocument(aoc.AttestationRef) : null
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyProvenanceDoc ToDocument(PolicyProvenance p)
|
||||
{
|
||||
return new PolicyProvenanceDoc
|
||||
{
|
||||
SourceType = p.SourceType,
|
||||
SourceUrl = p.SourceUrl,
|
||||
Submitter = p.Submitter,
|
||||
CommitSha = p.CommitSha,
|
||||
Branch = p.Branch,
|
||||
IngestedAt = p.IngestedAt
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyAttestationRefDoc ToDocument(PolicyAttestationRef a)
|
||||
{
|
||||
return new PolicyAttestationRefDoc
|
||||
{
|
||||
AttestationId = a.AttestationId,
|
||||
EnvelopeDigest = a.EnvelopeDigest,
|
||||
Uri = a.Uri,
|
||||
SigningKeyId = a.SigningKeyId,
|
||||
CreatedAt = a.CreatedAt
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Internal;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Migrations;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Options;
|
||||
using StellaOps.Policy.Engine.Storage.Mongo.Repositories;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Storage.Mongo;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering Policy Engine MongoDB storage services.
|
||||
/// </summary>
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds Policy Engine MongoDB storage services to the service collection.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configure">Optional configuration action for PolicyEngineMongoOptions.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddPolicyEngineMongoStorage(
|
||||
this IServiceCollection services,
|
||||
Action<PolicyEngineMongoOptions>? configure = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
// Register options
|
||||
if (configure is not null)
|
||||
{
|
||||
services.Configure(configure);
|
||||
}
|
||||
|
||||
// Register context (singleton for connection pooling)
|
||||
services.AddSingleton<PolicyEngineMongoContext>();
|
||||
|
||||
// Register migrations
|
||||
services.AddSingleton<IPolicyEngineMongoMigration, EnsurePolicyCollectionsMigration>();
|
||||
services.AddSingleton<IPolicyEngineMongoMigration, EnsurePolicyIndexesMigration>();
|
||||
services.AddSingleton<IPolicyEngineMongoMigration, EnsureExceptionIndexesMigration>();
|
||||
|
||||
// Register migration runner
|
||||
services.AddSingleton<PolicyEngineMigrationRunner>();
|
||||
|
||||
// Register initializer
|
||||
services.AddSingleton<IPolicyEngineMongoInitializer, PolicyEngineMongoInitializer>();
|
||||
|
||||
// Register dynamic collection initializer for effective findings
|
||||
services.AddSingleton<IEffectiveFindingCollectionInitializer, EffectiveFindingCollectionInitializer>();
|
||||
|
||||
// Register repositories
|
||||
services.AddSingleton<IExceptionRepository, MongoExceptionRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds Policy Engine MongoDB storage services with configuration binding from a configuration section.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configuration">Configuration section containing PolicyEngineMongoOptions.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddPolicyEngineMongoStorage(
|
||||
this IServiceCollection services,
|
||||
Microsoft.Extensions.Configuration.IConfigurationSection configuration)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configuration);
|
||||
|
||||
services.Configure<PolicyEngineMongoOptions>(configuration);
|
||||
|
||||
return services.AddPolicyEngineMongoStorage(configure: null);
|
||||
}
|
||||
}
|
||||
@@ -291,6 +291,90 @@ public static class PolicyEngineTelemetry
|
||||
/// </summary>
|
||||
public static Counter<long> ProfileEventsPublished => ProfileEventsPublishedCounter;
|
||||
|
||||
// Counter: policy_events_processed_total
|
||||
private static readonly Counter<long> PolicyEventsProcessedCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_events_processed_total",
|
||||
unit: "events",
|
||||
description: "Total policy change events processed.");
|
||||
|
||||
/// <summary>
|
||||
/// Counter for policy change events processed.
|
||||
/// </summary>
|
||||
public static Counter<long> PolicyEventsProcessed => PolicyEventsProcessedCounter;
|
||||
|
||||
// Counter: policy_effective_events_published_total
|
||||
private static readonly Counter<long> PolicyEffectiveEventsPublishedCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_effective_events_published_total",
|
||||
unit: "events",
|
||||
description: "Total policy.effective.* events published.");
|
||||
|
||||
/// <summary>
|
||||
/// Counter for policy effective events published.
|
||||
/// </summary>
|
||||
public static Counter<long> PolicyEffectiveEventsPublished => PolicyEffectiveEventsPublishedCounter;
|
||||
|
||||
// Counter: policy_reevaluation_jobs_scheduled_total
|
||||
private static readonly Counter<long> ReEvaluationJobsScheduledCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_reevaluation_jobs_scheduled_total",
|
||||
unit: "jobs",
|
||||
description: "Total re-evaluation jobs scheduled.");
|
||||
|
||||
/// <summary>
|
||||
/// Counter for re-evaluation jobs scheduled.
|
||||
/// </summary>
|
||||
public static Counter<long> ReEvaluationJobsScheduled => ReEvaluationJobsScheduledCounter;
|
||||
|
||||
// Counter: policy_explain_traces_stored_total
|
||||
private static readonly Counter<long> ExplainTracesStoredCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_explain_traces_stored_total",
|
||||
unit: "traces",
|
||||
description: "Total explain traces stored for decision audit.");
|
||||
|
||||
/// <summary>
|
||||
/// Counter for explain traces stored.
|
||||
/// </summary>
|
||||
public static Counter<long> ExplainTracesStored => ExplainTracesStoredCounter;
|
||||
|
||||
// Counter: policy_effective_decision_map_operations_total
|
||||
private static readonly Counter<long> EffectiveDecisionMapOperationsCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_effective_decision_map_operations_total",
|
||||
unit: "operations",
|
||||
description: "Total effective decision map operations (set, get, invalidate).");
|
||||
|
||||
/// <summary>
|
||||
/// Counter for effective decision map operations.
|
||||
/// </summary>
|
||||
public static Counter<long> EffectiveDecisionMapOperations => EffectiveDecisionMapOperationsCounter;
|
||||
|
||||
// Counter: policy_exception_operations_total{tenant,operation}
|
||||
private static readonly Counter<long> ExceptionOperationsCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_exception_operations_total",
|
||||
unit: "operations",
|
||||
description: "Total policy exception operations (create, update, revoke, review_*).");
|
||||
|
||||
/// <summary>
|
||||
/// Counter for policy exception operations.
|
||||
/// </summary>
|
||||
public static Counter<long> ExceptionOperations => ExceptionOperationsCounter;
|
||||
|
||||
// Counter: policy_exception_cache_operations_total{tenant,operation}
|
||||
private static readonly Counter<long> ExceptionCacheOperationsCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_exception_cache_operations_total",
|
||||
unit: "operations",
|
||||
description: "Total exception cache operations (hit, miss, set, warm, invalidate).");
|
||||
|
||||
/// <summary>
|
||||
/// Counter for exception cache operations.
|
||||
/// </summary>
|
||||
public static Counter<long> ExceptionCacheOperations => ExceptionCacheOperationsCounter;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Reachability Metrics
|
||||
@@ -506,6 +590,38 @@ public static class PolicyEngineTelemetry
|
||||
PolicySimulationCounter.Add(1, tags);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records a policy exception operation.
|
||||
/// </summary>
|
||||
/// <param name="tenant">Tenant identifier.</param>
|
||||
/// <param name="operation">Operation type (create, update, revoke, review_create, review_decision_*, etc.).</param>
|
||||
public static void RecordExceptionOperation(string tenant, string operation)
|
||||
{
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "tenant", NormalizeTenant(tenant) },
|
||||
{ "operation", NormalizeTag(operation) },
|
||||
};
|
||||
|
||||
ExceptionOperationsCounter.Add(1, tags);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records an exception cache operation.
|
||||
/// </summary>
|
||||
/// <param name="tenant">Tenant identifier.</param>
|
||||
/// <param name="operation">Operation type (hit, miss, set, warm, invalidate_*, event_*).</param>
|
||||
public static void RecordExceptionCacheOperation(string tenant, string operation)
|
||||
{
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "tenant", NormalizeTenant(tenant) },
|
||||
{ "operation", NormalizeTag(operation) },
|
||||
};
|
||||
|
||||
ExceptionCacheOperationsCounter.Add(1, tags);
|
||||
}
|
||||
|
||||
#region Golden Signals - Recording Methods
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -127,7 +127,7 @@ public sealed class PolicyEvaluationPredicate
|
||||
/// Environment information.
|
||||
/// </summary>
|
||||
[JsonPropertyName("environment")]
|
||||
public required PolicyEvaluationEnvironment Environment { get; init; }
|
||||
public required AttestationEnvironment Environment { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -167,9 +167,9 @@ public sealed class PolicyEvaluationMetrics
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Environment information for the evaluation.
|
||||
/// Environment information for the attestation.
|
||||
/// </summary>
|
||||
public sealed class PolicyEvaluationEnvironment
|
||||
public sealed class AttestationEnvironment
|
||||
{
|
||||
[JsonPropertyName("serviceVersion")]
|
||||
public required string ServiceVersion { get; init; }
|
||||
@@ -243,7 +243,7 @@ public sealed class PolicyEvaluationAttestationService
|
||||
VexOverridesApplied = vexOverridesApplied,
|
||||
DurationSeconds = durationSeconds,
|
||||
},
|
||||
Environment = new PolicyEvaluationEnvironment
|
||||
Environment = new AttestationEnvironment
|
||||
{
|
||||
ServiceVersion = serviceVersion,
|
||||
HostId = Environment.MachineName,
|
||||
@@ -338,7 +338,7 @@ public sealed class DsseEnvelopeRequest
|
||||
[JsonSerializable(typeof(InTotoSubject))]
|
||||
[JsonSerializable(typeof(EvidenceBundleRef))]
|
||||
[JsonSerializable(typeof(PolicyEvaluationMetrics))]
|
||||
[JsonSerializable(typeof(PolicyEvaluationEnvironment))]
|
||||
[JsonSerializable(typeof(AttestationEnvironment))]
|
||||
[JsonSourceGenerationOptions(
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
|
||||
@@ -0,0 +1,371 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Engine.WhatIfSimulation;
|
||||
|
||||
/// <summary>
|
||||
/// Request for what-if simulation supporting hypothetical SBOM diffs and draft policies.
|
||||
/// </summary>
|
||||
public sealed record WhatIfSimulationRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("tenant_id")]
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base snapshot ID to apply diffs to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("base_snapshot_id")]
|
||||
public required string BaseSnapshotId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Active policy pack ID to use as baseline.
|
||||
/// If DraftPolicy is provided, this will be compared against.
|
||||
/// </summary>
|
||||
[JsonPropertyName("baseline_pack_id")]
|
||||
public string? BaselinePackId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Baseline policy version. If null, uses active version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("baseline_pack_version")]
|
||||
public int? BaselinePackVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Draft policy to simulate (not yet activated).
|
||||
/// If null, uses baseline policy.
|
||||
/// </summary>
|
||||
[JsonPropertyName("draft_policy")]
|
||||
public WhatIfDraftPolicy? DraftPolicy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SBOM diffs to apply hypothetically.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbom_diffs")]
|
||||
public ImmutableArray<WhatIfSbomDiff> SbomDiffs { get; init; } = ImmutableArray<WhatIfSbomDiff>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Specific component PURLs to evaluate. If empty, evaluates affected by diffs.
|
||||
/// </summary>
|
||||
[JsonPropertyName("target_purls")]
|
||||
public ImmutableArray<string> TargetPurls { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of components to evaluate.
|
||||
/// </summary>
|
||||
[JsonPropertyName("limit")]
|
||||
public int Limit { get; init; } = 1000;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to include detailed explanations for each decision.
|
||||
/// </summary>
|
||||
[JsonPropertyName("include_explanations")]
|
||||
public bool IncludeExplanations { get; init; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Correlation ID for tracing.
|
||||
/// </summary>
|
||||
[JsonPropertyName("correlation_id")]
|
||||
public string? CorrelationId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Draft policy definition for simulation.
|
||||
/// </summary>
|
||||
public sealed record WhatIfDraftPolicy
|
||||
{
|
||||
/// <summary>
|
||||
/// Draft policy pack ID.
|
||||
/// </summary>
|
||||
[JsonPropertyName("pack_id")]
|
||||
public required string PackId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Draft policy version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("version")]
|
||||
public int Version { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw YAML policy definition to compile and evaluate.
|
||||
/// If provided, this is compiled on-the-fly.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policy_yaml")]
|
||||
public string? PolicyYaml { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Pre-compiled bundle digest if available.
|
||||
/// </summary>
|
||||
[JsonPropertyName("bundle_digest")]
|
||||
public string? BundleDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Hypothetical SBOM modification for what-if simulation.
|
||||
/// </summary>
|
||||
public sealed record WhatIfSbomDiff
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of modification: add, remove, upgrade, downgrade.
|
||||
/// </summary>
|
||||
[JsonPropertyName("operation")]
|
||||
public required string Operation { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Component PURL being modified.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// New version for upgrade/downgrade operations.
|
||||
/// </summary>
|
||||
[JsonPropertyName("new_version")]
|
||||
public string? NewVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Original version (for reference in upgrades/downgrades).
|
||||
/// </summary>
|
||||
[JsonPropertyName("original_version")]
|
||||
public string? OriginalVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hypothetical advisory IDs affecting this component.
|
||||
/// </summary>
|
||||
[JsonPropertyName("advisory_ids")]
|
||||
public ImmutableArray<string> AdvisoryIds { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Hypothetical VEX status for this component.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vex_status")]
|
||||
public string? VexStatus { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hypothetical reachability state.
|
||||
/// </summary>
|
||||
[JsonPropertyName("reachability")]
|
||||
public string? Reachability { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response from what-if simulation.
|
||||
/// </summary>
|
||||
public sealed record WhatIfSimulationResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Simulation identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("simulation_id")]
|
||||
public required string SimulationId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("tenant_id")]
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base snapshot ID used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("base_snapshot_id")]
|
||||
public required string BaseSnapshotId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Baseline policy used for comparison.
|
||||
/// </summary>
|
||||
[JsonPropertyName("baseline_policy")]
|
||||
public required WhatIfPolicyRef BaselinePolicy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Simulated policy (draft or modified).
|
||||
/// </summary>
|
||||
[JsonPropertyName("simulated_policy")]
|
||||
public WhatIfPolicyRef? SimulatedPolicy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Decision changes between baseline and simulation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("decision_changes")]
|
||||
public required ImmutableArray<WhatIfDecisionChange> DecisionChanges { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Summary of changes.
|
||||
/// </summary>
|
||||
[JsonPropertyName("summary")]
|
||||
public required WhatIfSummary Summary { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the simulation was executed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("executed_at")]
|
||||
public required DateTimeOffset ExecutedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Execution duration in milliseconds.
|
||||
/// </summary>
|
||||
[JsonPropertyName("duration_ms")]
|
||||
public long DurationMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Correlation ID.
|
||||
/// </summary>
|
||||
[JsonPropertyName("correlation_id")]
|
||||
public string? CorrelationId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Policy reference in simulation.
|
||||
/// </summary>
|
||||
public sealed record WhatIfPolicyRef(
|
||||
[property: JsonPropertyName("pack_id")] string PackId,
|
||||
[property: JsonPropertyName("version")] int Version,
|
||||
[property: JsonPropertyName("bundle_digest")] string? BundleDigest,
|
||||
[property: JsonPropertyName("is_draft")] bool IsDraft);
|
||||
|
||||
/// <summary>
|
||||
/// A decision change detected in what-if simulation.
|
||||
/// </summary>
|
||||
public sealed record WhatIfDecisionChange
|
||||
{
|
||||
/// <summary>
|
||||
/// Component PURL.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Advisory ID if applicable.
|
||||
/// </summary>
|
||||
[JsonPropertyName("advisory_id")]
|
||||
public string? AdvisoryId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Type of change: new, removed, status_changed, severity_changed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("change_type")]
|
||||
public required string ChangeType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Baseline decision.
|
||||
/// </summary>
|
||||
[JsonPropertyName("baseline")]
|
||||
public WhatIfDecision? Baseline { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Simulated decision.
|
||||
/// </summary>
|
||||
[JsonPropertyName("simulated")]
|
||||
public WhatIfDecision? Simulated { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SBOM diff that caused this change, if any.
|
||||
/// </summary>
|
||||
[JsonPropertyName("caused_by_diff")]
|
||||
public WhatIfSbomDiff? CausedByDiff { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Explanation for the change.
|
||||
/// </summary>
|
||||
[JsonPropertyName("explanation")]
|
||||
public WhatIfExplanation? Explanation { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A decision in what-if simulation.
|
||||
/// </summary>
|
||||
public sealed record WhatIfDecision(
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("severity")] string? Severity,
|
||||
[property: JsonPropertyName("rule_name")] string? RuleName,
|
||||
[property: JsonPropertyName("priority")] int? Priority,
|
||||
[property: JsonPropertyName("exception_applied")] bool ExceptionApplied);
|
||||
|
||||
/// <summary>
|
||||
/// Explanation for a what-if decision.
|
||||
/// </summary>
|
||||
public sealed record WhatIfExplanation
|
||||
{
|
||||
/// <summary>
|
||||
/// Rules that matched.
|
||||
/// </summary>
|
||||
[JsonPropertyName("matched_rules")]
|
||||
public ImmutableArray<string> MatchedRules { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Key factors in the decision.
|
||||
/// </summary>
|
||||
[JsonPropertyName("factors")]
|
||||
public ImmutableArray<string> Factors { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// VEX evidence considered.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vex_evidence")]
|
||||
public string? VexEvidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachability state.
|
||||
/// </summary>
|
||||
[JsonPropertyName("reachability")]
|
||||
public string? Reachability { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary of what-if simulation results.
|
||||
/// </summary>
|
||||
public sealed record WhatIfSummary
|
||||
{
|
||||
/// <summary>
|
||||
/// Total components evaluated.
|
||||
/// </summary>
|
||||
[JsonPropertyName("total_evaluated")]
|
||||
public int TotalEvaluated { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Components with changed decisions.
|
||||
/// </summary>
|
||||
[JsonPropertyName("total_changed")]
|
||||
public int TotalChanged { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Components newly affected.
|
||||
/// </summary>
|
||||
[JsonPropertyName("newly_affected")]
|
||||
public int NewlyAffected { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Components no longer affected.
|
||||
/// </summary>
|
||||
[JsonPropertyName("no_longer_affected")]
|
||||
public int NoLongerAffected { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Status changes by type.
|
||||
/// </summary>
|
||||
[JsonPropertyName("status_changes")]
|
||||
public required ImmutableDictionary<string, int> StatusChanges { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Severity changes by type (e.g., "low_to_high").
|
||||
/// </summary>
|
||||
[JsonPropertyName("severity_changes")]
|
||||
public required ImmutableDictionary<string, int> SeverityChanges { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Impact assessment.
|
||||
/// </summary>
|
||||
[JsonPropertyName("impact")]
|
||||
public required WhatIfImpact Impact { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Impact assessment from what-if simulation.
|
||||
/// </summary>
|
||||
public sealed record WhatIfImpact(
|
||||
[property: JsonPropertyName("risk_delta")] string RiskDelta, // increased, decreased, unchanged
|
||||
[property: JsonPropertyName("blocked_count_delta")] int BlockedCountDelta,
|
||||
[property: JsonPropertyName("warning_count_delta")] int WarningCountDelta,
|
||||
[property: JsonPropertyName("recommendation")] string? Recommendation);
|
||||
@@ -0,0 +1,548 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Engine.Domain;
|
||||
using StellaOps.Policy.Engine.EffectiveDecisionMap;
|
||||
using StellaOps.Policy.Engine.Services;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
|
||||
namespace StellaOps.Policy.Engine.WhatIfSimulation;
|
||||
|
||||
/// <summary>
|
||||
/// Service for Graph What-if API simulations.
|
||||
/// Supports hypothetical SBOM diffs and draft policies without persisting results.
|
||||
/// </summary>
|
||||
internal sealed class WhatIfSimulationService
|
||||
{
|
||||
private readonly IEffectiveDecisionMap _decisionMap;
|
||||
private readonly IPolicyPackRepository _policyRepository;
|
||||
private readonly PolicyCompilationService _compilationService;
|
||||
private readonly ILogger<WhatIfSimulationService> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public WhatIfSimulationService(
|
||||
IEffectiveDecisionMap decisionMap,
|
||||
IPolicyPackRepository policyRepository,
|
||||
PolicyCompilationService compilationService,
|
||||
ILogger<WhatIfSimulationService> logger,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
_decisionMap = decisionMap ?? throw new ArgumentNullException(nameof(decisionMap));
|
||||
_policyRepository = policyRepository ?? throw new ArgumentNullException(nameof(policyRepository));
|
||||
_compilationService = compilationService ?? throw new ArgumentNullException(nameof(compilationService));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Executes a what-if simulation without persisting results.
|
||||
/// </summary>
|
||||
public async Task<WhatIfSimulationResponse> SimulateAsync(
|
||||
WhatIfSimulationRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity(
|
||||
"policy.whatif.simulate", ActivityKind.Internal);
|
||||
activity?.SetTag("tenant_id", request.TenantId);
|
||||
activity?.SetTag("base_snapshot_id", request.BaseSnapshotId);
|
||||
activity?.SetTag("has_draft_policy", request.DraftPolicy is not null);
|
||||
activity?.SetTag("sbom_diff_count", request.SbomDiffs.Length);
|
||||
|
||||
var sw = Stopwatch.StartNew();
|
||||
var simulationId = GenerateSimulationId(request);
|
||||
var executedAt = _timeProvider.GetUtcNow();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting what-if simulation {SimulationId} for tenant {TenantId}, snapshot {SnapshotId}",
|
||||
simulationId, request.TenantId, request.BaseSnapshotId);
|
||||
|
||||
try
|
||||
{
|
||||
// Get baseline policy info
|
||||
var baselinePolicy = await GetBaselinePolicyAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Get simulated policy info (draft or same as baseline)
|
||||
var simulatedPolicy = await GetSimulatedPolicyAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Determine which components to evaluate
|
||||
var targetPurls = await DetermineTargetPurlsAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Get baseline decisions from effective decision map
|
||||
var baselineDecisions = await GetBaselineDecisionsAsync(
|
||||
request.TenantId, request.BaseSnapshotId, targetPurls, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Simulate decisions with hypothetical changes
|
||||
var simulatedDecisions = await SimulateDecisionsAsync(
|
||||
request, targetPurls, simulatedPolicy, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Compute changes between baseline and simulated
|
||||
var changes = ComputeChanges(
|
||||
targetPurls, baselineDecisions, simulatedDecisions, request.SbomDiffs, request.IncludeExplanations);
|
||||
|
||||
// Compute summary
|
||||
var summary = ComputeSummary(changes, baselineDecisions, simulatedDecisions);
|
||||
|
||||
sw.Stop();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Completed what-if simulation {SimulationId}: {Evaluated} evaluated, {Changed} changed in {Duration}ms",
|
||||
simulationId, summary.TotalEvaluated, summary.TotalChanged, sw.ElapsedMilliseconds);
|
||||
|
||||
PolicyEngineTelemetry.RecordSimulation(request.TenantId, "success");
|
||||
|
||||
return new WhatIfSimulationResponse
|
||||
{
|
||||
SimulationId = simulationId,
|
||||
TenantId = request.TenantId,
|
||||
BaseSnapshotId = request.BaseSnapshotId,
|
||||
BaselinePolicy = baselinePolicy,
|
||||
SimulatedPolicy = simulatedPolicy,
|
||||
DecisionChanges = changes,
|
||||
Summary = summary,
|
||||
ExecutedAt = executedAt,
|
||||
DurationMs = sw.ElapsedMilliseconds,
|
||||
CorrelationId = request.CorrelationId,
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
sw.Stop();
|
||||
_logger.LogError(ex, "What-if simulation {SimulationId} failed", simulationId);
|
||||
PolicyEngineTelemetry.RecordSimulation(request.TenantId, "failure");
|
||||
PolicyEngineTelemetry.RecordError("whatif_simulation", request.TenantId);
|
||||
activity?.SetStatus(ActivityStatusCode.Error, ex.Message);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<WhatIfPolicyRef> GetBaselinePolicyAsync(
|
||||
WhatIfSimulationRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (request.BaselinePackId is not null)
|
||||
{
|
||||
var version = request.BaselinePackVersion ?? 1;
|
||||
|
||||
// If no version specified, try to get the latest revision to find the active version
|
||||
if (request.BaselinePackVersion is null)
|
||||
{
|
||||
var revision = await _policyRepository.GetRevisionAsync(request.BaselinePackId, 1, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (revision?.Status == PolicyRevisionStatus.Active)
|
||||
{
|
||||
version = revision.Version;
|
||||
}
|
||||
}
|
||||
|
||||
var bundle = await _policyRepository.GetBundleAsync(request.BaselinePackId, version, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return new WhatIfPolicyRef(
|
||||
request.BaselinePackId,
|
||||
version,
|
||||
bundle?.Digest,
|
||||
IsDraft: false);
|
||||
}
|
||||
|
||||
// Return a placeholder for "current effective policy"
|
||||
return new WhatIfPolicyRef("default", 1, null, IsDraft: false);
|
||||
}
|
||||
|
||||
private async Task<WhatIfPolicyRef?> GetSimulatedPolicyAsync(
|
||||
WhatIfSimulationRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (request.DraftPolicy is null)
|
||||
{
|
||||
return null; // No draft - comparison is baseline vs hypothetical SBOM changes
|
||||
}
|
||||
|
||||
string? bundleDigest = request.DraftPolicy.BundleDigest;
|
||||
|
||||
// If we have YAML, we could compile it on-the-fly (not persisting)
|
||||
// For now, we just reference the draft
|
||||
if (request.DraftPolicy.PolicyYaml is not null && bundleDigest is null)
|
||||
{
|
||||
// Compute a digest from the YAML for reference
|
||||
bundleDigest = ComputeYamlDigest(request.DraftPolicy.PolicyYaml);
|
||||
}
|
||||
|
||||
return new WhatIfPolicyRef(
|
||||
request.DraftPolicy.PackId,
|
||||
request.DraftPolicy.Version,
|
||||
bundleDigest,
|
||||
IsDraft: true);
|
||||
}
|
||||
|
||||
private async Task<ImmutableArray<string>> DetermineTargetPurlsAsync(
|
||||
WhatIfSimulationRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (request.TargetPurls.Length > 0)
|
||||
{
|
||||
return request.TargetPurls.Take(request.Limit).ToImmutableArray();
|
||||
}
|
||||
|
||||
// Get PURLs from SBOM diffs
|
||||
var diffPurls = request.SbomDiffs.Select(d => d.Purl).Distinct().ToList();
|
||||
|
||||
if (diffPurls.Count > 0)
|
||||
{
|
||||
return diffPurls.Take(request.Limit).ToImmutableArray();
|
||||
}
|
||||
|
||||
// Get from effective decision map
|
||||
var allDecisions = await _decisionMap.GetAllForSnapshotAsync(
|
||||
request.TenantId,
|
||||
request.BaseSnapshotId,
|
||||
new EffectiveDecisionFilter { Limit = request.Limit },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return allDecisions.Select(d => d.AssetId).ToImmutableArray();
|
||||
}
|
||||
|
||||
private async Task<Dictionary<string, WhatIfDecision>> GetBaselineDecisionsAsync(
|
||||
string tenantId,
|
||||
string snapshotId,
|
||||
ImmutableArray<string> purls,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var result = await _decisionMap.GetBatchAsync(tenantId, snapshotId, purls.ToList(), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var decisions = new Dictionary<string, WhatIfDecision>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var (purl, entry) in result.Entries)
|
||||
{
|
||||
decisions[purl] = new WhatIfDecision(
|
||||
entry.Status,
|
||||
entry.Severity,
|
||||
entry.RuleName,
|
||||
entry.Priority,
|
||||
entry.ExceptionId is not null);
|
||||
}
|
||||
|
||||
return decisions;
|
||||
}
|
||||
|
||||
private Task<Dictionary<string, WhatIfDecision>> SimulateDecisionsAsync(
|
||||
WhatIfSimulationRequest request,
|
||||
ImmutableArray<string> targetPurls,
|
||||
WhatIfPolicyRef? simulatedPolicy,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// In a full implementation, this would:
|
||||
// 1. Apply SBOM diffs to compute hypothetical component states
|
||||
// 2. If draft policy, compile and evaluate against the draft
|
||||
// 3. Otherwise, re-evaluate with hypothetical context changes
|
||||
//
|
||||
// For now, we compute simulated decisions based on the diffs
|
||||
|
||||
var decisions = new Dictionary<string, WhatIfDecision>(StringComparer.OrdinalIgnoreCase);
|
||||
var diffsByPurl = request.SbomDiffs.ToDictionary(d => d.Purl, StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var purl in targetPurls)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (diffsByPurl.TryGetValue(purl, out var diff))
|
||||
{
|
||||
var decision = SimulateDecisionForDiff(diff, simulatedPolicy);
|
||||
decisions[purl] = decision;
|
||||
}
|
||||
else
|
||||
{
|
||||
// No diff for this PURL - simulate based on policy change if any
|
||||
decisions[purl] = SimulateDecisionWithoutDiff(purl, simulatedPolicy);
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(decisions);
|
||||
}
|
||||
|
||||
private static WhatIfDecision SimulateDecisionForDiff(WhatIfSbomDiff diff, WhatIfPolicyRef? policy)
|
||||
{
|
||||
// Simulate based on diff operation and properties
|
||||
return diff.Operation.ToLowerInvariant() switch
|
||||
{
|
||||
"remove" => new WhatIfDecision("allow", null, null, null, false),
|
||||
"add" => SimulateNewComponentDecision(diff),
|
||||
"upgrade" => SimulateUpgradeDecision(diff),
|
||||
"downgrade" => SimulateDowngradeDecision(diff),
|
||||
_ => new WhatIfDecision("allow", null, null, null, false),
|
||||
};
|
||||
}
|
||||
|
||||
private static WhatIfDecision SimulateNewComponentDecision(WhatIfSbomDiff diff)
|
||||
{
|
||||
// New components are evaluated based on advisory presence
|
||||
if (diff.AdvisoryIds.Length > 0)
|
||||
{
|
||||
var severity = DetermineSeverityFromAdvisories(diff.AdvisoryIds);
|
||||
var status = severity switch
|
||||
{
|
||||
"critical" or "high" => "deny",
|
||||
"medium" => "warn",
|
||||
_ => "allow"
|
||||
};
|
||||
|
||||
// VEX can override
|
||||
if (diff.VexStatus?.Equals("not_affected", StringComparison.OrdinalIgnoreCase) == true)
|
||||
{
|
||||
status = "allow";
|
||||
}
|
||||
|
||||
// Reachability can downgrade
|
||||
if (diff.Reachability?.Equals("unreachable", StringComparison.OrdinalIgnoreCase) == true &&
|
||||
status == "deny")
|
||||
{
|
||||
status = "warn";
|
||||
}
|
||||
|
||||
return new WhatIfDecision(status, severity, "simulated_rule", 100, false);
|
||||
}
|
||||
|
||||
return new WhatIfDecision("allow", null, null, null, false);
|
||||
}
|
||||
|
||||
private static WhatIfDecision SimulateUpgradeDecision(WhatIfSbomDiff diff)
|
||||
{
|
||||
// Upgrades typically fix vulnerabilities
|
||||
if (diff.AdvisoryIds.Length > 0)
|
||||
{
|
||||
// Some advisories remain
|
||||
return new WhatIfDecision("warn", "low", "simulated_upgrade_rule", 50, false);
|
||||
}
|
||||
|
||||
// Upgrade fixed all issues
|
||||
return new WhatIfDecision("allow", null, "simulated_upgrade_rule", 50, false);
|
||||
}
|
||||
|
||||
private static WhatIfDecision SimulateDowngradeDecision(WhatIfSbomDiff diff)
|
||||
{
|
||||
// Downgrades may introduce vulnerabilities
|
||||
if (diff.AdvisoryIds.Length > 0)
|
||||
{
|
||||
var severity = DetermineSeverityFromAdvisories(diff.AdvisoryIds);
|
||||
return new WhatIfDecision("deny", severity, "simulated_downgrade_rule", 150, false);
|
||||
}
|
||||
|
||||
return new WhatIfDecision("warn", "low", "simulated_downgrade_rule", 150, false);
|
||||
}
|
||||
|
||||
private static WhatIfDecision SimulateDecisionWithoutDiff(string purl, WhatIfPolicyRef? policy)
|
||||
{
|
||||
// If there's a draft policy, simulate potential changes from policy modification
|
||||
if (policy?.IsDraft == true)
|
||||
{
|
||||
// Draft policies might change thresholds - simulate a potential change
|
||||
return new WhatIfDecision("warn", "medium", "draft_policy_rule", 100, false);
|
||||
}
|
||||
|
||||
// No change - return unchanged placeholder
|
||||
return new WhatIfDecision("allow", null, null, null, false);
|
||||
}
|
||||
|
||||
private static string DetermineSeverityFromAdvisories(ImmutableArray<string> advisoryIds)
|
||||
{
|
||||
// In reality, would look up actual severity from advisories
|
||||
// For simulation, use a heuristic based on advisory count
|
||||
if (advisoryIds.Length >= 5) return "critical";
|
||||
if (advisoryIds.Length >= 3) return "high";
|
||||
if (advisoryIds.Length >= 1) return "medium";
|
||||
return "low";
|
||||
}
|
||||
|
||||
private static ImmutableArray<WhatIfDecisionChange> ComputeChanges(
|
||||
ImmutableArray<string> targetPurls,
|
||||
Dictionary<string, WhatIfDecision> baseline,
|
||||
Dictionary<string, WhatIfDecision> simulated,
|
||||
ImmutableArray<WhatIfSbomDiff> diffs,
|
||||
bool includeExplanations)
|
||||
{
|
||||
var changes = new List<WhatIfDecisionChange>();
|
||||
var diffsByPurl = diffs.ToDictionary(d => d.Purl, StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var purl in targetPurls)
|
||||
{
|
||||
var hasBaseline = baseline.TryGetValue(purl, out var baselineDecision);
|
||||
var hasSimulated = simulated.TryGetValue(purl, out var simulatedDecision);
|
||||
diffsByPurl.TryGetValue(purl, out var diff);
|
||||
|
||||
string? changeType = null;
|
||||
|
||||
if (!hasBaseline && hasSimulated)
|
||||
{
|
||||
changeType = "new";
|
||||
}
|
||||
else if (hasBaseline && !hasSimulated)
|
||||
{
|
||||
changeType = "removed";
|
||||
}
|
||||
else if (hasBaseline && hasSimulated)
|
||||
{
|
||||
if (baselineDecision!.Status != simulatedDecision!.Status)
|
||||
{
|
||||
changeType = "status_changed";
|
||||
}
|
||||
else if (baselineDecision.Severity != simulatedDecision.Severity)
|
||||
{
|
||||
changeType = "severity_changed";
|
||||
}
|
||||
}
|
||||
|
||||
if (changeType is not null)
|
||||
{
|
||||
var explanation = includeExplanations
|
||||
? BuildExplanation(diff, baselineDecision, simulatedDecision)
|
||||
: null;
|
||||
|
||||
changes.Add(new WhatIfDecisionChange
|
||||
{
|
||||
Purl = purl,
|
||||
AdvisoryId = diff?.AdvisoryIds.FirstOrDefault(),
|
||||
ChangeType = changeType,
|
||||
Baseline = baselineDecision,
|
||||
Simulated = simulatedDecision,
|
||||
CausedByDiff = diff,
|
||||
Explanation = explanation,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return changes.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static WhatIfExplanation BuildExplanation(
|
||||
WhatIfSbomDiff? diff,
|
||||
WhatIfDecision? baseline,
|
||||
WhatIfDecision? simulated)
|
||||
{
|
||||
var factors = new List<string>();
|
||||
var rules = new List<string>();
|
||||
|
||||
if (diff is not null)
|
||||
{
|
||||
factors.Add($"SBOM {diff.Operation}: {diff.Purl}");
|
||||
|
||||
if (diff.NewVersion is not null)
|
||||
{
|
||||
factors.Add($"Version change: {diff.OriginalVersion ?? "unknown"} -> {diff.NewVersion}");
|
||||
}
|
||||
|
||||
if (diff.AdvisoryIds.Length > 0)
|
||||
{
|
||||
factors.Add($"Advisories: {string.Join(", ", diff.AdvisoryIds.Take(3))}");
|
||||
}
|
||||
}
|
||||
|
||||
if (baseline?.RuleName is not null)
|
||||
{
|
||||
rules.Add($"baseline:{baseline.RuleName}");
|
||||
}
|
||||
|
||||
if (simulated?.RuleName is not null)
|
||||
{
|
||||
rules.Add($"simulated:{simulated.RuleName}");
|
||||
}
|
||||
|
||||
return new WhatIfExplanation
|
||||
{
|
||||
MatchedRules = rules.ToImmutableArray(),
|
||||
Factors = factors.ToImmutableArray(),
|
||||
VexEvidence = diff?.VexStatus,
|
||||
Reachability = diff?.Reachability,
|
||||
};
|
||||
}
|
||||
|
||||
private static WhatIfSummary ComputeSummary(
|
||||
ImmutableArray<WhatIfDecisionChange> changes,
|
||||
Dictionary<string, WhatIfDecision> baseline,
|
||||
Dictionary<string, WhatIfDecision> simulated)
|
||||
{
|
||||
var statusChanges = new Dictionary<string, int>();
|
||||
var severityChanges = new Dictionary<string, int>();
|
||||
var newlyAffected = 0;
|
||||
var noLongerAffected = 0;
|
||||
var blockedDelta = 0;
|
||||
var warningDelta = 0;
|
||||
|
||||
foreach (var change in changes)
|
||||
{
|
||||
switch (change.ChangeType)
|
||||
{
|
||||
case "new":
|
||||
newlyAffected++;
|
||||
if (change.Simulated?.Status == "deny") blockedDelta++;
|
||||
if (change.Simulated?.Status == "warn") warningDelta++;
|
||||
break;
|
||||
|
||||
case "removed":
|
||||
noLongerAffected++;
|
||||
if (change.Baseline?.Status == "deny") blockedDelta--;
|
||||
if (change.Baseline?.Status == "warn") warningDelta--;
|
||||
break;
|
||||
|
||||
case "status_changed":
|
||||
var statusKey = $"{change.Baseline?.Status ?? "none"}_to_{change.Simulated?.Status ?? "none"}";
|
||||
statusChanges.TryGetValue(statusKey, out var statusCount);
|
||||
statusChanges[statusKey] = statusCount + 1;
|
||||
|
||||
// Update deltas
|
||||
if (change.Baseline?.Status == "deny") blockedDelta--;
|
||||
if (change.Simulated?.Status == "deny") blockedDelta++;
|
||||
if (change.Baseline?.Status == "warn") warningDelta--;
|
||||
if (change.Simulated?.Status == "warn") warningDelta++;
|
||||
break;
|
||||
|
||||
case "severity_changed":
|
||||
var sevKey = $"{change.Baseline?.Severity ?? "none"}_to_{change.Simulated?.Severity ?? "none"}";
|
||||
severityChanges.TryGetValue(sevKey, out var sevCount);
|
||||
severityChanges[sevKey] = sevCount + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
var riskDelta = blockedDelta switch
|
||||
{
|
||||
> 0 => "increased",
|
||||
< 0 => "decreased",
|
||||
_ => warningDelta > 0 ? "increased" : warningDelta < 0 ? "decreased" : "unchanged"
|
||||
};
|
||||
|
||||
var recommendation = riskDelta switch
|
||||
{
|
||||
"increased" => "Review changes before applying - risk profile increases",
|
||||
"decreased" => "Changes appear safe - risk profile improves",
|
||||
_ => "Neutral impact - proceed with caution"
|
||||
};
|
||||
|
||||
return new WhatIfSummary
|
||||
{
|
||||
TotalEvaluated = baseline.Count + simulated.Count(kv => !baseline.ContainsKey(kv.Key)),
|
||||
TotalChanged = changes.Length,
|
||||
NewlyAffected = newlyAffected,
|
||||
NoLongerAffected = noLongerAffected,
|
||||
StatusChanges = statusChanges.ToImmutableDictionary(),
|
||||
SeverityChanges = severityChanges.ToImmutableDictionary(),
|
||||
Impact = new WhatIfImpact(riskDelta, blockedDelta, warningDelta, recommendation),
|
||||
};
|
||||
}
|
||||
|
||||
private static string GenerateSimulationId(WhatIfSimulationRequest request)
|
||||
{
|
||||
var seed = $"{request.TenantId}|{request.BaseSnapshotId}|{request.DraftPolicy?.PackId}|{Guid.NewGuid()}";
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(seed));
|
||||
return $"whatif-{Convert.ToHexStringLower(hash)[..16]}";
|
||||
}
|
||||
|
||||
private static string ComputeYamlDigest(string yaml)
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(yaml));
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,112 @@
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Engine.Options;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Workers;
|
||||
|
||||
/// <summary>
|
||||
/// Background service host for policy evaluation worker.
|
||||
/// Continuously processes re-evaluation jobs from the queue.
|
||||
/// </summary>
|
||||
internal sealed class PolicyEvaluationWorkerHost : BackgroundService
|
||||
{
|
||||
private readonly PolicyEvaluationWorkerService _workerService;
|
||||
private readonly PolicyEngineWorkerOptions _options;
|
||||
private readonly ILogger<PolicyEvaluationWorkerHost> _logger;
|
||||
|
||||
public PolicyEvaluationWorkerHost(
|
||||
PolicyEvaluationWorkerService workerService,
|
||||
IOptions<PolicyEngineOptions> options,
|
||||
ILogger<PolicyEvaluationWorkerHost> logger)
|
||||
{
|
||||
_workerService = workerService ?? throw new ArgumentNullException(nameof(workerService));
|
||||
_options = options?.Value.Workers ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
var pollInterval = TimeSpan.FromSeconds(_options.SchedulerIntervalSeconds);
|
||||
var maxConcurrency = _options.MaxConcurrentEvaluations;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Policy evaluation worker host starting with MaxConcurrency={MaxConcurrency}, PollInterval={PollInterval}s",
|
||||
maxConcurrency, _options.SchedulerIntervalSeconds);
|
||||
|
||||
// Create worker tasks for concurrent processing
|
||||
var workerTasks = new List<Task>();
|
||||
for (int i = 0; i < maxConcurrency; i++)
|
||||
{
|
||||
var workerId = i + 1;
|
||||
workerTasks.Add(RunWorkerAsync(workerId, maxConcurrency, pollInterval, stoppingToken));
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await Task.WhenAll(workerTasks).ConfigureAwait(false);
|
||||
}
|
||||
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
_logger.LogInformation("Policy evaluation worker host stopping");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Policy evaluation worker host encountered an error");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RunWorkerAsync(
|
||||
int workerId,
|
||||
int maxConcurrency,
|
||||
TimeSpan pollInterval,
|
||||
CancellationToken stoppingToken)
|
||||
{
|
||||
_logger.LogDebug("Worker {WorkerId} starting", workerId);
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
var result = await _workerService.TryExecuteNextAsync(maxConcurrency, stoppingToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (result is null)
|
||||
{
|
||||
// No job available, wait before polling again
|
||||
await Task.Delay(pollInterval, stoppingToken).ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Worker {WorkerId} completed job {JobId}: Success={Success}, Evaluated={Evaluated}",
|
||||
workerId, result.JobId, result.Success, result.ItemsEvaluated);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Worker {WorkerId} encountered an error processing job", workerId);
|
||||
// Wait before retrying to avoid tight error loop
|
||||
await Task.Delay(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogDebug("Worker {WorkerId} stopped", workerId);
|
||||
}
|
||||
|
||||
public override async Task StopAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Policy evaluation worker host stopping. Pending jobs: {PendingCount}, Running: {RunningCount}",
|
||||
_workerService.GetPendingJobCount(), _workerService.GetRunningJobCount());
|
||||
|
||||
await base.StopAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation("Policy evaluation worker host stopped");
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user