up
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Build Test Deploy / build-test (push) Has been cancelled
Build Test Deploy / authority-container (push) Has been cancelled
Build Test Deploy / docs (push) Has been cancelled
Build Test Deploy / deploy (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Build Test Deploy / build-test (push) Has been cancelled
Build Test Deploy / authority-container (push) Has been cancelled
Build Test Deploy / docs (push) Has been cancelled
Build Test Deploy / deploy (push) Has been cancelled
This commit is contained in:
@@ -34,18 +34,24 @@ jobs:
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
- name: Install markdown linters
|
||||
run: |
|
||||
npm install markdown-link-check remark-cli remark-preset-lint-recommended
|
||||
- name: Install documentation toolchain
|
||||
run: |
|
||||
npm install --no-save markdown-link-check remark-cli remark-preset-lint-recommended ajv ajv-cli ajv-formats
|
||||
|
||||
- name: Link check
|
||||
run: |
|
||||
find docs -name '*.md' -print0 | \
|
||||
xargs -0 -n1 -I{} npx markdown-link-check --quiet '{}'
|
||||
|
||||
- name: Remark lint
|
||||
run: |
|
||||
npx remark docs -qf
|
||||
- name: Remark lint
|
||||
run: |
|
||||
npx remark docs -qf
|
||||
|
||||
- name: Validate event schemas
|
||||
run: |
|
||||
for schema in docs/events/*.json; do
|
||||
npx ajv compile -c ajv-formats -s "$schema"
|
||||
done
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
|
||||
50
SPRINTS.md
50
SPRINTS.md
@@ -168,38 +168,38 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation
|
||||
| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-08-201 | Mirror distribution endpoints – expose domain-scoped index/download APIs with auth/quota. |
|
||||
| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md | TODO | BE-Conn-Stella | FEEDCONN-STELLA-08-001 | Concelier mirror connector – fetch mirror manifest, verify signatures, and hydrate canonical DTOs with resume support. |
|
||||
| Sprint 8 | Mirror Distribution | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-MIRROR-08-001 | Managed mirror deployments for `*.stella-ops.org` – Helm/Compose overlays, CDN, runbooks. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | TODO | Team Scanner Core | SCANNER-CORE-09-501 | Define shared DTOs (ScanJob, ProgressEvent), error taxonomy, and deterministic ID/timestamp helpers aligning with `ARCHITECTURE_SCANNER.md` §3–§4. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | TODO | Team Scanner Core | SCANNER-CORE-09-502 | Observability helpers (correlation IDs, logging scopes, metric namespacing, deterministic hashes) consumed by WebService/Worker. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | TODO | Team Scanner Core | SCANNER-CORE-09-503 | Security utilities: Authority client factory, OpTok caching, DPoP verifier, restart-time plug-in guardrails for scanner components. |
|
||||
| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | TODO | BuildX Guild | SP9-BLDX-09-001 | Buildx driver scaffold + handshake with Scanner.Emit (local CAS). |
|
||||
| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | TODO | BuildX Guild | SP9-BLDX-09-002 | OCI annotations + provenance hand-off to Attestor. |
|
||||
| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | TODO | BuildX Guild | SP9-BLDX-09-003 | CI demo: minimal SBOM push & backend report wiring. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Team Scanner WebService | SCANNER-WEB-09-101 | Minimal API host with Authority enforcement, health/ready endpoints, and restart-time plug-in loader per architecture §1, §4. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Team Scanner WebService | SCANNER-WEB-09-102 | `/api/v1/scans` submission/status endpoints with deterministic IDs, validation, and cancellation support. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-18) | Team Scanner Core | SCANNER-CORE-09-501 | Define shared DTOs (ScanJob, ProgressEvent), error taxonomy, and deterministic ID/timestamp helpers aligning with `ARCHITECTURE_SCANNER.md` §3–§4. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-18) | Team Scanner Core | SCANNER-CORE-09-502 | Observability helpers (correlation IDs, logging scopes, metric namespacing, deterministic hashes) consumed by WebService/Worker. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-18) | Team Scanner Core | SCANNER-CORE-09-503 | Security utilities: Authority client factory, OpTok caching, DPoP verifier, restart-time plug-in guardrails for scanner components. |
|
||||
| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE | BuildX Guild | SP9-BLDX-09-001 | Buildx driver scaffold + handshake with Scanner.Emit (local CAS). |
|
||||
| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE | BuildX Guild | SP9-BLDX-09-002 | OCI annotations + provenance hand-off to Attestor. |
|
||||
| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE | BuildX Guild | SP9-BLDX-09-003 | CI demo: minimal SBOM push & backend report wiring. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-18) | Team Scanner WebService | SCANNER-WEB-09-101 | Minimal API host with Authority enforcement, health/ready endpoints, and restart-time plug-in loader per architecture §1, §4. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-18) | Team Scanner WebService | SCANNER-WEB-09-102 | `/api/v1/scans` submission/status endpoints with deterministic IDs, validation, and cancellation support. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Team Scanner WebService | SCANNER-WEB-09-103 | Progress streaming (SSE/JSONL) with correlation IDs and ISO-8601 UTC timestamps, documented in API reference. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Team Scanner WebService | SCANNER-WEB-09-104 | Configuration binding for Mongo, MinIO, queue, feature flags; startup diagnostics and fail-fast policy. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-WEB-09-104 | Configuration binding for Mongo, MinIO, queue, feature flags; startup diagnostics and fail-fast policy. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Team Scanner WebService | SCANNER-POLICY-09-105 | Policy snapshot loader + schema + OpenAPI (YAML ignore rules, VEX include/exclude, vendor precedence). |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Team Scanner WebService | SCANNER-POLICY-09-106 | `/reports` verdict assembly (Feedser+Vexer+Policy) + signed response envelope. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Team Scanner WebService | SCANNER-POLICY-09-107 | Expose score inputs, config version, and quiet provenance in `/reports` JSON and signed payload. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | TODO | Team Scanner Worker | SCANNER-WORKER-09-201 | Worker host bootstrap with Authority auth, hosted services, and graceful shutdown semantics. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | TODO | Team Scanner Worker | SCANNER-WORKER-09-202 | Lease/heartbeat loop with retry+jitter, poison-job quarantine, structured logging. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | TODO | Team Scanner Worker | SCANNER-WORKER-09-203 | Analyzer dispatch skeleton emitting deterministic stage progress and honoring cancellation tokens. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | TODO | Team Scanner Worker | SCANNER-WORKER-09-204 | Worker metrics (queue latency, stage duration, failure counts) with OpenTelemetry resource wiring. |
|
||||
| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-CORE-09-001 | Policy schema + binder + diagnostics. |
|
||||
| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-CORE-09-002 | Policy snapshot store + revision digests. |
|
||||
| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-CORE-09-003 | `/policy/preview` API (image digest → projected verdict diff). |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-201 | Worker host bootstrap with Authority auth, hosted services, and graceful shutdown semantics. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-202 | Lease/heartbeat loop with retry+jitter, poison-job quarantine, structured logging. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-203 | Analyzer dispatch skeleton emitting deterministic stage progress and honoring cancellation tokens. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-204 | Worker metrics (queue latency, stage duration, failure counts) with OpenTelemetry resource wiring. |
|
||||
| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-001 | Policy schema + binder + diagnostics. |
|
||||
| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-002 | Policy snapshot store + revision digests. |
|
||||
| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-003 | `/policy/preview` API (image digest → projected verdict diff). |
|
||||
| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-CORE-09-004 | Versioned scoring config with schema validation, trust table, and golden fixtures. |
|
||||
| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-CORE-09-005 | Scoring/quiet engine – compute score, enforce VEX-only quiet rules, emit inputs and provenance. |
|
||||
| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-CORE-09-006 | Unknown state & confidence decay – deterministic bands surfaced in policy outputs. |
|
||||
| Sprint 9 | DevOps Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-HELM-09-001 | Helm/Compose environment profiles (dev/staging/airgap) with deterministic digests. |
|
||||
| Sprint 9 | Docs & Governance | docs/TASKS.md | TODO | Docs Guild, DevEx | DOCS-ADR-09-001 | Establish ADR process and template. |
|
||||
| Sprint 9 | Docs & Governance | docs/TASKS.md | TODO | Docs Guild, Platform Events | DOCS-EVENTS-09-002 | Publish event schema catalog (`docs/events/`) for critical envelopes. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | TODO | Team Scanner Storage | SCANNER-STORAGE-09-301 | Mongo catalog schemas/indexes for images, layers, artifacts, jobs, lifecycle rules plus migrations. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | TODO | Team Scanner Storage | SCANNER-STORAGE-09-302 | MinIO layout, immutability policies, client abstraction, and configuration binding. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | TODO | Team Scanner Storage | SCANNER-STORAGE-09-303 | Repositories/services with dual-write feature flag, deterministic digests, TTL enforcement tests. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | TODO | Team Scanner Queue | SCANNER-QUEUE-09-401 | Queue abstraction + Redis Streams adapter with ack/claim APIs and idempotency tokens. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | TODO | Team Scanner Queue | SCANNER-QUEUE-09-402 | Pluggable backend support (Redis, NATS) with configuration binding, health probes, failover docs. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | TODO | Team Scanner Queue | SCANNER-QUEUE-09-403 | Retry + dead-letter strategy with structured logs/metrics for offline deployments. |
|
||||
| Sprint 9 | DevOps Foundations | ops/devops/TASKS.md | DONE (2025-10-19) | DevOps Guild | DEVOPS-HELM-09-001 | Helm/Compose environment profiles (dev/staging/airgap) with deterministic digests. |
|
||||
| Sprint 9 | Docs & Governance | docs/TASKS.md | DONE (2025-10-19) | Docs Guild, DevEx | DOCS-ADR-09-001 | Establish ADR process and template. |
|
||||
| Sprint 9 | Docs & Governance | docs/TASKS.md | DONE (2025-10-19) | Docs Guild, Platform Events | DOCS-EVENTS-09-002 | Publish event schema catalog (`docs/events/`) for critical envelopes. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-301 | Mongo catalog schemas/indexes for images, layers, artifacts, jobs, lifecycle rules plus migrations. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-302 | MinIO layout, immutability policies, client abstraction, and configuration binding. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-303 | Repositories/services with dual-write feature flag, deterministic digests, TTL enforcement tests. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-401 | Queue abstraction + Redis Streams adapter with ack/claim APIs and idempotency tokens. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-402 | Pluggable backend support (Redis, NATS) with configuration binding, health probes, failover docs. |
|
||||
| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-403 | Retry + dead-letter strategy with structured logs/metrics for offline deployments. |
|
||||
| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Cache/TASKS.md | TODO | Scanner Cache Guild | SCANNER-CACHE-10-101 | Implement layer cache store keyed by layer digest with metadata retention per architecture §3.3. |
|
||||
| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Cache/TASKS.md | TODO | Scanner Cache Guild | SCANNER-CACHE-10-102 | Build file CAS with dedupe, TTL enforcement, and offline import/export hooks. |
|
||||
| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Cache/TASKS.md | TODO | Scanner Cache Guild | SCANNER-CACHE-10-103 | Expose cache metrics/logging and configuration toggles for warm/cold thresholds. |
|
||||
|
||||
@@ -24,41 +24,47 @@ Durations are estimated work sizes (1 d ≈ one focused engineer day). Milesto
|
||||
- Tasks: SCANNER-QUEUE-09-401 (3 d), -402 (2 d), -403 (2 d) · `/src/StellaOps.Scanner.Queue/TASKS.md`
|
||||
- Acceptance: dequeue latency p95 ≤20 ms at 40 rps; chaos test retains leases.
|
||||
- Gate: Redis/NATS adapters docs + `QueueLeaseIntegrationTests` passing.
|
||||
- Status: **DONE (2025-10-19)** – Gate satisfied via Redis/NATS adapter docs and `QueueLeaseIntegrationTests` run under fake clock.
|
||||
|
||||
### Group SP9-G3 — Storage Backbone (src/StellaOps.Scanner.Storage) ~1 w
|
||||
- Tasks: SCANNER-STORAGE-09-301 (3 d), -302 (2 d), -303 (2 d)
|
||||
- Acceptance: majority write/read ≤50 ms; TTL verified.
|
||||
- Gate: migrations checked in; `StorageDualWriteFixture` passes.
|
||||
- Status: **DONE (2025-10-19)** – Mongo bootstrapper + migrations committed; MinIO dual-write service wired; `StorageDualWriteFixture` green on Mongo2Go.
|
||||
|
||||
### Group SP9-G4 — WebService Host & Policy Surfacing (src/StellaOps.Scanner.WebService) ~1.2 w
|
||||
- Tasks: SCANNER-WEB-09-101 (2 d), -102 (3 d), -103 (2 d), -104 (2 d), SCANNER-POLICY-09-105 (3 d), SCANNER-POLICY-09-106 (4 d)
|
||||
- Acceptance: `/api/v1/scans` enqueue p95 ≤50 ms under synthetic load; policy validation errors actionable; `/reports` response signed.
|
||||
- Gate SP9-G4 → SP10/SP11: `/reports` OpenAPI frozen; sample signed envelope committed in `samples/api/reports/`.
|
||||
- Status: **IN PROGRESS (2025-10-19)** – Minimal host and `/api/v1/scans` endpoints delivered (SCANNER-WEB-09-101/102 done); progress streaming and policy/report surfaces remain.
|
||||
|
||||
### Group SP9-G5 — Worker Host (src/StellaOps.Scanner.Worker) ~1 w
|
||||
- Tasks: SCANNER-WORKER-09-201 (3 d), -202 (3 d), -203 (2 d), -204 (2 d)
|
||||
- Acceptance: job lease never drops <3× heartbeat; progress events deterministic.
|
||||
- Gate: `WorkerBasicScanScenario` integration recorded.
|
||||
- Status: **DONE (2025-10-19)** – Host bootstrap + authority wiring, heartbeat loop, deterministic stage pipeline, and metrics landed; `WorkerBasicScanScenarioTests` green.
|
||||
|
||||
### Group SP9-G6 — Buildx Plug-in (src/StellaOps.Scanner.Sbomer.BuildXPlugin) ~0.8 w
|
||||
- Tasks: SP9-BLDX-09-001 (3 d), SP9-BLDX-09-002 (2 d), SP9-BLDX-09-003 (2 d)
|
||||
- Acceptance: build-time overhead ≤300 ms/layer on 4 vCPU; CAS handshake reliable in CI sample.
|
||||
- Gate: buildx demo workflow artifact + quickstart doc.
|
||||
- Status: **DONE** (2025-10-19) — manifest+CAS scaffold, descriptor/Attestor hand-off, GitHub demo workflow, and quickstart committed.
|
||||
|
||||
### Group SP9-G7 — Policy Engine Core (src/StellaOps.Policy) ~1 w
|
||||
- Tasks: POLICY-CORE-09-001 (2 d), -002 (3 d), -003 (3 d), -004 (3 d), -005 (4 d), -006 (2 d)
|
||||
- Tasks: POLICY-CORE-09-001 (2 d) ✅, -002 (3 d) ✅, -003 (3 d) ✅, -004 (3 d), -005 (4 d), -006 (2 d)
|
||||
- Acceptance: policy parsing ≥200 files/s; preview diff response <200 ms for 500-component SBOM; quieting logic audited.
|
||||
- Gate: `policy-schema@1` published; revision digests stored; preview API doc updated.
|
||||
|
||||
### Group SP9-G8 — DevOps Early Guardrails (ops/devops) ~0.4 w
|
||||
- Tasks: DEVOPS-HELM-09-001 (3 d)
|
||||
- Tasks: DEVOPS-HELM-09-001 (3 d) — **DONE (2025-10-19)**
|
||||
- Acceptance: helm/compose profiles for dev/stage/airgap lint + dry-run clean; manifests pinned to digest.
|
||||
- Gate: profiles merged under `deploy/`; install guide cross-link.
|
||||
- Gate: profiles merged under `deploy/`; install guide cross-link satisfied via `deploy/compose/` bundles and `docs/21_INSTALL_GUIDE.md`.
|
||||
|
||||
### Group SP9-G9 — Documentation & Events (docs/) ~0.4 w
|
||||
- Tasks: DOCS-ADR-09-001 (2 d), DOCS-EVENTS-09-002 (2 d)
|
||||
- Acceptance: ADR process broadcast; event schemas validated via CI.
|
||||
- Gate: `docs/adr/index.md` linking template; `docs/events/README.md` referencing schemas.
|
||||
- Status: **DONE (2025-10-19)** – ADR contribution guide + template updates merged, Docs CI Ajv validation wired, events catalog documented, guild announcement recorded.
|
||||
|
||||
---
|
||||
|
||||
|
||||
19
deploy/README.md
Normal file
19
deploy/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Deployment Profiles
|
||||
|
||||
This directory contains deterministic deployment bundles for the core Stella Ops stack. All manifests reference immutable image digests and map 1:1 to the release manifests stored under `deploy/releases/`.
|
||||
|
||||
## Structure
|
||||
|
||||
- `releases/` – canonical release manifests (edge, stable, airgap) used to source image digests.
|
||||
- `compose/` – Docker Compose bundles for dev/stage/airgap targets plus `.env` seed files.
|
||||
- `helm/stellaops/` – multi-profile Helm chart with values files for dev/stage/airgap.
|
||||
- `tools/validate-profiles.sh` – helper that runs `docker compose config` and `helm lint/template` for every profile.
|
||||
|
||||
## Workflow
|
||||
|
||||
1. Update or add a release manifest under `releases/` with the new digests.
|
||||
2. Mirror the digests into the Compose and Helm profiles that correspond to that channel.
|
||||
3. Run `deploy/tools/validate-profiles.sh` (requires Docker CLI and Helm) to ensure the bundles lint and template cleanly.
|
||||
4. Commit the change alongside any documentation updates (e.g. install guide cross-links).
|
||||
|
||||
Maintaining the digest linkage keeps offline/air-gapped installs reproducible and avoids tag drift between environments.
|
||||
30
deploy/compose/README.md
Normal file
30
deploy/compose/README.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# Stella Ops Compose Profiles
|
||||
|
||||
These Compose bundles ship the minimum services required to exercise the scanner pipeline plus control-plane dependencies. Every profile is pinned to immutable image digests sourced from `deploy/releases/*.yaml` and is linted via `docker compose config` in CI.
|
||||
|
||||
## Layout
|
||||
|
||||
| Path | Purpose |
|
||||
| ---- | ------- |
|
||||
| `docker-compose.dev.yaml` | Edge/nightly stack tuned for laptops and iterative work. |
|
||||
| `docker-compose.stage.yaml` | Stable channel stack mirroring pre-production clusters. |
|
||||
| `docker-compose.airgap.yaml` | Stable stack with air-gapped defaults (no outbound hostnames). |
|
||||
| `env/*.env.example` | Seed `.env` files that document required secrets and ports per profile. |
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
cp env/dev.env.example dev.env
|
||||
docker compose --env-file dev.env -f docker-compose.dev.yaml config
|
||||
docker compose --env-file dev.env -f docker-compose.dev.yaml up -d
|
||||
```
|
||||
|
||||
The stage and airgap variants behave the same way—swap the file names accordingly. All profiles expose 443/8443 for the UI and REST APIs, and they share a `stellaops` Docker network scoped to the compose project.
|
||||
|
||||
### Updating to a new release
|
||||
|
||||
1. Import the new manifest into `deploy/releases/` (see `deploy/README.md`).
|
||||
2. Update image digests in the relevant Compose file(s).
|
||||
3. Re-run `docker compose config` to confirm the bundle is deterministic.
|
||||
|
||||
Keep digests synchronized between Compose, Helm, and the release manifest to preserve reproducibility guarantees. `deploy/tools/validate-profiles.sh` performs a quick audit.
|
||||
190
deploy/compose/docker-compose.airgap.yaml
Normal file
190
deploy/compose/docker-compose.airgap.yaml
Normal file
@@ -0,0 +1,190 @@
|
||||
version: "3.9"
|
||||
|
||||
x-release-labels: &release-labels
|
||||
com.stellaops.release.version: "2025.09.2-airgap"
|
||||
com.stellaops.release.channel: "airgap"
|
||||
com.stellaops.profile: "airgap"
|
||||
|
||||
networks:
|
||||
stellaops:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
mongo-data:
|
||||
minio-data:
|
||||
concelier-jobs:
|
||||
nats-data:
|
||||
|
||||
services:
|
||||
mongo:
|
||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
||||
command: ["mongod", "--bind_ip_all"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MONGO_INITDB_ROOT_USERNAME: "${MONGO_INITDB_ROOT_USERNAME}"
|
||||
MONGO_INITDB_ROOT_PASSWORD: "${MONGO_INITDB_ROOT_PASSWORD}"
|
||||
volumes:
|
||||
- mongo-data:/data/db
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
minio:
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
command: ["server", "/data", "--console-address", ":9001"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MINIO_ROOT_USER: "${MINIO_ROOT_USER}"
|
||||
MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD}"
|
||||
volumes:
|
||||
- minio-data:/data
|
||||
ports:
|
||||
- "${MINIO_CONSOLE_PORT:-29001}:9001"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
nats:
|
||||
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||
command:
|
||||
- "-js"
|
||||
- "-sd"
|
||||
- /data
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "${NATS_CLIENT_PORT:-24222}:4222"
|
||||
volumes:
|
||||
- nats-data:/data
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
authority:
|
||||
image: registry.stella-ops.org/stellaops/authority@sha256:5551a3269b7008cd5aceecf45df018c67459ed519557ccbe48b093b926a39bcc
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
environment:
|
||||
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||
STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||
volumes:
|
||||
- ../../etc/authority.yaml:/etc/authority.yaml:ro
|
||||
- ../../etc/authority.plugins:/app/etc/authority.plugins:ro
|
||||
ports:
|
||||
- "${AUTHORITY_PORT:-8440}:8440"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
signer:
|
||||
image: registry.stella-ops.org/stellaops/signer@sha256:ddbbd664a42846cea6b40fca6465bc679b30f72851158f300d01a8571c5478fc
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- authority
|
||||
environment:
|
||||
SIGNER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}"
|
||||
SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${SIGNER_PORT:-8441}:8441"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:1ff0a3124d66d3a2702d8e421df40fbd98cc75cb605d95510598ebbae1433c50
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:29e2e1a0972707e092cbd3d370701341f9fec2aa9316fb5d8100480f2a1c76b5
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- minio
|
||||
environment:
|
||||
CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
CONCELIER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
||||
CONCELIER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
||||
CONCELIER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
||||
CONCELIER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
CONCELIER__AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK: "true"
|
||||
CONCELIER__AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE: "${AUTHORITY_OFFLINE_CACHE_TOLERANCE:-00:30:00}"
|
||||
volumes:
|
||||
- concelier-jobs:/var/lib/concelier/jobs
|
||||
ports:
|
||||
- "${CONCELIER_PORT:-8445}:8445"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
scanner-web:
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:3df8ca21878126758203c1a0444e39fd97f77ddacf04a69685cda9f1e5e94718
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- concelier
|
||||
- minio
|
||||
- nats
|
||||
environment:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
||||
ports:
|
||||
- "${SCANNER_WEB_PORT:-8444}:8444"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
scanner-worker:
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:eea5d6cfe7835950c5ec7a735a651f2f0d727d3e470cf9027a4a402ea89c4fb5
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- scanner-web
|
||||
- nats
|
||||
environment:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
excititor:
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:65c0ee13f773efe920d7181512349a09d363ab3f3e177d276136bd2742325a68
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- concelier
|
||||
environment:
|
||||
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
web-ui:
|
||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:bee9668011ff414572131dc777faab4da24473fe12c230893f161cabee092a1d
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- scanner-web
|
||||
environment:
|
||||
STELLAOPS_UI__BACKEND__BASEURL: "https://scanner-web:8444"
|
||||
ports:
|
||||
- "${UI_PORT:-9443}:8443"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
188
deploy/compose/docker-compose.dev.yaml
Normal file
188
deploy/compose/docker-compose.dev.yaml
Normal file
@@ -0,0 +1,188 @@
|
||||
version: "3.9"
|
||||
|
||||
x-release-labels: &release-labels
|
||||
com.stellaops.release.version: "2025.10.0-edge"
|
||||
com.stellaops.release.channel: "edge"
|
||||
com.stellaops.profile: "dev"
|
||||
|
||||
networks:
|
||||
stellaops:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
mongo-data:
|
||||
minio-data:
|
||||
concelier-jobs:
|
||||
nats-data:
|
||||
|
||||
services:
|
||||
mongo:
|
||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
||||
command: ["mongod", "--bind_ip_all"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MONGO_INITDB_ROOT_USERNAME: "${MONGO_INITDB_ROOT_USERNAME}"
|
||||
MONGO_INITDB_ROOT_PASSWORD: "${MONGO_INITDB_ROOT_PASSWORD}"
|
||||
volumes:
|
||||
- mongo-data:/data/db
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
minio:
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
command: ["server", "/data", "--console-address", ":9001"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MINIO_ROOT_USER: "${MINIO_ROOT_USER}"
|
||||
MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD}"
|
||||
volumes:
|
||||
- minio-data:/data
|
||||
ports:
|
||||
- "${MINIO_CONSOLE_PORT:-9001}:9001"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
nats:
|
||||
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||
command:
|
||||
- "-js"
|
||||
- "-sd"
|
||||
- /data
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "${NATS_CLIENT_PORT:-4222}:4222"
|
||||
volumes:
|
||||
- nats-data:/data
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
authority:
|
||||
image: registry.stella-ops.org/stellaops/authority@sha256:a8e8faec44a579aa5714e58be835f25575710430b1ad2ccd1282a018cd9ffcdd
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
environment:
|
||||
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||
STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||
volumes:
|
||||
- ../../etc/authority.yaml:/etc/authority.yaml:ro
|
||||
- ../../etc/authority.plugins:/app/etc/authority.plugins:ro
|
||||
ports:
|
||||
- "${AUTHORITY_PORT:-8440}:8440"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
signer:
|
||||
image: registry.stella-ops.org/stellaops/signer@sha256:8bfef9a75783883d49fc18e3566553934e970b00ee090abee9cb110d2d5c3298
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- authority
|
||||
environment:
|
||||
SIGNER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}"
|
||||
SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${SIGNER_PORT:-8441}:8441"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:5cc417948c029da01dccf36e4645d961a3f6d8de7e62fe98d845f07cd2282114
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:dafef3954eb4b837e2c424dd2d23e1e4d60fa83794840fac9cd3dea1d43bd085
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- minio
|
||||
environment:
|
||||
CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
CONCELIER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
||||
CONCELIER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
||||
CONCELIER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
||||
CONCELIER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
volumes:
|
||||
- concelier-jobs:/var/lib/concelier/jobs
|
||||
ports:
|
||||
- "${CONCELIER_PORT:-8445}:8445"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
scanner-web:
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:e0dfdb087e330585a5953029fb4757f5abdf7610820a085bd61b457dbead9a11
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- concelier
|
||||
- minio
|
||||
- nats
|
||||
environment:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
||||
ports:
|
||||
- "${SCANNER_WEB_PORT:-8444}:8444"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
scanner-worker:
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:92dda42f6f64b2d9522104a5c9ffb61d37b34dd193132b68457a259748008f37
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- scanner-web
|
||||
- nats
|
||||
environment:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
excititor:
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:d9bd5cadf1eab427447ce3df7302c30ded837239771cc6433b9befb895054285
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- concelier
|
||||
environment:
|
||||
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
web-ui:
|
||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:38b225fa7767a5b94ebae4dae8696044126aac429415e93de514d5dd95748dcf
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- scanner-web
|
||||
environment:
|
||||
STELLAOPS_UI__BACKEND__BASEURL: "https://scanner-web:8444"
|
||||
ports:
|
||||
- "${UI_PORT:-8443}:8443"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
188
deploy/compose/docker-compose.stage.yaml
Normal file
188
deploy/compose/docker-compose.stage.yaml
Normal file
@@ -0,0 +1,188 @@
|
||||
version: "3.9"
|
||||
|
||||
x-release-labels: &release-labels
|
||||
com.stellaops.release.version: "2025.09.2"
|
||||
com.stellaops.release.channel: "stable"
|
||||
com.stellaops.profile: "stage"
|
||||
|
||||
networks:
|
||||
stellaops:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
mongo-data:
|
||||
minio-data:
|
||||
concelier-jobs:
|
||||
nats-data:
|
||||
|
||||
services:
|
||||
mongo:
|
||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
||||
command: ["mongod", "--bind_ip_all"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MONGO_INITDB_ROOT_USERNAME: "${MONGO_INITDB_ROOT_USERNAME}"
|
||||
MONGO_INITDB_ROOT_PASSWORD: "${MONGO_INITDB_ROOT_PASSWORD}"
|
||||
volumes:
|
||||
- mongo-data:/data/db
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
minio:
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
command: ["server", "/data", "--console-address", ":9001"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MINIO_ROOT_USER: "${MINIO_ROOT_USER}"
|
||||
MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD}"
|
||||
volumes:
|
||||
- minio-data:/data
|
||||
ports:
|
||||
- "${MINIO_CONSOLE_PORT:-9001}:9001"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
nats:
|
||||
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||
command:
|
||||
- "-js"
|
||||
- "-sd"
|
||||
- /data
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "${NATS_CLIENT_PORT:-4222}:4222"
|
||||
volumes:
|
||||
- nats-data:/data
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
authority:
|
||||
image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
environment:
|
||||
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||
STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||
volumes:
|
||||
- ../../etc/authority.yaml:/etc/authority.yaml:ro
|
||||
- ../../etc/authority.plugins:/app/etc/authority.plugins:ro
|
||||
ports:
|
||||
- "${AUTHORITY_PORT:-8440}:8440"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
signer:
|
||||
image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- authority
|
||||
environment:
|
||||
SIGNER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}"
|
||||
SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${SIGNER_PORT:-8441}:8441"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- minio
|
||||
environment:
|
||||
CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
CONCELIER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
||||
CONCELIER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
||||
CONCELIER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
||||
CONCELIER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
volumes:
|
||||
- concelier-jobs:/var/lib/concelier/jobs
|
||||
ports:
|
||||
- "${CONCELIER_PORT:-8445}:8445"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
scanner-web:
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- concelier
|
||||
- minio
|
||||
- nats
|
||||
environment:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
||||
ports:
|
||||
- "${SCANNER_WEB_PORT:-8444}:8444"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
scanner-worker:
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- scanner-web
|
||||
- nats
|
||||
environment:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
excititor:
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- concelier
|
||||
environment:
|
||||
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
web-ui:
|
||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- scanner-web
|
||||
environment:
|
||||
STELLAOPS_UI__BACKEND__BASEURL: "https://scanner-web:8444"
|
||||
ports:
|
||||
- "${UI_PORT:-8443}:8443"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
17
deploy/compose/env/airgap.env.example
vendored
Normal file
17
deploy/compose/env/airgap.env.example
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
# Substitutions for docker-compose.airgap.yaml
|
||||
MONGO_INITDB_ROOT_USERNAME=stellaops
|
||||
MONGO_INITDB_ROOT_PASSWORD=airgap-password
|
||||
MINIO_ROOT_USER=stellaops-offline
|
||||
MINIO_ROOT_PASSWORD=airgap-minio-secret
|
||||
MINIO_CONSOLE_PORT=29001
|
||||
AUTHORITY_ISSUER=https://authority.airgap.local
|
||||
AUTHORITY_PORT=8440
|
||||
SIGNER_POE_INTROSPECT_URL=file:///offline/poe/introspect.json
|
||||
SIGNER_PORT=8441
|
||||
ATTESTOR_PORT=8442
|
||||
CONCELIER_PORT=8445
|
||||
SCANNER_WEB_PORT=8444
|
||||
UI_PORT=9443
|
||||
NATS_CLIENT_PORT=24222
|
||||
SCANNER_QUEUE_BROKER=nats://nats:4222
|
||||
AUTHORITY_OFFLINE_CACHE_TOLERANCE=00:45:00
|
||||
16
deploy/compose/env/dev.env.example
vendored
Normal file
16
deploy/compose/env/dev.env.example
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
# Substitutions for docker-compose.dev.yaml
|
||||
MONGO_INITDB_ROOT_USERNAME=stellaops
|
||||
MONGO_INITDB_ROOT_PASSWORD=dev-password
|
||||
MINIO_ROOT_USER=stellaops
|
||||
MINIO_ROOT_PASSWORD=dev-minio-secret
|
||||
MINIO_CONSOLE_PORT=9001
|
||||
AUTHORITY_ISSUER=https://authority.localtest.me
|
||||
AUTHORITY_PORT=8440
|
||||
SIGNER_POE_INTROSPECT_URL=https://licensing.svc.local/introspect
|
||||
SIGNER_PORT=8441
|
||||
ATTESTOR_PORT=8442
|
||||
CONCELIER_PORT=8445
|
||||
SCANNER_WEB_PORT=8444
|
||||
UI_PORT=8443
|
||||
NATS_CLIENT_PORT=4222
|
||||
SCANNER_QUEUE_BROKER=nats://nats:4222
|
||||
16
deploy/compose/env/stage.env.example
vendored
Normal file
16
deploy/compose/env/stage.env.example
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
# Substitutions for docker-compose.stage.yaml
|
||||
MONGO_INITDB_ROOT_USERNAME=stellaops
|
||||
MONGO_INITDB_ROOT_PASSWORD=stage-password
|
||||
MINIO_ROOT_USER=stellaops-stage
|
||||
MINIO_ROOT_PASSWORD=stage-minio-secret
|
||||
MINIO_CONSOLE_PORT=19001
|
||||
AUTHORITY_ISSUER=https://authority.stage.stella-ops.internal
|
||||
AUTHORITY_PORT=8440
|
||||
SIGNER_POE_INTROSPECT_URL=https://licensing.stage.stella-ops.internal/introspect
|
||||
SIGNER_PORT=8441
|
||||
ATTESTOR_PORT=8442
|
||||
CONCELIER_PORT=8445
|
||||
SCANNER_WEB_PORT=8444
|
||||
UI_PORT=8443
|
||||
NATS_CLIENT_PORT=4222
|
||||
SCANNER_QUEUE_BROKER=nats://nats:4222
|
||||
6
deploy/helm/stellaops/Chart.yaml
Normal file
6
deploy/helm/stellaops/Chart.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
apiVersion: v2
|
||||
name: stellaops
|
||||
description: Stella Ops core stack (authority, signing, scanner, UI) with infrastructure primitives.
|
||||
type: application
|
||||
version: 0.1.0
|
||||
appVersion: "2025.10.0"
|
||||
31
deploy/helm/stellaops/templates/_helpers.tpl
Normal file
31
deploy/helm/stellaops/templates/_helpers.tpl
Normal file
@@ -0,0 +1,31 @@
|
||||
{{- define "stellaops.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}}
|
||||
{{- end -}}
|
||||
|
||||
{{- define "stellaops.fullname" -}}
|
||||
{{- $name := default .root.Chart.Name .root.Values.fullnameOverride -}}
|
||||
{{- printf "%s-%s" $name .name | trunc 63 | trimSuffix "-" -}}
|
||||
{{- end -}}
|
||||
|
||||
{{- define "stellaops.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "stellaops.name" .root | quote }}
|
||||
app.kubernetes.io/instance: {{ .root.Release.Name | quote }}
|
||||
app.kubernetes.io/component: {{ .name | quote }}
|
||||
{{- if .svc.class }}
|
||||
app.kubernetes.io/part-of: {{ printf "stellaops-%s" .svc.class | quote }}
|
||||
{{- else }}
|
||||
app.kubernetes.io/part-of: "stellaops-core"
|
||||
{{- end }}
|
||||
{{- end -}}
|
||||
|
||||
{{- define "stellaops.labels" -}}
|
||||
{{ include "stellaops.selectorLabels" . }}
|
||||
helm.sh/chart: {{ printf "%s-%s" .root.Chart.Name .root.Chart.Version | quote }}
|
||||
app.kubernetes.io/version: {{ .root.Values.global.release.version | quote }}
|
||||
app.kubernetes.io/managed-by: {{ .root.Release.Service | quote }}
|
||||
stellaops.release/channel: {{ .root.Values.global.release.channel | quote }}
|
||||
stellaops.profile: {{ .root.Values.global.profile | quote }}
|
||||
{{- range $k, $v := .root.Values.global.labels }}
|
||||
{{ $k }}: {{ $v | quote }}
|
||||
{{- end }}
|
||||
{{- end -}}
|
||||
10
deploy/helm/stellaops/templates/configmap-release.yaml
Normal file
10
deploy/helm/stellaops/templates/configmap-release.yaml
Normal file
@@ -0,0 +1,10 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ include "stellaops.fullname" (dict "root" . "name" "release") }}
|
||||
labels:
|
||||
{{- include "stellaops.labels" (dict "root" . "name" "release" "svc" (dict "class" "meta")) | nindent 4 }}
|
||||
data:
|
||||
version: {{ .Values.global.release.version | quote }}
|
||||
channel: {{ .Values.global.release.channel | quote }}
|
||||
manifestSha256: {{ default "" .Values.global.release.manifestSha256 | quote }}
|
||||
125
deploy/helm/stellaops/templates/core.yaml
Normal file
125
deploy/helm/stellaops/templates/core.yaml
Normal file
@@ -0,0 +1,125 @@
|
||||
{{- $root := . -}}
|
||||
{{- range $name, $svc := .Values.services }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "stellaops.fullname" (dict "root" $root "name" $name) }}
|
||||
labels:
|
||||
{{- include "stellaops.labels" (dict "root" $root "name" $name "svc" $svc) | nindent 4 }}
|
||||
spec:
|
||||
replicas: {{ default 1 $svc.replicas }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "stellaops.selectorLabels" (dict "root" $root "name" $name "svc" $svc) | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
{{- include "stellaops.selectorLabels" (dict "root" $root "name" $name "svc" $svc) | nindent 8 }}
|
||||
annotations:
|
||||
stellaops.release/version: {{ $root.Values.global.release.version | quote }}
|
||||
stellaops.release/channel: {{ $root.Values.global.release.channel | quote }}
|
||||
spec:
|
||||
containers:
|
||||
- name: {{ $name }}
|
||||
image: {{ $svc.image | quote }}
|
||||
imagePullPolicy: {{ default $root.Values.global.image.pullPolicy $svc.imagePullPolicy }}
|
||||
{{- if $svc.command }}
|
||||
command:
|
||||
{{- range $cmd := $svc.command }}
|
||||
- {{ $cmd | quote }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if $svc.args }}
|
||||
args:
|
||||
{{- range $arg := $svc.args }}
|
||||
- {{ $arg | quote }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if $svc.env }}
|
||||
env:
|
||||
{{- range $envName, $envValue := $svc.env }}
|
||||
- name: {{ $envName }}
|
||||
value: {{ $envValue | quote }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if $svc.envFrom }}
|
||||
envFrom:
|
||||
{{ toYaml $svc.envFrom | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if $svc.ports }}
|
||||
ports:
|
||||
{{- range $port := $svc.ports }}
|
||||
- name: {{ default (printf "%s-%v" $name $port.containerPort) $port.name | trunc 63 | trimSuffix "-" }}
|
||||
containerPort: {{ $port.containerPort }}
|
||||
protocol: {{ default "TCP" $port.protocol }}
|
||||
{{- end }}
|
||||
{{- else if $svc.service.port }}
|
||||
ports:
|
||||
- name: {{ printf "%s-http" $name | trunc 63 | trimSuffix "-" }}
|
||||
containerPort: {{ $svc.service.targetPort | default $svc.service.port }}
|
||||
protocol: TCP
|
||||
{{- end }}
|
||||
{{- if $svc.resources }}
|
||||
resources:
|
||||
{{ toYaml $svc.resources | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if $svc.livenessProbe }}
|
||||
livenessProbe:
|
||||
{{ toYaml $svc.livenessProbe | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if $svc.readinessProbe }}
|
||||
readinessProbe:
|
||||
{{ toYaml $svc.readinessProbe | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if $svc.volumeMounts }}
|
||||
volumeMounts:
|
||||
{{ toYaml $svc.volumeMounts | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if or $svc.volumes $svc.volumeClaims }}
|
||||
volumes:
|
||||
{{- if $svc.volumes }}
|
||||
{{ toYaml $svc.volumes | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if $svc.volumeClaims }}
|
||||
{{- range $claim := $svc.volumeClaims }}
|
||||
- name: {{ $claim.name }}
|
||||
persistentVolumeClaim:
|
||||
claimName: {{ $claim.claimName }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if $svc.serviceAccount }}
|
||||
serviceAccountName: {{ $svc.serviceAccount | quote }}
|
||||
{{- end }}
|
||||
{{- if $svc.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{ toYaml $svc.nodeSelector | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if $svc.affinity }}
|
||||
affinity:
|
||||
{{ toYaml $svc.affinity | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if $svc.tolerations }}
|
||||
tolerations:
|
||||
{{ toYaml $svc.tolerations | nindent 8 }}
|
||||
{{- end }}
|
||||
---
|
||||
{{- if $svc.service }}
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "stellaops.fullname" (dict "root" $root "name" $name) }}
|
||||
labels:
|
||||
{{- include "stellaops.labels" (dict "root" $root "name" $name "svc" $svc) | nindent 4 }}
|
||||
spec:
|
||||
type: {{ default "ClusterIP" $svc.service.type }}
|
||||
selector:
|
||||
{{- include "stellaops.selectorLabels" (dict "root" $root "name" $name "svc" $svc) | nindent 4 }}
|
||||
ports:
|
||||
- name: {{ default "http" $svc.service.portName }}
|
||||
port: {{ $svc.service.port }}
|
||||
targetPort: {{ $svc.service.targetPort | default $svc.service.port }}
|
||||
protocol: {{ default "TCP" $svc.service.protocol }}
|
||||
---
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
133
deploy/helm/stellaops/values-airgap.yaml
Normal file
133
deploy/helm/stellaops/values-airgap.yaml
Normal file
@@ -0,0 +1,133 @@
|
||||
global:
|
||||
profile: airgap
|
||||
release:
|
||||
version: "2025.09.2-airgap"
|
||||
channel: airgap
|
||||
manifestSha256: "b787b833dddd73960c31338279daa0b0a0dce2ef32bd32ef1aaf953d66135f94"
|
||||
image:
|
||||
pullPolicy: IfNotPresent
|
||||
labels:
|
||||
stellaops.io/channel: airgap
|
||||
services:
|
||||
authority:
|
||||
image: registry.stella-ops.org/stellaops/authority@sha256:5551a3269b7008cd5aceecf45df018c67459ed519557ccbe48b093b926a39bcc
|
||||
service:
|
||||
port: 8440
|
||||
env:
|
||||
STELLAOPS_AUTHORITY__ISSUER: "https://stellaops-authority:8440"
|
||||
STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://stellaops-airgap:stellaops-airgap@stellaops-mongo:27017"
|
||||
STELLAOPS_AUTHORITY__ALLOWANONYMOUSFALLBACK: "false"
|
||||
signer:
|
||||
image: registry.stella-ops.org/stellaops/signer@sha256:ddbbd664a42846cea6b40fca6465bc679b30f72851158f300d01a8571c5478fc
|
||||
service:
|
||||
port: 8441
|
||||
env:
|
||||
SIGNER__AUTHORITY__BASEURL: "https://stellaops-authority:8440"
|
||||
SIGNER__POE__INTROSPECTURL: "file:///offline/poe/introspect.json"
|
||||
SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops-airgap:stellaops-airgap@stellaops-mongo:27017"
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:1ff0a3124d66d3a2702d8e421df40fbd98cc75cb605d95510598ebbae1433c50
|
||||
service:
|
||||
port: 8442
|
||||
env:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://stellaops-signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://stellaops-airgap:stellaops-airgap@stellaops-mongo:27017"
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:29e2e1a0972707e092cbd3d370701341f9fec2aa9316fb5d8100480f2a1c76b5
|
||||
service:
|
||||
port: 8445
|
||||
env:
|
||||
CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops-airgap:stellaops-airgap@stellaops-mongo:27017"
|
||||
CONCELIER__STORAGE__S3__ENDPOINT: "http://stellaops-minio:9000"
|
||||
CONCELIER__STORAGE__S3__ACCESSKEYID: "stellaops-airgap"
|
||||
CONCELIER__STORAGE__S3__SECRETACCESSKEY: "airgap-minio-secret"
|
||||
CONCELIER__AUTHORITY__BASEURL: "https://stellaops-authority:8440"
|
||||
CONCELIER__AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK: "true"
|
||||
CONCELIER__AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE: "00:45:00"
|
||||
volumeMounts:
|
||||
- name: concelier-jobs
|
||||
mountPath: /var/lib/concelier/jobs
|
||||
volumeClaims:
|
||||
- name: concelier-jobs
|
||||
claimName: stellaops-concelier-jobs
|
||||
scanner-web:
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:3df8ca21878126758203c1a0444e39fd97f77ddacf04a69685cda9f1e5e94718
|
||||
service:
|
||||
port: 8444
|
||||
env:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops-airgap:stellaops-airgap@stellaops-mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://stellaops-minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "stellaops-airgap"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "airgap-minio-secret"
|
||||
SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222"
|
||||
scanner-worker:
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:eea5d6cfe7835950c5ec7a735a651f2f0d727d3e470cf9027a4a402ea89c4fb5
|
||||
env:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops-airgap:stellaops-airgap@stellaops-mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://stellaops-minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "stellaops-airgap"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "airgap-minio-secret"
|
||||
SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222"
|
||||
excititor:
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:65c0ee13f773efe920d7181512349a09d363ab3f3e177d276136bd2742325a68
|
||||
env:
|
||||
EXCITITOR__CONCELIER__BASEURL: "https://stellaops-concelier:8445"
|
||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops-airgap:stellaops-airgap@stellaops-mongo:27017"
|
||||
web-ui:
|
||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:bee9668011ff414572131dc777faab4da24473fe12c230893f161cabee092a1d
|
||||
service:
|
||||
port: 9443
|
||||
targetPort: 8443
|
||||
env:
|
||||
STELLAOPS_UI__BACKEND__BASEURL: "https://stellaops-scanner-web:8444"
|
||||
mongo:
|
||||
class: infrastructure
|
||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
||||
service:
|
||||
port: 27017
|
||||
command:
|
||||
- mongod
|
||||
- --bind_ip_all
|
||||
env:
|
||||
MONGO_INITDB_ROOT_USERNAME: stellaops-airgap
|
||||
MONGO_INITDB_ROOT_PASSWORD: stellaops-airgap
|
||||
volumeMounts:
|
||||
- name: mongo-data
|
||||
mountPath: /data/db
|
||||
volumeClaims:
|
||||
- name: mongo-data
|
||||
claimName: stellaops-mongo-data
|
||||
minio:
|
||||
class: infrastructure
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
service:
|
||||
port: 9000
|
||||
command:
|
||||
- server
|
||||
- /data
|
||||
- --console-address
|
||||
- :9001
|
||||
env:
|
||||
MINIO_ROOT_USER: stellaops-airgap
|
||||
MINIO_ROOT_PASSWORD: airgap-minio-secret
|
||||
volumeMounts:
|
||||
- name: minio-data
|
||||
mountPath: /data
|
||||
volumeClaims:
|
||||
- name: minio-data
|
||||
claimName: stellaops-minio-data
|
||||
nats:
|
||||
class: infrastructure
|
||||
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||
service:
|
||||
port: 4222
|
||||
command:
|
||||
- -js
|
||||
- -sd
|
||||
- /data
|
||||
volumeMounts:
|
||||
- name: nats-data
|
||||
mountPath: /data
|
||||
volumeClaims:
|
||||
- name: nats-data
|
||||
claimName: stellaops-nats-data
|
||||
131
deploy/helm/stellaops/values-dev.yaml
Normal file
131
deploy/helm/stellaops/values-dev.yaml
Normal file
@@ -0,0 +1,131 @@
|
||||
global:
|
||||
profile: dev
|
||||
release:
|
||||
version: "2025.10.0-edge"
|
||||
channel: edge
|
||||
manifestSha256: "822f82987529ea38d2321dbdd2ef6874a4062a117116a20861c26a8df1807beb"
|
||||
image:
|
||||
pullPolicy: IfNotPresent
|
||||
labels:
|
||||
stellaops.io/channel: edge
|
||||
services:
|
||||
authority:
|
||||
image: registry.stella-ops.org/stellaops/authority@sha256:a8e8faec44a579aa5714e58be835f25575710430b1ad2ccd1282a018cd9ffcdd
|
||||
service:
|
||||
port: 8440
|
||||
env:
|
||||
STELLAOPS_AUTHORITY__ISSUER: "https://stellaops-authority:8440"
|
||||
STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://stellaops:stellaops@stellaops-mongo:27017"
|
||||
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||
signer:
|
||||
image: registry.stella-ops.org/stellaops/signer@sha256:8bfef9a75783883d49fc18e3566553934e970b00ee090abee9cb110d2d5c3298
|
||||
service:
|
||||
port: 8441
|
||||
env:
|
||||
SIGNER__AUTHORITY__BASEURL: "https://stellaops-authority:8440"
|
||||
SIGNER__POE__INTROSPECTURL: "https://licensing.svc.local/introspect"
|
||||
SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops:stellaops@stellaops-mongo:27017"
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:5cc417948c029da01dccf36e4645d961a3f6d8de7e62fe98d845f07cd2282114
|
||||
service:
|
||||
port: 8442
|
||||
env:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://stellaops-signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://stellaops:stellaops@stellaops-mongo:27017"
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:dafef3954eb4b837e2c424dd2d23e1e4d60fa83794840fac9cd3dea1d43bd085
|
||||
service:
|
||||
port: 8445
|
||||
env:
|
||||
CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops:stellaops@stellaops-mongo:27017"
|
||||
CONCELIER__STORAGE__S3__ENDPOINT: "http://stellaops-minio:9000"
|
||||
CONCELIER__STORAGE__S3__ACCESSKEYID: "stellaops"
|
||||
CONCELIER__STORAGE__S3__SECRETACCESSKEY: "dev-minio-secret"
|
||||
CONCELIER__AUTHORITY__BASEURL: "https://stellaops-authority:8440"
|
||||
volumeMounts:
|
||||
- name: concelier-jobs
|
||||
mountPath: /var/lib/concelier/jobs
|
||||
volumes:
|
||||
- name: concelier-jobs
|
||||
emptyDir: {}
|
||||
scanner-web:
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:e0dfdb087e330585a5953029fb4757f5abdf7610820a085bd61b457dbead9a11
|
||||
service:
|
||||
port: 8444
|
||||
env:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops:stellaops@stellaops-mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://stellaops-minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "stellaops"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "dev-minio-secret"
|
||||
SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222"
|
||||
scanner-worker:
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:92dda42f6f64b2d9522104a5c9ffb61d37b34dd193132b68457a259748008f37
|
||||
env:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops:stellaops@stellaops-mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://stellaops-minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "stellaops"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "dev-minio-secret"
|
||||
SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222"
|
||||
excititor:
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:d9bd5cadf1eab427447ce3df7302c30ded837239771cc6433b9befb895054285
|
||||
env:
|
||||
EXCITITOR__CONCELIER__BASEURL: "https://stellaops-concelier:8445"
|
||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops:stellaops@stellaops-mongo:27017"
|
||||
web-ui:
|
||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:38b225fa7767a5b94ebae4dae8696044126aac429415e93de514d5dd95748dcf
|
||||
service:
|
||||
port: 8443
|
||||
env:
|
||||
STELLAOPS_UI__BACKEND__BASEURL: "https://stellaops-scanner-web:8444"
|
||||
mongo:
|
||||
class: infrastructure
|
||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
||||
service:
|
||||
port: 27017
|
||||
command:
|
||||
- mongod
|
||||
- --bind_ip_all
|
||||
env:
|
||||
MONGO_INITDB_ROOT_USERNAME: stellaops
|
||||
MONGO_INITDB_ROOT_PASSWORD: stellaops
|
||||
volumeMounts:
|
||||
- name: mongo-data
|
||||
mountPath: /data/db
|
||||
volumes:
|
||||
- name: mongo-data
|
||||
emptyDir: {}
|
||||
minio:
|
||||
class: infrastructure
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
service:
|
||||
port: 9000
|
||||
command:
|
||||
- server
|
||||
- /data
|
||||
- --console-address
|
||||
- :9001
|
||||
env:
|
||||
MINIO_ROOT_USER: stellaops
|
||||
MINIO_ROOT_PASSWORD: dev-minio-secret
|
||||
volumeMounts:
|
||||
- name: minio-data
|
||||
mountPath: /data
|
||||
volumes:
|
||||
- name: minio-data
|
||||
emptyDir: {}
|
||||
nats:
|
||||
class: infrastructure
|
||||
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||
service:
|
||||
port: 4222
|
||||
command:
|
||||
- -js
|
||||
- -sd
|
||||
- /data
|
||||
volumeMounts:
|
||||
- name: nats-data
|
||||
mountPath: /data
|
||||
volumes:
|
||||
- name: nats-data
|
||||
emptyDir: {}
|
||||
132
deploy/helm/stellaops/values-stage.yaml
Normal file
132
deploy/helm/stellaops/values-stage.yaml
Normal file
@@ -0,0 +1,132 @@
|
||||
global:
|
||||
profile: stage
|
||||
release:
|
||||
version: "2025.09.2"
|
||||
channel: stable
|
||||
manifestSha256: "dc3c8fe1ab83941c838ccc5a8a5862f7ddfa38c2078e580b5649db26554565b7"
|
||||
image:
|
||||
pullPolicy: IfNotPresent
|
||||
labels:
|
||||
stellaops.io/channel: stable
|
||||
services:
|
||||
authority:
|
||||
image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5
|
||||
service:
|
||||
port: 8440
|
||||
env:
|
||||
STELLAOPS_AUTHORITY__ISSUER: "https://stellaops-authority:8440"
|
||||
STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://stellaops-stage:stellaops-stage@stellaops-mongo:27017"
|
||||
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||
signer:
|
||||
image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e
|
||||
service:
|
||||
port: 8441
|
||||
env:
|
||||
SIGNER__AUTHORITY__BASEURL: "https://stellaops-authority:8440"
|
||||
SIGNER__POE__INTROSPECTURL: "https://licensing.stage.stella-ops.internal/introspect"
|
||||
SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops-stage:stellaops-stage@stellaops-mongo:27017"
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
||||
service:
|
||||
port: 8442
|
||||
env:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://stellaops-signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://stellaops-stage:stellaops-stage@stellaops-mongo:27017"
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
||||
service:
|
||||
port: 8445
|
||||
env:
|
||||
CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops-stage:stellaops-stage@stellaops-mongo:27017"
|
||||
CONCELIER__STORAGE__S3__ENDPOINT: "http://stellaops-minio:9000"
|
||||
CONCELIER__STORAGE__S3__ACCESSKEYID: "stellaops-stage"
|
||||
CONCELIER__STORAGE__S3__SECRETACCESSKEY: "stage-minio-secret"
|
||||
CONCELIER__AUTHORITY__BASEURL: "https://stellaops-authority:8440"
|
||||
volumeMounts:
|
||||
- name: concelier-jobs
|
||||
mountPath: /var/lib/concelier/jobs
|
||||
volumeClaims:
|
||||
- name: concelier-jobs
|
||||
claimName: stellaops-concelier-jobs
|
||||
scanner-web:
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7
|
||||
service:
|
||||
port: 8444
|
||||
env:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops-stage:stellaops-stage@stellaops-mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://stellaops-minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "stellaops-stage"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "stage-minio-secret"
|
||||
SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222"
|
||||
scanner-worker:
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab
|
||||
replicas: 2
|
||||
env:
|
||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops-stage:stellaops-stage@stellaops-mongo:27017"
|
||||
SCANNER__STORAGE__S3__ENDPOINT: "http://stellaops-minio:9000"
|
||||
SCANNER__STORAGE__S3__ACCESSKEYID: "stellaops-stage"
|
||||
SCANNER__STORAGE__S3__SECRETACCESSKEY: "stage-minio-secret"
|
||||
SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222"
|
||||
excititor:
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa
|
||||
env:
|
||||
EXCITITOR__CONCELIER__BASEURL: "https://stellaops-concelier:8445"
|
||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://stellaops-stage:stellaops-stage@stellaops-mongo:27017"
|
||||
web-ui:
|
||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23
|
||||
service:
|
||||
port: 8443
|
||||
env:
|
||||
STELLAOPS_UI__BACKEND__BASEURL: "https://stellaops-scanner-web:8444"
|
||||
mongo:
|
||||
class: infrastructure
|
||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
||||
service:
|
||||
port: 27017
|
||||
command:
|
||||
- mongod
|
||||
- --bind_ip_all
|
||||
env:
|
||||
MONGO_INITDB_ROOT_USERNAME: stellaops-stage
|
||||
MONGO_INITDB_ROOT_PASSWORD: stellaops-stage
|
||||
volumeMounts:
|
||||
- name: mongo-data
|
||||
mountPath: /data/db
|
||||
volumeClaims:
|
||||
- name: mongo-data
|
||||
claimName: stellaops-mongo-data
|
||||
minio:
|
||||
class: infrastructure
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
service:
|
||||
port: 9000
|
||||
command:
|
||||
- server
|
||||
- /data
|
||||
- --console-address
|
||||
- :9001
|
||||
env:
|
||||
MINIO_ROOT_USER: stellaops-stage
|
||||
MINIO_ROOT_PASSWORD: stage-minio-secret
|
||||
volumeMounts:
|
||||
- name: minio-data
|
||||
mountPath: /data
|
||||
volumeClaims:
|
||||
- name: minio-data
|
||||
claimName: stellaops-minio-data
|
||||
nats:
|
||||
class: infrastructure
|
||||
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||
service:
|
||||
port: 4222
|
||||
command:
|
||||
- -js
|
||||
- -sd
|
||||
- /data
|
||||
volumeMounts:
|
||||
- name: nats-data
|
||||
mountPath: /data
|
||||
volumeClaims:
|
||||
- name: nats-data
|
||||
claimName: stellaops-nats-data
|
||||
10
deploy/helm/stellaops/values.yaml
Normal file
10
deploy/helm/stellaops/values.yaml
Normal file
@@ -0,0 +1,10 @@
|
||||
global:
|
||||
release:
|
||||
version: ""
|
||||
channel: ""
|
||||
manifestSha256: ""
|
||||
profile: ""
|
||||
image:
|
||||
pullPolicy: IfNotPresent
|
||||
labels: {}
|
||||
services: {}
|
||||
29
deploy/releases/2025.09-airgap.yaml
Normal file
29
deploy/releases/2025.09-airgap.yaml
Normal file
@@ -0,0 +1,29 @@
|
||||
release:
|
||||
version: "2025.09.2-airgap"
|
||||
channel: "airgap"
|
||||
date: "2025-09-20T00:00:00Z"
|
||||
calendar: "2025.09"
|
||||
components:
|
||||
- name: authority
|
||||
image: registry.stella-ops.org/stellaops/authority@sha256:5551a3269b7008cd5aceecf45df018c67459ed519557ccbe48b093b926a39bcc
|
||||
- name: signer
|
||||
image: registry.stella-ops.org/stellaops/signer@sha256:ddbbd664a42846cea6b40fca6465bc679b30f72851158f300d01a8571c5478fc
|
||||
- name: attestor
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:1ff0a3124d66d3a2702d8e421df40fbd98cc75cb605d95510598ebbae1433c50
|
||||
- name: scanner-web
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:3df8ca21878126758203c1a0444e39fd97f77ddacf04a69685cda9f1e5e94718
|
||||
- name: scanner-worker
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:eea5d6cfe7835950c5ec7a735a651f2f0d727d3e470cf9027a4a402ea89c4fb5
|
||||
- name: concelier
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:29e2e1a0972707e092cbd3d370701341f9fec2aa9316fb5d8100480f2a1c76b5
|
||||
- name: excititor
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:65c0ee13f773efe920d7181512349a09d363ab3f3e177d276136bd2742325a68
|
||||
- name: web-ui
|
||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:bee9668011ff414572131dc777faab4da24473fe12c230893f161cabee092a1d
|
||||
infrastructure:
|
||||
mongo:
|
||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
||||
minio:
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
checksums:
|
||||
releaseManifestSha256: b787b833dddd73960c31338279daa0b0a0dce2ef32bd32ef1aaf953d66135f94
|
||||
29
deploy/releases/2025.09-stable.yaml
Normal file
29
deploy/releases/2025.09-stable.yaml
Normal file
@@ -0,0 +1,29 @@
|
||||
release:
|
||||
version: "2025.09.2"
|
||||
channel: "stable"
|
||||
date: "2025-09-20T00:00:00Z"
|
||||
calendar: "2025.09"
|
||||
components:
|
||||
- name: authority
|
||||
image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5
|
||||
- name: signer
|
||||
image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e
|
||||
- name: attestor
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
||||
- name: scanner-web
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7
|
||||
- name: scanner-worker
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab
|
||||
- name: concelier
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
||||
- name: excititor
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa
|
||||
- name: web-ui
|
||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23
|
||||
infrastructure:
|
||||
mongo:
|
||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
||||
minio:
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
checksums:
|
||||
releaseManifestSha256: dc3c8fe1ab83941c838ccc5a8a5862f7ddfa38c2078e580b5649db26554565b7
|
||||
29
deploy/releases/2025.10-edge.yaml
Normal file
29
deploy/releases/2025.10-edge.yaml
Normal file
@@ -0,0 +1,29 @@
|
||||
release:
|
||||
version: "2025.10.0-edge"
|
||||
channel: "edge"
|
||||
date: "2025-10-01T00:00:00Z"
|
||||
calendar: "2025.10"
|
||||
components:
|
||||
- name: authority
|
||||
image: registry.stella-ops.org/stellaops/authority@sha256:a8e8faec44a579aa5714e58be835f25575710430b1ad2ccd1282a018cd9ffcdd
|
||||
- name: signer
|
||||
image: registry.stella-ops.org/stellaops/signer@sha256:8bfef9a75783883d49fc18e3566553934e970b00ee090abee9cb110d2d5c3298
|
||||
- name: attestor
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:5cc417948c029da01dccf36e4645d961a3f6d8de7e62fe98d845f07cd2282114
|
||||
- name: scanner-web
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:e0dfdb087e330585a5953029fb4757f5abdf7610820a085bd61b457dbead9a11
|
||||
- name: scanner-worker
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:92dda42f6f64b2d9522104a5c9ffb61d37b34dd193132b68457a259748008f37
|
||||
- name: concelier
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:dafef3954eb4b837e2c424dd2d23e1e4d60fa83794840fac9cd3dea1d43bd085
|
||||
- name: excititor
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:d9bd5cadf1eab427447ce3df7302c30ded837239771cc6433b9befb895054285
|
||||
- name: web-ui
|
||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:38b225fa7767a5b94ebae4dae8696044126aac429415e93de514d5dd95748dcf
|
||||
infrastructure:
|
||||
mongo:
|
||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
||||
minio:
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
checksums:
|
||||
releaseManifestSha256: 822f82987529ea38d2321dbdd2ef6874a4062a117116a20861c26a8df1807beb
|
||||
42
deploy/tools/validate-profiles.sh
Normal file
42
deploy/tools/validate-profiles.sh
Normal file
@@ -0,0 +1,42 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
COMPOSE_DIR="$ROOT_DIR/compose"
|
||||
HELM_DIR="$ROOT_DIR/helm/stellaops"
|
||||
|
||||
compose_profiles=(
|
||||
"docker-compose.dev.yaml:env/dev.env.example"
|
||||
"docker-compose.stage.yaml:env/stage.env.example"
|
||||
"docker-compose.airgap.yaml:env/airgap.env.example"
|
||||
)
|
||||
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
for entry in "${compose_profiles[@]}"; do
|
||||
IFS=":" read -r compose_file env_file <<<"$entry"
|
||||
printf '→ validating %s with %s\n' "$compose_file" "$env_file"
|
||||
docker compose \
|
||||
--env-file "$COMPOSE_DIR/$env_file" \
|
||||
-f "$COMPOSE_DIR/$compose_file" config >/dev/null
|
||||
done
|
||||
else
|
||||
echo "⚠️ docker CLI not found; skipping compose validation" >&2
|
||||
fi
|
||||
|
||||
helm_values=(
|
||||
"$HELM_DIR/values-dev.yaml"
|
||||
"$HELM_DIR/values-stage.yaml"
|
||||
"$HELM_DIR/values-airgap.yaml"
|
||||
)
|
||||
|
||||
if command -v helm >/dev/null 2>&1; then
|
||||
for values in "${helm_values[@]}"; do
|
||||
printf '→ linting Helm chart with %s\n' "$(basename "$values")"
|
||||
helm lint "$HELM_DIR" -f "$values"
|
||||
helm template test-release "$HELM_DIR" -f "$values" >/dev/null
|
||||
done
|
||||
else
|
||||
echo "⚠️ helm CLI not found; skipping Helm lint/template" >&2
|
||||
fi
|
||||
|
||||
printf 'Profiles validated (where tooling was available).\n'
|
||||
@@ -158,6 +158,90 @@ Client then generates SBOM **only** for the `missing` layers and re‑posts `/sc
|
||||
| `POST` | `/policy/validate` | Lint only; returns 400 on error |
|
||||
| `GET` | `/policy/history` | Paginated change log (audit trail) |
|
||||
|
||||
### 2.4 Scanner – Queue a Scan Job *(SP9 milestone)*
|
||||
|
||||
```
|
||||
POST /api/v1/scans
|
||||
Authorization: Bearer <token with scanner.scans.enqueue>
|
||||
Content-Type: application/json
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"image": {
|
||||
"reference": "registry.example.com/acme/app:1.2.3"
|
||||
},
|
||||
"force": false,
|
||||
"clientRequestId": "ci-build-1845",
|
||||
"metadata": {
|
||||
"pipeline": "github",
|
||||
"trigger": "pull-request"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
| Field | Required | Notes |
|
||||
| ------------------- | -------- | ------------------------------------------------------------------------------------------------ |
|
||||
| `image.reference` | no\* | Full repo/tag (`registry/repo:tag`). Provide **either** `reference` or `digest` (sha256:…). |
|
||||
| `image.digest` | no\* | OCI digest (e.g. `sha256:…`). |
|
||||
| `force` | no | `true` forces a re-run even if an identical scan (`scanId`) already exists. Default **false**. |
|
||||
| `clientRequestId` | no | Free-form string surfaced in audit logs. |
|
||||
| `metadata` | no | Optional string map stored with the job and surfaced in observability feeds. |
|
||||
|
||||
\* At least one of `image.reference` or `image.digest` must be supplied.
|
||||
|
||||
**Response 202** – job accepted (idempotent):
|
||||
|
||||
```http
|
||||
HTTP/1.1 202 Accepted
|
||||
Location: /api/v1/scans/2f6c17f9b3f548e2a28b9c412f4d63f8
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"scanId": "2f6c17f9b3f548e2a28b9c412f4d63f8",
|
||||
"status": "Pending",
|
||||
"location": "/api/v1/scans/2f6c17f9b3f548e2a28b9c412f4d63f8",
|
||||
"created": true
|
||||
}
|
||||
```
|
||||
|
||||
- `scanId` is deterministic – resubmitting an identical payload returns the same identifier with `"created": false`.
|
||||
- API is cancellation-aware; aborting the HTTP request cancels the submission attempt.
|
||||
- Required scope: **`scanner.scans.enqueue`**.
|
||||
|
||||
**Response 400** – validation problem (`Content-Type: application/problem+json`) when both `image.reference` and `image.digest` are blank.
|
||||
|
||||
### 2.5 Scanner – Fetch Scan Status
|
||||
|
||||
```
|
||||
GET /api/v1/scans/{scanId}
|
||||
Authorization: Bearer <token with scanner.scans.read>
|
||||
Accept: application/json
|
||||
```
|
||||
|
||||
**Response 200**:
|
||||
|
||||
```json
|
||||
{
|
||||
"scanId": "2f6c17f9b3f548e2a28b9c412f4d63f8",
|
||||
"status": "Pending",
|
||||
"image": {
|
||||
"reference": "registry.example.com/acme/app:1.2.3",
|
||||
"digest": null
|
||||
},
|
||||
"createdAt": "2025-10-18T20:15:12.482Z",
|
||||
"updatedAt": "2025-10-18T20:15:12.482Z",
|
||||
"failureReason": null
|
||||
}
|
||||
```
|
||||
|
||||
Statuses: `Pending`, `Running`, `Succeeded`, `Failed`, `Cancelled`.
|
||||
|
||||
**Response 404** – `application/problem+json` payload with type `https://stellaops.org/problems/not-found` when the scan identifier is unknown.
|
||||
|
||||
> **Tip** – poll `Location` from the submission call until `status` transitions away from `Pending`/`Running`.
|
||||
|
||||
```yaml
|
||||
# Example import payload (YAML)
|
||||
version: "1.0"
|
||||
@@ -181,6 +265,23 @@ Validation errors come back as:
|
||||
}
|
||||
```
|
||||
|
||||
```json
|
||||
# Preview response excerpt
|
||||
{
|
||||
"success": true,
|
||||
"policyDigest": "9c5e...",
|
||||
"revisionId": "rev-12",
|
||||
"changed": 1,
|
||||
"diffs": [
|
||||
{
|
||||
"baseline": {"findingId": "finding-1", "status": "pass"},
|
||||
"projected": {"findingId": "finding-1", "status": "blocked", "ruleName": "Block Critical"},
|
||||
"changed": true
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 2.4 Attestation (Planned – Q1‑2026)
|
||||
|
||||
@@ -120,7 +120,18 @@ rules:
|
||||
action: escalate
|
||||
```
|
||||
|
||||
Validation is performed by `policy:mapping.yaml` JSON‑Schema embedded in backend.
|
||||
Validation is performed by `policy:mapping.yaml` JSON‑Schema embedded in backend.
|
||||
|
||||
Canonical schema source: `src/StellaOps.Policy/Schemas/policy-schema@1.json` (embedded into `StellaOps.Policy`).
|
||||
`PolicyValidationCli` (see `src/StellaOps.Policy/PolicyValidationCli.cs`) provides the reusable command handler that the main CLI wires up; in the interim it can be invoked from a short host like:
|
||||
|
||||
```csharp
|
||||
await new PolicyValidationCli().RunAsync(new PolicyValidationCliOptions
|
||||
{
|
||||
Inputs = new[] { "policies/root.yaml" },
|
||||
Strict = true,
|
||||
});
|
||||
```
|
||||
|
||||
### 4.1 Rego Variant (Advanced – TODO)
|
||||
|
||||
|
||||
@@ -76,6 +76,12 @@ UI: [https://\<host\>:8443](https://<host>:8443) (self‑signed cert
|
||||
> `stella-ops:latest` with the immutable digest printed by
|
||||
> `docker images --digests`.
|
||||
|
||||
> **Repo bundles** – Development, staging, and air‑gapped Compose profiles live
|
||||
> under `deploy/compose/`, already tied to the release manifests in
|
||||
> `deploy/releases/`. Helm users can pull the same channel overlays from
|
||||
> `deploy/helm/stellaops/values-*.yaml` and validate everything with
|
||||
> `deploy/tools/validate-profiles.sh`.
|
||||
|
||||
### 1.1 · Concelier authority configuration
|
||||
|
||||
The Concelier container reads configuration from `etc/concelier.yaml` plus
|
||||
|
||||
@@ -234,6 +234,11 @@ release:
|
||||
|
||||
The manifest is **cosign‑signed**; UI/CLI can verify a bundle without talking to registries.
|
||||
|
||||
> Deployment guardrails – The repository keeps channel-aligned Compose bundles
|
||||
> in `deploy/compose/` and Helm overlays in `deploy/helm/stellaops/`. Both sets
|
||||
> pull their digests from `deploy/releases/` and are validated by
|
||||
> `deploy/tools/validate-profiles.sh` to guarantee lint/dry-run cleanliness.
|
||||
|
||||
### 6.2 Image labels (release metadata)
|
||||
|
||||
Each image sets OCI labels:
|
||||
|
||||
@@ -42,6 +42,35 @@ src/
|
||||
|
||||
Analyzer assemblies and buildx generators are packaged as **restart-time plug-ins** under `plugins/scanner/**` with manifests; services must restart to activate new plug-ins.
|
||||
|
||||
### 1.1 Queue backbone (Redis / NATS)
|
||||
|
||||
`StellaOps.Scanner.Queue` exposes a transport-agnostic contract (`IScanQueue`/`IScanQueueLease`) used by the WebService producer and Worker consumers. Sprint 9 introduces two first-party transports:
|
||||
|
||||
- **Redis Streams** (default). Uses consumer groups, deterministic idempotency keys (`scanner:jobs:idemp:*`), and supports lease claim (`XCLAIM`), renewal, exponential-backoff retries, and a `scanner:jobs:dead` stream for exhausted attempts.
|
||||
- **NATS JetStream**. Provisions the `SCANNER_JOBS` work-queue stream + durable consumer `scanner-workers`, publishes with `MsgId` for dedupe, applies backoff via `NAK` delays, and routes dead-lettered jobs to `SCANNER_JOBS_DEAD`.
|
||||
|
||||
Metrics are emitted via `Meter` counters (`scanner_queue_enqueued_total`, `scanner_queue_retry_total`, `scanner_queue_deadletter_total`), and `ScannerQueueHealthCheck` pings the active backend (Redis `PING`, NATS `PING`). Configuration is bound from `scanner.queue`:
|
||||
|
||||
```yaml
|
||||
scanner:
|
||||
queue:
|
||||
kind: redis # or nats
|
||||
redis:
|
||||
connectionString: "redis://queue:6379/0"
|
||||
streamName: "scanner:jobs"
|
||||
nats:
|
||||
url: "nats://queue:4222"
|
||||
stream: "SCANNER_JOBS"
|
||||
subject: "scanner.jobs"
|
||||
durableConsumer: "scanner-workers"
|
||||
deadLetterSubject: "scanner.jobs.dead"
|
||||
maxDeliveryAttempts: 5
|
||||
retryInitialBackoff: 00:00:05
|
||||
retryMaxBackoff: 00:02:00
|
||||
```
|
||||
|
||||
The DI extension (`AddScannerQueue`) wires the selected transport, so future additions (e.g., RabbitMQ) only implement the same contract and register.
|
||||
|
||||
**Runtime form‑factor:** two deployables
|
||||
|
||||
* **Scanner.WebService** (stateless REST)
|
||||
|
||||
@@ -31,12 +31,13 @@ Everything here is open‑source and versioned — when you check out a git ta
|
||||
- **03 – [Vision & Road‑map](03_VISION.md)**
|
||||
- **04 – [Feature Matrix](04_FEATURE_MATRIX.md)**
|
||||
|
||||
### Reference & concepts
|
||||
- **05 – [System Requirements Specification](05_SYSTEM_REQUIREMENTS_SPEC.md)**
|
||||
- **07 – [High‑Level Architecture](07_HIGH_LEVEL_ARCHITECTURE.md)**
|
||||
- **08 – Module Architecture Dossiers**
|
||||
- [Scanner](ARCHITECTURE_SCANNER.md)
|
||||
- [Concelier](ARCHITECTURE_CONCELIER.md)
|
||||
### Reference & concepts
|
||||
- **05 – [System Requirements Specification](05_SYSTEM_REQUIREMENTS_SPEC.md)**
|
||||
- **07 – [High‑Level Architecture](07_HIGH_LEVEL_ARCHITECTURE.md)**
|
||||
- **08 – [Architecture Decision Records](adr/index.md)**
|
||||
- **08 – Module Architecture Dossiers**
|
||||
- [Scanner](ARCHITECTURE_SCANNER.md)
|
||||
- [Concelier](ARCHITECTURE_CONCELIER.md)
|
||||
- [Excititor](ARCHITECTURE_EXCITITOR.md)
|
||||
- [Signer](ARCHITECTURE_SIGNER.md)
|
||||
- [Attestor](ARCHITECTURE_ATTESTOR.md)
|
||||
@@ -48,8 +49,9 @@ Everything here is open‑source and versioned — when you check out a git ta
|
||||
- [Zastava Runtime](ARCHITECTURE_ZASTAVA.md)
|
||||
- [Release & Operations](ARCHITECTURE_DEVOPS.md)
|
||||
- **09 – [API & CLI Reference](09_API_CLI_REFERENCE.md)**
|
||||
- **10 – [Plug‑in SDK Guide](10_PLUGIN_SDK_GUIDE.md)**
|
||||
- **10 – [Concelier CLI Quickstart](10_CONCELIER_CLI_QUICKSTART.md)**
|
||||
- **10 – [Plug‑in SDK Guide](10_PLUGIN_SDK_GUIDE.md)**
|
||||
- **10 – [Concelier CLI Quickstart](10_CONCELIER_CLI_QUICKSTART.md)**
|
||||
- **10 – [BuildX Generator Quickstart](dev/BUILDX_PLUGIN_QUICKSTART.md)**
|
||||
- **30 – [Excititor Connector Packaging Guide](dev/30_EXCITITOR_CONNECTOR_GUIDE.md)**
|
||||
- **30 – Developer Templates**
|
||||
- [Excititor Connector Skeleton](dev/templates/excititor-connector/)
|
||||
|
||||
@@ -9,8 +9,8 @@
|
||||
| DOC5.Concelier-Runbook | DONE (2025-10-12) | Docs Guild | DOC3.Concelier-Authority | Produce dedicated Concelier authority audit runbook covering log fields, monitoring recommendations, and troubleshooting steps. | ✅ Runbook published; ✅ linked from DOC3/DOC5; ✅ alerting guidance included. |
|
||||
| FEEDDOCS-DOCS-05-001 | DONE (2025-10-11) | Docs Guild | FEEDMERGE-ENGINE-04-001, FEEDMERGE-ENGINE-04-002 | Publish Concelier conflict resolution runbook covering precedence workflow, merge-event auditing, and Sprint 3 metrics. | ✅ `docs/ops/concelier-conflict-resolution.md` committed; ✅ metrics/log tables align with latest merge code; ✅ Ops alert guidance handed to Concelier team. |
|
||||
| FEEDDOCS-DOCS-05-002 | DONE (2025-10-16) | Docs Guild, Concelier Ops | FEEDDOCS-DOCS-05-001 | Ops sign-off captured: conflict runbook circulated, alert thresholds tuned, and rollout decisions documented in change log. | ✅ Ops review recorded; ✅ alert thresholds finalised using `docs/ops/concelier-authority-audit-runbook.md`; ✅ change-log entry linked from runbook once GHSA/NVD/OSV regression fixtures land. |
|
||||
| DOCS-ADR-09-001 | TODO | Docs Guild, DevEx | — | Establish ADR process (`docs/adr/0000-template.md`) and document usage guidelines. | Template published; README snippet linking ADR process; announcement posted. |
|
||||
| DOCS-EVENTS-09-002 | TODO | Docs Guild, Platform Events | SCANNER-EVENTS-15-201 | Publish event schema catalog (`docs/events/`) for `scanner.report.ready@1`, `scheduler.rescan.delta@1`, `attestor.logged@1`. | Schemas validated; docs/events/README summarises usage; Notify/Scheduler teams acknowledge. |
|
||||
| DOCS-ADR-09-001 | DONE (2025-10-19) | Docs Guild, DevEx | — | Establish ADR process (`docs/adr/0000-template.md`) and document usage guidelines. | Template published; README snippet linking ADR process; announcement posted (`docs/updates/2025-10-18-docs-guild.md`). |
|
||||
| DOCS-EVENTS-09-002 | DONE (2025-10-19) | Docs Guild, Platform Events | SCANNER-EVENTS-15-201 | Publish event schema catalog (`docs/events/`) for `scanner.report.ready@1`, `scheduler.rescan.delta@1`, `attestor.logged@1`. | Schemas validated (Ajv CI hooked); docs/events/README summarises usage; Platform Events notified via `docs/updates/2025-10-18-docs-guild.md`. |
|
||||
| DOCS-RUNTIME-17-004 | TODO | Docs Guild, Runtime Guild | SCANNER-EMIT-17-701, ZASTAVA-OBS-17-005, DEVOPS-REL-17-002 | Document build-id workflows: SBOM exposure, runtime event payloads, debug-store layout, and operator guidance for symbol retrieval. | Architecture + operator docs updated with build-id sections, examples show `readelf` output + debuginfod usage, references linked from Offline Kit/Release guides. |
|
||||
|
||||
> Update statuses (TODO/DOING/REVIEW/DONE/BLOCKED) as progress changes. Keep guides in sync with configuration samples under `etc/`.
|
||||
|
||||
@@ -3,16 +3,32 @@
|
||||
## Status
|
||||
Proposed
|
||||
|
||||
## Date
|
||||
YYYY-MM-DD
|
||||
|
||||
## Authors
|
||||
- Name (team)
|
||||
|
||||
## Deciders
|
||||
- Names of approvers / reviewers
|
||||
|
||||
## Context
|
||||
- What decision needs to be made?
|
||||
- What are the forces (requirements, constraints, stakeholders)?
|
||||
- Why now? What triggers the ADR?
|
||||
|
||||
## Decision
|
||||
- Summary of the chosen option.
|
||||
- Key rationale points.
|
||||
|
||||
## Consequences
|
||||
- Positive/negative consequences.
|
||||
- Follow-up actions or tasks.
|
||||
- Rollback plan or re-evaluation criteria.
|
||||
|
||||
## Alternatives Considered
|
||||
- Option A — pros/cons.
|
||||
- Option B — pros/cons.
|
||||
|
||||
## References
|
||||
- Links to related ADRs, issues, documents.
|
||||
|
||||
41
docs/adr/index.md
Normal file
41
docs/adr/index.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Architecture Decision Records (ADRs)
|
||||
|
||||
Architecture Decision Records document long-lived choices that shape StellaOps architecture, security posture, and operator experience. They complement RFCs by capturing the final call and the context that led to it.
|
||||
|
||||
## When to file an ADR
|
||||
- Decisions that affect cross-module contracts, persistence models, or external interfaces.
|
||||
- Security or compliance controls with on-going operational ownership.
|
||||
- Rollout strategies that require coordination across guilds or sprints.
|
||||
- Reversals or deprecations of previously accepted ADRs.
|
||||
|
||||
Small, module-local refactors that do not modify public behaviour can live in commit messages instead.
|
||||
|
||||
## Workflow at a glance
|
||||
1. Copy `docs/adr/0000-template.md` to `docs/adr/NNNN-short-slug.md` with a zero-padded sequence (see **Numbering**).
|
||||
2. Fill in context, decision, consequences, and alternatives. Include links to RFCs, issues, benchmarks, or experiments.
|
||||
3. Request async review from the impacted guilds. Capture sign-offs in the **Deciders** field.
|
||||
4. Merge the ADR with the code/config changes (or in a preparatory PR).
|
||||
5. Announce the accepted ADR in the Docs Guild channel or sprint notes so downstream teams can consume it.
|
||||
|
||||
## Numbering and status
|
||||
- Use zero-padded integers (e.g., `0001`, `0002`) in file names and the document header. Increment from the highest existing number.
|
||||
- Valid statuses: `Proposed`, `Accepted`, `Rejected`, `Deprecated`, `Superseded`. Update the status when follow-up work lands.
|
||||
- When an ADR supersedes another, link them in both documents’ **References** sections.
|
||||
|
||||
## Review expectations
|
||||
- Highlight edge-case handling, trade-offs, and determinism requirements.
|
||||
- Include operational checklists for any new runtime path (quota updates, schema migrations, credential rotation, etc.).
|
||||
- Attach diagrams under `docs/adr/assets/` when visuals improve comprehension.
|
||||
- Add TODO tasks for follow-up work in the relevant module’s `TASKS.md` and link them from the ADR.
|
||||
|
||||
## Verification checklist
|
||||
- [ ] `Status`, `Date`, `Authors`, and `Deciders` populated.
|
||||
- [ ] Links to code/config PRs or experiments recorded under **References**.
|
||||
- [ ] Consequences call out migration or rollback steps.
|
||||
- [ ] Announcement posted to Docs Guild updates (or sprint log).
|
||||
|
||||
## Related resources
|
||||
- [Docs Guild Task Board](../TASKS.md)
|
||||
- [High-Level Architecture Overview](../07_HIGH_LEVEL_ARCHITECTURE.md)
|
||||
- [Coding Standards](../18_CODING_STANDARDS.md)
|
||||
- [Release Engineering Playbook](../13_RELEASE_ENGINEERING_PLAYBOOK.md)
|
||||
@@ -241,7 +241,59 @@ jobs:
|
||||
|
||||
---
|
||||
|
||||
## 4 · Troubleshooting cheat‑sheet
|
||||
## 4 · Docs CI (Gitea Actions & Offline Mirror)
|
||||
|
||||
StellaOps ships a dedicated Docs workflow at `.gitea/workflows/docs.yml`. When mirroring the pipeline offline or running it locally, install the same toolchain so markdown linting, schema validation, and HTML preview stay deterministic.
|
||||
|
||||
### 4.1 Toolchain bootstrap
|
||||
|
||||
```bash
|
||||
# Node.js 20.x is required; install once per runner
|
||||
npm install --no-save \
|
||||
markdown-link-check \
|
||||
remark-cli \
|
||||
remark-preset-lint-recommended \
|
||||
ajv \
|
||||
ajv-cli \
|
||||
ajv-formats
|
||||
|
||||
# Python 3.11+ powers the preview renderer
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install markdown pygments
|
||||
```
|
||||
|
||||
**Offline tip.** Add the packages above to your artifact mirror (for example `ops/devops/offline-kit.json`) so runners can install them via `npm --offline` / `pip --no-index`.
|
||||
|
||||
### 4.2 Schema validation step
|
||||
|
||||
Ajv compiles every event schema to guard against syntax or format regressions. The workflow uses `ajv-formats` for UUID/date-time support.
|
||||
|
||||
```bash
|
||||
for schema in docs/events/*.json; do
|
||||
npx ajv compile -c ajv-formats -s "$schema"
|
||||
done
|
||||
```
|
||||
|
||||
Run this loop before committing schema changes. For new references, append `-r additional-file.json` so CI and local runs stay aligned.
|
||||
|
||||
### 4.3 Preview build
|
||||
|
||||
```bash
|
||||
python scripts/render_docs.py --source docs --output artifacts/docs-preview --clean
|
||||
```
|
||||
|
||||
Host the resulting bundle via any static file server for review (for example `python -m http.server`).
|
||||
|
||||
### 4.4 Publishing checklist
|
||||
|
||||
- [ ] Toolchain installs succeed without hitting the public internet (mirror or cached tarballs).
|
||||
- [ ] Ajv validation passes for `scanner.report.ready@1`, `scheduler.rescan.delta@1`, `attestor.logged@1`.
|
||||
- [ ] Markdown link check (`npx markdown-link-check`) reports no broken references.
|
||||
- [ ] Preview bundle archived (or attached) for stakeholders.
|
||||
|
||||
---
|
||||
|
||||
## 5 · Troubleshooting cheat‑sheet
|
||||
|
||||
| Symptom | Root cause | First things to try |
|
||||
| ------------------------------------- | --------------------------- | --------------------------------------------------------------- |
|
||||
@@ -253,6 +305,7 @@ jobs:
|
||||
|
||||
---
|
||||
|
||||
### Change log
|
||||
|
||||
* **2025‑08‑04** – Variable clean‑up, removed Docker‑socket & cache mounts, added Jenkins / CircleCI / Gitea examples, clarified Option B comment.
|
||||
### Change log
|
||||
|
||||
* **2025‑10‑18** – Documented Docs CI toolchain (Ajv validation, static preview) and offline checklist.
|
||||
* **2025‑08‑04** – Variable clean‑up, removed Docker‑socket & cache mounts, added Jenkins / CircleCI / Gitea examples, clarified Option B comment.
|
||||
|
||||
117
docs/dev/BUILDX_PLUGIN_QUICKSTART.md
Normal file
117
docs/dev/BUILDX_PLUGIN_QUICKSTART.md
Normal file
@@ -0,0 +1,117 @@
|
||||
# BuildX Generator Quickstart
|
||||
|
||||
This quickstart explains how to run the StellaOps **BuildX SBOM generator** offline, verify the CAS handshake, and emit OCI descriptors that downstream services can attest.
|
||||
|
||||
## 1. Prerequisites
|
||||
|
||||
- Docker 25+ with BuildKit enabled (`docker buildx` available).
|
||||
- .NET 10 (preview) SDK matching the repository `global.json`.
|
||||
- Optional: network access to a StellaOps Attestor endpoint (the quickstart uses a mock service).
|
||||
|
||||
## 2. Publish the plug-in binaries
|
||||
|
||||
The BuildX generator publishes as a .NET self-contained executable with its manifest under `plugins/scanner/buildx/`.
|
||||
|
||||
```bash
|
||||
# From the repository root
|
||||
DOTNET_CLI_HOME="${PWD}/.dotnet" \
|
||||
dotnet publish src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj \
|
||||
-c Release \
|
||||
-o out/buildx
|
||||
```
|
||||
|
||||
- `out/buildx/` now contains `StellaOps.Scanner.Sbomer.BuildXPlugin.dll` and the manifest `stellaops.sbom-indexer.manifest.json`.
|
||||
- `plugins/scanner/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin/` receives the same artefacts for release packaging.
|
||||
|
||||
## 3. Verify the CAS handshake
|
||||
|
||||
```bash
|
||||
dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll handshake \
|
||||
--manifest out/buildx \
|
||||
--cas out/cas
|
||||
```
|
||||
|
||||
The command performs a deterministic probe write (`sha256`) into the provided CAS directory and prints the resolved path.
|
||||
|
||||
## 4. Emit a descriptor + provenance placeholder
|
||||
|
||||
1. Build or identify the image you want to describe and capture its digest:
|
||||
|
||||
```bash
|
||||
docker buildx build --load -t stellaops/buildx-demo:ci samples/ci/buildx-demo
|
||||
DIGEST=$(docker image inspect stellaops/buildx-demo:ci --format '{{index .RepoDigests 0}}')
|
||||
```
|
||||
|
||||
2. Generate a CycloneDX SBOM for the built image (any tool works; here we use `docker sbom`):
|
||||
|
||||
```bash
|
||||
docker sbom stellaops/buildx-demo:ci --format cyclonedx-json > out/buildx-sbom.cdx.json
|
||||
```
|
||||
|
||||
3. Invoke the `descriptor` command, pointing at the SBOM file and optional metadata:
|
||||
|
||||
```bash
|
||||
dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \
|
||||
--manifest out/buildx \
|
||||
--image "$DIGEST" \
|
||||
--sbom out/buildx-sbom.cdx.json \
|
||||
--sbom-name buildx-sbom.cdx.json \
|
||||
--artifact-type application/vnd.stellaops.sbom.layer+json \
|
||||
--sbom-format cyclonedx-json \
|
||||
--sbom-kind inventory \
|
||||
--repository git.stella-ops.org/stellaops/buildx-demo \
|
||||
--build-ref $(git rev-parse HEAD) \
|
||||
> out/buildx-descriptor.json
|
||||
```
|
||||
|
||||
The output JSON captures:
|
||||
|
||||
- OCI artifact descriptor including size, digest, and annotations (`org.stellaops.*`).
|
||||
- Provenance placeholder (`expectedDsseSha256`, `nonce`, `attestorUri` when provided).
|
||||
- Generator metadata and deterministic timestamps.
|
||||
|
||||
## 5. (Optional) Send the placeholder to an Attestor
|
||||
|
||||
The plug-in can POST the descriptor metadata to an Attestor endpoint, returning once it receives an HTTP 202.
|
||||
|
||||
```bash
|
||||
python3 - <<'PY' &
|
||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||
class Handler(BaseHTTPRequestHandler):
|
||||
def do_POST(self):
|
||||
_ = self.rfile.read(int(self.headers.get('Content-Length', 0)))
|
||||
self.send_response(202); self.end_headers(); self.wfile.write(b'accepted')
|
||||
def log_message(self, fmt, *args):
|
||||
return
|
||||
server = HTTPServer(('127.0.0.1', 8085), Handler)
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
finally:
|
||||
server.server_close()
|
||||
PY
|
||||
MOCK_PID=$!
|
||||
|
||||
dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \
|
||||
--manifest out/buildx \
|
||||
--image "$DIGEST" \
|
||||
--sbom out/buildx-sbom.cdx.json \
|
||||
--attestor http://127.0.0.1:8085/provenance \
|
||||
--attestor-token "$STELLAOPS_ATTESTOR_TOKEN" \
|
||||
> out/buildx-descriptor.json
|
||||
|
||||
kill $MOCK_PID
|
||||
```
|
||||
|
||||
Set `STELLAOPS_ATTESTOR_TOKEN` (or pass `--attestor-token`) when the Attestor requires bearer authentication. Use `--attestor-insecure` for lab environments with self-signed certificates.
|
||||
|
||||
## 6. CI workflow example
|
||||
|
||||
A reusable GitHub Actions workflow is provided under `samples/ci/buildx-demo/github-actions-buildx-demo.yml`. It publishes the plug-in, runs the handshake, builds the demo image, emits a descriptor, and uploads both the descriptor and the mock-Attestor request as artefacts.
|
||||
|
||||
Add the workflow to your repository (or call it via `workflow_call`) and adjust the SBOM path + Attestor URL as needed.
|
||||
|
||||
---
|
||||
|
||||
For deeper integration guidance (custom SBOM builders, exporting DSSE bundles), track ADRs in `docs/ARCHITECTURE_SCANNER.md` §7 and follow upcoming Attestor API releases.
|
||||
@@ -1,9 +1,30 @@
|
||||
# Event Envelope Schemas
|
||||
|
||||
Versioned JSON Schemas for platform events consumed by Scheduler, Notify, and UI.
|
||||
Platform services publish strongly typed events; the JSON Schemas in this directory define those envelopes. File names follow `<event-name>@<version>.json` so producers and consumers can negotiate contracts explicitly.
|
||||
|
||||
- `scanner.report.ready@1.json`
|
||||
- `scheduler.rescan.delta@1.json`
|
||||
- `attestor.logged@1.json`
|
||||
## Catalog
|
||||
- `scanner.report.ready@1.json` — emitted by Scanner.WebService once a signed report is persisted. Consumers: Notify, UI timeline.
|
||||
- `scheduler.rescan.delta@1.json` — emitted by Scheduler when BOM-Index diffs require fresh scans. Consumers: Notify, Policy Engine.
|
||||
- `attestor.logged@1.json` — emitted by Attestor after storing the Rekor inclusion proof. Consumers: UI attestation panel, Governance exports.
|
||||
|
||||
Producers must bump the version suffix when introducing breaking changes; consumers validate incoming payloads against these schemas.
|
||||
Additive payload changes (new optional fields) can stay within the same version. Any breaking change (removing a field, tightening validation, altering semantics) must increment the `@<version>` suffix and update downstream consumers.
|
||||
|
||||
## CI validation
|
||||
The Docs CI workflow (`.gitea/workflows/docs.yml`) installs `ajv-cli` and compiles every schema on pull requests. Run the same check locally before opening a PR:
|
||||
|
||||
```bash
|
||||
for schema in docs/events/*.json; do
|
||||
npx ajv compile -c ajv-formats -s "$schema"
|
||||
done
|
||||
```
|
||||
|
||||
Tip: run `npm install --no-save ajv ajv-cli ajv-formats` once per clone so `npx` can resolve the tooling offline.
|
||||
|
||||
If a schema references additional files, include `-r` flags so CI and local runs stay consistent.
|
||||
|
||||
## Working with schemas
|
||||
- Producers should validate outbound payloads using the matching schema during unit tests.
|
||||
- Consumers should pin to a specific version and log when encountering unknown versions to catch missing migrations early.
|
||||
- Store real payload samples under `samples/events/` (mirrors the schema version) to aid contract testing.
|
||||
|
||||
Contact the Platform Events group in Docs Guild if you need help shaping a new event or version strategy.
|
||||
|
||||
42
docs/scanner-core-contracts.md
Normal file
42
docs/scanner-core-contracts.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# Scanner Core Contracts
|
||||
|
||||
The **Scanner Core** library provides shared contracts, observability helpers, and security utilities consumed by `Scanner.WebService`, `Scanner.Worker`, analyzers, and tooling. These primitives guarantee deterministic identifiers, timestamps, and log context for all scanning flows.
|
||||
|
||||
## DTOs
|
||||
|
||||
- `ScanJob` & `ScanJobStatus` – canonical job metadata (image reference/digest, tenant, correlation ID, timestamps, failure details). Constructors normalise timestamps to UTC microsecond precision and canonicalise image digests. Round-trips with `JsonSerializerDefaults.Web` using `ScannerJsonOptions`.
|
||||
- `ScanProgressEvent` & `ScanStage`/`ScanProgressEventKind` – stage-level progress surface for queue/stream consumers. Includes deterministic sequence numbers, optional progress percentage, attributes, and attached `ScannerError`.
|
||||
- `ScannerError` & `ScannerErrorCode` – shared error taxonomy spanning queue, analyzers, storage, exporters, and signing. Carries severity, retryability, structured details, and microsecond-precision timestamps.
|
||||
- `ScanJobId` – strongly-typed identifier rendered as `Guid` (lowercase `N` format) with deterministic parsing.
|
||||
|
||||
## Deterministic helpers
|
||||
|
||||
- `ScannerIdentifiers` – derives `ScanJobId`, correlation IDs, and SHA-256 hashes from normalised inputs (image reference/digest, tenant, salt). Ensures case-insensitive stability and reproducible metric keys.
|
||||
- `ScannerTimestamps` – trims to microsecond precision, provides ISO-8601 (`yyyy-MM-ddTHH:mm:ss.ffffffZ`) rendering, and parsing helpers.
|
||||
- `ScannerJsonOptions` – standard JSON options (web defaults, camel-case enums) shared by services/tests.
|
||||
|
||||
## Observability primitives
|
||||
|
||||
- `ScannerDiagnostics` – global `ActivitySource`/`Meter` for scanner components. `StartActivity` seeds deterministic tags (`job_id`, `stage`, `component`, `correlation_id`).
|
||||
- `ScannerMetricNames` – centralises metric prefixes (`stellaops.scanner.*`) and deterministic job/event tag builders.
|
||||
- `ScannerCorrelationContext` & `ScannerCorrelationContextAccessor` – ambient correlation propagation via `AsyncLocal` for log scopes, metrics, and diagnostics.
|
||||
- `ScannerLogExtensions` – `ILogger` scopes for jobs/progress events with automatic correlation context push, minimal allocations, and consistent structured fields.
|
||||
|
||||
## Security utilities
|
||||
|
||||
- `AuthorityTokenSource` – caches short-lived OpToks per audience+scope using deterministic keys and refresh skew (default 30 s). Integrates with `StellaOps.Auth.Client`.
|
||||
- `DpopProofValidator` – validates DPoP proofs (alg allowlist, `htm`/`htu`, nonce, replay window, signature) backed by pluggable `IDpopReplayCache`. Ships with `InMemoryDpopReplayCache` for restart-only deployments.
|
||||
- `RestartOnlyPluginGuard` – enforces restart-time plug-in registration (deterministic path normalisation; throws if new plug-ins added post-seal).
|
||||
- `ServiceCollectionExtensions.AddScannerAuthorityCore` – DI helper wiring Authority client, OpTok source, DPoP validation, replay cache, and plug-in guard.
|
||||
|
||||
## Testing guarantees
|
||||
|
||||
Unit tests (`StellaOps.Scanner.Core.Tests`) assert:
|
||||
|
||||
- DTO JSON round-trips are stable and deterministic.
|
||||
- Identifier/hash helpers ignore case and emit lowercase hex.
|
||||
- Timestamp normalisation retains UTC semantics.
|
||||
- Log scopes push/pop correlation context predictably.
|
||||
- Authority token caching honours refresh skew and invalidation.
|
||||
- DPoP validator accepts valid proofs, rejects nonce mismatch/replay, and enforces signature validation.
|
||||
- Restart-only plug-in guard blocks runtime additions post-seal.
|
||||
14
docs/updates/2025-10-18-docs-guild.md
Normal file
14
docs/updates/2025-10-18-docs-guild.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# Docs Guild Update — 2025-10-18
|
||||
|
||||
**Subject:** ADR process + events schema validation shipped
|
||||
**Audience:** Docs Guild, DevEx, Platform Events
|
||||
|
||||
- Published the ADR contribution guide at `docs/adr/index.md` and enriched the template to capture authorship, deciders, and alternatives. All new cross-module decisions should follow this workflow.
|
||||
- Linked the ADR hub from `docs/README.md` so operators and engineers can discover the process without digging through directories.
|
||||
- Extended Docs CI (`.gitea/workflows/docs.yml`) to compile event schemas with Ajv (including `ajv-formats`) and documented the local loop in `docs/events/README.md`.
|
||||
- Captured the mirror/offline workflow in `docs/ci/20_CI_RECIPES.md` so runners know how to install the Ajv toolchain and publish previews without internet access.
|
||||
- Validated `scanner.report.ready@1`, `scheduler.rescan.delta@1`, and `attestor.logged@1` schemas locally to unblock Platform Events acknowledgements.
|
||||
|
||||
Next steps:
|
||||
- Platform Events to confirm Notify/Scheduler consumers have visibility into the schema docs.
|
||||
- DevEx to add ADR announcement blurb to the next sprint recap if broader broadcast is needed.
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| DEVOPS-HELM-09-001 | TODO | DevOps Guild | SCANNER-WEB-09-101 | Create Helm/Compose environment profiles (dev, staging, airgap) with deterministic digests. | Profiles committed under `deploy/`; docs updated; CI smoke deploy passes. |
|
||||
| DEVOPS-HELM-09-001 | DONE | DevOps Guild | SCANNER-WEB-09-101 | Create Helm/Compose environment profiles (dev, staging, airgap) with deterministic digests. | Profiles committed under `deploy/`; docs updated; CI smoke deploy passes. |
|
||||
| DEVOPS-PERF-10-001 | TODO | DevOps Guild | BENCH-SCANNER-10-001 | Add perf smoke job (SBOM compose <5 s target) to CI. | CI job runs sample build verifying <5 s; alerts configured. |
|
||||
| DEVOPS-REL-14-001 | TODO | DevOps Guild | SIGNER-API-11-101, ATTESTOR-API-11-201 | Deterministic build/release pipeline with SBOM/provenance, signing, manifest generation. | CI pipeline produces signed images + SBOM/attestations, manifests published with verified hashes, docs updated. |
|
||||
| DEVOPS-REL-17-002 | TODO | DevOps Guild | DEVOPS-REL-14-001, SCANNER-EMIT-17-701 | Persist stripped-debug artifacts organised by GNU build-id and bundle them into release/offline kits with checksum manifests. | CI job writes `.debug` files under `artifacts/debug/.build-id/`, manifest + checksums published, offline kit includes cache, smoke job proves symbol lookup via build-id. |
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"id": "stellaops.sbom-indexer",
|
||||
"displayName": "StellaOps SBOM BuildX Generator",
|
||||
"version": "0.1.0-alpha",
|
||||
"requiresRestart": true,
|
||||
"entryPoint": {
|
||||
"type": "dotnet",
|
||||
"executable": "StellaOps.Scanner.Sbomer.BuildXPlugin.dll",
|
||||
"arguments": [
|
||||
"handshake"
|
||||
]
|
||||
},
|
||||
"capabilities": [
|
||||
"generator",
|
||||
"sbom"
|
||||
],
|
||||
"cas": {
|
||||
"protocol": "filesystem",
|
||||
"defaultRoot": "cas",
|
||||
"compression": "zstd"
|
||||
},
|
||||
"image": {
|
||||
"name": "stellaops/sbom-indexer",
|
||||
"digest": null,
|
||||
"platforms": [
|
||||
"linux/amd64",
|
||||
"linux/arm64"
|
||||
]
|
||||
},
|
||||
"metadata": {
|
||||
"org.stellaops.plugin.kind": "buildx-generator",
|
||||
"org.stellaops.restart.required": "true"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"id": "stellaops.sbom-indexer",
|
||||
"displayName": "StellaOps SBOM BuildX Generator",
|
||||
"version": "0.1.0-alpha",
|
||||
"requiresRestart": true,
|
||||
"entryPoint": {
|
||||
"type": "dotnet",
|
||||
"executable": "StellaOps.Scanner.Sbomer.BuildXPlugin.dll",
|
||||
"arguments": [
|
||||
"handshake"
|
||||
]
|
||||
},
|
||||
"capabilities": [
|
||||
"generator",
|
||||
"sbom"
|
||||
],
|
||||
"cas": {
|
||||
"protocol": "filesystem",
|
||||
"defaultRoot": "cas",
|
||||
"compression": "zstd"
|
||||
},
|
||||
"image": {
|
||||
"name": "stellaops/sbom-indexer",
|
||||
"digest": null,
|
||||
"platforms": [
|
||||
"linux/amd64",
|
||||
"linux/arm64"
|
||||
]
|
||||
},
|
||||
"metadata": {
|
||||
"org.stellaops.plugin.kind": "buildx-generator",
|
||||
"org.stellaops.restart.required": "true"
|
||||
}
|
||||
}
|
||||
4
samples/ci/buildx-demo/Dockerfile
Normal file
4
samples/ci/buildx-demo/Dockerfile
Normal file
@@ -0,0 +1,4 @@
|
||||
FROM alpine:3.20
|
||||
RUN adduser -S stella && echo "hello" > /app.txt
|
||||
USER stella
|
||||
CMD ["/bin/sh","-c","cat /app.txt"]
|
||||
42
samples/ci/buildx-demo/README.md
Normal file
42
samples/ci/buildx-demo/README.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# Buildx SBOM Demo Workflow
|
||||
|
||||
This sample GitHub Actions workflow shows how to run the StellaOps BuildX generator alongside a container build.
|
||||
|
||||
## What it does
|
||||
|
||||
1. Publishes the `StellaOps.Scanner.Sbomer.BuildXPlugin` with the manifest copied beside the binaries.
|
||||
2. Calls the plug-in `handshake` command to verify the local CAS directory.
|
||||
3. Builds a tiny Alpine-based image via `docker buildx`.
|
||||
4. Generates a CycloneDX SBOM from the built image with `docker sbom`.
|
||||
5. Emits a descriptor + provenance placeholder referencing the freshly generated SBOM with the `descriptor` command.
|
||||
6. Sends the placeholder to a mock Attestor endpoint and uploads the descriptor, SBOM, and captured request as artefacts. (Swap the mock step with your real Attestor URL + `STELLAOPS_ATTESTOR_TOKEN` secret when ready.)
|
||||
|
||||
## Files
|
||||
|
||||
- `github-actions-buildx-demo.yml` – workflow definition (`workflow_dispatch` + `demo/buildx` branch trigger).
|
||||
- `Dockerfile` – minimal demo image.
|
||||
- `github-actions-buildx-demo.yml` now captures a real SBOM via `docker sbom`.
|
||||
|
||||
## Running locally
|
||||
|
||||
```bash
|
||||
dotnet publish src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj -c Release -o out/buildx
|
||||
|
||||
dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll handshake \
|
||||
--manifest out/buildx \
|
||||
--cas out/cas
|
||||
|
||||
docker buildx build --load -t stellaops/buildx-demo:ci samples/ci/buildx-demo
|
||||
DIGEST=$(docker image inspect stellaops/buildx-demo:ci --format '{{index .RepoDigests 0}}')
|
||||
|
||||
docker sbom stellaops/buildx-demo:ci --format cyclonedx-json > out/buildx-sbom.cdx.json
|
||||
|
||||
dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \
|
||||
--manifest out/buildx \
|
||||
--image "$DIGEST" \
|
||||
--sbom out/buildx-sbom.cdx.json \
|
||||
--sbom-name buildx-sbom.cdx.json \
|
||||
> out/buildx-descriptor.json
|
||||
```
|
||||
|
||||
The descriptor JSON contains deterministic annotations and provenance placeholders ready for the Attestor.
|
||||
120
samples/ci/buildx-demo/github-actions-buildx-demo.yml
Normal file
120
samples/ci/buildx-demo/github-actions-buildx-demo.yml
Normal file
@@ -0,0 +1,120 @@
|
||||
name: Buildx SBOM Demo
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [ demo/buildx ]
|
||||
|
||||
jobs:
|
||||
buildx-sbom:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Set up .NET 10 preview
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.x'
|
||||
|
||||
- name: Publish StellaOps BuildX generator
|
||||
run: |
|
||||
dotnet publish src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj \
|
||||
-c Release \
|
||||
-o out/buildx
|
||||
|
||||
- name: Handshake CAS
|
||||
run: |
|
||||
dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll handshake \
|
||||
--manifest out/buildx \
|
||||
--cas out/cas
|
||||
|
||||
- name: Build demo container image
|
||||
run: |
|
||||
docker buildx build --load -t stellaops/buildx-demo:ci samples/ci/buildx-demo
|
||||
|
||||
- name: Capture image digest
|
||||
id: digest
|
||||
run: |
|
||||
DIGEST=$(docker image inspect stellaops/buildx-demo:ci --format '{{index .RepoDigests 0}}')
|
||||
echo "digest=$DIGEST" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Generate SBOM from built image
|
||||
run: |
|
||||
mkdir -p out
|
||||
docker sbom stellaops/buildx-demo:ci --format cyclonedx-json > out/buildx-sbom.cdx.json
|
||||
|
||||
- name: Start mock Attestor
|
||||
id: attestor
|
||||
run: |
|
||||
mkdir -p out
|
||||
cat <<'PY' > out/mock-attestor.py
|
||||
import json
|
||||
import os
|
||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||
|
||||
class Handler(BaseHTTPRequestHandler):
|
||||
def do_POST(self):
|
||||
length = int(self.headers.get('Content-Length') or 0)
|
||||
body = self.rfile.read(length)
|
||||
with open(os.path.join('out', 'provenance-request.json'), 'wb') as fp:
|
||||
fp.write(body)
|
||||
self.send_response(202)
|
||||
self.end_headers()
|
||||
self.wfile.write(b'accepted')
|
||||
|
||||
def log_message(self, format, *args):
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
server = HTTPServer(('127.0.0.1', 8085), Handler)
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
finally:
|
||||
server.server_close()
|
||||
PY
|
||||
touch out/provenance-request.json
|
||||
python3 out/mock-attestor.py &
|
||||
echo $! > out/mock-attestor.pid
|
||||
|
||||
- name: Emit descriptor with provenance placeholder
|
||||
env:
|
||||
IMAGE_DIGEST: ${{ steps.digest.outputs.digest }}
|
||||
# Uncomment the next line and remove the mock Attestor block to hit a real service.
|
||||
# STELLAOPS_ATTESTOR_TOKEN: ${{ secrets.STELLAOPS_ATTESTOR_TOKEN }}
|
||||
run: |
|
||||
dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \
|
||||
--manifest out/buildx \
|
||||
--image "$IMAGE_DIGEST" \
|
||||
--sbom out/buildx-sbom.cdx.json \
|
||||
--sbom-name buildx-sbom.cdx.json \
|
||||
--artifact-type application/vnd.stellaops.sbom.layer+json \
|
||||
--sbom-format cyclonedx-json \
|
||||
--sbom-kind inventory \
|
||||
--repository ${{ github.repository }} \
|
||||
--build-ref ${{ github.sha }} \
|
||||
--attestor http://127.0.0.1:8085/provenance \
|
||||
> out/buildx-descriptor.json
|
||||
|
||||
- name: Stop mock Attestor
|
||||
if: always()
|
||||
run: |
|
||||
if [ -f out/mock-attestor.pid ]; then
|
||||
kill $(cat out/mock-attestor.pid)
|
||||
fi
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: stellaops-buildx-demo
|
||||
path: |
|
||||
out/buildx-descriptor.json
|
||||
out/buildx-sbom.cdx.json
|
||||
out/provenance-request.json
|
||||
|
||||
- name: Show descriptor summary
|
||||
run: |
|
||||
cat out/buildx-descriptor.json
|
||||
@@ -7,6 +7,8 @@
|
||||
<IsConcelierPlugin Condition="'$(IsConcelierPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Concelier.Connector.'))">true</IsConcelierPlugin>
|
||||
<IsConcelierPlugin Condition="'$(IsConcelierPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Concelier.Exporter.'))">true</IsConcelierPlugin>
|
||||
<IsAuthorityPlugin Condition="'$(IsAuthorityPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Authority.Plugin.'))">true</IsAuthorityPlugin>
|
||||
<ScannerBuildxPluginOutputRoot Condition="'$(ScannerBuildxPluginOutputRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\buildx\'))</ScannerBuildxPluginOutputRoot>
|
||||
<IsScannerBuildxPlugin Condition="'$(IsScannerBuildxPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)')) == 'StellaOps.Scanner.Sbomer.BuildXPlugin'">true</IsScannerBuildxPlugin>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -30,4 +30,21 @@
|
||||
|
||||
<Copy SourceFiles="@(AuthorityPluginArtifacts)" DestinationFolder="$(AuthorityPluginOutputDirectory)" SkipUnchangedFiles="true" />
|
||||
</Target>
|
||||
|
||||
<Target Name="ScannerCopyBuildxPluginArtifacts" AfterTargets="Build" Condition="'$(IsScannerBuildxPlugin)' == 'true'">
|
||||
<PropertyGroup>
|
||||
<ScannerBuildxPluginOutputDirectory>$(ScannerBuildxPluginOutputRoot)\$(MSBuildProjectName)</ScannerBuildxPluginOutputDirectory>
|
||||
</PropertyGroup>
|
||||
|
||||
<MakeDir Directories="$(ScannerBuildxPluginOutputDirectory)" />
|
||||
|
||||
<ItemGroup>
|
||||
<ScannerBuildxPluginArtifacts Include="$(TargetPath)" />
|
||||
<ScannerBuildxPluginArtifacts Include="$(TargetPath).deps.json" Condition="Exists('$(TargetPath).deps.json')" />
|
||||
<ScannerBuildxPluginArtifacts Include="$(TargetDir)$(TargetName).pdb" Condition="Exists('$(TargetDir)$(TargetName).pdb')" />
|
||||
<ScannerBuildxPluginArtifacts Include="$(ProjectDir)stellaops.sbom-indexer.manifest.json" Condition="Exists('$(ProjectDir)stellaops.sbom-indexer.manifest.json')" />
|
||||
</ItemGroup>
|
||||
|
||||
<Copy SourceFiles="@(ScannerBuildxPluginArtifacts)" DestinationFolder="$(ScannerBuildxPluginOutputDirectory)" SkipUnchangedFiles="true" />
|
||||
</Target>
|
||||
</Project>
|
||||
|
||||
86
src/StellaOps.Policy.Tests/PolicyBinderTests.cs
Normal file
86
src/StellaOps.Policy.Tests/PolicyBinderTests.cs
Normal file
@@ -0,0 +1,86 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Tests;
|
||||
|
||||
public sealed class PolicyBinderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Bind_ValidYaml_ReturnsSuccess()
|
||||
{
|
||||
const string yaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Block Critical
|
||||
severity: [Critical]
|
||||
sources: [NVD]
|
||||
action: block
|
||||
""";
|
||||
|
||||
var result = PolicyBinder.Bind(yaml, PolicyDocumentFormat.Yaml);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Equal("1.0", result.Document.Version);
|
||||
Assert.Single(result.Document.Rules);
|
||||
Assert.Empty(result.Issues);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Bind_InvalidSeverity_ReturnsError()
|
||||
{
|
||||
const string yaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Invalid Severity
|
||||
severity: [Nope]
|
||||
action: block
|
||||
""";
|
||||
|
||||
var result = PolicyBinder.Bind(yaml, PolicyDocumentFormat.Yaml);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Contains(result.Issues, issue => issue.Code == "policy.severity.invalid");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Cli_StrictMode_FailsOnWarnings()
|
||||
{
|
||||
const string yaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Quiet Warning
|
||||
sources: ["", "NVD"]
|
||||
action: ignore
|
||||
""";
|
||||
|
||||
var path = Path.Combine(Path.GetTempPath(), $"policy-{Guid.NewGuid():N}.yaml");
|
||||
await File.WriteAllTextAsync(path, yaml);
|
||||
|
||||
try
|
||||
{
|
||||
using var output = new StringWriter();
|
||||
using var error = new StringWriter();
|
||||
var cli = new PolicyValidationCli(output, error);
|
||||
var options = new PolicyValidationCliOptions
|
||||
{
|
||||
Inputs = new[] { path },
|
||||
Strict = true,
|
||||
};
|
||||
|
||||
var exitCode = await cli.RunAsync(options, CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, exitCode);
|
||||
Assert.Contains("WARNING", output.ToString());
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (File.Exists(path))
|
||||
{
|
||||
File.Delete(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
166
src/StellaOps.Policy.Tests/PolicyPreviewServiceTests.cs
Normal file
166
src/StellaOps.Policy.Tests/PolicyPreviewServiceTests.cs
Normal file
@@ -0,0 +1,166 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Tests;
|
||||
|
||||
public sealed class PolicyPreviewServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task PreviewAsync_ComputesDiffs_ForBlockingRule()
|
||||
{
|
||||
const string yaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Block Critical
|
||||
severity: [Critical]
|
||||
action: block
|
||||
""";
|
||||
|
||||
var snapshotRepo = new InMemoryPolicySnapshotRepository();
|
||||
var auditRepo = new InMemoryPolicyAuditRepository();
|
||||
var timeProvider = new FakeTimeProvider();
|
||||
var store = new PolicySnapshotStore(snapshotRepo, auditRepo, timeProvider, NullLogger<PolicySnapshotStore>.Instance);
|
||||
|
||||
await store.SaveAsync(new PolicySnapshotContent(yaml, PolicyDocumentFormat.Yaml, "tester", null, null), CancellationToken.None);
|
||||
|
||||
var service = new PolicyPreviewService(store, NullLogger<PolicyPreviewService>.Instance);
|
||||
|
||||
var findings = ImmutableArray.Create(
|
||||
PolicyFinding.Create("finding-1", PolicySeverity.Critical, environment: "prod", source: "NVD"),
|
||||
PolicyFinding.Create("finding-2", PolicySeverity.Low));
|
||||
|
||||
var baseline = ImmutableArray.Create(
|
||||
new PolicyVerdict("finding-1", PolicyVerdictStatus.Pass),
|
||||
new PolicyVerdict("finding-2", PolicyVerdictStatus.Pass));
|
||||
|
||||
var response = await service.PreviewAsync(new PolicyPreviewRequest(
|
||||
"sha256:abc",
|
||||
findings,
|
||||
baseline),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.True(response.Success);
|
||||
Assert.Equal(1, response.ChangedCount);
|
||||
var diff1 = Assert.Single(response.Diffs.Where(diff => diff.Projected.FindingId == "finding-1"));
|
||||
Assert.Equal(PolicyVerdictStatus.Pass, diff1.Baseline.Status);
|
||||
Assert.Equal(PolicyVerdictStatus.Blocked, diff1.Projected.Status);
|
||||
Assert.Equal("Block Critical", diff1.Projected.RuleName);
|
||||
Assert.True(diff1.Projected.Score > 0);
|
||||
Assert.Equal(PolicyScoringConfig.Default.Version, diff1.Projected.ConfigVersion);
|
||||
Assert.Equal(PolicyVerdictStatus.Pass, response.Diffs.First(diff => diff.Projected.FindingId == "finding-2").Projected.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PreviewAsync_UsesProposedPolicy_WhenProvided()
|
||||
{
|
||||
const string yaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Ignore Dev
|
||||
environments: [dev]
|
||||
action:
|
||||
type: ignore
|
||||
justification: dev waiver
|
||||
""";
|
||||
|
||||
var snapshotRepo = new InMemoryPolicySnapshotRepository();
|
||||
var auditRepo = new InMemoryPolicyAuditRepository();
|
||||
var store = new PolicySnapshotStore(snapshotRepo, auditRepo, TimeProvider.System, NullLogger<PolicySnapshotStore>.Instance);
|
||||
var service = new PolicyPreviewService(store, NullLogger<PolicyPreviewService>.Instance);
|
||||
|
||||
var findings = ImmutableArray.Create(
|
||||
PolicyFinding.Create("finding-1", PolicySeverity.Medium, environment: "dev"));
|
||||
|
||||
var baseline = ImmutableArray.Create(new PolicyVerdict("finding-1", PolicyVerdictStatus.Blocked));
|
||||
|
||||
var response = await service.PreviewAsync(new PolicyPreviewRequest(
|
||||
"sha256:def",
|
||||
findings,
|
||||
baseline,
|
||||
SnapshotOverride: null,
|
||||
ProposedPolicy: new PolicySnapshotContent(yaml, PolicyDocumentFormat.Yaml, "tester", null, "dev override")),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.True(response.Success);
|
||||
var diff = Assert.Single(response.Diffs);
|
||||
Assert.Equal(PolicyVerdictStatus.Blocked, diff.Baseline.Status);
|
||||
Assert.Equal(PolicyVerdictStatus.Ignored, diff.Projected.Status);
|
||||
Assert.Equal("Ignore Dev", diff.Projected.RuleName);
|
||||
Assert.True(diff.Projected.Score >= 0);
|
||||
Assert.Equal(1, response.ChangedCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PreviewAsync_ReturnsIssues_WhenPolicyInvalid()
|
||||
{
|
||||
var snapshotRepo = new InMemoryPolicySnapshotRepository();
|
||||
var auditRepo = new InMemoryPolicyAuditRepository();
|
||||
var store = new PolicySnapshotStore(snapshotRepo, auditRepo, TimeProvider.System, NullLogger<PolicySnapshotStore>.Instance);
|
||||
var service = new PolicyPreviewService(store, NullLogger<PolicyPreviewService>.Instance);
|
||||
|
||||
const string invalid = "version: 1.0";
|
||||
var request = new PolicyPreviewRequest(
|
||||
"sha256:ghi",
|
||||
ImmutableArray<PolicyFinding>.Empty,
|
||||
ImmutableArray<PolicyVerdict>.Empty,
|
||||
SnapshotOverride: null,
|
||||
ProposedPolicy: new PolicySnapshotContent(invalid, PolicyDocumentFormat.Yaml, null, null, null));
|
||||
|
||||
var response = await service.PreviewAsync(request, CancellationToken.None);
|
||||
|
||||
Assert.False(response.Success);
|
||||
Assert.NotEmpty(response.Issues);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PreviewAsync_QuietWithoutVexDowngradesToWarn()
|
||||
{
|
||||
const string yaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Quiet Without VEX
|
||||
severity: [Low]
|
||||
quiet: true
|
||||
action:
|
||||
type: ignore
|
||||
""";
|
||||
|
||||
var binding = PolicyBinder.Bind(yaml, PolicyDocumentFormat.Yaml);
|
||||
Assert.True(binding.Success);
|
||||
Assert.Empty(binding.Issues);
|
||||
Assert.False(binding.Document.Rules[0].Metadata.ContainsKey("quiet"));
|
||||
Assert.True(binding.Document.Rules[0].Action.Quiet);
|
||||
|
||||
var store = new PolicySnapshotStore(new InMemoryPolicySnapshotRepository(), new InMemoryPolicyAuditRepository(), TimeProvider.System, NullLogger<PolicySnapshotStore>.Instance);
|
||||
await store.SaveAsync(new PolicySnapshotContent(yaml, PolicyDocumentFormat.Yaml, "tester", null, "quiet test"), CancellationToken.None);
|
||||
var snapshot = await store.GetLatestAsync();
|
||||
Assert.NotNull(snapshot);
|
||||
Assert.True(snapshot!.Document.Rules[0].Action.Quiet);
|
||||
Assert.Null(snapshot.Document.Rules[0].Action.RequireVex);
|
||||
Assert.Equal(PolicyActionType.Ignore, snapshot.Document.Rules[0].Action.Type);
|
||||
var manualVerdict = PolicyEvaluation.EvaluateFinding(snapshot.Document, snapshot.ScoringConfig, PolicyFinding.Create("finding-quiet", PolicySeverity.Low));
|
||||
Assert.Equal(PolicyVerdictStatus.Warned, manualVerdict.Status);
|
||||
|
||||
var service = new PolicyPreviewService(store, NullLogger<PolicyPreviewService>.Instance);
|
||||
|
||||
var findings = ImmutableArray.Create(PolicyFinding.Create("finding-quiet", PolicySeverity.Low));
|
||||
var baseline = ImmutableArray<PolicyVerdict>.Empty;
|
||||
|
||||
var response = await service.PreviewAsync(new PolicyPreviewRequest(
|
||||
"sha256:quiet",
|
||||
findings,
|
||||
baseline),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.True(response.Success);
|
||||
var verdict = Assert.Single(response.Diffs).Projected;
|
||||
Assert.Equal(PolicyVerdictStatus.Warned, verdict.Status);
|
||||
Assert.Contains("requireVex", verdict.Notes, System.StringComparison.OrdinalIgnoreCase);
|
||||
Assert.True(verdict.Score >= 0);
|
||||
}
|
||||
}
|
||||
26
src/StellaOps.Policy.Tests/PolicyScoringConfigTests.cs
Normal file
26
src/StellaOps.Policy.Tests/PolicyScoringConfigTests.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
using System.Threading.Tasks;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Tests;
|
||||
|
||||
public sealed class PolicyScoringConfigTests
|
||||
{
|
||||
[Fact]
|
||||
public void LoadDefaultReturnsConfig()
|
||||
{
|
||||
var config = PolicyScoringConfigBinder.LoadDefault();
|
||||
Assert.NotNull(config);
|
||||
Assert.Equal("1.0", config.Version);
|
||||
Assert.NotEmpty(config.SeverityWeights);
|
||||
Assert.True(config.SeverityWeights.ContainsKey(PolicySeverity.Critical));
|
||||
Assert.True(config.QuietPenalty > 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BindRejectsEmptyContent()
|
||||
{
|
||||
var result = PolicyScoringConfigBinder.Bind(string.Empty, PolicyDocumentFormat.Json);
|
||||
Assert.False(result.Success);
|
||||
Assert.NotEmpty(result.Issues);
|
||||
}
|
||||
}
|
||||
94
src/StellaOps.Policy.Tests/PolicySnapshotStoreTests.cs
Normal file
94
src/StellaOps.Policy.Tests/PolicySnapshotStoreTests.cs
Normal file
@@ -0,0 +1,94 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Tests;
|
||||
|
||||
public sealed class PolicySnapshotStoreTests
|
||||
{
|
||||
private const string BasePolicyYaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Block Critical
|
||||
severity: [Critical]
|
||||
action: block
|
||||
""";
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_CreatesNewSnapshotAndAuditEntry()
|
||||
{
|
||||
var snapshotRepo = new InMemoryPolicySnapshotRepository();
|
||||
var auditRepo = new InMemoryPolicyAuditRepository();
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 10, 0, 0, TimeSpan.Zero));
|
||||
var store = new PolicySnapshotStore(snapshotRepo, auditRepo, timeProvider, NullLogger<PolicySnapshotStore>.Instance);
|
||||
|
||||
var content = new PolicySnapshotContent(BasePolicyYaml, PolicyDocumentFormat.Yaml, "cli", "test", null);
|
||||
|
||||
var result = await store.SaveAsync(content, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.True(result.Created);
|
||||
Assert.NotNull(result.Snapshot);
|
||||
Assert.Equal("rev-1", result.Snapshot!.RevisionId);
|
||||
Assert.Equal(result.Digest, result.Snapshot.Digest);
|
||||
Assert.Equal(timeProvider.GetUtcNow(), result.Snapshot.CreatedAt);
|
||||
Assert.Equal(PolicyScoringConfig.Default.Version, result.Snapshot.ScoringConfig.Version);
|
||||
|
||||
var latest = await store.GetLatestAsync();
|
||||
Assert.Equal(result.Snapshot, latest);
|
||||
|
||||
var audits = await auditRepo.ListAsync(10);
|
||||
Assert.Single(audits);
|
||||
Assert.Equal(result.Digest, audits[0].Digest);
|
||||
Assert.Equal("snapshot.created", audits[0].Action);
|
||||
Assert.Equal("rev-1", audits[0].RevisionId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_DoesNotCreateNewRevisionWhenDigestUnchanged()
|
||||
{
|
||||
var snapshotRepo = new InMemoryPolicySnapshotRepository();
|
||||
var auditRepo = new InMemoryPolicyAuditRepository();
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 10, 0, 0, TimeSpan.Zero));
|
||||
var store = new PolicySnapshotStore(snapshotRepo, auditRepo, timeProvider, NullLogger<PolicySnapshotStore>.Instance);
|
||||
|
||||
var content = new PolicySnapshotContent(BasePolicyYaml, PolicyDocumentFormat.Yaml, "cli", "test", null);
|
||||
var first = await store.SaveAsync(content, CancellationToken.None);
|
||||
Assert.True(first.Created);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromHours(1));
|
||||
var second = await store.SaveAsync(content, CancellationToken.None);
|
||||
|
||||
Assert.True(second.Success);
|
||||
Assert.False(second.Created);
|
||||
Assert.Equal(first.Digest, second.Digest);
|
||||
Assert.Equal("rev-1", second.Snapshot!.RevisionId);
|
||||
Assert.Equal(PolicyScoringConfig.Default.Version, second.Snapshot.ScoringConfig.Version);
|
||||
|
||||
var audits = await auditRepo.ListAsync(10);
|
||||
Assert.Single(audits);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_ReturnsFailureWhenValidationFails()
|
||||
{
|
||||
var snapshotRepo = new InMemoryPolicySnapshotRepository();
|
||||
var auditRepo = new InMemoryPolicyAuditRepository();
|
||||
var store = new PolicySnapshotStore(snapshotRepo, auditRepo, TimeProvider.System, NullLogger<PolicySnapshotStore>.Instance);
|
||||
|
||||
const string invalidYaml = "version: '1.0'\nrules: []";
|
||||
var content = new PolicySnapshotContent(invalidYaml, PolicyDocumentFormat.Yaml, null, null, null);
|
||||
|
||||
var result = await store.SaveAsync(content, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.False(result.Created);
|
||||
Assert.Null(result.Snapshot);
|
||||
|
||||
var audits = await auditRepo.ListAsync(5);
|
||||
Assert.Empty(audits);
|
||||
}
|
||||
}
|
||||
13
src/StellaOps.Policy.Tests/StellaOps.Policy.Tests.csproj
Normal file
13
src/StellaOps.Policy.Tests/StellaOps.Policy.Tests.csproj
Normal file
@@ -0,0 +1,13 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Policy\StellaOps.Policy.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
12
src/StellaOps.Policy/Audit/IPolicyAuditRepository.cs
Normal file
12
src/StellaOps.Policy/Audit/IPolicyAuditRepository.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public interface IPolicyAuditRepository
|
||||
{
|
||||
Task AddAsync(PolicyAuditEntry entry, CancellationToken cancellationToken = default);
|
||||
|
||||
Task<IReadOnlyList<PolicyAuditEntry>> ListAsync(int limit, CancellationToken cancellationToken = default);
|
||||
}
|
||||
52
src/StellaOps.Policy/Audit/InMemoryPolicyAuditRepository.cs
Normal file
52
src/StellaOps.Policy/Audit/InMemoryPolicyAuditRepository.cs
Normal file
@@ -0,0 +1,52 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed class InMemoryPolicyAuditRepository : IPolicyAuditRepository
|
||||
{
|
||||
private readonly List<PolicyAuditEntry> _entries = new();
|
||||
private readonly SemaphoreSlim _mutex = new(1, 1);
|
||||
|
||||
public async Task AddAsync(PolicyAuditEntry entry, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (entry is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(entry));
|
||||
}
|
||||
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
_entries.Add(entry);
|
||||
_entries.Sort(static (left, right) => left.CreatedAt.CompareTo(right.CreatedAt));
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PolicyAuditEntry>> ListAsync(int limit, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
IEnumerable<PolicyAuditEntry> query = _entries;
|
||||
if (limit > 0)
|
||||
{
|
||||
query = query.TakeLast(limit);
|
||||
}
|
||||
|
||||
return query.ToImmutableArray();
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
}
|
||||
12
src/StellaOps.Policy/PolicyAuditEntry.cs
Normal file
12
src/StellaOps.Policy/PolicyAuditEntry.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed record PolicyAuditEntry(
|
||||
Guid Id,
|
||||
DateTimeOffset CreatedAt,
|
||||
string Action,
|
||||
string RevisionId,
|
||||
string Digest,
|
||||
string? Actor,
|
||||
string Message);
|
||||
913
src/StellaOps.Policy/PolicyBinder.cs
Normal file
913
src/StellaOps.Policy/PolicyBinder.cs
Normal file
@@ -0,0 +1,913 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using System.Text.Json.Serialization;
|
||||
using YamlDotNet.Serialization;
|
||||
using YamlDotNet.Serialization.NamingConventions;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public enum PolicyDocumentFormat
|
||||
{
|
||||
Json,
|
||||
Yaml,
|
||||
}
|
||||
|
||||
public sealed record PolicyBindingResult(
|
||||
bool Success,
|
||||
PolicyDocument Document,
|
||||
ImmutableArray<PolicyIssue> Issues,
|
||||
PolicyDocumentFormat Format);
|
||||
|
||||
public static class PolicyBinder
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true,
|
||||
NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString,
|
||||
Converters =
|
||||
{
|
||||
new JsonStringEnumConverter()
|
||||
},
|
||||
};
|
||||
|
||||
private static readonly IDeserializer YamlDeserializer = new DeserializerBuilder()
|
||||
.WithNamingConvention(CamelCaseNamingConvention.Instance)
|
||||
.IgnoreUnmatchedProperties()
|
||||
.Build();
|
||||
|
||||
public static PolicyBindingResult Bind(string content, PolicyDocumentFormat format)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(content))
|
||||
{
|
||||
var issues = ImmutableArray.Create(
|
||||
PolicyIssue.Error("policy.empty", "Policy document is empty.", "$"));
|
||||
return new PolicyBindingResult(false, PolicyDocument.Empty, issues, format);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var node = ParseToNode(content, format);
|
||||
if (node is not JsonObject obj)
|
||||
{
|
||||
var issues = ImmutableArray.Create(
|
||||
PolicyIssue.Error("policy.document.invalid", "Policy document must be an object.", "$"));
|
||||
return new PolicyBindingResult(false, PolicyDocument.Empty, issues, format);
|
||||
}
|
||||
|
||||
var model = obj.Deserialize<PolicyDocumentModel>(SerializerOptions) ?? new PolicyDocumentModel();
|
||||
var normalization = PolicyNormalizer.Normalize(model);
|
||||
var success = normalization.Issues.All(static issue => issue.Severity != PolicyIssueSeverity.Error);
|
||||
return new PolicyBindingResult(success, normalization.Document, normalization.Issues, format);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
var issues = ImmutableArray.Create(
|
||||
PolicyIssue.Error("policy.parse.json", $"Failed to parse policy JSON: {ex.Message}", "$"));
|
||||
return new PolicyBindingResult(false, PolicyDocument.Empty, issues, format);
|
||||
}
|
||||
catch (YamlDotNet.Core.YamlException ex)
|
||||
{
|
||||
var issues = ImmutableArray.Create(
|
||||
PolicyIssue.Error("policy.parse.yaml", $"Failed to parse policy YAML: {ex.Message}", "$"));
|
||||
return new PolicyBindingResult(false, PolicyDocument.Empty, issues, format);
|
||||
}
|
||||
}
|
||||
|
||||
public static PolicyBindingResult Bind(Stream stream, PolicyDocumentFormat format, Encoding? encoding = null)
|
||||
{
|
||||
if (stream is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(stream));
|
||||
}
|
||||
|
||||
encoding ??= Encoding.UTF8;
|
||||
using var reader = new StreamReader(stream, encoding, detectEncodingFromByteOrderMarks: true, leaveOpen: true);
|
||||
var content = reader.ReadToEnd();
|
||||
return Bind(content, format);
|
||||
}
|
||||
|
||||
private static JsonNode? ParseToNode(string content, PolicyDocumentFormat format)
|
||||
{
|
||||
return format switch
|
||||
{
|
||||
PolicyDocumentFormat.Json => JsonNode.Parse(content, documentOptions: new JsonDocumentOptions
|
||||
{
|
||||
AllowTrailingCommas = true,
|
||||
CommentHandling = JsonCommentHandling.Skip,
|
||||
}),
|
||||
PolicyDocumentFormat.Yaml => ConvertYamlToJsonNode(content),
|
||||
_ => throw new ArgumentOutOfRangeException(nameof(format), format, "Unsupported policy document format."),
|
||||
};
|
||||
}
|
||||
|
||||
private static JsonNode? ConvertYamlToJsonNode(string content)
|
||||
{
|
||||
var yamlObject = YamlDeserializer.Deserialize<object?>(content);
|
||||
return ConvertYamlObject(yamlObject);
|
||||
}
|
||||
|
||||
private static JsonNode? ConvertYamlObject(object? value)
|
||||
{
|
||||
switch (value)
|
||||
{
|
||||
case null:
|
||||
return null;
|
||||
case string s:
|
||||
return JsonValue.Create(s);
|
||||
case bool b:
|
||||
return JsonValue.Create(b);
|
||||
case sbyte or byte or short or ushort or int or uint or long or ulong or float or double or decimal:
|
||||
return JsonValue.Create(Convert.ToDecimal(value, CultureInfo.InvariantCulture));
|
||||
case DateTime dt:
|
||||
return JsonValue.Create(dt.ToString("O", CultureInfo.InvariantCulture));
|
||||
case DateTimeOffset dto:
|
||||
return JsonValue.Create(dto.ToString("O", CultureInfo.InvariantCulture));
|
||||
case Enum e:
|
||||
return JsonValue.Create(e.ToString());
|
||||
case IDictionary dictionary:
|
||||
{
|
||||
var obj = new JsonObject();
|
||||
foreach (DictionaryEntry entry in dictionary)
|
||||
{
|
||||
if (entry.Key is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var key = Convert.ToString(entry.Key, CultureInfo.InvariantCulture);
|
||||
if (string.IsNullOrWhiteSpace(key))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
obj[key!] = ConvertYamlObject(entry.Value);
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
case IEnumerable enumerable:
|
||||
{
|
||||
var array = new JsonArray();
|
||||
foreach (var item in enumerable)
|
||||
{
|
||||
array.Add(ConvertYamlObject(item));
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
default:
|
||||
return JsonValue.Create(value.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record PolicyDocumentModel
|
||||
{
|
||||
[JsonPropertyName("version")]
|
||||
public JsonNode? Version { get; init; }
|
||||
|
||||
[JsonPropertyName("description")]
|
||||
public string? Description { get; init; }
|
||||
|
||||
[JsonPropertyName("metadata")]
|
||||
public Dictionary<string, JsonNode?>? Metadata { get; init; }
|
||||
|
||||
[JsonPropertyName("rules")]
|
||||
public List<PolicyRuleModel>? Rules { get; init; }
|
||||
|
||||
[JsonExtensionData]
|
||||
public Dictionary<string, JsonElement>? Extensions { get; init; }
|
||||
}
|
||||
|
||||
private sealed record PolicyRuleModel
|
||||
{
|
||||
[JsonPropertyName("id")]
|
||||
public string? Identifier { get; init; }
|
||||
|
||||
[JsonPropertyName("name")]
|
||||
public string? Name { get; init; }
|
||||
|
||||
[JsonPropertyName("description")]
|
||||
public string? Description { get; init; }
|
||||
|
||||
[JsonPropertyName("severity")]
|
||||
public List<string>? Severity { get; init; }
|
||||
|
||||
[JsonPropertyName("sources")]
|
||||
public List<string>? Sources { get; init; }
|
||||
|
||||
[JsonPropertyName("vendors")]
|
||||
public List<string>? Vendors { get; init; }
|
||||
|
||||
[JsonPropertyName("licenses")]
|
||||
public List<string>? Licenses { get; init; }
|
||||
|
||||
[JsonPropertyName("tags")]
|
||||
public List<string>? Tags { get; init; }
|
||||
|
||||
[JsonPropertyName("environments")]
|
||||
public List<string>? Environments { get; init; }
|
||||
|
||||
[JsonPropertyName("images")]
|
||||
public List<string>? Images { get; init; }
|
||||
|
||||
[JsonPropertyName("repositories")]
|
||||
public List<string>? Repositories { get; init; }
|
||||
|
||||
[JsonPropertyName("packages")]
|
||||
public List<string>? Packages { get; init; }
|
||||
|
||||
[JsonPropertyName("purls")]
|
||||
public List<string>? Purls { get; init; }
|
||||
|
||||
[JsonPropertyName("cves")]
|
||||
public List<string>? Cves { get; init; }
|
||||
|
||||
[JsonPropertyName("paths")]
|
||||
public List<string>? Paths { get; init; }
|
||||
|
||||
[JsonPropertyName("layerDigests")]
|
||||
public List<string>? LayerDigests { get; init; }
|
||||
|
||||
[JsonPropertyName("usedByEntrypoint")]
|
||||
public List<string>? UsedByEntrypoint { get; init; }
|
||||
|
||||
[JsonPropertyName("action")]
|
||||
public JsonNode? Action { get; init; }
|
||||
|
||||
[JsonPropertyName("expires")]
|
||||
public JsonNode? Expires { get; init; }
|
||||
|
||||
[JsonPropertyName("until")]
|
||||
public JsonNode? Until { get; init; }
|
||||
|
||||
[JsonPropertyName("justification")]
|
||||
public string? Justification { get; init; }
|
||||
|
||||
[JsonPropertyName("quiet")]
|
||||
public bool? Quiet { get; init; }
|
||||
|
||||
[JsonPropertyName("metadata")]
|
||||
public Dictionary<string, JsonNode?>? Metadata { get; init; }
|
||||
|
||||
[JsonExtensionData]
|
||||
public Dictionary<string, JsonElement>? Extensions { get; init; }
|
||||
}
|
||||
|
||||
private sealed class PolicyNormalizer
|
||||
{
|
||||
private static readonly ImmutableDictionary<string, PolicySeverity> SeverityMap =
|
||||
new Dictionary<string, PolicySeverity>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["critical"] = PolicySeverity.Critical,
|
||||
["high"] = PolicySeverity.High,
|
||||
["medium"] = PolicySeverity.Medium,
|
||||
["moderate"] = PolicySeverity.Medium,
|
||||
["low"] = PolicySeverity.Low,
|
||||
["informational"] = PolicySeverity.Informational,
|
||||
["info"] = PolicySeverity.Informational,
|
||||
["none"] = PolicySeverity.None,
|
||||
["unknown"] = PolicySeverity.Unknown,
|
||||
}.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public static (PolicyDocument Document, ImmutableArray<PolicyIssue> Issues) Normalize(PolicyDocumentModel model)
|
||||
{
|
||||
var issues = ImmutableArray.CreateBuilder<PolicyIssue>();
|
||||
|
||||
var version = NormalizeVersion(model.Version, issues);
|
||||
var metadata = NormalizeMetadata(model.Metadata, "$.metadata", issues);
|
||||
var rules = NormalizeRules(model.Rules, issues);
|
||||
|
||||
if (model.Extensions is { Count: > 0 })
|
||||
{
|
||||
foreach (var pair in model.Extensions)
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning(
|
||||
"policy.document.extension",
|
||||
$"Unrecognized document property '{pair.Key}' has been ignored.",
|
||||
$"$.{pair.Key}"));
|
||||
}
|
||||
}
|
||||
|
||||
var document = new PolicyDocument(
|
||||
version ?? PolicySchema.CurrentVersion,
|
||||
rules,
|
||||
metadata);
|
||||
|
||||
var orderedIssues = SortIssues(issues);
|
||||
return (document, orderedIssues);
|
||||
}
|
||||
|
||||
private static string? NormalizeVersion(JsonNode? versionNode, ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (versionNode is null)
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning("policy.version.missing", "Policy version not specified; defaulting to 1.0.", "$.version"));
|
||||
return PolicySchema.CurrentVersion;
|
||||
}
|
||||
|
||||
if (versionNode is JsonValue value)
|
||||
{
|
||||
if (value.TryGetValue(out string? versionText))
|
||||
{
|
||||
versionText = versionText?.Trim();
|
||||
if (string.IsNullOrEmpty(versionText))
|
||||
{
|
||||
issues.Add(PolicyIssue.Error("policy.version.empty", "Policy version is empty.", "$.version"));
|
||||
return null;
|
||||
}
|
||||
|
||||
if (IsSupportedVersion(versionText))
|
||||
{
|
||||
return CanonicalizeVersion(versionText);
|
||||
}
|
||||
|
||||
issues.Add(PolicyIssue.Error("policy.version.unsupported", $"Unsupported policy version '{versionText}'. Expected '{PolicySchema.CurrentVersion}'.", "$.version"));
|
||||
return null;
|
||||
}
|
||||
|
||||
if (value.TryGetValue(out double numericVersion))
|
||||
{
|
||||
var numericText = numericVersion.ToString("0.0###", CultureInfo.InvariantCulture);
|
||||
if (IsSupportedVersion(numericText))
|
||||
{
|
||||
return CanonicalizeVersion(numericText);
|
||||
}
|
||||
|
||||
issues.Add(PolicyIssue.Error("policy.version.unsupported", $"Unsupported policy version '{numericText}'.", "$.version"));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
var raw = versionNode.ToJsonString();
|
||||
issues.Add(PolicyIssue.Error("policy.version.invalid", $"Policy version must be a string. Received: {raw}", "$.version"));
|
||||
return null;
|
||||
}
|
||||
|
||||
private static bool IsSupportedVersion(string versionText)
|
||||
=> string.Equals(versionText, "1", StringComparison.OrdinalIgnoreCase)
|
||||
|| string.Equals(versionText, "1.0", StringComparison.OrdinalIgnoreCase)
|
||||
|| string.Equals(versionText, PolicySchema.CurrentVersion, StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private static string CanonicalizeVersion(string versionText)
|
||||
=> string.Equals(versionText, "1", StringComparison.OrdinalIgnoreCase)
|
||||
? "1.0"
|
||||
: versionText;
|
||||
|
||||
private static ImmutableDictionary<string, string> NormalizeMetadata(
|
||||
Dictionary<string, JsonNode?>? metadata,
|
||||
string path,
|
||||
ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (metadata is null || metadata.Count == 0)
|
||||
{
|
||||
return ImmutableDictionary<string, string>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
|
||||
foreach (var pair in metadata)
|
||||
{
|
||||
var key = pair.Key?.Trim();
|
||||
if (string.IsNullOrEmpty(key))
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning("policy.metadata.key.empty", "Metadata keys must be non-empty strings.", path));
|
||||
continue;
|
||||
}
|
||||
|
||||
var value = ConvertNodeToString(pair.Value);
|
||||
builder[key] = value;
|
||||
}
|
||||
|
||||
return builder.ToImmutable();
|
||||
}
|
||||
|
||||
private static ImmutableArray<PolicyRule> NormalizeRules(
|
||||
List<PolicyRuleModel>? rules,
|
||||
ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (rules is null || rules.Count == 0)
|
||||
{
|
||||
issues.Add(PolicyIssue.Error("policy.rules.empty", "At least one rule must be defined.", "$.rules"));
|
||||
return ImmutableArray<PolicyRule>.Empty;
|
||||
}
|
||||
|
||||
var normalized = new List<(PolicyRule Rule, int Index)>(rules.Count);
|
||||
var seenNames = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
for (var index = 0; index < rules.Count; index++)
|
||||
{
|
||||
var model = rules[index];
|
||||
var normalizedRule = NormalizeRule(model, index, issues);
|
||||
if (normalizedRule is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!seenNames.Add(normalizedRule.Name))
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning(
|
||||
"policy.rules.duplicateName",
|
||||
$"Duplicate rule name '{normalizedRule.Name}' detected; evaluation order may be ambiguous.",
|
||||
$"$.rules[{index}].name"));
|
||||
}
|
||||
|
||||
normalized.Add((normalizedRule, index));
|
||||
}
|
||||
|
||||
return normalized
|
||||
.OrderBy(static tuple => tuple.Rule.Name, StringComparer.OrdinalIgnoreCase)
|
||||
.ThenBy(static tuple => tuple.Rule.Identifier ?? string.Empty, StringComparer.OrdinalIgnoreCase)
|
||||
.ThenBy(static tuple => tuple.Index)
|
||||
.Select(static tuple => tuple.Rule)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static PolicyRule? NormalizeRule(
|
||||
PolicyRuleModel model,
|
||||
int index,
|
||||
ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
var basePath = $"$.rules[{index}]";
|
||||
|
||||
var name = NormalizeRequiredString(model.Name, $"{basePath}.name", "Rule name", issues);
|
||||
if (name is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var identifier = NormalizeOptionalString(model.Identifier);
|
||||
var description = NormalizeOptionalString(model.Description);
|
||||
var metadata = NormalizeMetadata(model.Metadata, $"{basePath}.metadata", issues);
|
||||
|
||||
var severities = NormalizeSeverityList(model.Severity, $"{basePath}.severity", issues);
|
||||
var environments = NormalizeStringList(model.Environments, $"{basePath}.environments", issues);
|
||||
var sources = NormalizeStringList(model.Sources, $"{basePath}.sources", issues);
|
||||
var vendors = NormalizeStringList(model.Vendors, $"{basePath}.vendors", issues);
|
||||
var licenses = NormalizeStringList(model.Licenses, $"{basePath}.licenses", issues);
|
||||
var tags = NormalizeStringList(model.Tags, $"{basePath}.tags", issues);
|
||||
|
||||
var match = new PolicyRuleMatchCriteria(
|
||||
NormalizeStringList(model.Images, $"{basePath}.images", issues),
|
||||
NormalizeStringList(model.Repositories, $"{basePath}.repositories", issues),
|
||||
NormalizeStringList(model.Packages, $"{basePath}.packages", issues),
|
||||
NormalizeStringList(model.Purls, $"{basePath}.purls", issues),
|
||||
NormalizeStringList(model.Cves, $"{basePath}.cves", issues),
|
||||
NormalizeStringList(model.Paths, $"{basePath}.paths", issues),
|
||||
NormalizeStringList(model.LayerDigests, $"{basePath}.layerDigests", issues),
|
||||
NormalizeStringList(model.UsedByEntrypoint, $"{basePath}.usedByEntrypoint", issues));
|
||||
|
||||
var action = NormalizeAction(model, basePath, issues);
|
||||
var justification = NormalizeOptionalString(model.Justification);
|
||||
var expires = NormalizeTemporal(model.Expires ?? model.Until, $"{basePath}.expires", issues);
|
||||
|
||||
if (model.Extensions is { Count: > 0 })
|
||||
{
|
||||
foreach (var pair in model.Extensions)
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning(
|
||||
"policy.rule.extension",
|
||||
$"Unrecognized rule property '{pair.Key}' has been ignored.",
|
||||
$"{basePath}.{pair.Key}"));
|
||||
}
|
||||
}
|
||||
|
||||
return PolicyRule.Create(
|
||||
name,
|
||||
action,
|
||||
severities,
|
||||
environments,
|
||||
sources,
|
||||
vendors,
|
||||
licenses,
|
||||
tags,
|
||||
match,
|
||||
expires,
|
||||
justification,
|
||||
identifier,
|
||||
description,
|
||||
metadata);
|
||||
}
|
||||
|
||||
private static PolicyAction NormalizeAction(
|
||||
PolicyRuleModel model,
|
||||
string basePath,
|
||||
ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
var actionNode = model.Action;
|
||||
var quiet = model.Quiet ?? false;
|
||||
if (!quiet && model.Extensions is not null && model.Extensions.TryGetValue("quiet", out var quietExtension) && quietExtension.ValueKind == JsonValueKind.True)
|
||||
{
|
||||
quiet = true;
|
||||
}
|
||||
string? justification = NormalizeOptionalString(model.Justification);
|
||||
DateTimeOffset? until = NormalizeTemporal(model.Until, $"{basePath}.until", issues);
|
||||
DateTimeOffset? expires = NormalizeTemporal(model.Expires, $"{basePath}.expires", issues);
|
||||
|
||||
var effectiveUntil = until ?? expires;
|
||||
|
||||
if (actionNode is null)
|
||||
{
|
||||
issues.Add(PolicyIssue.Error("policy.action.missing", "Rule action is required.", $"{basePath}.action"));
|
||||
return new PolicyAction(PolicyActionType.Block, null, null, null, Quiet: false);
|
||||
}
|
||||
|
||||
string? actionType = null;
|
||||
JsonObject? actionObject = null;
|
||||
|
||||
switch (actionNode)
|
||||
{
|
||||
case JsonValue value when value.TryGetValue(out string? text):
|
||||
actionType = text;
|
||||
break;
|
||||
case JsonValue value when value.TryGetValue(out bool booleanValue):
|
||||
actionType = booleanValue ? "block" : "ignore";
|
||||
break;
|
||||
case JsonObject obj:
|
||||
actionObject = obj;
|
||||
if (obj.TryGetPropertyValue("type", out var typeNode) && typeNode is JsonValue typeValue && typeValue.TryGetValue(out string? typeText))
|
||||
{
|
||||
actionType = typeText;
|
||||
}
|
||||
else
|
||||
{
|
||||
issues.Add(PolicyIssue.Error("policy.action.type", "Action object must contain a 'type' property.", $"{basePath}.action.type"));
|
||||
}
|
||||
|
||||
if (obj.TryGetPropertyValue("quiet", out var quietNode) && quietNode is JsonValue quietValue && quietValue.TryGetValue(out bool quietFlag))
|
||||
{
|
||||
quiet = quietFlag;
|
||||
}
|
||||
|
||||
if (obj.TryGetPropertyValue("until", out var untilNode))
|
||||
{
|
||||
effectiveUntil ??= NormalizeTemporal(untilNode, $"{basePath}.action.until", issues);
|
||||
}
|
||||
|
||||
if (obj.TryGetPropertyValue("justification", out var justificationNode) && justificationNode is JsonValue justificationValue && justificationValue.TryGetValue(out string? justificationText))
|
||||
{
|
||||
justification = NormalizeOptionalString(justificationText);
|
||||
}
|
||||
|
||||
break;
|
||||
default:
|
||||
actionType = actionNode.ToString();
|
||||
break;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(actionType))
|
||||
{
|
||||
issues.Add(PolicyIssue.Error("policy.action.type", "Action type is required.", $"{basePath}.action"));
|
||||
return new PolicyAction(PolicyActionType.Block, null, null, null, Quiet: quiet);
|
||||
}
|
||||
|
||||
actionType = actionType.Trim();
|
||||
var (type, typeIssues) = MapActionType(actionType, $"{basePath}.action");
|
||||
foreach (var issue in typeIssues)
|
||||
{
|
||||
issues.Add(issue);
|
||||
}
|
||||
|
||||
PolicyIgnoreOptions? ignoreOptions = null;
|
||||
PolicyEscalateOptions? escalateOptions = null;
|
||||
PolicyRequireVexOptions? requireVexOptions = null;
|
||||
|
||||
if (type == PolicyActionType.Ignore)
|
||||
{
|
||||
ignoreOptions = new PolicyIgnoreOptions(effectiveUntil, justification);
|
||||
}
|
||||
else if (type == PolicyActionType.Escalate)
|
||||
{
|
||||
escalateOptions = NormalizeEscalateOptions(actionObject, $"{basePath}.action", issues);
|
||||
}
|
||||
else if (type == PolicyActionType.RequireVex)
|
||||
{
|
||||
requireVexOptions = NormalizeRequireVexOptions(actionObject, $"{basePath}.action", issues);
|
||||
}
|
||||
|
||||
return new PolicyAction(type, ignoreOptions, escalateOptions, requireVexOptions, quiet);
|
||||
}
|
||||
|
||||
private static (PolicyActionType Type, ImmutableArray<PolicyIssue> Issues) MapActionType(string value, string path)
|
||||
{
|
||||
var issues = ImmutableArray<PolicyIssue>.Empty;
|
||||
var lower = value.ToLowerInvariant();
|
||||
return lower switch
|
||||
{
|
||||
"block" or "fail" or "deny" => (PolicyActionType.Block, issues),
|
||||
"ignore" or "mute" => (PolicyActionType.Ignore, issues),
|
||||
"warn" or "warning" => (PolicyActionType.Warn, issues),
|
||||
"defer" => (PolicyActionType.Defer, issues),
|
||||
"escalate" => (PolicyActionType.Escalate, issues),
|
||||
"requirevex" or "require_vex" or "require-vex" => (PolicyActionType.RequireVex, issues),
|
||||
_ => (PolicyActionType.Block, ImmutableArray.Create(PolicyIssue.Warning(
|
||||
"policy.action.unknown",
|
||||
$"Unknown action '{value}' encountered. Defaulting to 'block'.",
|
||||
path))),
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyEscalateOptions? NormalizeEscalateOptions(
|
||||
JsonObject? actionObject,
|
||||
string path,
|
||||
ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (actionObject is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
PolicySeverity? minSeverity = null;
|
||||
bool requireKev = false;
|
||||
double? minEpss = null;
|
||||
|
||||
if (actionObject.TryGetPropertyValue("severity", out var severityNode) && severityNode is JsonValue severityValue && severityValue.TryGetValue(out string? severityText))
|
||||
{
|
||||
if (SeverityMap.TryGetValue(severityText ?? string.Empty, out var mapped))
|
||||
{
|
||||
minSeverity = mapped;
|
||||
}
|
||||
else
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning("policy.action.escalate.severity", $"Unknown escalate severity '{severityText}'.", $"{path}.severity"));
|
||||
}
|
||||
}
|
||||
|
||||
if (actionObject.TryGetPropertyValue("kev", out var kevNode) && kevNode is JsonValue kevValue && kevValue.TryGetValue(out bool kevFlag))
|
||||
{
|
||||
requireKev = kevFlag;
|
||||
}
|
||||
|
||||
if (actionObject.TryGetPropertyValue("epss", out var epssNode))
|
||||
{
|
||||
var parsed = ParseDouble(epssNode, $"{path}.epss", issues);
|
||||
if (parsed is { } epssValue)
|
||||
{
|
||||
if (epssValue < 0 || epssValue > 1)
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning("policy.action.escalate.epssRange", "EPS score must be between 0 and 1.", $"{path}.epss"));
|
||||
}
|
||||
else
|
||||
{
|
||||
minEpss = epssValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new PolicyEscalateOptions(minSeverity, requireKev, minEpss);
|
||||
}
|
||||
|
||||
private static PolicyRequireVexOptions? NormalizeRequireVexOptions(
|
||||
JsonObject? actionObject,
|
||||
string path,
|
||||
ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (actionObject is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var vendors = ImmutableArray<string>.Empty;
|
||||
var justifications = ImmutableArray<string>.Empty;
|
||||
|
||||
if (actionObject.TryGetPropertyValue("vendors", out var vendorsNode))
|
||||
{
|
||||
vendors = NormalizeJsonStringArray(vendorsNode, $"{path}.vendors", issues);
|
||||
}
|
||||
|
||||
if (actionObject.TryGetPropertyValue("justifications", out var justificationsNode))
|
||||
{
|
||||
justifications = NormalizeJsonStringArray(justificationsNode, $"{path}.justifications", issues);
|
||||
}
|
||||
|
||||
return new PolicyRequireVexOptions(vendors, justifications);
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> NormalizeStringList(
|
||||
List<string>? values,
|
||||
string path,
|
||||
ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (values is null || values.Count == 0)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableHashSet.CreateBuilder<string>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var value in values)
|
||||
{
|
||||
var normalized = NormalizeOptionalString(value);
|
||||
if (string.IsNullOrEmpty(normalized))
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning("policy.list.blank", $"Blank entry detected; ignoring value at {path}.", path));
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.Add(normalized);
|
||||
}
|
||||
|
||||
return builder.ToImmutable()
|
||||
.OrderBy(static item => item, StringComparer.OrdinalIgnoreCase)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static ImmutableArray<PolicySeverity> NormalizeSeverityList(
|
||||
List<string>? values,
|
||||
string path,
|
||||
ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (values is null || values.Count == 0)
|
||||
{
|
||||
return ImmutableArray<PolicySeverity>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableArray.CreateBuilder<PolicySeverity>();
|
||||
foreach (var value in values)
|
||||
{
|
||||
var normalized = NormalizeOptionalString(value);
|
||||
if (string.IsNullOrEmpty(normalized))
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning("policy.severity.blank", "Blank severity was ignored.", path));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (SeverityMap.TryGetValue(normalized, out var severity))
|
||||
{
|
||||
builder.Add(severity);
|
||||
}
|
||||
else
|
||||
{
|
||||
issues.Add(PolicyIssue.Error("policy.severity.invalid", $"Unknown severity '{value}'.", path));
|
||||
}
|
||||
}
|
||||
|
||||
return builder.Distinct().OrderBy(static sev => sev).ToImmutableArray();
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> NormalizeJsonStringArray(
|
||||
JsonNode? node,
|
||||
string path,
|
||||
ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (node is null)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
if (node is JsonArray array)
|
||||
{
|
||||
var values = new List<string>(array.Count);
|
||||
foreach (var element in array)
|
||||
{
|
||||
var text = ConvertNodeToString(element);
|
||||
if (string.IsNullOrWhiteSpace(text))
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning("policy.list.blank", $"Blank entry detected; ignoring value at {path}.", path));
|
||||
}
|
||||
else
|
||||
{
|
||||
values.Add(text);
|
||||
}
|
||||
}
|
||||
|
||||
return values
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.OrderBy(static entry => entry, StringComparer.OrdinalIgnoreCase)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
var single = ConvertNodeToString(node);
|
||||
return ImmutableArray.Create(single);
|
||||
}
|
||||
|
||||
private static double? ParseDouble(JsonNode? node, string path, ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (node is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (node is JsonValue value)
|
||||
{
|
||||
if (value.TryGetValue(out double numeric))
|
||||
{
|
||||
return numeric;
|
||||
}
|
||||
|
||||
if (value.TryGetValue(out string? text) && double.TryParse(text, NumberStyles.Float, CultureInfo.InvariantCulture, out numeric))
|
||||
{
|
||||
return numeric;
|
||||
}
|
||||
}
|
||||
|
||||
issues.Add(PolicyIssue.Warning("policy.number.invalid", $"Value '{node.ToJsonString()}' is not a valid number.", path));
|
||||
return null;
|
||||
}
|
||||
|
||||
private static DateTimeOffset? NormalizeTemporal(JsonNode? node, string path, ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (node is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (node is JsonValue value)
|
||||
{
|
||||
if (value.TryGetValue(out DateTimeOffset dto))
|
||||
{
|
||||
return dto;
|
||||
}
|
||||
|
||||
if (value.TryGetValue(out DateTime dt))
|
||||
{
|
||||
return new DateTimeOffset(DateTime.SpecifyKind(dt, DateTimeKind.Utc));
|
||||
}
|
||||
|
||||
if (value.TryGetValue(out string? text))
|
||||
{
|
||||
if (DateTimeOffset.TryParse(text, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed))
|
||||
{
|
||||
return parsed;
|
||||
}
|
||||
|
||||
if (DateTime.TryParse(text, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsedDate))
|
||||
{
|
||||
return new DateTimeOffset(parsedDate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
issues.Add(PolicyIssue.Warning("policy.date.invalid", $"Value '{node.ToJsonString()}' is not a valid ISO-8601 timestamp.", path));
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? NormalizeRequiredString(
|
||||
string? value,
|
||||
string path,
|
||||
string fieldDescription,
|
||||
ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
var normalized = NormalizeOptionalString(value);
|
||||
if (!string.IsNullOrEmpty(normalized))
|
||||
{
|
||||
return normalized;
|
||||
}
|
||||
|
||||
issues.Add(PolicyIssue.Error(
|
||||
"policy.required",
|
||||
$"{fieldDescription} is required.",
|
||||
path));
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? NormalizeOptionalString(string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return value.Trim();
|
||||
}
|
||||
|
||||
private static string ConvertNodeToString(JsonNode? node)
|
||||
{
|
||||
if (node is null)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
return node switch
|
||||
{
|
||||
JsonValue value when value.TryGetValue(out string? text) => text ?? string.Empty,
|
||||
JsonValue value when value.TryGetValue(out bool boolean) => boolean ? "true" : "false",
|
||||
JsonValue value when value.TryGetValue(out double numeric) => numeric.ToString(CultureInfo.InvariantCulture),
|
||||
JsonObject obj => obj.ToJsonString(),
|
||||
JsonArray array => array.ToJsonString(),
|
||||
_ => node.ToJsonString(),
|
||||
};
|
||||
}
|
||||
|
||||
private static ImmutableArray<PolicyIssue> SortIssues(ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
return issues.ToImmutable()
|
||||
.OrderBy(static issue => issue.Severity switch
|
||||
{
|
||||
PolicyIssueSeverity.Error => 0,
|
||||
PolicyIssueSeverity.Warning => 1,
|
||||
_ => 2,
|
||||
})
|
||||
.ThenBy(static issue => issue.Path, StringComparer.Ordinal)
|
||||
.ThenBy(static issue => issue.Code, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
}
|
||||
}
|
||||
77
src/StellaOps.Policy/PolicyDiagnostics.cs
Normal file
77
src/StellaOps.Policy/PolicyDiagnostics.cs
Normal file
@@ -0,0 +1,77 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed record PolicyDiagnosticsReport(
|
||||
string Version,
|
||||
int RuleCount,
|
||||
int ErrorCount,
|
||||
int WarningCount,
|
||||
DateTimeOffset GeneratedAt,
|
||||
ImmutableArray<PolicyIssue> Issues,
|
||||
ImmutableArray<string> Recommendations);
|
||||
|
||||
public static class PolicyDiagnostics
|
||||
{
|
||||
public static PolicyDiagnosticsReport Create(PolicyBindingResult bindingResult, TimeProvider? timeProvider = null)
|
||||
{
|
||||
if (bindingResult is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(bindingResult));
|
||||
}
|
||||
|
||||
var time = (timeProvider ?? TimeProvider.System).GetUtcNow();
|
||||
var errorCount = bindingResult.Issues.Count(static issue => issue.Severity == PolicyIssueSeverity.Error);
|
||||
var warningCount = bindingResult.Issues.Count(static issue => issue.Severity == PolicyIssueSeverity.Warning);
|
||||
|
||||
var recommendations = BuildRecommendations(bindingResult.Document, errorCount, warningCount);
|
||||
|
||||
return new PolicyDiagnosticsReport(
|
||||
bindingResult.Document.Version,
|
||||
bindingResult.Document.Rules.Length,
|
||||
errorCount,
|
||||
warningCount,
|
||||
time,
|
||||
bindingResult.Issues,
|
||||
recommendations);
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> BuildRecommendations(PolicyDocument document, int errorCount, int warningCount)
|
||||
{
|
||||
var messages = ImmutableArray.CreateBuilder<string>();
|
||||
|
||||
if (errorCount > 0)
|
||||
{
|
||||
messages.Add("Resolve policy errors before promoting the revision; fallback rules may be applied while errors remain.");
|
||||
}
|
||||
|
||||
if (warningCount > 0)
|
||||
{
|
||||
messages.Add("Review policy warnings and ensure intentional overrides are documented.");
|
||||
}
|
||||
|
||||
if (document.Rules.Length == 0)
|
||||
{
|
||||
messages.Add("Add at least one policy rule to enforce gating logic.");
|
||||
}
|
||||
|
||||
var quietRules = document.Rules
|
||||
.Where(static rule => rule.Action.Quiet)
|
||||
.Select(static rule => rule.Name)
|
||||
.ToArray();
|
||||
|
||||
if (quietRules.Length > 0)
|
||||
{
|
||||
messages.Add($"Quiet rules detected ({string.Join(", ", quietRules)}); verify scoring behaviour aligns with expectations.");
|
||||
}
|
||||
|
||||
if (messages.Count == 0)
|
||||
{
|
||||
messages.Add("Policy validated successfully; no additional action required.");
|
||||
}
|
||||
|
||||
return messages.ToImmutable();
|
||||
}
|
||||
}
|
||||
211
src/StellaOps.Policy/PolicyDigest.cs
Normal file
211
src/StellaOps.Policy/PolicyDigest.cs
Normal file
@@ -0,0 +1,211 @@
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public static class PolicyDigest
|
||||
{
|
||||
public static string Compute(PolicyDocument document)
|
||||
{
|
||||
if (document is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(document));
|
||||
}
|
||||
|
||||
var buffer = new ArrayBufferWriter<byte>();
|
||||
using (var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions
|
||||
{
|
||||
SkipValidation = true,
|
||||
}))
|
||||
{
|
||||
WriteDocument(writer, document);
|
||||
}
|
||||
|
||||
var hash = SHA256.HashData(buffer.WrittenSpan);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static void WriteDocument(Utf8JsonWriter writer, PolicyDocument document)
|
||||
{
|
||||
writer.WriteStartObject();
|
||||
writer.WriteString("version", document.Version);
|
||||
|
||||
if (!document.Metadata.IsEmpty)
|
||||
{
|
||||
writer.WritePropertyName("metadata");
|
||||
writer.WriteStartObject();
|
||||
foreach (var pair in document.Metadata.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal))
|
||||
{
|
||||
writer.WriteString(pair.Key, pair.Value);
|
||||
}
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
|
||||
writer.WritePropertyName("rules");
|
||||
writer.WriteStartArray();
|
||||
foreach (var rule in document.Rules)
|
||||
{
|
||||
WriteRule(writer, rule);
|
||||
}
|
||||
writer.WriteEndArray();
|
||||
|
||||
writer.WriteEndObject();
|
||||
writer.Flush();
|
||||
}
|
||||
|
||||
private static void WriteRule(Utf8JsonWriter writer, PolicyRule rule)
|
||||
{
|
||||
writer.WriteStartObject();
|
||||
writer.WriteString("name", rule.Name);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(rule.Identifier))
|
||||
{
|
||||
writer.WriteString("id", rule.Identifier);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(rule.Description))
|
||||
{
|
||||
writer.WriteString("description", rule.Description);
|
||||
}
|
||||
|
||||
WriteMetadata(writer, rule.Metadata);
|
||||
WriteSeverities(writer, rule.Severities);
|
||||
WriteStringArray(writer, "environments", rule.Environments);
|
||||
WriteStringArray(writer, "sources", rule.Sources);
|
||||
WriteStringArray(writer, "vendors", rule.Vendors);
|
||||
WriteStringArray(writer, "licenses", rule.Licenses);
|
||||
WriteStringArray(writer, "tags", rule.Tags);
|
||||
|
||||
if (!rule.Match.IsEmpty)
|
||||
{
|
||||
writer.WritePropertyName("match");
|
||||
writer.WriteStartObject();
|
||||
WriteStringArray(writer, "images", rule.Match.Images);
|
||||
WriteStringArray(writer, "repositories", rule.Match.Repositories);
|
||||
WriteStringArray(writer, "packages", rule.Match.Packages);
|
||||
WriteStringArray(writer, "purls", rule.Match.Purls);
|
||||
WriteStringArray(writer, "cves", rule.Match.Cves);
|
||||
WriteStringArray(writer, "paths", rule.Match.Paths);
|
||||
WriteStringArray(writer, "layerDigests", rule.Match.LayerDigests);
|
||||
WriteStringArray(writer, "usedByEntrypoint", rule.Match.UsedByEntrypoint);
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
|
||||
WriteAction(writer, rule.Action);
|
||||
|
||||
if (rule.Expires is DateTimeOffset expires)
|
||||
{
|
||||
writer.WriteString("expires", expires.ToUniversalTime().ToString("O"));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(rule.Justification))
|
||||
{
|
||||
writer.WriteString("justification", rule.Justification);
|
||||
}
|
||||
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
|
||||
private static void WriteAction(Utf8JsonWriter writer, PolicyAction action)
|
||||
{
|
||||
writer.WritePropertyName("action");
|
||||
writer.WriteStartObject();
|
||||
writer.WriteString("type", action.Type.ToString().ToLowerInvariant());
|
||||
|
||||
if (action.Quiet)
|
||||
{
|
||||
writer.WriteBoolean("quiet", true);
|
||||
}
|
||||
|
||||
if (action.Ignore is { } ignore)
|
||||
{
|
||||
if (ignore.Until is DateTimeOffset until)
|
||||
{
|
||||
writer.WriteString("until", until.ToUniversalTime().ToString("O"));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(ignore.Justification))
|
||||
{
|
||||
writer.WriteString("justification", ignore.Justification);
|
||||
}
|
||||
}
|
||||
|
||||
if (action.Escalate is { } escalate)
|
||||
{
|
||||
if (escalate.MinimumSeverity is { } severity)
|
||||
{
|
||||
writer.WriteString("severity", severity.ToString());
|
||||
}
|
||||
|
||||
if (escalate.RequireKev)
|
||||
{
|
||||
writer.WriteBoolean("kev", true);
|
||||
}
|
||||
|
||||
if (escalate.MinimumEpss is double epss)
|
||||
{
|
||||
writer.WriteNumber("epss", epss);
|
||||
}
|
||||
}
|
||||
|
||||
if (action.RequireVex is { } requireVex)
|
||||
{
|
||||
WriteStringArray(writer, "vendors", requireVex.Vendors);
|
||||
WriteStringArray(writer, "justifications", requireVex.Justifications);
|
||||
}
|
||||
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
|
||||
private static void WriteMetadata(Utf8JsonWriter writer, ImmutableDictionary<string, string> metadata)
|
||||
{
|
||||
if (metadata.IsEmpty)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
writer.WritePropertyName("metadata");
|
||||
writer.WriteStartObject();
|
||||
foreach (var pair in metadata.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal))
|
||||
{
|
||||
writer.WriteString(pair.Key, pair.Value);
|
||||
}
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
|
||||
private static void WriteSeverities(Utf8JsonWriter writer, ImmutableArray<PolicySeverity> severities)
|
||||
{
|
||||
if (severities.IsDefaultOrEmpty)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
writer.WritePropertyName("severity");
|
||||
writer.WriteStartArray();
|
||||
foreach (var severity in severities)
|
||||
{
|
||||
writer.WriteStringValue(severity.ToString());
|
||||
}
|
||||
writer.WriteEndArray();
|
||||
}
|
||||
|
||||
private static void WriteStringArray(Utf8JsonWriter writer, string propertyName, ImmutableArray<string> values)
|
||||
{
|
||||
if (values.IsDefaultOrEmpty)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
writer.WritePropertyName(propertyName);
|
||||
writer.WriteStartArray();
|
||||
foreach (var value in values)
|
||||
{
|
||||
writer.WriteStringValue(value);
|
||||
}
|
||||
writer.WriteEndArray();
|
||||
}
|
||||
}
|
||||
192
src/StellaOps.Policy/PolicyDocument.cs
Normal file
192
src/StellaOps.Policy/PolicyDocument.cs
Normal file
@@ -0,0 +1,192 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
/// <summary>
|
||||
/// Canonical representation of a StellaOps policy document.
|
||||
/// </summary>
|
||||
public sealed record PolicyDocument(
|
||||
string Version,
|
||||
ImmutableArray<PolicyRule> Rules,
|
||||
ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
public static PolicyDocument Empty { get; } = new(
|
||||
PolicySchema.CurrentVersion,
|
||||
ImmutableArray<PolicyRule>.Empty,
|
||||
ImmutableDictionary<string, string>.Empty);
|
||||
}
|
||||
|
||||
public static class PolicySchema
|
||||
{
|
||||
public const string SchemaId = "https://schemas.stella-ops.org/policy/policy-schema@1.json";
|
||||
public const string CurrentVersion = "1.0";
|
||||
|
||||
public static PolicyDocumentFormat DetectFormat(string fileName)
|
||||
{
|
||||
if (fileName is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(fileName));
|
||||
}
|
||||
|
||||
var lower = fileName.Trim().ToLowerInvariant();
|
||||
if (lower.EndsWith(".yaml", StringComparison.Ordinal) || lower.EndsWith(".yml", StringComparison.Ordinal))
|
||||
{
|
||||
return PolicyDocumentFormat.Yaml;
|
||||
}
|
||||
|
||||
return PolicyDocumentFormat.Json;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record PolicyRule(
|
||||
string Name,
|
||||
string? Identifier,
|
||||
string? Description,
|
||||
PolicyAction Action,
|
||||
ImmutableArray<PolicySeverity> Severities,
|
||||
ImmutableArray<string> Environments,
|
||||
ImmutableArray<string> Sources,
|
||||
ImmutableArray<string> Vendors,
|
||||
ImmutableArray<string> Licenses,
|
||||
ImmutableArray<string> Tags,
|
||||
PolicyRuleMatchCriteria Match,
|
||||
DateTimeOffset? Expires,
|
||||
string? Justification,
|
||||
ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
public static PolicyRuleMatchCriteria EmptyMatch { get; } = new(
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty);
|
||||
|
||||
public static PolicyRule Create(
|
||||
string name,
|
||||
PolicyAction action,
|
||||
ImmutableArray<PolicySeverity> severities,
|
||||
ImmutableArray<string> environments,
|
||||
ImmutableArray<string> sources,
|
||||
ImmutableArray<string> vendors,
|
||||
ImmutableArray<string> licenses,
|
||||
ImmutableArray<string> tags,
|
||||
PolicyRuleMatchCriteria match,
|
||||
DateTimeOffset? expires,
|
||||
string? justification,
|
||||
string? identifier = null,
|
||||
string? description = null,
|
||||
ImmutableDictionary<string, string>? metadata = null)
|
||||
{
|
||||
metadata ??= ImmutableDictionary<string, string>.Empty;
|
||||
return new PolicyRule(
|
||||
name,
|
||||
identifier,
|
||||
description,
|
||||
action,
|
||||
severities,
|
||||
environments,
|
||||
sources,
|
||||
vendors,
|
||||
licenses,
|
||||
tags,
|
||||
match,
|
||||
expires,
|
||||
justification,
|
||||
metadata);
|
||||
}
|
||||
|
||||
public bool MatchesAnyEnvironment => Environments.IsDefaultOrEmpty;
|
||||
}
|
||||
|
||||
public sealed record PolicyRuleMatchCriteria(
|
||||
ImmutableArray<string> Images,
|
||||
ImmutableArray<string> Repositories,
|
||||
ImmutableArray<string> Packages,
|
||||
ImmutableArray<string> Purls,
|
||||
ImmutableArray<string> Cves,
|
||||
ImmutableArray<string> Paths,
|
||||
ImmutableArray<string> LayerDigests,
|
||||
ImmutableArray<string> UsedByEntrypoint)
|
||||
{
|
||||
public static PolicyRuleMatchCriteria Create(
|
||||
ImmutableArray<string> images,
|
||||
ImmutableArray<string> repositories,
|
||||
ImmutableArray<string> packages,
|
||||
ImmutableArray<string> purls,
|
||||
ImmutableArray<string> cves,
|
||||
ImmutableArray<string> paths,
|
||||
ImmutableArray<string> layerDigests,
|
||||
ImmutableArray<string> usedByEntrypoint)
|
||||
=> new(
|
||||
images,
|
||||
repositories,
|
||||
packages,
|
||||
purls,
|
||||
cves,
|
||||
paths,
|
||||
layerDigests,
|
||||
usedByEntrypoint);
|
||||
|
||||
public static PolicyRuleMatchCriteria Empty { get; } = new(
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty);
|
||||
|
||||
public bool IsEmpty =>
|
||||
Images.IsDefaultOrEmpty &&
|
||||
Repositories.IsDefaultOrEmpty &&
|
||||
Packages.IsDefaultOrEmpty &&
|
||||
Purls.IsDefaultOrEmpty &&
|
||||
Cves.IsDefaultOrEmpty &&
|
||||
Paths.IsDefaultOrEmpty &&
|
||||
LayerDigests.IsDefaultOrEmpty &&
|
||||
UsedByEntrypoint.IsDefaultOrEmpty;
|
||||
}
|
||||
|
||||
public sealed record PolicyAction(
|
||||
PolicyActionType Type,
|
||||
PolicyIgnoreOptions? Ignore,
|
||||
PolicyEscalateOptions? Escalate,
|
||||
PolicyRequireVexOptions? RequireVex,
|
||||
bool Quiet);
|
||||
|
||||
public enum PolicyActionType
|
||||
{
|
||||
Block,
|
||||
Ignore,
|
||||
Warn,
|
||||
Defer,
|
||||
Escalate,
|
||||
RequireVex,
|
||||
}
|
||||
|
||||
public sealed record PolicyIgnoreOptions(DateTimeOffset? Until, string? Justification);
|
||||
|
||||
public sealed record PolicyEscalateOptions(
|
||||
PolicySeverity? MinimumSeverity,
|
||||
bool RequireKev,
|
||||
double? MinimumEpss);
|
||||
|
||||
public sealed record PolicyRequireVexOptions(
|
||||
ImmutableArray<string> Vendors,
|
||||
ImmutableArray<string> Justifications);
|
||||
|
||||
public enum PolicySeverity
|
||||
{
|
||||
Critical,
|
||||
High,
|
||||
Medium,
|
||||
Low,
|
||||
Informational,
|
||||
None,
|
||||
Unknown,
|
||||
}
|
||||
270
src/StellaOps.Policy/PolicyEvaluation.cs
Normal file
270
src/StellaOps.Policy/PolicyEvaluation.cs
Normal file
@@ -0,0 +1,270 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public static class PolicyEvaluation
|
||||
{
|
||||
public static PolicyVerdict EvaluateFinding(PolicyDocument document, PolicyScoringConfig scoringConfig, PolicyFinding finding)
|
||||
{
|
||||
if (document is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(document));
|
||||
}
|
||||
|
||||
if (scoringConfig is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(scoringConfig));
|
||||
}
|
||||
|
||||
if (finding is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(finding));
|
||||
}
|
||||
|
||||
var severityWeight = scoringConfig.SeverityWeights.TryGetValue(finding.Severity, out var weight)
|
||||
? weight
|
||||
: scoringConfig.SeverityWeights.GetValueOrDefault(PolicySeverity.Unknown, 0);
|
||||
|
||||
foreach (var rule in document.Rules)
|
||||
{
|
||||
if (!RuleMatches(rule, finding))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
return BuildVerdict(rule, finding, scoringConfig, severityWeight);
|
||||
}
|
||||
|
||||
return PolicyVerdict.CreateBaseline(finding.FindingId, scoringConfig);
|
||||
}
|
||||
|
||||
private static PolicyVerdict BuildVerdict(
|
||||
PolicyRule rule,
|
||||
PolicyFinding finding,
|
||||
PolicyScoringConfig config,
|
||||
double severityWeight)
|
||||
{
|
||||
var action = rule.Action;
|
||||
var status = MapAction(action);
|
||||
var notes = BuildNotes(action);
|
||||
var inputs = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.OrdinalIgnoreCase);
|
||||
inputs["severityWeight"] = severityWeight;
|
||||
|
||||
double score = severityWeight;
|
||||
string? quietedBy = null;
|
||||
var quiet = false;
|
||||
|
||||
switch (status)
|
||||
{
|
||||
case PolicyVerdictStatus.Ignored:
|
||||
score = Math.Max(0, severityWeight - config.IgnorePenalty);
|
||||
inputs["ignorePenalty"] = config.IgnorePenalty;
|
||||
break;
|
||||
case PolicyVerdictStatus.Warned:
|
||||
score = Math.Max(0, severityWeight - config.WarnPenalty);
|
||||
inputs["warnPenalty"] = config.WarnPenalty;
|
||||
break;
|
||||
case PolicyVerdictStatus.Deferred:
|
||||
score = Math.Max(0, severityWeight - (config.WarnPenalty / 2));
|
||||
inputs["deferPenalty"] = config.WarnPenalty / 2;
|
||||
break;
|
||||
}
|
||||
|
||||
if (action.Quiet)
|
||||
{
|
||||
var quietAllowed = action.RequireVex is not null || action.Type == PolicyActionType.RequireVex;
|
||||
if (quietAllowed)
|
||||
{
|
||||
score = Math.Max(0, score - config.QuietPenalty);
|
||||
inputs["quietPenalty"] = config.QuietPenalty;
|
||||
quietedBy = rule.Name;
|
||||
quiet = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
inputs.Remove("ignorePenalty");
|
||||
var warnScore = Math.Max(0, severityWeight - config.WarnPenalty);
|
||||
inputs["warnPenalty"] = config.WarnPenalty;
|
||||
var warnNotes = AppendNote(notes, "Quiet flag ignored: rule must specify requireVex justifications.");
|
||||
|
||||
return new PolicyVerdict(
|
||||
finding.FindingId,
|
||||
PolicyVerdictStatus.Warned,
|
||||
rule.Name,
|
||||
action.Type.ToString(),
|
||||
warnNotes,
|
||||
warnScore,
|
||||
config.Version,
|
||||
inputs.ToImmutable(),
|
||||
QuietedBy: null,
|
||||
Quiet: false);
|
||||
}
|
||||
}
|
||||
|
||||
return new PolicyVerdict(
|
||||
finding.FindingId,
|
||||
status,
|
||||
rule.Name,
|
||||
action.Type.ToString(),
|
||||
notes,
|
||||
score,
|
||||
config.Version,
|
||||
inputs.ToImmutable(),
|
||||
quietedBy,
|
||||
quiet);
|
||||
}
|
||||
|
||||
private static bool RuleMatches(PolicyRule rule, PolicyFinding finding)
|
||||
{
|
||||
if (!rule.Severities.IsDefaultOrEmpty && !rule.Severities.Contains(finding.Severity))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!Matches(rule.Environments, finding.Environment))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!Matches(rule.Sources, finding.Source))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!Matches(rule.Vendors, finding.Vendor))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!Matches(rule.Licenses, finding.License))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!RuleMatchCriteria(rule.Match, finding))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool Matches(ImmutableArray<string> ruleValues, string? candidate)
|
||||
{
|
||||
if (ruleValues.IsDefaultOrEmpty)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(candidate))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return ruleValues.Contains(candidate, StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
private static bool RuleMatchCriteria(PolicyRuleMatchCriteria criteria, PolicyFinding finding)
|
||||
{
|
||||
if (!criteria.Images.IsDefaultOrEmpty && !ContainsValue(criteria.Images, finding.Image, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!criteria.Repositories.IsDefaultOrEmpty && !ContainsValue(criteria.Repositories, finding.Repository, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!criteria.Packages.IsDefaultOrEmpty && !ContainsValue(criteria.Packages, finding.Package, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!criteria.Purls.IsDefaultOrEmpty && !ContainsValue(criteria.Purls, finding.Purl, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!criteria.Cves.IsDefaultOrEmpty && !ContainsValue(criteria.Cves, finding.Cve, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!criteria.Paths.IsDefaultOrEmpty && !ContainsValue(criteria.Paths, finding.Path, StringComparer.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!criteria.LayerDigests.IsDefaultOrEmpty && !ContainsValue(criteria.LayerDigests, finding.LayerDigest, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!criteria.UsedByEntrypoint.IsDefaultOrEmpty)
|
||||
{
|
||||
var match = false;
|
||||
foreach (var tag in criteria.UsedByEntrypoint)
|
||||
{
|
||||
if (finding.Tags.Contains(tag, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
match = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!match)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool ContainsValue(ImmutableArray<string> values, string? candidate, StringComparer comparer)
|
||||
{
|
||||
if (values.IsDefaultOrEmpty)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(candidate))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return values.Contains(candidate, comparer);
|
||||
}
|
||||
|
||||
private static PolicyVerdictStatus MapAction(PolicyAction action)
|
||||
=> action.Type switch
|
||||
{
|
||||
PolicyActionType.Block => PolicyVerdictStatus.Blocked,
|
||||
PolicyActionType.Ignore => PolicyVerdictStatus.Ignored,
|
||||
PolicyActionType.Warn => PolicyVerdictStatus.Warned,
|
||||
PolicyActionType.Defer => PolicyVerdictStatus.Deferred,
|
||||
PolicyActionType.Escalate => PolicyVerdictStatus.Escalated,
|
||||
PolicyActionType.RequireVex => PolicyVerdictStatus.RequiresVex,
|
||||
_ => PolicyVerdictStatus.Pass,
|
||||
};
|
||||
|
||||
private static string? BuildNotes(PolicyAction action)
|
||||
{
|
||||
if (action.Ignore is { } ignore && !string.IsNullOrWhiteSpace(ignore.Justification))
|
||||
{
|
||||
return ignore.Justification;
|
||||
}
|
||||
|
||||
if (action.Escalate is { } escalate && escalate.MinimumSeverity is { } severity)
|
||||
{
|
||||
return $"Escalate >= {severity}";
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? AppendNote(string? existing, string addition)
|
||||
=> string.IsNullOrWhiteSpace(existing) ? addition : string.Concat(existing, " | ", addition);
|
||||
}
|
||||
51
src/StellaOps.Policy/PolicyFinding.cs
Normal file
51
src/StellaOps.Policy/PolicyFinding.cs
Normal file
@@ -0,0 +1,51 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed record PolicyFinding(
|
||||
string FindingId,
|
||||
PolicySeverity Severity,
|
||||
string? Environment,
|
||||
string? Source,
|
||||
string? Vendor,
|
||||
string? License,
|
||||
string? Image,
|
||||
string? Repository,
|
||||
string? Package,
|
||||
string? Purl,
|
||||
string? Cve,
|
||||
string? Path,
|
||||
string? LayerDigest,
|
||||
ImmutableArray<string> Tags)
|
||||
{
|
||||
public static PolicyFinding Create(
|
||||
string findingId,
|
||||
PolicySeverity severity,
|
||||
string? environment = null,
|
||||
string? source = null,
|
||||
string? vendor = null,
|
||||
string? license = null,
|
||||
string? image = null,
|
||||
string? repository = null,
|
||||
string? package = null,
|
||||
string? purl = null,
|
||||
string? cve = null,
|
||||
string? path = null,
|
||||
string? layerDigest = null,
|
||||
ImmutableArray<string>? tags = null)
|
||||
=> new(
|
||||
findingId,
|
||||
severity,
|
||||
environment,
|
||||
source,
|
||||
vendor,
|
||||
license,
|
||||
image,
|
||||
repository,
|
||||
package,
|
||||
purl,
|
||||
cve,
|
||||
path,
|
||||
layerDigest,
|
||||
tags ?? ImmutableArray<string>.Empty);
|
||||
}
|
||||
28
src/StellaOps.Policy/PolicyIssue.cs
Normal file
28
src/StellaOps.Policy/PolicyIssue.cs
Normal file
@@ -0,0 +1,28 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a validation or normalization issue discovered while processing a policy document.
|
||||
/// </summary>
|
||||
public sealed record PolicyIssue(string Code, string Message, PolicyIssueSeverity Severity, string Path)
|
||||
{
|
||||
public static PolicyIssue Error(string code, string message, string path)
|
||||
=> new(code, message, PolicyIssueSeverity.Error, path);
|
||||
|
||||
public static PolicyIssue Warning(string code, string message, string path)
|
||||
=> new(code, message, PolicyIssueSeverity.Warning, path);
|
||||
|
||||
public static PolicyIssue Info(string code, string message, string path)
|
||||
=> new(code, message, PolicyIssueSeverity.Info, path);
|
||||
|
||||
public PolicyIssue EnsurePath(string fallbackPath)
|
||||
=> string.IsNullOrWhiteSpace(Path) ? this with { Path = fallbackPath } : this;
|
||||
}
|
||||
|
||||
public enum PolicyIssueSeverity
|
||||
{
|
||||
Error,
|
||||
Warning,
|
||||
Info,
|
||||
}
|
||||
18
src/StellaOps.Policy/PolicyPreviewModels.cs
Normal file
18
src/StellaOps.Policy/PolicyPreviewModels.cs
Normal file
@@ -0,0 +1,18 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed record PolicyPreviewRequest(
|
||||
string ImageDigest,
|
||||
ImmutableArray<PolicyFinding> Findings,
|
||||
ImmutableArray<PolicyVerdict> BaselineVerdicts,
|
||||
PolicySnapshot? SnapshotOverride = null,
|
||||
PolicySnapshotContent? ProposedPolicy = null);
|
||||
|
||||
public sealed record PolicyPreviewResponse(
|
||||
bool Success,
|
||||
string PolicyDigest,
|
||||
string? RevisionId,
|
||||
ImmutableArray<PolicyIssue> Issues,
|
||||
ImmutableArray<PolicyVerdictDiff> Diffs,
|
||||
int ChangedCount);
|
||||
142
src/StellaOps.Policy/PolicyPreviewService.cs
Normal file
142
src/StellaOps.Policy/PolicyPreviewService.cs
Normal file
@@ -0,0 +1,142 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed class PolicyPreviewService
|
||||
{
|
||||
private readonly PolicySnapshotStore _snapshotStore;
|
||||
private readonly ILogger<PolicyPreviewService> _logger;
|
||||
|
||||
public PolicyPreviewService(PolicySnapshotStore snapshotStore, ILogger<PolicyPreviewService> logger)
|
||||
{
|
||||
_snapshotStore = snapshotStore ?? throw new ArgumentNullException(nameof(snapshotStore));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<PolicyPreviewResponse> PreviewAsync(PolicyPreviewRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(request));
|
||||
}
|
||||
|
||||
var (snapshot, bindingIssues) = await ResolveSnapshotAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
if (snapshot is null)
|
||||
{
|
||||
_logger.LogWarning("Policy preview failed: snapshot unavailable or validation errors. Issues={Count}", bindingIssues.Length);
|
||||
return new PolicyPreviewResponse(false, string.Empty, null, bindingIssues, ImmutableArray<PolicyVerdictDiff>.Empty, 0);
|
||||
}
|
||||
|
||||
var projected = Evaluate(snapshot.Document, snapshot.ScoringConfig, request.Findings);
|
||||
var baseline = BuildBaseline(request.BaselineVerdicts, projected, snapshot.ScoringConfig);
|
||||
var diffs = BuildDiffs(baseline, projected);
|
||||
var changed = diffs.Count(static diff => diff.Changed);
|
||||
|
||||
_logger.LogDebug("Policy preview computed for {ImageDigest}. Changed={Changed}", request.ImageDigest, changed);
|
||||
|
||||
return new PolicyPreviewResponse(true, snapshot.Digest, snapshot.RevisionId, bindingIssues, diffs, changed);
|
||||
}
|
||||
|
||||
private async Task<(PolicySnapshot? Snapshot, ImmutableArray<PolicyIssue> Issues)> ResolveSnapshotAsync(PolicyPreviewRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
if (request.ProposedPolicy is not null)
|
||||
{
|
||||
var binding = PolicyBinder.Bind(request.ProposedPolicy.Content, request.ProposedPolicy.Format);
|
||||
if (!binding.Success)
|
||||
{
|
||||
return (null, binding.Issues);
|
||||
}
|
||||
|
||||
var digest = PolicyDigest.Compute(binding.Document);
|
||||
var snapshot = new PolicySnapshot(
|
||||
request.SnapshotOverride?.RevisionNumber + 1 ?? 0,
|
||||
request.SnapshotOverride?.RevisionId ?? "preview",
|
||||
digest,
|
||||
DateTimeOffset.UtcNow,
|
||||
request.ProposedPolicy.Actor,
|
||||
request.ProposedPolicy.Format,
|
||||
binding.Document,
|
||||
binding.Issues,
|
||||
PolicyScoringConfig.Default);
|
||||
|
||||
return (snapshot, binding.Issues);
|
||||
}
|
||||
|
||||
if (request.SnapshotOverride is not null)
|
||||
{
|
||||
return (request.SnapshotOverride, ImmutableArray<PolicyIssue>.Empty);
|
||||
}
|
||||
|
||||
var latest = await _snapshotStore.GetLatestAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (latest is not null)
|
||||
{
|
||||
return (latest, ImmutableArray<PolicyIssue>.Empty);
|
||||
}
|
||||
|
||||
return (null, ImmutableArray.Create(PolicyIssue.Error("policy.preview.snapshot_missing", "No policy snapshot is available for preview.", "$")));
|
||||
}
|
||||
|
||||
private static ImmutableArray<PolicyVerdict> Evaluate(PolicyDocument document, PolicyScoringConfig scoringConfig, ImmutableArray<PolicyFinding> findings)
|
||||
{
|
||||
if (findings.IsDefaultOrEmpty)
|
||||
{
|
||||
return ImmutableArray<PolicyVerdict>.Empty;
|
||||
}
|
||||
|
||||
var results = ImmutableArray.CreateBuilder<PolicyVerdict>(findings.Length);
|
||||
foreach (var finding in findings)
|
||||
{
|
||||
var verdict = PolicyEvaluation.EvaluateFinding(document, scoringConfig, finding);
|
||||
results.Add(verdict);
|
||||
}
|
||||
|
||||
return results.ToImmutable();
|
||||
}
|
||||
|
||||
private static ImmutableDictionary<string, PolicyVerdict> BuildBaseline(ImmutableArray<PolicyVerdict> baseline, ImmutableArray<PolicyVerdict> projected, PolicyScoringConfig scoringConfig)
|
||||
{
|
||||
var builder = ImmutableDictionary.CreateBuilder<string, PolicyVerdict>(StringComparer.Ordinal);
|
||||
if (!baseline.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var verdict in baseline)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(verdict.FindingId) && !builder.ContainsKey(verdict.FindingId))
|
||||
{
|
||||
builder.Add(verdict.FindingId, verdict);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var verdict in projected)
|
||||
{
|
||||
if (!builder.ContainsKey(verdict.FindingId))
|
||||
{
|
||||
builder.Add(verdict.FindingId, PolicyVerdict.CreateBaseline(verdict.FindingId, scoringConfig));
|
||||
}
|
||||
}
|
||||
|
||||
return builder.ToImmutable();
|
||||
}
|
||||
|
||||
private static ImmutableArray<PolicyVerdictDiff> BuildDiffs(ImmutableDictionary<string, PolicyVerdict> baseline, ImmutableArray<PolicyVerdict> projected)
|
||||
{
|
||||
var diffs = ImmutableArray.CreateBuilder<PolicyVerdictDiff>(projected.Length);
|
||||
foreach (var verdict in projected.OrderBy(static v => v.FindingId, StringComparer.Ordinal))
|
||||
{
|
||||
var baseVerdict = baseline.TryGetValue(verdict.FindingId, out var existing)
|
||||
? existing
|
||||
: new PolicyVerdict(verdict.FindingId, PolicyVerdictStatus.Pass);
|
||||
|
||||
diffs.Add(new PolicyVerdictDiff(baseVerdict, verdict));
|
||||
}
|
||||
|
||||
return diffs.ToImmutable();
|
||||
}
|
||||
}
|
||||
30
src/StellaOps.Policy/PolicySchemaResource.cs
Normal file
30
src/StellaOps.Policy/PolicySchemaResource.cs
Normal file
@@ -0,0 +1,30 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public static class PolicySchemaResource
|
||||
{
|
||||
private const string SchemaResourceName = "StellaOps.Policy.Schemas.policy-schema@1.json";
|
||||
|
||||
public static Stream OpenSchemaStream()
|
||||
{
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var stream = assembly.GetManifestResourceStream(SchemaResourceName);
|
||||
if (stream is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Unable to locate embedded schema resource '{SchemaResourceName}'.");
|
||||
}
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
public static string ReadSchemaJson()
|
||||
{
|
||||
using var stream = OpenSchemaStream();
|
||||
using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true);
|
||||
return reader.ReadToEnd();
|
||||
}
|
||||
}
|
||||
16
src/StellaOps.Policy/PolicyScoringConfig.cs
Normal file
16
src/StellaOps.Policy/PolicyScoringConfig.cs
Normal file
@@ -0,0 +1,16 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed record PolicyScoringConfig(
|
||||
string Version,
|
||||
ImmutableDictionary<PolicySeverity, double> SeverityWeights,
|
||||
double QuietPenalty,
|
||||
double WarnPenalty,
|
||||
double IgnorePenalty,
|
||||
ImmutableDictionary<string, double> TrustOverrides)
|
||||
{
|
||||
public static string BaselineVersion => "1.0";
|
||||
|
||||
public static PolicyScoringConfig Default { get; } = PolicyScoringConfigBinder.LoadDefault();
|
||||
}
|
||||
266
src/StellaOps.Policy/PolicyScoringConfigBinder.cs
Normal file
266
src/StellaOps.Policy/PolicyScoringConfigBinder.cs
Normal file
@@ -0,0 +1,266 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using YamlDotNet.Serialization;
|
||||
using YamlDotNet.Serialization.NamingConventions;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed record PolicyScoringBindingResult(
|
||||
bool Success,
|
||||
PolicyScoringConfig? Config,
|
||||
ImmutableArray<PolicyIssue> Issues);
|
||||
|
||||
public static class PolicyScoringConfigBinder
|
||||
{
|
||||
private const string DefaultResourceName = "StellaOps.Policy.Schemas.policy-scoring-default.json";
|
||||
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true,
|
||||
};
|
||||
|
||||
private static readonly IDeserializer YamlDeserializer = new DeserializerBuilder()
|
||||
.WithNamingConvention(CamelCaseNamingConvention.Instance)
|
||||
.IgnoreUnmatchedProperties()
|
||||
.Build();
|
||||
|
||||
public static PolicyScoringConfig LoadDefault()
|
||||
{
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
using var stream = assembly.GetManifestResourceStream(DefaultResourceName)
|
||||
?? throw new InvalidOperationException($"Embedded resource '{DefaultResourceName}' not found.");
|
||||
using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true);
|
||||
var json = reader.ReadToEnd();
|
||||
var binding = Bind(json, PolicyDocumentFormat.Json);
|
||||
if (!binding.Success || binding.Config is null)
|
||||
{
|
||||
throw new InvalidOperationException("Failed to load default policy scoring configuration.");
|
||||
}
|
||||
|
||||
return binding.Config;
|
||||
}
|
||||
|
||||
public static PolicyScoringBindingResult Bind(string content, PolicyDocumentFormat format)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(content))
|
||||
{
|
||||
var issue = PolicyIssue.Error("scoring.empty", "Scoring configuration content is empty.", "$");
|
||||
return new PolicyScoringBindingResult(false, null, ImmutableArray.Create(issue));
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var root = Parse(content, format);
|
||||
if (root is not JsonObject obj)
|
||||
{
|
||||
var issue = PolicyIssue.Error("scoring.invalid", "Scoring configuration must be a JSON object.", "$");
|
||||
return new PolicyScoringBindingResult(false, null, ImmutableArray.Create(issue));
|
||||
}
|
||||
|
||||
var issues = ImmutableArray.CreateBuilder<PolicyIssue>();
|
||||
var config = BuildConfig(obj, issues);
|
||||
var hasErrors = issues.Any(issue => issue.Severity == PolicyIssueSeverity.Error);
|
||||
return new PolicyScoringBindingResult(!hasErrors, config, issues.ToImmutable());
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
var issue = PolicyIssue.Error("scoring.parse.json", $"Failed to parse scoring JSON: {ex.Message}", "$");
|
||||
return new PolicyScoringBindingResult(false, null, ImmutableArray.Create(issue));
|
||||
}
|
||||
catch (YamlDotNet.Core.YamlException ex)
|
||||
{
|
||||
var issue = PolicyIssue.Error("scoring.parse.yaml", $"Failed to parse scoring YAML: {ex.Message}", "$");
|
||||
return new PolicyScoringBindingResult(false, null, ImmutableArray.Create(issue));
|
||||
}
|
||||
}
|
||||
|
||||
private static JsonNode? Parse(string content, PolicyDocumentFormat format)
|
||||
{
|
||||
return format switch
|
||||
{
|
||||
PolicyDocumentFormat.Json => JsonNode.Parse(content, new JsonNodeOptions { PropertyNameCaseInsensitive = true }),
|
||||
PolicyDocumentFormat.Yaml => ConvertYamlToJsonNode(content),
|
||||
_ => throw new ArgumentOutOfRangeException(nameof(format), format, "Unsupported scoring configuration format."),
|
||||
};
|
||||
}
|
||||
|
||||
private static JsonNode? ConvertYamlToJsonNode(string content)
|
||||
{
|
||||
var yamlObject = YamlDeserializer.Deserialize<object?>(content);
|
||||
return PolicyBinderUtilities.ConvertYamlObject(yamlObject);
|
||||
}
|
||||
|
||||
private static PolicyScoringConfig BuildConfig(JsonObject obj, ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
var version = ReadString(obj, "version", issues, required: true) ?? PolicyScoringConfig.BaselineVersion;
|
||||
|
||||
var severityWeights = ReadSeverityWeights(obj, issues);
|
||||
var quietPenalty = ReadDouble(obj, "quietPenalty", issues, defaultValue: 45);
|
||||
var warnPenalty = ReadDouble(obj, "warnPenalty", issues, defaultValue: 15);
|
||||
var ignorePenalty = ReadDouble(obj, "ignorePenalty", issues, defaultValue: 35);
|
||||
var trustOverrides = ReadTrustOverrides(obj, issues);
|
||||
|
||||
return new PolicyScoringConfig(
|
||||
version,
|
||||
severityWeights,
|
||||
quietPenalty,
|
||||
warnPenalty,
|
||||
ignorePenalty,
|
||||
trustOverrides);
|
||||
}
|
||||
|
||||
private static ImmutableDictionary<PolicySeverity, double> ReadSeverityWeights(JsonObject obj, ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (!obj.TryGetPropertyValue("severityWeights", out var node) || node is not JsonObject severityObj)
|
||||
{
|
||||
issues.Add(PolicyIssue.Error("scoring.severityWeights.missing", "severityWeights section is required.", "$.severityWeights"));
|
||||
return ImmutableDictionary<PolicySeverity, double>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableDictionary.CreateBuilder<PolicySeverity, double>();
|
||||
foreach (var severity in Enum.GetValues<PolicySeverity>())
|
||||
{
|
||||
var key = severity.ToString();
|
||||
if (!severityObj.TryGetPropertyValue(key, out var valueNode))
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning("scoring.severityWeights.default", $"Severity '{key}' not specified; defaulting to 0.", $"$.severityWeights.{key}"));
|
||||
builder[severity] = 0;
|
||||
continue;
|
||||
}
|
||||
|
||||
var value = ExtractDouble(valueNode, issues, $"$.severityWeights.{key}");
|
||||
builder[severity] = value;
|
||||
}
|
||||
|
||||
return builder.ToImmutable();
|
||||
}
|
||||
|
||||
private static double ReadDouble(JsonObject obj, string property, ImmutableArray<PolicyIssue>.Builder issues, double defaultValue)
|
||||
{
|
||||
if (!obj.TryGetPropertyValue(property, out var node))
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning("scoring.numeric.default", $"{property} not specified; defaulting to {defaultValue:0.##}.", $"$.{property}"));
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
return ExtractDouble(node, issues, $"$.{property}");
|
||||
}
|
||||
|
||||
private static double ExtractDouble(JsonNode? node, ImmutableArray<PolicyIssue>.Builder issues, string path)
|
||||
{
|
||||
if (node is null)
|
||||
{
|
||||
issues.Add(PolicyIssue.Warning("scoring.numeric.null", $"Value at {path} missing; defaulting to 0.", path));
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (node is JsonValue value)
|
||||
{
|
||||
if (value.TryGetValue(out double number))
|
||||
{
|
||||
return number;
|
||||
}
|
||||
|
||||
if (value.TryGetValue(out string? text) && double.TryParse(text, NumberStyles.Float, CultureInfo.InvariantCulture, out number))
|
||||
{
|
||||
return number;
|
||||
}
|
||||
}
|
||||
|
||||
issues.Add(PolicyIssue.Error("scoring.numeric.invalid", $"Value at {path} is not numeric.", path));
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static ImmutableDictionary<string, double> ReadTrustOverrides(JsonObject obj, ImmutableArray<PolicyIssue>.Builder issues)
|
||||
{
|
||||
if (!obj.TryGetPropertyValue("trustOverrides", out var node) || node is not JsonObject trustObj)
|
||||
{
|
||||
return ImmutableDictionary<string, double>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var pair in trustObj)
|
||||
{
|
||||
var value = ExtractDouble(pair.Value, issues, $"$.trustOverrides.{pair.Key}");
|
||||
builder[pair.Key] = value;
|
||||
}
|
||||
|
||||
return builder.ToImmutable();
|
||||
}
|
||||
|
||||
private static string? ReadString(JsonObject obj, string property, ImmutableArray<PolicyIssue>.Builder issues, bool required)
|
||||
{
|
||||
if (!obj.TryGetPropertyValue(property, out var node) || node is null)
|
||||
{
|
||||
if (required)
|
||||
{
|
||||
issues.Add(PolicyIssue.Error("scoring.string.missing", $"{property} is required.", $"$.{property}"));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if (node is JsonValue value && value.TryGetValue(out string? text))
|
||||
{
|
||||
return text?.Trim();
|
||||
}
|
||||
|
||||
issues.Add(PolicyIssue.Error("scoring.string.invalid", $"{property} must be a string.", $"$.{property}"));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
internal static class PolicyBinderUtilities
|
||||
{
|
||||
public static JsonNode? ConvertYamlObject(object? value)
|
||||
{
|
||||
switch (value)
|
||||
{
|
||||
case null:
|
||||
return null;
|
||||
case string s:
|
||||
return JsonValue.Create(s);
|
||||
case bool b:
|
||||
return JsonValue.Create(b);
|
||||
case sbyte or byte or short or ushort or int or uint or long or ulong or float or double or decimal:
|
||||
return JsonValue.Create(Convert.ToDouble(value, CultureInfo.InvariantCulture));
|
||||
case IDictionary dictionary:
|
||||
{
|
||||
var obj = new JsonObject();
|
||||
foreach (DictionaryEntry entry in dictionary)
|
||||
{
|
||||
if (entry.Key is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
obj[entry.Key.ToString()!] = ConvertYamlObject(entry.Value);
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
case IEnumerable enumerable:
|
||||
{
|
||||
var array = new JsonArray();
|
||||
foreach (var item in enumerable)
|
||||
{
|
||||
array.Add(ConvertYamlObject(item));
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
default:
|
||||
return JsonValue.Create(value.ToString());
|
||||
}
|
||||
}
|
||||
}
|
||||
29
src/StellaOps.Policy/PolicySnapshot.cs
Normal file
29
src/StellaOps.Policy/PolicySnapshot.cs
Normal file
@@ -0,0 +1,29 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed record PolicySnapshot(
|
||||
long RevisionNumber,
|
||||
string RevisionId,
|
||||
string Digest,
|
||||
DateTimeOffset CreatedAt,
|
||||
string? CreatedBy,
|
||||
PolicyDocumentFormat Format,
|
||||
PolicyDocument Document,
|
||||
ImmutableArray<PolicyIssue> Issues,
|
||||
PolicyScoringConfig ScoringConfig);
|
||||
|
||||
public sealed record PolicySnapshotContent(
|
||||
string Content,
|
||||
PolicyDocumentFormat Format,
|
||||
string? Actor,
|
||||
string? Source,
|
||||
string? Description);
|
||||
|
||||
public sealed record PolicySnapshotSaveResult(
|
||||
bool Success,
|
||||
bool Created,
|
||||
string Digest,
|
||||
PolicySnapshot? Snapshot,
|
||||
PolicyBindingResult BindingResult);
|
||||
101
src/StellaOps.Policy/PolicySnapshotStore.cs
Normal file
101
src/StellaOps.Policy/PolicySnapshotStore.cs
Normal file
@@ -0,0 +1,101 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed class PolicySnapshotStore
|
||||
{
|
||||
private readonly IPolicySnapshotRepository _snapshotRepository;
|
||||
private readonly IPolicyAuditRepository _auditRepository;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<PolicySnapshotStore> _logger;
|
||||
private readonly SemaphoreSlim _mutex = new(1, 1);
|
||||
|
||||
public PolicySnapshotStore(
|
||||
IPolicySnapshotRepository snapshotRepository,
|
||||
IPolicyAuditRepository auditRepository,
|
||||
TimeProvider? timeProvider,
|
||||
ILogger<PolicySnapshotStore> logger)
|
||||
{
|
||||
_snapshotRepository = snapshotRepository ?? throw new ArgumentNullException(nameof(snapshotRepository));
|
||||
_auditRepository = auditRepository ?? throw new ArgumentNullException(nameof(auditRepository));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<PolicySnapshotSaveResult> SaveAsync(PolicySnapshotContent content, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (content is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(content));
|
||||
}
|
||||
|
||||
var bindingResult = PolicyBinder.Bind(content.Content, content.Format);
|
||||
if (!bindingResult.Success)
|
||||
{
|
||||
_logger.LogWarning("Policy snapshot rejected due to validation errors (Format: {Format})", content.Format);
|
||||
return new PolicySnapshotSaveResult(false, false, string.Empty, null, bindingResult);
|
||||
}
|
||||
|
||||
var digest = PolicyDigest.Compute(bindingResult.Document);
|
||||
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var latest = await _snapshotRepository.GetLatestAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (latest is not null && string.Equals(latest.Digest, digest, StringComparison.Ordinal))
|
||||
{
|
||||
_logger.LogInformation("Policy snapshot unchanged; digest {Digest} matches revision {RevisionId}", digest, latest.RevisionId);
|
||||
return new PolicySnapshotSaveResult(true, false, digest, latest, bindingResult);
|
||||
}
|
||||
|
||||
var revisionNumber = (latest?.RevisionNumber ?? 0) + 1;
|
||||
var revisionId = $"rev-{revisionNumber}";
|
||||
var createdAt = _timeProvider.GetUtcNow();
|
||||
|
||||
var scoringConfig = PolicyScoringConfig.Default;
|
||||
|
||||
var snapshot = new PolicySnapshot(
|
||||
revisionNumber,
|
||||
revisionId,
|
||||
digest,
|
||||
createdAt,
|
||||
content.Actor,
|
||||
content.Format,
|
||||
bindingResult.Document,
|
||||
bindingResult.Issues,
|
||||
scoringConfig);
|
||||
|
||||
await _snapshotRepository.AddAsync(snapshot, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var auditMessage = content.Description ?? "Policy snapshot created";
|
||||
var auditEntry = new PolicyAuditEntry(
|
||||
Guid.NewGuid(),
|
||||
createdAt,
|
||||
"snapshot.created",
|
||||
revisionId,
|
||||
digest,
|
||||
content.Actor,
|
||||
auditMessage);
|
||||
|
||||
await _auditRepository.AddAsync(auditEntry, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Policy snapshot saved. Revision {RevisionId}, digest {Digest}, issues {IssueCount}",
|
||||
revisionId,
|
||||
digest,
|
||||
bindingResult.Issues.Length);
|
||||
|
||||
return new PolicySnapshotSaveResult(true, true, digest, snapshot, bindingResult);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public Task<PolicySnapshot?> GetLatestAsync(CancellationToken cancellationToken = default)
|
||||
=> _snapshotRepository.GetLatestAsync(cancellationToken);
|
||||
}
|
||||
241
src/StellaOps.Policy/PolicyValidationCli.cs
Normal file
241
src/StellaOps.Policy/PolicyValidationCli.cs
Normal file
@@ -0,0 +1,241 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed record PolicyValidationCliOptions
|
||||
{
|
||||
public IReadOnlyList<string> Inputs { get; init; } = Array.Empty<string>();
|
||||
|
||||
/// <summary>
|
||||
/// Writes machine-readable JSON instead of human-formatted text.
|
||||
/// </summary>
|
||||
public bool OutputJson { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When enabled, warnings cause a non-zero exit code.
|
||||
/// </summary>
|
||||
public bool Strict { get; init; }
|
||||
}
|
||||
|
||||
public sealed record PolicyValidationFileResult(
|
||||
string Path,
|
||||
PolicyBindingResult BindingResult,
|
||||
PolicyDiagnosticsReport Diagnostics);
|
||||
|
||||
public sealed class PolicyValidationCli
|
||||
{
|
||||
private readonly TextWriter _output;
|
||||
private readonly TextWriter _error;
|
||||
|
||||
public PolicyValidationCli(TextWriter? output = null, TextWriter? error = null)
|
||||
{
|
||||
_output = output ?? Console.Out;
|
||||
_error = error ?? Console.Error;
|
||||
}
|
||||
|
||||
public async Task<int> RunAsync(PolicyValidationCliOptions options, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (options is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(options));
|
||||
}
|
||||
|
||||
if (options.Inputs.Count == 0)
|
||||
{
|
||||
await _error.WriteLineAsync("No input files provided. Supply one or more policy file paths.");
|
||||
return 64; // EX_USAGE
|
||||
}
|
||||
|
||||
var results = new List<PolicyValidationFileResult>();
|
||||
foreach (var input in options.Inputs)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var resolvedPaths = ResolveInput(input);
|
||||
if (resolvedPaths.Count == 0)
|
||||
{
|
||||
await _error.WriteLineAsync($"No files matched '{input}'.");
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var path in resolvedPaths)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var format = PolicySchema.DetectFormat(path);
|
||||
var content = await File.ReadAllTextAsync(path, cancellationToken);
|
||||
var bindingResult = PolicyBinder.Bind(content, format);
|
||||
var diagnostics = PolicyDiagnostics.Create(bindingResult);
|
||||
|
||||
results.Add(new PolicyValidationFileResult(path, bindingResult, diagnostics));
|
||||
}
|
||||
}
|
||||
|
||||
if (results.Count == 0)
|
||||
{
|
||||
await _error.WriteLineAsync("No files were processed.");
|
||||
return 65; // EX_DATAERR
|
||||
}
|
||||
|
||||
if (options.OutputJson)
|
||||
{
|
||||
WriteJson(results);
|
||||
}
|
||||
else
|
||||
{
|
||||
await WriteTextAsync(results, cancellationToken);
|
||||
}
|
||||
|
||||
var hasErrors = results.Any(static result => !result.BindingResult.Success);
|
||||
var hasWarnings = results.Any(static result => result.BindingResult.Issues.Any(static issue => issue.Severity == PolicyIssueSeverity.Warning));
|
||||
|
||||
if (hasErrors)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (options.Strict && hasWarnings)
|
||||
{
|
||||
return 2;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
private async Task WriteTextAsync(IReadOnlyList<PolicyValidationFileResult> results, CancellationToken cancellationToken)
|
||||
{
|
||||
foreach (var result in results)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var relativePath = MakeRelative(result.Path);
|
||||
await _output.WriteLineAsync($"{relativePath} [{result.BindingResult.Format}]");
|
||||
|
||||
if (result.BindingResult.Issues.Length == 0)
|
||||
{
|
||||
await _output.WriteLineAsync(" OK");
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var issue in result.BindingResult.Issues)
|
||||
{
|
||||
var severity = issue.Severity.ToString().ToUpperInvariant().PadRight(7);
|
||||
await _output.WriteLineAsync($" {severity} {issue.Path} :: {issue.Message} ({issue.Code})");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void WriteJson(IReadOnlyList<PolicyValidationFileResult> results)
|
||||
{
|
||||
var payload = results.Select(static result => new
|
||||
{
|
||||
path = result.Path,
|
||||
format = result.BindingResult.Format.ToString().ToLowerInvariant(),
|
||||
success = result.BindingResult.Success,
|
||||
issues = result.BindingResult.Issues.Select(static issue => new
|
||||
{
|
||||
code = issue.Code,
|
||||
message = issue.Message,
|
||||
severity = issue.Severity.ToString().ToLowerInvariant(),
|
||||
path = issue.Path,
|
||||
}),
|
||||
diagnostics = new
|
||||
{
|
||||
version = result.Diagnostics.Version,
|
||||
ruleCount = result.Diagnostics.RuleCount,
|
||||
errorCount = result.Diagnostics.ErrorCount,
|
||||
warningCount = result.Diagnostics.WarningCount,
|
||||
generatedAt = result.Diagnostics.GeneratedAt,
|
||||
recommendations = result.Diagnostics.Recommendations,
|
||||
},
|
||||
})
|
||||
.ToArray();
|
||||
|
||||
var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = true,
|
||||
});
|
||||
_output.WriteLine(json);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string> ResolveInput(string input)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(input))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
var expanded = Environment.ExpandEnvironmentVariables(input.Trim());
|
||||
if (File.Exists(expanded))
|
||||
{
|
||||
return new[] { Path.GetFullPath(expanded) };
|
||||
}
|
||||
|
||||
if (Directory.Exists(expanded))
|
||||
{
|
||||
return Directory.EnumerateFiles(expanded, "*.*", SearchOption.TopDirectoryOnly)
|
||||
.Where(static path => MatchesPolicyExtension(path))
|
||||
.OrderBy(static path => path, StringComparer.OrdinalIgnoreCase)
|
||||
.Select(Path.GetFullPath)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
var directory = Path.GetDirectoryName(expanded);
|
||||
var searchPattern = Path.GetFileName(expanded);
|
||||
|
||||
if (string.IsNullOrEmpty(searchPattern))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(directory))
|
||||
{
|
||||
directory = ".";
|
||||
}
|
||||
|
||||
if (!Directory.Exists(directory))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
return Directory.EnumerateFiles(directory, searchPattern, SearchOption.TopDirectoryOnly)
|
||||
.Where(static path => MatchesPolicyExtension(path))
|
||||
.OrderBy(static path => path, StringComparer.OrdinalIgnoreCase)
|
||||
.Select(Path.GetFullPath)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static bool MatchesPolicyExtension(string path)
|
||||
{
|
||||
var extension = Path.GetExtension(path);
|
||||
return extension.Equals(".yaml", StringComparison.OrdinalIgnoreCase)
|
||||
|| extension.Equals(".yml", StringComparison.OrdinalIgnoreCase)
|
||||
|| extension.Equals(".json", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
private static string MakeRelative(string path)
|
||||
{
|
||||
try
|
||||
{
|
||||
var fullPath = Path.GetFullPath(path);
|
||||
var current = Directory.GetCurrentDirectory();
|
||||
if (fullPath.StartsWith(current, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return fullPath[current.Length..].TrimStart(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
|
||||
}
|
||||
|
||||
return fullPath;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return path;
|
||||
}
|
||||
}
|
||||
}
|
||||
80
src/StellaOps.Policy/PolicyVerdict.cs
Normal file
80
src/StellaOps.Policy/PolicyVerdict.cs
Normal file
@@ -0,0 +1,80 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public enum PolicyVerdictStatus
|
||||
{
|
||||
Pass,
|
||||
Blocked,
|
||||
Ignored,
|
||||
Warned,
|
||||
Deferred,
|
||||
Escalated,
|
||||
RequiresVex,
|
||||
}
|
||||
|
||||
public sealed record PolicyVerdict(
|
||||
string FindingId,
|
||||
PolicyVerdictStatus Status,
|
||||
string? RuleName = null,
|
||||
string? RuleAction = null,
|
||||
string? Notes = null,
|
||||
double Score = 0,
|
||||
string ConfigVersion = "1.0",
|
||||
ImmutableDictionary<string, double>? Inputs = null,
|
||||
string? QuietedBy = null,
|
||||
bool Quiet = false)
|
||||
{
|
||||
public static PolicyVerdict CreateBaseline(string findingId, PolicyScoringConfig scoringConfig)
|
||||
{
|
||||
var inputs = ImmutableDictionary<string, double>.Empty;
|
||||
return new PolicyVerdict(
|
||||
findingId,
|
||||
PolicyVerdictStatus.Pass,
|
||||
RuleName: null,
|
||||
RuleAction: null,
|
||||
Notes: null,
|
||||
Score: 0,
|
||||
ConfigVersion: scoringConfig.Version,
|
||||
Inputs: inputs,
|
||||
QuietedBy: null,
|
||||
Quiet: false);
|
||||
}
|
||||
|
||||
public ImmutableDictionary<string, double> GetInputs()
|
||||
=> Inputs ?? ImmutableDictionary<string, double>.Empty;
|
||||
}
|
||||
|
||||
public sealed record PolicyVerdictDiff(
|
||||
PolicyVerdict Baseline,
|
||||
PolicyVerdict Projected)
|
||||
{
|
||||
public bool Changed
|
||||
{
|
||||
get
|
||||
{
|
||||
if (Baseline.Status != Projected.Status)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!string.Equals(Baseline.RuleName, Projected.RuleName, StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (Math.Abs(Baseline.Score - Projected.Score) > 0.0001)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!string.Equals(Baseline.QuietedBy, Projected.QuietedBy, StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
176
src/StellaOps.Policy/Schemas/policy-schema@1.json
Normal file
176
src/StellaOps.Policy/Schemas/policy-schema@1.json
Normal file
@@ -0,0 +1,176 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://schemas.stella-ops.org/policy/policy-schema@1.json",
|
||||
"title": "StellaOps Policy Schema v1",
|
||||
"type": "object",
|
||||
"required": ["version", "rules"],
|
||||
"properties": {
|
||||
"version": {
|
||||
"type": ["string", "number"],
|
||||
"enum": ["1", "1.0", 1, 1.0]
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"metadata": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": ["string", "number", "boolean"]
|
||||
}
|
||||
},
|
||||
"rules": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": {
|
||||
"$ref": "#/$defs/rule"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": true,
|
||||
"$defs": {
|
||||
"identifier": {
|
||||
"type": "string",
|
||||
"minLength": 1
|
||||
},
|
||||
"severity": {
|
||||
"type": "string",
|
||||
"enum": ["Critical", "High", "Medium", "Low", "Informational", "None", "Unknown"]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"minLength": 1
|
||||
},
|
||||
"uniqueItems": true
|
||||
},
|
||||
"rule": {
|
||||
"type": "object",
|
||||
"required": ["name", "action"],
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/$defs/identifier"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"minLength": 1
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"severity": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/$defs/severity"
|
||||
},
|
||||
"uniqueItems": true
|
||||
},
|
||||
"sources": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"vendors": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"licenses": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"tags": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"environments": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"images": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"repositories": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"packages": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"purls": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"cves": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"paths": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"layerDigests": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"usedByEntrypoint": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"justification": {
|
||||
"type": "string"
|
||||
},
|
||||
"quiet": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"action": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"enum": ["block", "fail", "deny", "ignore", "warn", "defer", "escalate", "requireVex"]
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": ["type"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string"
|
||||
},
|
||||
"quiet": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"until": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"justification": {
|
||||
"type": "string"
|
||||
},
|
||||
"severity": {
|
||||
"$ref": "#/$defs/severity"
|
||||
},
|
||||
"vendors": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"justifications": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
},
|
||||
"epss": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 1
|
||||
},
|
||||
"kev": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"additionalProperties": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"expires": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"until": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"metadata": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": ["string", "number", "boolean"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": true
|
||||
}
|
||||
}
|
||||
}
|
||||
21
src/StellaOps.Policy/Schemas/policy-scoring-default.json
Normal file
21
src/StellaOps.Policy/Schemas/policy-scoring-default.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"version": "1.0",
|
||||
"severityWeights": {
|
||||
"Critical": 90.0,
|
||||
"High": 75.0,
|
||||
"Medium": 50.0,
|
||||
"Low": 25.0,
|
||||
"Informational": 10.0,
|
||||
"None": 0.0,
|
||||
"Unknown": 60.0
|
||||
},
|
||||
"quietPenalty": 45.0,
|
||||
"warnPenalty": 15.0,
|
||||
"ignorePenalty": 35.0,
|
||||
"trustOverrides": {
|
||||
"vendor": 1.0,
|
||||
"distro": 0.85,
|
||||
"platform": 0.75,
|
||||
"community": 0.65
|
||||
}
|
||||
}
|
||||
@@ -3,5 +3,18 @@
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="System.CommandLine" Version="2.0.0-beta5.25306.1" />
|
||||
<PackageReference Include="YamlDotNet" Version="13.7.1" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Include="Schemas\policy-schema@1.json" />
|
||||
<EmbeddedResource Include="Schemas\policy-scoring-default.json" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
14
src/StellaOps.Policy/Storage/IPolicySnapshotRepository.cs
Normal file
14
src/StellaOps.Policy/Storage/IPolicySnapshotRepository.cs
Normal file
@@ -0,0 +1,14 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public interface IPolicySnapshotRepository
|
||||
{
|
||||
Task<PolicySnapshot?> GetLatestAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
Task<IReadOnlyList<PolicySnapshot>> ListAsync(int limit, CancellationToken cancellationToken = default);
|
||||
|
||||
Task AddAsync(PolicySnapshot snapshot, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Policy;
|
||||
|
||||
public sealed class InMemoryPolicySnapshotRepository : IPolicySnapshotRepository
|
||||
{
|
||||
private readonly List<PolicySnapshot> _snapshots = new();
|
||||
private readonly SemaphoreSlim _mutex = new(1, 1);
|
||||
|
||||
public async Task AddAsync(PolicySnapshot snapshot, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (snapshot is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(snapshot));
|
||||
}
|
||||
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
_snapshots.Add(snapshot);
|
||||
_snapshots.Sort(static (left, right) => left.RevisionNumber.CompareTo(right.RevisionNumber));
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<PolicySnapshot?> GetLatestAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
return _snapshots.Count == 0 ? null : _snapshots[^1];
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PolicySnapshot>> ListAsync(int limit, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
IEnumerable<PolicySnapshot> query = _snapshots;
|
||||
if (limit > 0)
|
||||
{
|
||||
query = query.TakeLast(limit);
|
||||
}
|
||||
|
||||
return query.ToImmutableArray();
|
||||
}
|
||||
finally
|
||||
{
|
||||
_mutex.Release();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,12 +2,17 @@
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| POLICY-CORE-09-001 | TODO | Policy Guild | SCANNER-WEB-09-101 | Define YAML schema/binder, diagnostics, CLI validation for policy files. | Schema doc published; binder loads sample policy; validation errors actionable. |
|
||||
| POLICY-CORE-09-002 | TODO | Policy Guild | POLICY-CORE-09-001 | Implement policy snapshot store + revision digests + audit logging. | Snapshots persisted with digest; tests compare revisions; audit entries created. |
|
||||
| POLICY-CORE-09-003 | TODO | Policy Guild | POLICY-CORE-09-002 | `/policy/preview` API (image digest → projected verdict delta). | Preview returns diff JSON; integration tests with mocked report; docs updated. |
|
||||
| POLICY-CORE-09-001 | DONE | Policy Guild | SCANNER-WEB-09-101 | Define YAML schema/binder, diagnostics, CLI validation for policy files. | Schema doc published; binder loads sample policy; validation errors actionable. |
|
||||
| POLICY-CORE-09-002 | DONE | Policy Guild | POLICY-CORE-09-001 | Implement policy snapshot store + revision digests + audit logging. | Snapshots persisted with digest; tests compare revisions; audit entries created. |
|
||||
| POLICY-CORE-09-003 | DONE | Policy Guild | POLICY-CORE-09-002 | `/policy/preview` API (image digest → projected verdict delta). | Preview returns diff JSON; integration tests with mocked report; docs updated. |
|
||||
| POLICY-CORE-09-004 | TODO | Policy Guild | POLICY-CORE-09-001 | Versioned scoring config with schema validation, trust table, and golden fixtures. | Scoring config documented; fixtures stored; validation CLI passes. |
|
||||
| POLICY-CORE-09-005 | TODO | Policy Guild | POLICY-CORE-09-004 | Scoring/quiet engine – compute score, enforce VEX-only quiet rules, emit inputs and provenance. | Engine unit tests cover severity weighting; outputs include provenance data. |
|
||||
| POLICY-CORE-09-006 | TODO | Policy Guild | POLICY-CORE-09-005 | Unknown state & confidence decay – deterministic bands surfaced in policy outputs. | Confidence decay tests pass; docs updated; preview endpoint displays banding. |
|
||||
| POLICY-CORE-09-004 | TODO | Policy Guild | POLICY-CORE-09-001 | Versioned scoring config (weights, trust table, reachability buckets) with schema validation, binder, and golden fixtures. | Config serialized with semantic version, binder loads defaults, fixtures assert deterministic hash. |
|
||||
| POLICY-CORE-09-005 | TODO | Policy Guild | POLICY-CORE-09-004, POLICY-CORE-09-002 | Implement scoring/quiet engine: compute score from config, enforce VEX-only quiet rules, emit inputs + `quietedBy` metadata in policy verdicts. | `/reports` policy result includes score, inputs, configVersion, quiet provenance; unit/integration tests prove reproducibility. |
|
||||
| POLICY-CORE-09-006 | TODO | Policy Guild | POLICY-CORE-09-005, FEEDCORE-ENGINE-07-003 | Track unknown states with deterministic confidence bands that decay over time; expose state in policy outputs and docs. | Unknown flags + confidence band persisted, decay job deterministic, preview/report APIs show state with tests covering decay math. |
|
||||
|
||||
## Notes
|
||||
- 2025-10-18: POLICY-CORE-09-001 completed. Binder + diagnostics + CLI scaffolding landed with tests; schema embedded at `src/StellaOps.Policy/Schemas/policy-schema@1.json` and referenced by docs/11_DATA_SCHEMAS.md.
|
||||
- 2025-10-18: POLICY-CORE-09-002 completed. Snapshot store + audit trail implemented with deterministic digest hashing and tests covering revision increments and dedupe.
|
||||
- 2025-10-18: POLICY-CORE-09-003 delivered. Preview service evaluates policy projections vs. baseline, returns verdict diffs, and ships with unit coverage.
|
||||
|
||||
81
src/StellaOps.Scanner.Core.Tests/Contracts/ScanJobTests.cs
Normal file
81
src/StellaOps.Scanner.Core.Tests/Contracts/ScanJobTests.cs
Normal file
@@ -0,0 +1,81 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Serialization;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Contracts;
|
||||
|
||||
public sealed class ScanJobTests
|
||||
{
|
||||
[Fact]
|
||||
public void SerializeAndDeserialize_RoundTripsDeterministically()
|
||||
{
|
||||
var createdAt = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.Zero);
|
||||
var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:ABCDEF", "tenant-a", "request-1");
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, "enqueue");
|
||||
var error = new ScannerError(
|
||||
ScannerErrorCode.AnalyzerFailure,
|
||||
ScannerErrorSeverity.Error,
|
||||
"Analyzer crashed for layer sha256:abc",
|
||||
createdAt,
|
||||
retryable: false,
|
||||
details: new Dictionary<string, string>
|
||||
{
|
||||
["stage"] = "analyze-os",
|
||||
["layer"] = "sha256:abc"
|
||||
});
|
||||
|
||||
var job = new ScanJob(
|
||||
jobId,
|
||||
ScanJobStatus.Running,
|
||||
"registry.example.com/stellaops/scanner:1.2.3",
|
||||
"SHA256:ABCDEF",
|
||||
createdAt,
|
||||
createdAt,
|
||||
correlationId,
|
||||
"tenant-a",
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["requestId"] = "request-1"
|
||||
},
|
||||
error);
|
||||
|
||||
var json = JsonSerializer.Serialize(job, ScannerJsonOptions.CreateDefault());
|
||||
var deserialized = JsonSerializer.Deserialize<ScanJob>(json, ScannerJsonOptions.CreateDefault());
|
||||
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(job.Id, deserialized!.Id);
|
||||
Assert.Equal(job.ImageDigest, deserialized.ImageDigest);
|
||||
Assert.Equal(job.CorrelationId, deserialized.CorrelationId);
|
||||
Assert.Equal(job.Metadata["requestId"], deserialized.Metadata["requestId"]);
|
||||
|
||||
var secondJson = JsonSerializer.Serialize(deserialized, ScannerJsonOptions.CreateDefault());
|
||||
Assert.Equal(json, secondJson);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithStatus_UpdatesTimestampDeterministically()
|
||||
{
|
||||
var createdAt = new DateTimeOffset(2025, 10, 18, 14, 30, 15, 123, TimeSpan.Zero);
|
||||
var jobId = ScannerIdentifiers.CreateJobId("example/scanner:latest", "sha256:def", null, null);
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, "enqueue");
|
||||
|
||||
var job = new ScanJob(
|
||||
jobId,
|
||||
ScanJobStatus.Pending,
|
||||
"example/scanner:latest",
|
||||
"sha256:def",
|
||||
createdAt,
|
||||
null,
|
||||
correlationId,
|
||||
null,
|
||||
null,
|
||||
null);
|
||||
|
||||
var updated = job.WithStatus(ScanJobStatus.Running, createdAt.AddSeconds(5));
|
||||
|
||||
Assert.Equal(ScanJobStatus.Running, updated.Status);
|
||||
Assert.Equal(ScannerTimestamps.Normalize(createdAt.AddSeconds(5)), updated.UpdatedAt);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Observability;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Observability;
|
||||
|
||||
public sealed class ScannerLogExtensionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void BeginScanScope_PopulatesCorrelationContext()
|
||||
{
|
||||
using var factory = LoggerFactory.Create(builder => builder.AddFilter(_ => true));
|
||||
var logger = factory.CreateLogger("test");
|
||||
|
||||
var jobId = ScannerIdentifiers.CreateJobId("example/scanner:1.0", "sha256:abc", null, null);
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, "enqueue");
|
||||
var job = new ScanJob(
|
||||
jobId,
|
||||
ScanJobStatus.Pending,
|
||||
"example/scanner:1.0",
|
||||
"sha256:abc",
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
correlationId,
|
||||
null,
|
||||
null,
|
||||
null);
|
||||
|
||||
using (logger.BeginScanScope(job, "enqueue"))
|
||||
{
|
||||
Assert.True(ScannerCorrelationContextAccessor.TryGetCorrelationId(out var current));
|
||||
Assert.Equal(correlationId, current);
|
||||
}
|
||||
|
||||
Assert.False(ScannerCorrelationContextAccessor.TryGetCorrelationId(out _));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,89 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
using StellaOps.Auth.Client;
|
||||
using StellaOps.Scanner.Core.Security;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Security;
|
||||
|
||||
public sealed class AuthorityTokenSourceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task GetAsync_ReusesCachedTokenUntilRefreshSkew()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var client = new FakeTokenClient(timeProvider);
|
||||
var source = new AuthorityTokenSource(client, TimeSpan.FromSeconds(30), timeProvider, NullLogger<AuthorityTokenSource>.Instance);
|
||||
|
||||
var token1 = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(1, client.RequestCount);
|
||||
|
||||
var token2 = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(1, client.RequestCount);
|
||||
Assert.Equal(token1.AccessToken, token2.AccessToken);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(3));
|
||||
var token3 = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(2, client.RequestCount);
|
||||
Assert.NotEqual(token1.AccessToken, token3.AccessToken);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InvalidateAsync_RemovesCachedToken()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var client = new FakeTokenClient(timeProvider);
|
||||
var source = new AuthorityTokenSource(client, TimeSpan.FromSeconds(30), timeProvider, NullLogger<AuthorityTokenSource>.Instance);
|
||||
|
||||
_ = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(1, client.RequestCount);
|
||||
|
||||
await source.InvalidateAsync("scanner", new[] { "scanner.read" });
|
||||
_ = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
|
||||
Assert.Equal(2, client.RequestCount);
|
||||
}
|
||||
|
||||
private sealed class FakeTokenClient : IStellaOpsTokenClient
|
||||
{
|
||||
private readonly FakeTimeProvider timeProvider;
|
||||
private int counter;
|
||||
|
||||
public FakeTokenClient(FakeTimeProvider timeProvider)
|
||||
{
|
||||
this.timeProvider = timeProvider;
|
||||
}
|
||||
|
||||
public int RequestCount => counter;
|
||||
|
||||
public Task<StellaOpsTokenResult> RequestClientCredentialsTokenAsync(string? scope = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var access = $"token-{Interlocked.Increment(ref counter)}";
|
||||
var expires = timeProvider.GetUtcNow().AddMinutes(2);
|
||||
var scopes = scope is null
|
||||
? Array.Empty<string>()
|
||||
: scope.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
|
||||
return Task.FromResult(new StellaOpsTokenResult(access, "Bearer", expires, scopes));
|
||||
}
|
||||
|
||||
public Task<StellaOpsTokenResult> RequestPasswordTokenAsync(string username, string password, string? scope = null, CancellationToken cancellationToken = default)
|
||||
=> throw new NotSupportedException();
|
||||
|
||||
public Task<JsonWebKeySet> GetJsonWebKeySetAsync(CancellationToken cancellationToken = default)
|
||||
=> throw new NotSupportedException();
|
||||
|
||||
public ValueTask<StellaOpsTokenCacheEntry?> GetCachedTokenAsync(string key, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.FromResult<StellaOpsTokenCacheEntry?>(null);
|
||||
|
||||
public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IdentityModel.Tokens.Jwt;
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
using StellaOps.Scanner.Core.Security;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Security;
|
||||
|
||||
public sealed class DpopProofValidatorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ValidateAsync_ReturnsSuccess_ForValidProof()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var validator = new DpopProofValidator(Options.Create(new DpopValidationOptions()), new InMemoryDpopReplayCache(timeProvider), timeProvider);
|
||||
using var key = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var securityKey = new ECDsaSecurityKey(key) { KeyId = Guid.NewGuid().ToString("N") };
|
||||
|
||||
var proof = CreateProof(timeProvider, securityKey, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.NotNull(result.PublicKey);
|
||||
Assert.NotNull(result.JwtId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_Fails_OnNonceMismatch()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var validator = new DpopProofValidator(Options.Create(new DpopValidationOptions()), new InMemoryDpopReplayCache(timeProvider), timeProvider);
|
||||
using var key = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var securityKey = new ECDsaSecurityKey(key) { KeyId = Guid.NewGuid().ToString("N") };
|
||||
|
||||
var proof = CreateProof(timeProvider, securityKey, "POST", new Uri("https://scanner.example.com/api/v1/scans"), nonce: "expected");
|
||||
var result = await validator.ValidateAsync(proof, "POST", new Uri("https://scanner.example.com/api/v1/scans"), nonce: "different");
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("invalid_token", result.ErrorCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_Fails_OnReplay()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var cache = new InMemoryDpopReplayCache(timeProvider);
|
||||
var validator = new DpopProofValidator(Options.Create(new DpopValidationOptions()), cache, timeProvider);
|
||||
using var key = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var securityKey = new ECDsaSecurityKey(key) { KeyId = Guid.NewGuid().ToString("N") };
|
||||
var jti = Guid.NewGuid().ToString();
|
||||
|
||||
var proof = CreateProof(timeProvider, securityKey, "GET", new Uri("https://scanner.example.com/api/v1/scans"), jti: jti);
|
||||
|
||||
var first = await validator.ValidateAsync(proof, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
Assert.True(first.IsValid);
|
||||
|
||||
var second = await validator.ValidateAsync(proof, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
Assert.False(second.IsValid);
|
||||
Assert.Equal("replay", second.ErrorCode);
|
||||
}
|
||||
|
||||
private static string CreateProof(FakeTimeProvider timeProvider, ECDsaSecurityKey key, string method, Uri uri, string? nonce = null, string? jti = null)
|
||||
{
|
||||
var handler = new JwtSecurityTokenHandler();
|
||||
var signingCredentials = new SigningCredentials(key, SecurityAlgorithms.EcdsaSha256);
|
||||
var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(key);
|
||||
|
||||
var header = new JwtHeader(signingCredentials)
|
||||
{
|
||||
["typ"] = "dpop+jwt",
|
||||
["jwk"] = new Dictionary<string, object?>
|
||||
{
|
||||
["kty"] = jwk.Kty,
|
||||
["crv"] = jwk.Crv,
|
||||
["x"] = jwk.X,
|
||||
["y"] = jwk.Y
|
||||
}
|
||||
};
|
||||
|
||||
var payload = new JwtPayload
|
||||
{
|
||||
["htm"] = method.ToUpperInvariant(),
|
||||
["htu"] = Normalize(uri),
|
||||
["iat"] = timeProvider.GetUtcNow().ToUnixTimeSeconds(),
|
||||
["jti"] = jti ?? Guid.NewGuid().ToString()
|
||||
};
|
||||
|
||||
if (nonce is not null)
|
||||
{
|
||||
payload["nonce"] = nonce;
|
||||
}
|
||||
|
||||
var token = new JwtSecurityToken(header, payload);
|
||||
return handler.WriteToken(token);
|
||||
}
|
||||
|
||||
private static string Normalize(Uri uri)
|
||||
{
|
||||
var builder = new UriBuilder(uri)
|
||||
{
|
||||
Fragment = string.Empty
|
||||
};
|
||||
|
||||
builder.Host = builder.Host.ToLowerInvariant();
|
||||
builder.Scheme = builder.Scheme.ToLowerInvariant();
|
||||
|
||||
if ((builder.Scheme == "http" && builder.Port == 80) || (builder.Scheme == "https" && builder.Port == 443))
|
||||
{
|
||||
builder.Port = -1;
|
||||
}
|
||||
|
||||
return builder.Uri.GetComponents(UriComponents.SchemeAndServer | UriComponents.PathAndQuery, UriFormat.UriEscaped);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
using System;
|
||||
using StellaOps.Scanner.Core.Security;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Security;
|
||||
|
||||
public sealed class RestartOnlyPluginGuardTests
|
||||
{
|
||||
[Fact]
|
||||
public void EnsureRegistrationAllowed_AllowsNewPluginsBeforeSeal()
|
||||
{
|
||||
var guard = new RestartOnlyPluginGuard();
|
||||
guard.EnsureRegistrationAllowed("./plugins/analyzer.dll");
|
||||
|
||||
Assert.Contains(guard.KnownPlugins, path => path.EndsWith("analyzer.dll", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EnsureRegistrationAllowed_ThrowsAfterSeal()
|
||||
{
|
||||
var guard = new RestartOnlyPluginGuard(new[] { "./plugins/a.dll" });
|
||||
guard.Seal();
|
||||
|
||||
Assert.Throws<InvalidOperationException>(() => guard.EnsureRegistrationAllowed("./plugins/new.dll"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,33 @@
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Utility;
|
||||
|
||||
public sealed class ScannerIdentifiersTests
|
||||
{
|
||||
[Fact]
|
||||
public void CreateJobId_IsDeterministicAndCaseInsensitive()
|
||||
{
|
||||
var first = ScannerIdentifiers.CreateJobId("registry.example.com/repo:latest", "SHA256:ABC", "Tenant-A", "salt");
|
||||
var second = ScannerIdentifiers.CreateJobId("REGISTRY.EXAMPLE.COM/REPO:latest", "sha256:abc", "tenant-a", "salt");
|
||||
|
||||
Assert.Equal(first, second);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateDeterministicHash_ProducesLowercaseHex()
|
||||
{
|
||||
var hash = ScannerIdentifiers.CreateDeterministicHash("scan", "abc", "123");
|
||||
|
||||
Assert.Matches("^[0-9a-f]{64}$", hash);
|
||||
Assert.Equal(hash, hash.ToLowerInvariant());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NormalizeImageReference_LowercasesRegistryAndRepository()
|
||||
{
|
||||
var normalized = ScannerIdentifiers.NormalizeImageReference("Registry.Example.com/StellaOps/Scanner:1.0");
|
||||
|
||||
Assert.Equal("registry.example.com/stellaops/scanner:1.0", normalized);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Utility;
|
||||
|
||||
public sealed class ScannerTimestampsTests
|
||||
{
|
||||
[Fact]
|
||||
public void Normalize_TrimsToMicroseconds()
|
||||
{
|
||||
var value = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.Zero).AddTicks(7);
|
||||
var normalized = ScannerTimestamps.Normalize(value);
|
||||
|
||||
var expectedTicks = value.UtcTicks - (value.UtcTicks % 10);
|
||||
Assert.Equal(expectedTicks, normalized.UtcTicks);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToIso8601_ProducesUtcString()
|
||||
{
|
||||
var value = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.FromHours(-4));
|
||||
var iso = ScannerTimestamps.ToIso8601(value);
|
||||
|
||||
Assert.Equal("2025-10-18T18:30:15.000000Z", iso);
|
||||
}
|
||||
}
|
||||
29
src/StellaOps.Scanner.Core/AGENTS.md
Normal file
29
src/StellaOps.Scanner.Core/AGENTS.md
Normal file
@@ -0,0 +1,29 @@
|
||||
# AGENTS
|
||||
## Role
|
||||
Provide shared scanner contracts, observability primitives, and security utilities consumed by the WebService, Worker, analyzers, and downstream tooling.
|
||||
## Scope
|
||||
- Canonical DTOs for scan jobs, progress, outcomes, and error taxonomy shared across scanner services.
|
||||
- Deterministic ID and timestamp helpers to guarantee reproducible job identifiers and ISO-8601 rendering.
|
||||
- Observability helpers (logging scopes, correlation IDs, metric naming, activity sources) with negligible overhead.
|
||||
- Authority/OpTok integrations, DPoP validation helpers, and restart-time plug-in guardrails for scanner components.
|
||||
## Participants
|
||||
- Scanner.WebService and Scanner.Worker depend on these primitives for request handling, queue interactions, and diagnostics.
|
||||
- Policy/Signer integrations rely on deterministic identifiers and timestamps emitted here.
|
||||
- DevOps/Offline kits bundle plug-in manifests validated via the guardrails defined in this module.
|
||||
## Interfaces & contracts
|
||||
- DTOs must round-trip via System.Text.Json with `JsonSerializerDefaults.Web` and preserve ordering.
|
||||
- Deterministic helpers must not depend on ambient time/randomness; they derive IDs from explicit inputs and normalize timestamps to microsecond precision in UTC.
|
||||
- Observability scopes expose `scanId`, `jobId`, `correlationId`, and `imageDigest` fields with `stellaops scanner` metric prefixing.
|
||||
- Security helpers expose `IAuthorityTokenSource`, `IDPoPProofValidator`, and `IPluginCatalogGuard` abstractions with DI-friendly implementations.
|
||||
## In/Out of scope
|
||||
In: shared contracts, telemetry primitives, security utilities, plug-in manifest checks.
|
||||
Out: queue implementations, analyzer logic, storage adapters, HTTP endpoints, UI wiring.
|
||||
## Observability & security expectations
|
||||
- No network calls except via registered Authority clients.
|
||||
- Avoid allocations in hot paths; prefer struct enumerables/`ValueTask`.
|
||||
- All logs structured, correlation IDs propagated, no secrets persisted.
|
||||
- DPoP validation enforces algorithm allowlist (ES256/ES384) and ensures replay cache hooks.
|
||||
## Tests
|
||||
- `../StellaOps.Scanner.Core.Tests` owns unit coverage with deterministic fixtures.
|
||||
- Golden JSON for DTO round-trips stored under `Fixtures/`.
|
||||
- Security and observability helpers must include tests proving deterministic outputs and rejecting malformed proofs.
|
||||
173
src/StellaOps.Scanner.Core/Contracts/ScanJob.cs
Normal file
173
src/StellaOps.Scanner.Core/Contracts/ScanJob.cs
Normal file
@@ -0,0 +1,173 @@
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Globalization;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Contracts;
|
||||
|
||||
[JsonConverter(typeof(ScanJobIdJsonConverter))]
|
||||
public readonly record struct ScanJobId(Guid Value)
|
||||
{
|
||||
public static readonly ScanJobId Empty = new(Guid.Empty);
|
||||
|
||||
public override string ToString()
|
||||
=> Value.ToString("n", CultureInfo.InvariantCulture);
|
||||
|
||||
public static ScanJobId From(Guid value)
|
||||
=> new(value);
|
||||
|
||||
public static bool TryParse(string? text, out ScanJobId id)
|
||||
{
|
||||
if (Guid.TryParse(text, out var guid))
|
||||
{
|
||||
id = new ScanJobId(guid);
|
||||
return true;
|
||||
}
|
||||
|
||||
id = Empty;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<ScanJobStatus>))]
|
||||
public enum ScanJobStatus
|
||||
{
|
||||
Unknown = 0,
|
||||
Pending,
|
||||
Queued,
|
||||
Running,
|
||||
Succeeded,
|
||||
Failed,
|
||||
Cancelled
|
||||
}
|
||||
|
||||
public sealed class ScanJob
|
||||
{
|
||||
private static readonly IReadOnlyDictionary<string, string> EmptyMetadata =
|
||||
new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(0, StringComparer.Ordinal));
|
||||
|
||||
[JsonConstructor]
|
||||
public ScanJob(
|
||||
ScanJobId id,
|
||||
ScanJobStatus status,
|
||||
string imageReference,
|
||||
string? imageDigest,
|
||||
DateTimeOffset createdAt,
|
||||
DateTimeOffset? updatedAt,
|
||||
string correlationId,
|
||||
string? tenantId,
|
||||
IReadOnlyDictionary<string, string>? metadata = null,
|
||||
ScannerError? failure = null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(imageReference))
|
||||
{
|
||||
throw new ArgumentException("Image reference cannot be null or whitespace.", nameof(imageReference));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(correlationId))
|
||||
{
|
||||
throw new ArgumentException("Correlation identifier cannot be null or whitespace.", nameof(correlationId));
|
||||
}
|
||||
|
||||
Id = id;
|
||||
Status = status;
|
||||
ImageReference = imageReference.Trim();
|
||||
ImageDigest = NormalizeDigest(imageDigest);
|
||||
CreatedAt = ScannerTimestamps.Normalize(createdAt);
|
||||
UpdatedAt = updatedAt is null ? null : ScannerTimestamps.Normalize(updatedAt.Value);
|
||||
CorrelationId = correlationId;
|
||||
TenantId = string.IsNullOrWhiteSpace(tenantId) ? null : tenantId.Trim();
|
||||
Metadata = metadata is null or { Count: 0 }
|
||||
? EmptyMetadata
|
||||
: new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(metadata, StringComparer.Ordinal));
|
||||
Failure = failure;
|
||||
}
|
||||
|
||||
[JsonPropertyName("id")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public ScanJobId Id { get; }
|
||||
|
||||
[JsonPropertyName("status")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public ScanJobStatus Status { get; init; }
|
||||
|
||||
[JsonPropertyName("imageReference")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public string ImageReference { get; }
|
||||
|
||||
[JsonPropertyName("imageDigest")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public string? ImageDigest { get; }
|
||||
|
||||
[JsonPropertyName("createdAt")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public DateTimeOffset CreatedAt { get; }
|
||||
|
||||
[JsonPropertyName("updatedAt")]
|
||||
[JsonPropertyOrder(5)]
|
||||
public DateTimeOffset? UpdatedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("correlationId")]
|
||||
[JsonPropertyOrder(6)]
|
||||
public string CorrelationId { get; }
|
||||
|
||||
[JsonPropertyName("tenantId")]
|
||||
[JsonPropertyOrder(7)]
|
||||
public string? TenantId { get; }
|
||||
|
||||
[JsonPropertyName("metadata")]
|
||||
[JsonPropertyOrder(8)]
|
||||
public IReadOnlyDictionary<string, string> Metadata { get; }
|
||||
|
||||
[JsonPropertyName("failure")]
|
||||
[JsonPropertyOrder(9)]
|
||||
public ScannerError? Failure { get; init; }
|
||||
|
||||
public ScanJob WithStatus(ScanJobStatus status, DateTimeOffset? updatedAt = null)
|
||||
=> new(
|
||||
Id,
|
||||
status,
|
||||
ImageReference,
|
||||
ImageDigest,
|
||||
CreatedAt,
|
||||
updatedAt ?? UpdatedAt ?? CreatedAt,
|
||||
CorrelationId,
|
||||
TenantId,
|
||||
Metadata,
|
||||
Failure);
|
||||
|
||||
public ScanJob WithFailure(ScannerError failure, DateTimeOffset? updatedAt = null, TimeProvider? timeProvider = null)
|
||||
=> new(
|
||||
Id,
|
||||
ScanJobStatus.Failed,
|
||||
ImageReference,
|
||||
ImageDigest,
|
||||
CreatedAt,
|
||||
updatedAt ?? ScannerTimestamps.UtcNow(timeProvider),
|
||||
CorrelationId,
|
||||
TenantId,
|
||||
Metadata,
|
||||
failure);
|
||||
|
||||
private static string? NormalizeDigest(string? digest)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(digest))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var trimmed = digest.Trim();
|
||||
if (!trimmed.StartsWith("sha", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
var parts = trimmed.Split(':', 2, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
if (parts.Length != 2)
|
||||
{
|
||||
return trimmed.ToLowerInvariant();
|
||||
}
|
||||
|
||||
return $"{parts[0].ToLowerInvariant()}:{parts[1].ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Contracts;
|
||||
|
||||
internal sealed class ScanJobIdJsonConverter : JsonConverter<ScanJobId>
|
||||
{
|
||||
public override ScanJobId Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
|
||||
{
|
||||
if (reader.TokenType != JsonTokenType.String)
|
||||
{
|
||||
throw new JsonException("Expected scan job identifier to be a string.");
|
||||
}
|
||||
|
||||
var value = reader.GetString();
|
||||
if (!ScanJobId.TryParse(value, out var id))
|
||||
{
|
||||
throw new JsonException("Invalid scan job identifier.");
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
public override void Write(Utf8JsonWriter writer, ScanJobId value, JsonSerializerOptions options)
|
||||
=> writer.WriteStringValue(value.ToString());
|
||||
}
|
||||
121
src/StellaOps.Scanner.Core/Contracts/ScanProgressEvent.cs
Normal file
121
src/StellaOps.Scanner.Core/Contracts/ScanProgressEvent.cs
Normal file
@@ -0,0 +1,121 @@
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Contracts;
|
||||
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<ScanStage>))]
|
||||
public enum ScanStage
|
||||
{
|
||||
Unknown = 0,
|
||||
ResolveImage,
|
||||
FetchLayers,
|
||||
MountLayers,
|
||||
AnalyzeOperatingSystem,
|
||||
AnalyzeLanguageEcosystems,
|
||||
AnalyzeNativeArtifacts,
|
||||
ComposeSbom,
|
||||
BuildDiffs,
|
||||
EmitArtifacts,
|
||||
SignArtifacts,
|
||||
Complete
|
||||
}
|
||||
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<ScanProgressEventKind>))]
|
||||
public enum ScanProgressEventKind
|
||||
{
|
||||
Progress = 0,
|
||||
StageStarted,
|
||||
StageCompleted,
|
||||
Warning,
|
||||
Error
|
||||
}
|
||||
|
||||
public sealed class ScanProgressEvent
|
||||
{
|
||||
private static readonly IReadOnlyDictionary<string, string> EmptyAttributes =
|
||||
new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(0, StringComparer.Ordinal));
|
||||
|
||||
[JsonConstructor]
|
||||
public ScanProgressEvent(
|
||||
ScanJobId jobId,
|
||||
ScanStage stage,
|
||||
ScanProgressEventKind kind,
|
||||
int sequence,
|
||||
DateTimeOffset timestamp,
|
||||
double? percentComplete = null,
|
||||
string? message = null,
|
||||
IReadOnlyDictionary<string, string>? attributes = null,
|
||||
ScannerError? error = null)
|
||||
{
|
||||
if (sequence < 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(sequence), sequence, "Sequence cannot be negative.");
|
||||
}
|
||||
|
||||
JobId = jobId;
|
||||
Stage = stage;
|
||||
Kind = kind;
|
||||
Sequence = sequence;
|
||||
Timestamp = ScannerTimestamps.Normalize(timestamp);
|
||||
PercentComplete = percentComplete is < 0 or > 100 ? null : percentComplete;
|
||||
Message = message is { Length: > 0 } ? message.Trim() : null;
|
||||
Attributes = attributes is null or { Count: 0 }
|
||||
? EmptyAttributes
|
||||
: new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(attributes, StringComparer.Ordinal));
|
||||
Error = error;
|
||||
}
|
||||
|
||||
[JsonPropertyName("jobId")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public ScanJobId JobId { get; }
|
||||
|
||||
[JsonPropertyName("stage")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public ScanStage Stage { get; }
|
||||
|
||||
[JsonPropertyName("kind")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public ScanProgressEventKind Kind { get; }
|
||||
|
||||
[JsonPropertyName("sequence")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public int Sequence { get; }
|
||||
|
||||
[JsonPropertyName("timestamp")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public DateTimeOffset Timestamp { get; }
|
||||
|
||||
[JsonPropertyName("percentComplete")]
|
||||
[JsonPropertyOrder(5)]
|
||||
public double? PercentComplete { get; }
|
||||
|
||||
[JsonPropertyName("message")]
|
||||
[JsonPropertyOrder(6)]
|
||||
public string? Message { get; }
|
||||
|
||||
[JsonPropertyName("attributes")]
|
||||
[JsonPropertyOrder(7)]
|
||||
public IReadOnlyDictionary<string, string> Attributes { get; }
|
||||
|
||||
[JsonPropertyName("error")]
|
||||
[JsonPropertyOrder(8)]
|
||||
public ScannerError? Error { get; }
|
||||
|
||||
public ScanProgressEvent With(
|
||||
ScanProgressEventKind? kind = null,
|
||||
double? percentComplete = null,
|
||||
string? message = null,
|
||||
IReadOnlyDictionary<string, string>? attributes = null,
|
||||
ScannerError? error = null)
|
||||
=> new(
|
||||
JobId,
|
||||
Stage,
|
||||
kind ?? Kind,
|
||||
Sequence,
|
||||
Timestamp,
|
||||
percentComplete ?? PercentComplete,
|
||||
message ?? Message,
|
||||
attributes ?? Attributes,
|
||||
error ?? Error);
|
||||
}
|
||||
110
src/StellaOps.Scanner.Core/Contracts/ScannerError.cs
Normal file
110
src/StellaOps.Scanner.Core/Contracts/ScannerError.cs
Normal file
@@ -0,0 +1,110 @@
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Contracts;
|
||||
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<ScannerErrorCode>))]
|
||||
public enum ScannerErrorCode
|
||||
{
|
||||
Unknown = 0,
|
||||
InvalidImageReference,
|
||||
ImageNotFound,
|
||||
AuthorizationFailed,
|
||||
QueueUnavailable,
|
||||
StorageUnavailable,
|
||||
AnalyzerFailure,
|
||||
ExportFailure,
|
||||
SigningFailure,
|
||||
RuntimeFailure,
|
||||
Timeout,
|
||||
Cancelled,
|
||||
PluginViolation
|
||||
}
|
||||
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<ScannerErrorSeverity>))]
|
||||
public enum ScannerErrorSeverity
|
||||
{
|
||||
Warning = 0,
|
||||
Error,
|
||||
Fatal
|
||||
}
|
||||
|
||||
public sealed class ScannerError
|
||||
{
|
||||
private static readonly IReadOnlyDictionary<string, string> EmptyDetails =
|
||||
new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(0, StringComparer.Ordinal));
|
||||
|
||||
[JsonConstructor]
|
||||
public ScannerError(
|
||||
ScannerErrorCode code,
|
||||
ScannerErrorSeverity severity,
|
||||
string message,
|
||||
DateTimeOffset timestamp,
|
||||
bool retryable,
|
||||
IReadOnlyDictionary<string, string>? details = null,
|
||||
string? stage = null,
|
||||
string? component = null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(message))
|
||||
{
|
||||
throw new ArgumentException("Error message cannot be null or whitespace.", nameof(message));
|
||||
}
|
||||
|
||||
Code = code;
|
||||
Severity = severity;
|
||||
Message = message.Trim();
|
||||
Timestamp = ScannerTimestamps.Normalize(timestamp);
|
||||
Retryable = retryable;
|
||||
Stage = stage;
|
||||
Component = component;
|
||||
Details = details is null or { Count: 0 }
|
||||
? EmptyDetails
|
||||
: new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(details, StringComparer.Ordinal));
|
||||
}
|
||||
|
||||
[JsonPropertyName("code")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public ScannerErrorCode Code { get; }
|
||||
|
||||
[JsonPropertyName("severity")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public ScannerErrorSeverity Severity { get; }
|
||||
|
||||
[JsonPropertyName("message")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public string Message { get; }
|
||||
|
||||
[JsonPropertyName("timestamp")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public DateTimeOffset Timestamp { get; }
|
||||
|
||||
[JsonPropertyName("retryable")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public bool Retryable { get; }
|
||||
|
||||
[JsonPropertyName("stage")]
|
||||
[JsonPropertyOrder(5)]
|
||||
public string? Stage { get; }
|
||||
|
||||
[JsonPropertyName("component")]
|
||||
[JsonPropertyOrder(6)]
|
||||
public string? Component { get; }
|
||||
|
||||
[JsonPropertyName("details")]
|
||||
[JsonPropertyOrder(7)]
|
||||
public IReadOnlyDictionary<string, string> Details { get; }
|
||||
|
||||
public ScannerError WithDetail(string key, string value)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(key);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(value);
|
||||
|
||||
var mutable = new Dictionary<string, string>(Details, StringComparer.Ordinal)
|
||||
{
|
||||
[key] = value
|
||||
};
|
||||
|
||||
return new ScannerError(Code, Severity, Message, Timestamp, Retryable, mutable, Stage, Component);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.Threading;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Observability;
|
||||
|
||||
public readonly record struct ScannerCorrelationContext(
|
||||
ScanJobId JobId,
|
||||
string CorrelationId,
|
||||
string? Stage,
|
||||
string? Component,
|
||||
string? Audience = null)
|
||||
{
|
||||
public static ScannerCorrelationContext Create(
|
||||
ScanJobId jobId,
|
||||
string? stage = null,
|
||||
string? component = null,
|
||||
string? audience = null)
|
||||
{
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, stage, component);
|
||||
return new ScannerCorrelationContext(jobId, correlationId, stage, component, audience);
|
||||
}
|
||||
|
||||
public string DeterministicHash()
|
||||
=> ScannerIdentifiers.CreateDeterministicHash(
|
||||
JobId.ToString(),
|
||||
Stage ?? string.Empty,
|
||||
Component ?? string.Empty,
|
||||
Audience ?? string.Empty);
|
||||
}
|
||||
|
||||
public static class ScannerCorrelationContextAccessor
|
||||
{
|
||||
private static readonly AsyncLocal<ScannerCorrelationContext?> CurrentContext = new();
|
||||
|
||||
public static ScannerCorrelationContext? Current => CurrentContext.Value;
|
||||
|
||||
public static IDisposable Push(in ScannerCorrelationContext context)
|
||||
{
|
||||
var previous = CurrentContext.Value;
|
||||
CurrentContext.Value = context;
|
||||
return new DisposableScope(() => CurrentContext.Value = previous);
|
||||
}
|
||||
|
||||
public static bool TryGetCorrelationId([NotNullWhen(true)] out string? correlationId)
|
||||
{
|
||||
var context = CurrentContext.Value;
|
||||
if (context.HasValue)
|
||||
{
|
||||
correlationId = context.Value.CorrelationId;
|
||||
return true;
|
||||
}
|
||||
|
||||
correlationId = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
private sealed class DisposableScope : IDisposable
|
||||
{
|
||||
private readonly Action release;
|
||||
private bool disposed;
|
||||
|
||||
public DisposableScope(Action release)
|
||||
{
|
||||
this.release = release ?? throw new ArgumentNullException(nameof(release));
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
disposed = true;
|
||||
release();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.Metrics;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Observability;
|
||||
|
||||
public static class ScannerDiagnostics
|
||||
{
|
||||
public const string ActivitySourceName = "StellaOps.Scanner";
|
||||
public const string ActivityVersion = "1.0.0";
|
||||
public const string MeterName = "stellaops.scanner";
|
||||
public const string MeterVersion = "1.0.0";
|
||||
|
||||
public static ActivitySource ActivitySource { get; } = new(ActivitySourceName, ActivityVersion);
|
||||
public static Meter Meter { get; } = new(MeterName, MeterVersion);
|
||||
|
||||
public static Activity? StartActivity(
|
||||
string name,
|
||||
ScanJobId jobId,
|
||||
string? stage = null,
|
||||
string? component = null,
|
||||
ActivityKind kind = ActivityKind.Internal,
|
||||
IEnumerable<KeyValuePair<string, object?>>? tags = null)
|
||||
{
|
||||
var activity = ActivitySource.StartActivity(name, kind);
|
||||
if (activity is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
activity.SetTag("stellaops.scanner.job_id", jobId.ToString());
|
||||
activity.SetTag("stellaops.scanner.correlation_id", ScannerIdentifiers.CreateCorrelationId(jobId, stage, component));
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(stage))
|
||||
{
|
||||
activity.SetTag("stellaops.scanner.stage", stage);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(component))
|
||||
{
|
||||
activity.SetTag("stellaops.scanner.component", component);
|
||||
}
|
||||
|
||||
if (tags is not null)
|
||||
{
|
||||
foreach (var tag in tags)
|
||||
{
|
||||
activity?.SetTag(tag.Key, tag.Value);
|
||||
}
|
||||
}
|
||||
|
||||
return activity;
|
||||
}
|
||||
}
|
||||
115
src/StellaOps.Scanner.Core/Observability/ScannerLogExtensions.cs
Normal file
115
src/StellaOps.Scanner.Core/Observability/ScannerLogExtensions.cs
Normal file
@@ -0,0 +1,115 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Observability;
|
||||
|
||||
public static class ScannerLogExtensions
|
||||
{
|
||||
private sealed class NoopScope : IDisposable
|
||||
{
|
||||
public static NoopScope Instance { get; } = new();
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class CompositeScope : IDisposable
|
||||
{
|
||||
private readonly IDisposable first;
|
||||
private readonly IDisposable second;
|
||||
private bool disposed;
|
||||
|
||||
public CompositeScope(IDisposable first, IDisposable second)
|
||||
{
|
||||
this.first = first;
|
||||
this.second = second;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
disposed = true;
|
||||
second.Dispose();
|
||||
first.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
public static IDisposable BeginScanScope(this ILogger? logger, ScanJob job, string? stage = null, string? component = null)
|
||||
{
|
||||
var correlation = ScannerCorrelationContext.Create(job.Id, stage, component);
|
||||
var logScope = logger is null
|
||||
? NoopScope.Instance
|
||||
: logger.BeginScope(CreateScopeState(
|
||||
job.Id,
|
||||
job.CorrelationId,
|
||||
stage,
|
||||
component,
|
||||
job.TenantId,
|
||||
job.ImageDigest)) ?? NoopScope.Instance;
|
||||
|
||||
var correlationScope = ScannerCorrelationContextAccessor.Push(correlation);
|
||||
return new CompositeScope(logScope, correlationScope);
|
||||
}
|
||||
|
||||
public static IDisposable BeginProgressScope(this ILogger? logger, ScanProgressEvent progress, string? component = null)
|
||||
{
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(progress.JobId, progress.Stage.ToString(), component);
|
||||
var correlation = new ScannerCorrelationContext(progress.JobId, correlationId, progress.Stage.ToString(), component);
|
||||
|
||||
var logScope = logger is null
|
||||
? NoopScope.Instance
|
||||
: logger.BeginScope(new Dictionary<string, object?>(6, StringComparer.Ordinal)
|
||||
{
|
||||
["scanId"] = progress.JobId.ToString(),
|
||||
["stage"] = progress.Stage.ToString(),
|
||||
["sequence"] = progress.Sequence,
|
||||
["kind"] = progress.Kind.ToString(),
|
||||
["correlationId"] = correlationId,
|
||||
["component"] = component ?? string.Empty
|
||||
}) ?? NoopScope.Instance;
|
||||
|
||||
var correlationScope = ScannerCorrelationContextAccessor.Push(correlation);
|
||||
return new CompositeScope(logScope, correlationScope);
|
||||
}
|
||||
|
||||
public static IDisposable BeginCorrelationScope(this ILogger? logger, ScannerCorrelationContext context)
|
||||
{
|
||||
var scope = logger is null
|
||||
? NoopScope.Instance
|
||||
: logger.BeginScope(CreateScopeState(context.JobId, context.CorrelationId, context.Stage, context.Component, null, null)) ?? NoopScope.Instance;
|
||||
|
||||
var correlationScope = ScannerCorrelationContextAccessor.Push(context);
|
||||
return new CompositeScope(scope, correlationScope);
|
||||
}
|
||||
|
||||
private static Dictionary<string, object?> CreateScopeState(
|
||||
ScanJobId jobId,
|
||||
string correlationId,
|
||||
string? stage,
|
||||
string? component,
|
||||
string? tenantId,
|
||||
string? imageDigest)
|
||||
{
|
||||
var state = new Dictionary<string, object?>(6, StringComparer.Ordinal)
|
||||
{
|
||||
["scanId"] = jobId.ToString(),
|
||||
["correlationId"] = correlationId,
|
||||
["stage"] = stage ?? string.Empty,
|
||||
["component"] = component ?? string.Empty,
|
||||
["tenantId"] = tenantId ?? string.Empty
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(imageDigest))
|
||||
{
|
||||
state["imageDigest"] = imageDigest;
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
using System.Collections.Frozen;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Observability;
|
||||
|
||||
public static class ScannerMetricNames
|
||||
{
|
||||
public const string Prefix = "stellaops.scanner";
|
||||
public const string QueueLatency = $"{Prefix}.queue.latency";
|
||||
public const string QueueDepth = $"{Prefix}.queue.depth";
|
||||
public const string StageDuration = $"{Prefix}.stage.duration";
|
||||
public const string StageProgress = $"{Prefix}.stage.progress";
|
||||
public const string JobCount = $"{Prefix}.jobs.count";
|
||||
public const string JobFailures = $"{Prefix}.jobs.failures";
|
||||
public const string ArtifactBytes = $"{Prefix}.artifacts.bytes";
|
||||
|
||||
public static FrozenDictionary<string, object?> BuildJobTags(ScanJob job, string? stage = null, string? component = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(job);
|
||||
|
||||
var builder = new Dictionary<string, object?>(6, StringComparer.Ordinal)
|
||||
{
|
||||
["jobId"] = job.Id.ToString(),
|
||||
["stage"] = stage ?? string.Empty,
|
||||
["component"] = component ?? string.Empty,
|
||||
["tenantId"] = job.TenantId ?? string.Empty,
|
||||
["correlationId"] = job.CorrelationId,
|
||||
["status"] = job.Status.ToString()
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(job.ImageDigest))
|
||||
{
|
||||
builder["imageDigest"] = job.ImageDigest;
|
||||
}
|
||||
|
||||
return builder.ToFrozenDictionary(StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
public static FrozenDictionary<string, object?> BuildEventTags(ScanProgressEvent progress)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(progress);
|
||||
|
||||
var builder = new Dictionary<string, object?>(5, StringComparer.Ordinal)
|
||||
{
|
||||
["jobId"] = progress.JobId.ToString(),
|
||||
["stage"] = progress.Stage.ToString(),
|
||||
["kind"] = progress.Kind.ToString(),
|
||||
["sequence"] = progress.Sequence,
|
||||
["correlationId"] = ScannerIdentifiers.CreateCorrelationId(progress.JobId, progress.Stage.ToString())
|
||||
};
|
||||
|
||||
return builder.ToFrozenDictionary(StringComparer.Ordinal);
|
||||
}
|
||||
}
|
||||
128
src/StellaOps.Scanner.Core/Security/AuthorityTokenSource.cs
Normal file
128
src/StellaOps.Scanner.Core/Security/AuthorityTokenSource.cs
Normal file
@@ -0,0 +1,128 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Auth.Client;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Security;
|
||||
|
||||
public sealed class AuthorityTokenSource : IAuthorityTokenSource
|
||||
{
|
||||
private readonly IStellaOpsTokenClient tokenClient;
|
||||
private readonly TimeProvider timeProvider;
|
||||
private readonly TimeSpan refreshSkew;
|
||||
private readonly ILogger<AuthorityTokenSource>? logger;
|
||||
private readonly ConcurrentDictionary<string, CacheEntry> cache = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, SemaphoreSlim> locks = new(StringComparer.Ordinal);
|
||||
|
||||
public AuthorityTokenSource(
|
||||
IStellaOpsTokenClient tokenClient,
|
||||
TimeSpan? refreshSkew = null,
|
||||
TimeProvider? timeProvider = null,
|
||||
ILogger<AuthorityTokenSource>? logger = null)
|
||||
{
|
||||
this.tokenClient = tokenClient ?? throw new ArgumentNullException(nameof(tokenClient));
|
||||
this.timeProvider = timeProvider ?? TimeProvider.System;
|
||||
this.logger = logger;
|
||||
this.refreshSkew = refreshSkew is { } value && value > TimeSpan.Zero ? value : TimeSpan.FromSeconds(30);
|
||||
}
|
||||
|
||||
public async ValueTask<ScannerOperationalToken> GetAsync(string audience, IEnumerable<string> scopes, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(audience);
|
||||
|
||||
var normalizedAudience = NormalizeAudience(audience);
|
||||
var normalizedScopes = NormalizeScopes(scopes, normalizedAudience);
|
||||
var cacheKey = BuildCacheKey(normalizedAudience, normalizedScopes);
|
||||
|
||||
if (cache.TryGetValue(cacheKey, out var cached) && !cached.Token.IsExpired(timeProvider, refreshSkew))
|
||||
{
|
||||
return cached.Token;
|
||||
}
|
||||
|
||||
var mutex = locks.GetOrAdd(cacheKey, static _ => new SemaphoreSlim(1, 1));
|
||||
await mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
try
|
||||
{
|
||||
if (cache.TryGetValue(cacheKey, out cached) && !cached.Token.IsExpired(timeProvider, refreshSkew))
|
||||
{
|
||||
return cached.Token;
|
||||
}
|
||||
|
||||
var scopeString = string.Join(' ', normalizedScopes);
|
||||
var tokenResult = await tokenClient.RequestClientCredentialsTokenAsync(scopeString, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var token = ScannerOperationalToken.FromResult(
|
||||
tokenResult.AccessToken,
|
||||
tokenResult.TokenType,
|
||||
tokenResult.ExpiresAtUtc,
|
||||
tokenResult.Scopes);
|
||||
|
||||
cache[cacheKey] = new CacheEntry(token);
|
||||
logger?.LogDebug(
|
||||
"Issued new scanner OpTok for audience {Audience} with scopes {Scopes}; expires at {ExpiresAt}.",
|
||||
normalizedAudience,
|
||||
scopeString,
|
||||
token.ExpiresAt);
|
||||
|
||||
return token;
|
||||
}
|
||||
finally
|
||||
{
|
||||
mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public ValueTask InvalidateAsync(string audience, IEnumerable<string> scopes, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(audience);
|
||||
|
||||
var normalizedAudience = NormalizeAudience(audience);
|
||||
var normalizedScopes = NormalizeScopes(scopes, normalizedAudience);
|
||||
var cacheKey = BuildCacheKey(normalizedAudience, normalizedScopes);
|
||||
|
||||
cache.TryRemove(cacheKey, out _);
|
||||
if (locks.TryRemove(cacheKey, out var mutex))
|
||||
{
|
||||
mutex.Dispose();
|
||||
}
|
||||
|
||||
logger?.LogDebug("Invalidated cached OpTok for {Audience} ({CacheKey}).", normalizedAudience, cacheKey);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private static string NormalizeAudience(string audience)
|
||||
=> audience.Trim().ToLowerInvariant();
|
||||
|
||||
private static IReadOnlyList<string> NormalizeScopes(IEnumerable<string> scopes, string audience)
|
||||
{
|
||||
var set = new SortedSet<string>(StringComparer.Ordinal)
|
||||
{
|
||||
$"aud:{audience}"
|
||||
};
|
||||
|
||||
if (scopes is not null)
|
||||
{
|
||||
foreach (var scope in scopes)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(scope))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
set.Add(scope.Trim());
|
||||
}
|
||||
}
|
||||
|
||||
return set.ToArray();
|
||||
}
|
||||
|
||||
private static string BuildCacheKey(string audience, IReadOnlyList<string> scopes)
|
||||
=> ScannerIdentifiers.CreateDeterministicHash(audience, string.Join(' ', scopes));
|
||||
|
||||
private readonly record struct CacheEntry(ScannerOperationalToken Token);
|
||||
}
|
||||
248
src/StellaOps.Scanner.Core/Security/DpopProofValidator.cs
Normal file
248
src/StellaOps.Scanner.Core/Security/DpopProofValidator.cs
Normal file
@@ -0,0 +1,248 @@
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
using System.IdentityModel.Tokens.Jwt;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Security;
|
||||
|
||||
public sealed class DpopProofValidator : IDpopProofValidator
|
||||
{
|
||||
private static readonly string ProofType = "dpop+jwt";
|
||||
private readonly DpopValidationOptions options;
|
||||
private readonly IDpopReplayCache replayCache;
|
||||
private readonly TimeProvider timeProvider;
|
||||
private readonly ILogger<DpopProofValidator>? logger;
|
||||
private readonly JwtSecurityTokenHandler tokenHandler = new();
|
||||
|
||||
public DpopProofValidator(
|
||||
IOptions<DpopValidationOptions> options,
|
||||
IDpopReplayCache? replayCache = null,
|
||||
TimeProvider? timeProvider = null,
|
||||
ILogger<DpopProofValidator>? logger = null)
|
||||
{
|
||||
if (options is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(options));
|
||||
}
|
||||
|
||||
var cloned = options.Value ?? throw new InvalidOperationException("DPoP options must be provided.");
|
||||
cloned.Validate();
|
||||
|
||||
this.options = cloned;
|
||||
this.replayCache = replayCache ?? NullReplayCache.Instance;
|
||||
this.timeProvider = timeProvider ?? TimeProvider.System;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
public async ValueTask<DpopValidationResult> ValidateAsync(string proof, string httpMethod, Uri httpUri, string? nonce = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proof);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(httpMethod);
|
||||
ArgumentNullException.ThrowIfNull(httpUri);
|
||||
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
if (!TryDecodeSegment(proof, segmentIndex: 0, out var headerElement, out var headerError))
|
||||
{
|
||||
logger?.LogWarning("DPoP header decode failure: {Error}", headerError);
|
||||
return DpopValidationResult.Failure("invalid_header", headerError ?? "Unable to decode header.");
|
||||
}
|
||||
|
||||
if (!headerElement.TryGetProperty("typ", out var typElement) || !string.Equals(typElement.GetString(), ProofType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_header", "DPoP proof missing typ=dpop+jwt header.");
|
||||
}
|
||||
|
||||
if (!headerElement.TryGetProperty("alg", out var algElement))
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_header", "DPoP proof missing alg header.");
|
||||
}
|
||||
|
||||
var algorithm = algElement.GetString()?.Trim().ToUpperInvariant();
|
||||
if (string.IsNullOrEmpty(algorithm) || !options.NormalizedAlgorithms.Contains(algorithm))
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_header", "Unsupported DPoP algorithm.");
|
||||
}
|
||||
|
||||
if (!headerElement.TryGetProperty("jwk", out var jwkElement))
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_header", "DPoP proof missing jwk header.");
|
||||
}
|
||||
|
||||
JsonWebKey jwk;
|
||||
try
|
||||
{
|
||||
jwk = new JsonWebKey(jwkElement.GetRawText());
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger?.LogWarning(ex, "Failed to parse DPoP jwk header.");
|
||||
return DpopValidationResult.Failure("invalid_header", "DPoP proof jwk header is invalid.");
|
||||
}
|
||||
|
||||
if (!TryDecodeSegment(proof, segmentIndex: 1, out var payloadElement, out var payloadError))
|
||||
{
|
||||
logger?.LogWarning("DPoP payload decode failure: {Error}", payloadError);
|
||||
return DpopValidationResult.Failure("invalid_payload", payloadError ?? "Unable to decode payload.");
|
||||
}
|
||||
|
||||
if (!payloadElement.TryGetProperty("htm", out var htmElement))
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_payload", "DPoP proof missing htm claim.");
|
||||
}
|
||||
|
||||
var method = httpMethod.Trim().ToUpperInvariant();
|
||||
if (!string.Equals(htmElement.GetString(), method, StringComparison.Ordinal))
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_payload", "DPoP htm does not match request method.");
|
||||
}
|
||||
|
||||
if (!payloadElement.TryGetProperty("htu", out var htuElement))
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_payload", "DPoP proof missing htu claim.");
|
||||
}
|
||||
|
||||
var normalizedHtu = NormalizeHtu(httpUri);
|
||||
if (!string.Equals(htuElement.GetString(), normalizedHtu, StringComparison.Ordinal))
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_payload", "DPoP htu does not match request URI.");
|
||||
}
|
||||
|
||||
if (!payloadElement.TryGetProperty("iat", out var iatElement) || iatElement.ValueKind is not JsonValueKind.Number)
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_payload", "DPoP proof missing iat claim.");
|
||||
}
|
||||
|
||||
if (!payloadElement.TryGetProperty("jti", out var jtiElement) || jtiElement.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_payload", "DPoP proof missing jti claim.");
|
||||
}
|
||||
|
||||
long iatSeconds;
|
||||
try
|
||||
{
|
||||
iatSeconds = iatElement.GetInt64();
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_payload", "DPoP proof iat claim is not a valid number.");
|
||||
}
|
||||
|
||||
var issuedAt = DateTimeOffset.FromUnixTimeSeconds(iatSeconds).ToUniversalTime();
|
||||
if (issuedAt - options.AllowedClockSkew > now)
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_token", "DPoP proof issued in the future.");
|
||||
}
|
||||
|
||||
if (now - issuedAt > options.ProofLifetime + options.AllowedClockSkew)
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_token", "DPoP proof expired.");
|
||||
}
|
||||
|
||||
if (nonce is not null)
|
||||
{
|
||||
if (!payloadElement.TryGetProperty("nonce", out var nonceElement) || nonceElement.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_token", "DPoP proof missing nonce claim.");
|
||||
}
|
||||
|
||||
if (!string.Equals(nonceElement.GetString(), nonce, StringComparison.Ordinal))
|
||||
{
|
||||
return DpopValidationResult.Failure("invalid_token", "DPoP nonce mismatch.");
|
||||
}
|
||||
}
|
||||
|
||||
var jwtId = jtiElement.GetString()!;
|
||||
|
||||
try
|
||||
{
|
||||
var parameters = new TokenValidationParameters
|
||||
{
|
||||
ValidateAudience = false,
|
||||
ValidateIssuer = false,
|
||||
ValidateLifetime = false,
|
||||
ValidateTokenReplay = false,
|
||||
RequireSignedTokens = true,
|
||||
ValidateIssuerSigningKey = true,
|
||||
IssuerSigningKey = jwk,
|
||||
ValidAlgorithms = options.NormalizedAlgorithms.ToArray()
|
||||
};
|
||||
|
||||
tokenHandler.ValidateToken(proof, parameters, out _);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger?.LogWarning(ex, "DPoP proof signature validation failed.");
|
||||
return DpopValidationResult.Failure("invalid_signature", "DPoP proof signature validation failed.");
|
||||
}
|
||||
|
||||
if (!await replayCache.TryStoreAsync(jwtId, issuedAt + options.ReplayWindow, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return DpopValidationResult.Failure("replay", "DPoP proof already used.");
|
||||
}
|
||||
|
||||
return DpopValidationResult.Success(jwk, jwtId, issuedAt);
|
||||
}
|
||||
|
||||
private static bool TryDecodeSegment(string token, int segmentIndex, out JsonElement element, out string? error)
|
||||
{
|
||||
element = default;
|
||||
error = null;
|
||||
|
||||
var segments = token.Split('.');
|
||||
if (segments.Length != 3)
|
||||
{
|
||||
error = "Token must contain three segments.";
|
||||
return false;
|
||||
}
|
||||
|
||||
if (segmentIndex < 0 || segmentIndex > 1)
|
||||
{
|
||||
error = "Segment index must be 0 or 1.";
|
||||
return false;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var jsonBytes = Base64UrlEncoder.DecodeBytes(segments[segmentIndex]);
|
||||
using var document = JsonDocument.Parse(jsonBytes);
|
||||
element = document.RootElement.Clone();
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
error = ex.Message;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static string NormalizeHtu(Uri uri)
|
||||
{
|
||||
var builder = new UriBuilder(uri)
|
||||
{
|
||||
Fragment = string.Empty
|
||||
};
|
||||
|
||||
builder.Host = builder.Host.ToLowerInvariant();
|
||||
builder.Scheme = builder.Scheme.ToLowerInvariant();
|
||||
|
||||
if ((builder.Scheme == "http" && builder.Port == 80) || (builder.Scheme == "https" && builder.Port == 443))
|
||||
{
|
||||
builder.Port = -1;
|
||||
}
|
||||
|
||||
return builder.Uri.GetComponents(UriComponents.SchemeAndServer | UriComponents.PathAndQuery, UriFormat.UriEscaped);
|
||||
}
|
||||
|
||||
private sealed class NullReplayCache : IDpopReplayCache
|
||||
{
|
||||
public static NullReplayCache Instance { get; } = new();
|
||||
|
||||
public ValueTask<bool> TryStoreAsync(string jwtId, DateTimeOffset expiresAt, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.FromResult(true);
|
||||
}
|
||||
}
|
||||
58
src/StellaOps.Scanner.Core/Security/DpopValidationOptions.cs
Normal file
58
src/StellaOps.Scanner.Core/Security/DpopValidationOptions.cs
Normal file
@@ -0,0 +1,58 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Security;
|
||||
|
||||
public sealed class DpopValidationOptions
|
||||
{
|
||||
private readonly HashSet<string> allowedAlgorithms = new(StringComparer.Ordinal);
|
||||
|
||||
public DpopValidationOptions()
|
||||
{
|
||||
allowedAlgorithms.Add("ES256");
|
||||
allowedAlgorithms.Add("ES384");
|
||||
}
|
||||
|
||||
public TimeSpan ProofLifetime { get; set; } = TimeSpan.FromMinutes(2);
|
||||
|
||||
public TimeSpan AllowedClockSkew { get; set; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
public TimeSpan ReplayWindow { get; set; } = TimeSpan.FromMinutes(5);
|
||||
|
||||
public ISet<string> AllowedAlgorithms => allowedAlgorithms;
|
||||
|
||||
public IReadOnlySet<string> NormalizedAlgorithms { get; private set; } = ImmutableHashSet<string>.Empty;
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (ProofLifetime <= TimeSpan.Zero)
|
||||
{
|
||||
throw new InvalidOperationException("DPoP proof lifetime must be greater than zero.");
|
||||
}
|
||||
|
||||
if (AllowedClockSkew < TimeSpan.Zero || AllowedClockSkew > TimeSpan.FromMinutes(5))
|
||||
{
|
||||
throw new InvalidOperationException("DPoP allowed clock skew must be between 0 seconds and 5 minutes.");
|
||||
}
|
||||
|
||||
if (ReplayWindow < TimeSpan.Zero)
|
||||
{
|
||||
throw new InvalidOperationException("DPoP replay window must be greater than or equal to zero.");
|
||||
}
|
||||
|
||||
if (allowedAlgorithms.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException("At least one allowed DPoP algorithm must be configured.");
|
||||
}
|
||||
|
||||
NormalizedAlgorithms = allowedAlgorithms
|
||||
.Select(static algorithm => algorithm.Trim().ToUpperInvariant())
|
||||
.Where(static algorithm => algorithm.Length > 0)
|
||||
.ToImmutableHashSet(StringComparer.Ordinal);
|
||||
|
||||
if (NormalizedAlgorithms.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException("Allowed DPoP algorithms cannot be empty after normalization.");
|
||||
}
|
||||
}
|
||||
}
|
||||
34
src/StellaOps.Scanner.Core/Security/DpopValidationResult.cs
Normal file
34
src/StellaOps.Scanner.Core/Security/DpopValidationResult.cs
Normal file
@@ -0,0 +1,34 @@
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Security;
|
||||
|
||||
public sealed class DpopValidationResult
|
||||
{
|
||||
private DpopValidationResult(bool success, string? errorCode, string? errorDescription, SecurityKey? key, string? jwtId, DateTimeOffset? issuedAt)
|
||||
{
|
||||
IsValid = success;
|
||||
ErrorCode = errorCode;
|
||||
ErrorDescription = errorDescription;
|
||||
PublicKey = key;
|
||||
JwtId = jwtId;
|
||||
IssuedAt = issuedAt;
|
||||
}
|
||||
|
||||
public bool IsValid { get; }
|
||||
|
||||
public string? ErrorCode { get; }
|
||||
|
||||
public string? ErrorDescription { get; }
|
||||
|
||||
public SecurityKey? PublicKey { get; }
|
||||
|
||||
public string? JwtId { get; }
|
||||
|
||||
public DateTimeOffset? IssuedAt { get; }
|
||||
|
||||
public static DpopValidationResult Success(SecurityKey key, string jwtId, DateTimeOffset issuedAt)
|
||||
=> new(true, null, null, key, jwtId, issuedAt);
|
||||
|
||||
public static DpopValidationResult Failure(string code, string description)
|
||||
=> new(false, code, description, null, null, null);
|
||||
}
|
||||
11
src/StellaOps.Scanner.Core/Security/IAuthorityTokenSource.cs
Normal file
11
src/StellaOps.Scanner.Core/Security/IAuthorityTokenSource.cs
Normal file
@@ -0,0 +1,11 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Security;
|
||||
|
||||
public interface IAuthorityTokenSource
|
||||
{
|
||||
ValueTask<ScannerOperationalToken> GetAsync(string audience, IEnumerable<string> scopes, CancellationToken cancellationToken = default);
|
||||
|
||||
ValueTask InvalidateAsync(string audience, IEnumerable<string> scopes, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Security;
|
||||
|
||||
public interface IDpopProofValidator
|
||||
{
|
||||
ValueTask<DpopValidationResult> ValidateAsync(string proof, string httpMethod, Uri httpUri, string? nonce = null, CancellationToken cancellationToken = default);
|
||||
}
|
||||
9
src/StellaOps.Scanner.Core/Security/IDpopReplayCache.cs
Normal file
9
src/StellaOps.Scanner.Core/Security/IDpopReplayCache.cs
Normal file
@@ -0,0 +1,9 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Security;
|
||||
|
||||
public interface IDpopReplayCache
|
||||
{
|
||||
ValueTask<bool> TryStoreAsync(string jwtId, DateTimeOffset expiresAt, CancellationToken cancellationToken = default);
|
||||
}
|
||||
12
src/StellaOps.Scanner.Core/Security/IPluginCatalogGuard.cs
Normal file
12
src/StellaOps.Scanner.Core/Security/IPluginCatalogGuard.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
namespace StellaOps.Scanner.Core.Security;
|
||||
|
||||
public interface IPluginCatalogGuard
|
||||
{
|
||||
IReadOnlyCollection<string> KnownPlugins { get; }
|
||||
|
||||
bool IsSealed { get; }
|
||||
|
||||
void EnsureRegistrationAllowed(string pluginPath);
|
||||
|
||||
void Seal();
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user