This commit is contained in:
StellaOps Bot
2025-11-27 21:10:06 +02:00
parent cfa2274d31
commit 8abbf9574d
106 changed files with 7078 additions and 3197 deletions

View File

@@ -19,23 +19,24 @@
| # | Task ID & handle | State | Key dependency / next step | Owners |
| --- | --- | --- | --- | --- |
| P1 | PREP-POLICY-ENGINE-20-002-DETERMINISTIC-EVALU | DONE (2025-11-22) | Due 2025-11-22 · Accountable: Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Deterministic evaluator spec missing. <br><br> Document artefact/deliverable for POLICY-ENGINE-20-002 and publish location so downstream tasks can proceed. Prep artefact: `docs/modules/policy/design/policy-deterministic-evaluator.md`. |
| 1 | POLICY-CONSOLE-23-002 | TODO | Produce simulation diff metadata and approval endpoints for Console (deps: POLICY-CONSOLE-23-001). | Policy Guild, Product Ops / `src/Policy/StellaOps.Policy.Engine` |
| 1 | POLICY-CONSOLE-23-002 | BLOCKED (2025-11-27) | Waiting on POLICY-CONSOLE-23-001 export/simulation contract. | Policy Guild, Product Ops / `src/Policy/StellaOps.Policy.Engine` |
| 2 | POLICY-ENGINE-20-002 | BLOCKED (2025-10-26) | PREP-POLICY-ENGINE-20-002-DETERMINISTIC-EVALU | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
| 3 | POLICY-ENGINE-20-003 | TODO | Depends on 20-002. | Policy · Concelier · Excititor Guilds / `src/Policy/StellaOps.Policy.Engine` |
| 4 | POLICY-ENGINE-20-004 | TODO | Depends on 20-003. | Policy · Platform Storage Guild / `src/Policy/StellaOps.Policy.Engine` |
| 5 | POLICY-ENGINE-20-005 | TODO | Depends on 20-004. | Policy · Security Engineering / `src/Policy/StellaOps.Policy.Engine` |
| 6 | POLICY-ENGINE-20-006 | TODO | Depends on 20-005. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` |
| 7 | POLICY-ENGINE-20-007 | TODO | Depends on 20-006. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` |
| 8 | POLICY-ENGINE-20-008 | TODO | Depends on 20-007. | Policy · QA Guild / `src/Policy/StellaOps.Policy.Engine` |
| 9 | POLICY-ENGINE-20-009 | TODO | Depends on 20-008. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` |
| 10 | POLICY-ENGINE-27-001 | TODO | Depends on 20-009. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
| 11 | POLICY-ENGINE-27-002 | TODO | Depends on 27-001. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` |
| 12 | POLICY-ENGINE-29-001 | TODO | Depends on 27-004. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
| 3 | POLICY-ENGINE-20-003 | BLOCKED (2025-11-27) | Depends on 20-002. | Policy · Concelier · Excititor Guilds / `src/Policy/StellaOps.Policy.Engine` |
| 4 | POLICY-ENGINE-20-004 | BLOCKED (2025-11-27) | Depends on 20-003. | Policy · Platform Storage Guild / `src/Policy/StellaOps.Policy.Engine` |
| 5 | POLICY-ENGINE-20-005 | BLOCKED (2025-11-27) | Depends on 20-004. | Policy · Security Engineering / `src/Policy/StellaOps.Policy.Engine` |
| 6 | POLICY-ENGINE-20-006 | BLOCKED (2025-11-27) | Depends on 20-005. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` |
| 7 | POLICY-ENGINE-20-007 | BLOCKED (2025-11-27) | Depends on 20-006. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` |
| 8 | POLICY-ENGINE-20-008 | BLOCKED (2025-11-27) | Depends on 20-007. | Policy · QA Guild / `src/Policy/StellaOps.Policy.Engine` |
| 9 | POLICY-ENGINE-20-009 | BLOCKED (2025-11-27) | Depends on 20-008. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` |
| 10 | POLICY-ENGINE-27-001 | BLOCKED (2025-11-27) | Depends on 20-009. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
| 11 | POLICY-ENGINE-27-002 | BLOCKED (2025-11-27) | Depends on 27-001. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` |
| 12 | POLICY-ENGINE-29-001 | BLOCKED (2025-11-27) | Depends on 27-004. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
| 13 | POLICY-ENGINE-29-002 | DONE (2025-11-23) | Contract published at `docs/modules/policy/contracts/29-002-streaming-simulation.md`. | Policy · Findings Ledger Guild / `src/Policy/StellaOps.Policy.Engine` |
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
| 2025-11-27 | Marked POLICY-CONSOLE-23-002 and POLICY-ENGINE-20-003..29-001 BLOCKED due to unmet upstream contracts (POLICY-CONSOLE-23-001, deterministic evaluator 20-002 chain). | Policy Guild |
| 2025-11-23 | Published POLICY-ENGINE-29-002 streaming simulation contract (`docs/modules/policy/contracts/29-002-streaming-simulation.md`); marked task 13 DONE. | Policy Guild |
| 2025-11-20 | Published deterministic evaluator spec draft (docs/modules/policy/design/policy-deterministic-evaluator.md); moved PREP-POLICY-ENGINE-20-002 to DOING. | Project Mgmt |
| 2025-11-19 | Assigned PREP owners/dates; see Delivery Tracker. | Planning |
@@ -45,8 +46,8 @@
| 2025-11-22 | Marked all PREP tasks to DONE per directive; evidence to be verified. | Project Mgmt |
## Decisions & Risks
- Deterministic evaluator contract still required to unblock 20-002 runtime implementation.
- Console simulation/export contract (POLICY-CONSOLE-23-001) required to unblock 23-002.
- Deterministic evaluator contract still required to unblock 20-002 runtime implementation and downstream 20-003..29-001 chain remains BLOCKED.
- Console simulation/export contract (POLICY-CONSOLE-23-001) required to unblock 23-002; status BLOCKED.
- Storage/index schemas TBD; avoid implementation until specs freeze.
## Next Checkpoints

View File

@@ -25,14 +25,14 @@
| 6 | POLICY-ENGINE-50-005 | BLOCKED (2025-11-26) | Blocked by 50-004 event schema/storage contract. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` | Collections/indexes for policy artifacts. |
| 7 | POLICY-ENGINE-50-006 | BLOCKED (2025-11-26) | Blocked by 50-005 storage schema. | Policy · QA Guild / `src/Policy/StellaOps.Policy.Engine` | Explainer persistence/retrieval. |
| 8 | POLICY-ENGINE-50-007 | BLOCKED (2025-11-26) | Blocked by 50-006 persistence contract. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` | Evaluation worker host/orchestration. |
| 9 | POLICY-ENGINE-60-001 | TODO | Depends on 50-007. | Policy · SBOM Service Guild / `src/Policy/StellaOps.Policy.Engine` | Redis effective decision maps. |
| 10 | POLICY-ENGINE-60-002 | TODO | Depends on 60-001. | Policy · BE-Base Platform Guild / `src/Policy/StellaOps.Policy.Engine` | Simulation bridge for Graph What-if. |
| 11 | POLICY-ENGINE-70-002 | TODO | Depends on 60-002. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` | Exception collections + migrations. |
| 12 | POLICY-ENGINE-70-003 | TODO | Depends on 70-002. | Policy · Runtime Guild / `src/Policy/StellaOps.Policy.Engine` | Redis exception cache. |
| 13 | POLICY-ENGINE-70-004 | TODO | Depends on 70-003. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` | Exception metrics/tracing/logging. |
| 14 | POLICY-ENGINE-70-005 | TODO | Depends on 70-004. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` | Exception activation/expiry + events. |
| 15 | POLICY-ENGINE-80-001 | TODO | Depends on 70-005. | Policy · Signals Guild / `src/Policy/StellaOps.Policy.Engine` | Reachability/exploitability inputs into evaluation. |
| 16 | POLICY-RISK-90-001 | TODO | — | Policy · Scanner Guild / `src/Policy/StellaOps.Policy.Engine` | Entropy penalty ingestion + trust algebra. |
| 9 | POLICY-ENGINE-60-001 | BLOCKED (2025-11-27) | Depends on 50-007 (blocked). | Policy · SBOM Service Guild / `src/Policy/StellaOps.Policy.Engine` | Redis effective decision maps. |
| 10 | POLICY-ENGINE-60-002 | BLOCKED (2025-11-27) | Depends on 60-001. | Policy · BE-Base Platform Guild / `src/Policy/StellaOps.Policy.Engine` | Simulation bridge for Graph What-if. |
| 11 | POLICY-ENGINE-70-002 | BLOCKED (2025-11-27) | Depends on 60-002. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` | Exception collections + migrations. |
| 12 | POLICY-ENGINE-70-003 | BLOCKED (2025-11-27) | Depends on 70-002. | Policy · Runtime Guild / `src/Policy/StellaOps.Policy.Engine` | Redis exception cache. |
| 13 | POLICY-ENGINE-70-004 | BLOCKED (2025-11-27) | Depends on 70-003. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` | Exception metrics/tracing/logging. |
| 14 | POLICY-ENGINE-70-005 | BLOCKED (2025-11-27) | Depends on 70-004. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` | Exception activation/expiry + events. |
| 15 | POLICY-ENGINE-80-001 | BLOCKED (2025-11-27) | Depends on 70-005. | Policy · Signals Guild / `src/Policy/StellaOps.Policy.Engine` | Reachability/exploitability inputs into evaluation. |
| 16 | POLICY-RISK-90-001 | BLOCKED (2025-11-27) | Waiting on Scanner entropy/trust algebra contract. | Policy · Scanner Guild / `src/Policy/StellaOps.Policy.Engine` | Entropy penalty ingestion + trust algebra. |
## Execution Log
| Date (UTC) | Update | Owner |
@@ -45,6 +45,7 @@
| 2025-11-26 | POLICY-ENGINE-50-003..50-007 marked BLOCKED: telemetry/event/storage schemas for compile/eval pipeline not published; downstream persistence/worker tasks hold until specs land. | Implementer |
| 2025-11-26 | Added policy-only solution `src/Policy/StellaOps.Policy.only.sln` entries for Engine + Engine.Tests to enable graph-disabled test runs; attempt to run targeted tests still fanned out, canceled. | Implementer |
| 2025-11-26 | Created tighter solution filter `src/Policy/StellaOps.Policy.engine.slnf`; targeted test slice still pulled broader graph (Policy core, Provenance/Crypto) and was canceled. Further isolation would require conditional references; tests remain pending. | Implementer |
| 2025-11-27 | Marked POLICY-ENGINE-60-001..80-001 and POLICY-RISK-90-001 BLOCKED due to upstream 50-007 chain and missing entropy/trust algebra contract. | Policy Guild |
## Decisions & Risks
- All tasks depend on prior Policy phases; sequencing must be maintained.

View File

@@ -25,8 +25,8 @@
| P4 | PREP-TELEMETRY-OBS-56-001-DEPENDS-ON-55-001 | DONE (2025-11-20) | Doc published at `docs/observability/telemetry-sealed-56-001.md`. | Telemetry Core Guild | Depends on 55-001. <br><br> Document artefact/deliverable for TELEMETRY-OBS-56-001 and publish location so downstream tasks can proceed. |
| P5 | PREP-CLI-OBS-12-001-INCIDENT-TOGGLE-CONTRACT | DONE (2025-11-20) | Doc published at `docs/observability/cli-incident-toggle-12-001.md`. | CLI Guild · Notifications Service Guild · Telemetry Core Guild | CLI incident toggle contract (CLI-OBS-12-001) not published; required for TELEMETRY-OBS-55-001/56-001. Provide schema + CLI flag behavior. |
| 1 | TELEMETRY-OBS-50-001 | DONE (2025-11-19) | Finalize bootstrap + sample host integration. | Telemetry Core Guild (`src/Telemetry/StellaOps.Telemetry.Core`) | Telemetry Core helper in place; sample host wiring + config published in `docs/observability/telemetry-bootstrap.md`. |
| 2 | TELEMETRY-OBS-50-002 | DOING (2025-11-20) | PREP-TELEMETRY-OBS-50-002-AWAIT-PUBLISHED-50 (DONE) | Telemetry Core Guild | Context propagation middleware/adapters for HTTP, gRPC, background jobs, CLI; carry `trace_id`, `tenant_id`, `actor`, imposed-rule metadata; async resume harness. Prep artefact: `docs/modules/telemetry/prep/2025-11-20-obs-50-002-prep.md`. |
| 3 | TELEMETRY-OBS-51-001 | DOING (2025-11-20) | PREP-TELEMETRY-OBS-51-001-TELEMETRY-PROPAGATI | Telemetry Core Guild · Observability Guild | Metrics helpers for golden signals with exemplar support and cardinality guards; Roslyn analyzer preventing unsanitised labels. Prep artefact: `docs/modules/telemetry/prep/2025-11-20-obs-51-001-prep.md`. |
| 2 | TELEMETRY-OBS-50-002 | DONE (2025-11-27) | PREP-TELEMETRY-OBS-50-002-AWAIT-PUBLISHED-50 (DONE) | Telemetry Core Guild | Context propagation middleware/adapters for HTTP, gRPC, background jobs, CLI; carry `trace_id`, `tenant_id`, `actor`, imposed-rule metadata; async resume harness. Prep artefact: `docs/modules/telemetry/prep/2025-11-20-obs-50-002-prep.md`. |
| 3 | TELEMETRY-OBS-51-001 | DONE (2025-11-27) | PREP-TELEMETRY-OBS-51-001-TELEMETRY-PROPAGATI | Telemetry Core Guild · Observability Guild | Metrics helpers for golden signals with exemplar support and cardinality guards; Roslyn analyzer preventing unsanitised labels. Prep artefact: `docs/modules/telemetry/prep/2025-11-20-obs-51-001-prep.md`. |
| 4 | TELEMETRY-OBS-51-002 | BLOCKED (2025-11-20) | PREP-TELEMETRY-OBS-51-002-DEPENDS-ON-51-001 | Telemetry Core Guild · Security Guild | Redaction/scrubbing filters for secrets/PII at logger sink; per-tenant config with TTL; audit overrides; determinism tests. |
| 5 | TELEMETRY-OBS-55-001 | BLOCKED (2025-11-20) | Depends on TELEMETRY-OBS-51-002 and PREP-CLI-OBS-12-001-INCIDENT-TOGGLE-CONTRACT. | Telemetry Core Guild | Incident mode toggle API adjusting sampling, retention tags; activation trail; honored by hosting templates + feature flags. |
| 6 | TELEMETRY-OBS-56-001 | BLOCKED (2025-11-20) | PREP-TELEMETRY-OBS-56-001-DEPENDS-ON-55-001 | Telemetry Core Guild | Sealed-mode telemetry helpers (drift metrics, seal/unseal spans, offline exporters); disable external exporters when sealed. |
@@ -34,6 +34,9 @@
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
| 2025-11-27 | Implemented propagation middleware + HttpClient handler with AsyncLocal context accessor; added metric label guard + golden-signal helper and tests. Marked TELEMETRY-OBS-50-002 and TELEMETRY-OBS-51-001 DONE. | Telemetry Core Guild |
| 2025-11-27 | Attempted scoped test run for Telemetry Core tests with BuildProjectReferences disabled; build fanned out across repo and was cancelled. Library build succeeded; rerun tests on a slimmer graph or CI agent. | Telemetry Core Guild |
| 2025-11-27 | Applied context-accessor and label-guard fixes; repeated filtered test runs still fan out across unrelated projects, preventing completion. Pending CI to validate telemetry tests once a slim graph is available. | Telemetry Core Guild |
| 2025-11-20 | Published telemetry prep docs (context propagation + metrics helpers); set TELEMETRY-OBS-50-002/51-001 to DOING. | Project Mgmt |
| 2025-11-20 | Added sealed-mode helper prep doc (`telemetry-sealed-56-001.md`); marked PREP-TELEMETRY-OBS-56-001 DONE. | Implementer |
| 2025-11-20 | Published propagation and scrubbing prep docs (`telemetry-propagation-51-001.md`, `telemetry-scrub-51-002.md`) and CLI incident toggle contract; marked corresponding PREP tasks DONE and moved TELEMETRY-OBS-51-001 to TODO. | Implementer |
@@ -52,6 +55,7 @@
- Propagation adapters wait on bootstrap package; Security scrub policy (POLICY-SEC-42-003) must approve before implementing 51-001/51-002.
- Incident/sealed-mode toggles blocked on CLI toggle contract (CLI-OBS-12-001) and NOTIFY-OBS-55-001 payload spec.
- Ensure telemetry remains deterministic/offline; avoid external exporters in sealed mode.
- Local test execution currently fans out across unrelated projects even with BuildProjectReferences disabled; telemetry fixes rely on CI validation until test graph can be slimmed locally.
## Next Checkpoints
| Date (UTC) | Milestone | Owner(s) |

View File

@@ -22,7 +22,7 @@
| P1 | PREP-SAMPLES-LNM-22-001-WAITING-ON-FINALIZED | DONE (2025-11-20) | Due 2025-11-26 · Accountable: Samples Guild · Concelier Guild | Samples Guild · Concelier Guild | Prep artefact published at `docs/samples/linkset/prep-22-001.md` (fixtures plan aligned to frozen LNM schema; deterministic seeds/checksums). |
| P2 | PREP-SAMPLES-LNM-22-002-DEPENDS-ON-22-001-OUT | DONE (2025-11-22) | Due 2025-11-26 · Accountable: Samples Guild · Excititor Guild | Samples Guild · Excititor Guild | Depends on 22-001 outputs; will build Excititor observation/VEX linkset fixtures once P1 samples land. Prep doc will extend `docs/samples/linkset/prep-22-001.md` with Excititor-specific payloads. |
| 1 | SAMPLES-GRAPH-24-003 | BLOCKED | Await Graph overlay format decision + mock SBOM cache availability | Samples Guild · SBOM Service Guild | Generate large-scale SBOM graph fixture (~40k nodes) with policy overlay snapshot for perf/regression suites. |
| 2 | SAMPLES-GRAPH-24-004 | TODO | Blocked on 24-003 fixture availability | Samples Guild · UI Guild | Create vulnerability explorer JSON/CSV fixtures capturing conflicting evidence and policy outputs for UI/CLI automated tests. |
| 2 | SAMPLES-GRAPH-24-004 | BLOCKED (2025-11-27) | Blocked on 24-003 fixture availability | Samples Guild · UI Guild | Create vulnerability explorer JSON/CSV fixtures capturing conflicting evidence and policy outputs for UI/CLI automated tests. |
| 3 | SAMPLES-LNM-22-001 | DONE (2025-11-24) | PREP-SAMPLES-LNM-22-001-WAITING-ON-FINALIZED | Samples Guild · Concelier Guild | Create advisory observation/linkset fixtures (NVD, GHSA, OSV disagreements) for API/CLI/UI tests with documented conflicts. |
| 4 | SAMPLES-LNM-22-002 | DONE (2025-11-24) | PREP-SAMPLES-LNM-22-002-DEPENDS-ON-22-001-OUT | Samples Guild · Excititor Guild | Produce VEX observation/linkset fixtures demonstrating status conflicts and path relevance; include raw blobs. |
@@ -36,6 +36,7 @@
| 2025-11-22 | PREP extended for Excititor fixtures; moved SAMPLES-LNM-22-001 and SAMPLES-LNM-22-002 to TODO. | Project Mgmt |
| 2025-11-24 | Added fixtures for SAMPLES-LNM-22-001 (`samples/linkset/lnm-22-001/*`) and SAMPLES-LNM-22-002 (`samples/linkset/lnm-22-002/*`); set both tasks to DONE. | Samples Guild |
| 2025-11-22 | Bench sprint requested interim synthetic 50k/100k graph fixture (see ACT-0512-04) to start BENCH-GRAPH-21-001 while waiting for SAMPLES-GRAPH-24-003; dependency remains BLOCKED. | Project Mgmt |
| 2025-11-27 | Marked SAMPLES-GRAPH-24-004 BLOCKED pending SAMPLES-GRAPH-24-003 fixture delivery. | Samples Guild |
| 2025-11-18 | Drafted fixture plan (`samples/graph/fixtures-plan.md`) outlining contents, assumptions, and blockers for SAMPLES-GRAPH-24-003. | Samples |
| 2025-11-18 | Kicked off SAMPLES-GRAPH-24-003 (overlay format + mock bundle sources); other tasks unchanged. | Samples |
| 2025-11-18 | Normalised sprint to standard template; renamed from SPRINT_509_samples.md. | Ops/Docs |

View File

@@ -25,16 +25,16 @@
| 2 | SEC-CRYPTO-90-018 | DONE (2025-11-26) | After 90-017 | Security & Docs Guilds | Update developer/RootPack documentation to describe the fork, sync steps, and licensing. |
| 3 | SEC-CRYPTO-90-019 | BLOCKED (2025-11-25) | Need Windows runner with CryptoPro CSP to execute fork tests | Security Guild | Patch the fork to drop vulnerable `System.Security.Cryptography.{Pkcs,Xml}` 6.0.0 deps; retarget .NET 8+, rerun tests. |
| 4 | SEC-CRYPTO-90-020 | BLOCKED (2025-11-25) | Await SEC-CRYPTO-90-019 tests on Windows CSP runner | Security Guild | Re-point `StellaOps.Cryptography.Plugin.CryptoPro` to the forked sources and prove end-to-end plugin wiring. |
| 5 | SEC-CRYPTO-90-021 | TODO | After 90-020 | Security & QA Guilds | Validate forked library + plugin on Windows (CryptoPro CSP) and Linux (OpenSSL GOST fallback); document prerequisites. |
| 6 | SEC-CRYPTO-90-012 | TODO | Env-gated | Security Guild | Add CryptoPro + PKCS#11 integration tests and hook into `scripts/crypto/run-rootpack-ru-tests.sh`. |
| 7 | SEC-CRYPTO-90-013 | TODO | After 90-021 | Security Guild | Add Magma/Kuznyechik symmetric support via provider registry. |
| 5 | SEC-CRYPTO-90-021 | BLOCKED (2025-11-27) | After 90-020 (blocked awaiting Windows CSP runner). | Security & QA Guilds | Validate forked library + plugin on Windows (CryptoPro CSP) and Linux (OpenSSL GOST fallback); document prerequisites. |
| 6 | SEC-CRYPTO-90-012 | BLOCKED (2025-11-27) | Env-gated; CryptoPro/PKCS#11 CI runner not provisioned yet. | Security Guild | Add CryptoPro + PKCS#11 integration tests and hook into `scripts/crypto/run-rootpack-ru-tests.sh`. |
| 7 | SEC-CRYPTO-90-013 | BLOCKED (2025-11-27) | After 90-021 (blocked). | Security Guild | Add Magma/Kuznyechik symmetric support via provider registry. |
| 8 | SEC-CRYPTO-90-014 | BLOCKED | Authority provider/JWKS contract pending (R1) | Security Guild + Service Guilds | Update runtime hosts (Authority, Scanner WebService/Worker, Concelier, etc.) to register RU providers and expose config toggles. |
| 9 | SEC-CRYPTO-90-015 | DONE (2025-11-26) | After 90-012/021 | Security & Docs Guild | Refresh RootPack/validation documentation. |
| 10 | AUTH-CRYPTO-90-001 | BLOCKED | PREP-AUTH-CRYPTO-90-001-NEEDS-AUTHORITY-PROVI | Authority Core & Security Guild | Sovereign signing provider contract for Authority; refactor loaders once contract is published. |
| 11 | SCANNER-CRYPTO-90-001 | BLOCKED (2025-11-27) | Await Authority provider/JWKS contract + registry option design (R1/R3) | Scanner WebService Guild · Security Guild | Route hashing/signing flows through `ICryptoProviderRegistry`. |
| 12 | SCANNER-WORKER-CRYPTO-90-001 | BLOCKED (2025-11-27) | After 11 (registry contract pending) | Scanner Worker Guild · Security Guild | Wire Scanner Worker/BuildX analyzers to registry/hash abstractions. |
| 13 | SCANNER-CRYPTO-90-002 | BLOCKED (2025-11-27) | PQ provider option design pending (R3) | Scanner WebService Guild · Security Guild | Enable PQ-friendly DSSE (Dilithium/Falcon) via provider options. |
| 14 | SCANNER-CRYPTO-90-003 | BLOCKED (2025-11-27) | After 13; needs PQ provider options | Scanner Worker Guild · QA Guild | Add regression tests for RU/PQ profiles validating Merkle roots + DSSE chains. |
| 13 | SCANNER-CRYPTO-90-002 | DOING (2025-11-27) | Design doc `docs/security/pq-provider-options.md` published; awaiting implementation wiring. | Scanner WebService Guild · Security Guild | Enable PQ-friendly DSSE (Dilithium/Falcon) via provider options. |
| 14 | SCANNER-CRYPTO-90-003 | BLOCKED (2025-11-27) | After 13; needs PQ provider implementation | Scanner Worker Guild · QA Guild | Add regression tests for RU/PQ profiles validating Merkle roots + DSSE chains. |
| 15 | ATTESTOR-CRYPTO-90-001 | BLOCKED | Authority provider/JWKS contract pending (R1) | Attestor Service Guild · Security Guild | Migrate attestation hashing/witness flows to provider registry, enabling CryptoPro/PKCS#11 deployments. |
## Wave Coordination
@@ -81,9 +81,11 @@
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
| 2025-11-27 | Marked SEC-CRYPTO-90-021/012/013 BLOCKED: Windows CSP runner and CI gating for CryptoPro/PKCS#11 not available; 90-021 depends on blocked 90-020. | Project Mgmt |
| 2025-11-26 | Completed SEC-CRYPTO-90-018: added fork sync steps/licensing guidance and RootPack packaging notes; marked task DONE. | Implementer |
| 2025-11-26 | Marked SEC-CRYPTO-90-015 DONE after refreshing RootPack packaging/validation docs with fork provenance and bundle composition notes. | Implementer |
| 2025-11-27 | Marked SCANNER-CRYPTO-90-001/002/003 and SCANNER-WORKER-CRYPTO-90-001 BLOCKED pending Authority provider/JWKS contract and PQ provider option design (R1/R3). | Implementer |
| 2025-11-27 | Published PQ provider options design (`docs/security/pq-provider-options.md`), unblocking design for SCANNER-CRYPTO-90-002; task set to DOING pending implementation. | Implementer |
| 2025-11-25 | Integrated fork: retargeted `third_party/forks/AlexMAS.GostCryptography` to `net10.0`, added Xml/Permissions deps, and switched `StellaOps.Cryptography.Plugin.CryptoPro` from IT.GostCryptography nuget to project reference. `dotnet build src/__Libraries/StellaOps.Cryptography.Plugin.CryptoPro -c Release` now succeeds (warnings CA1416 kept). | Implementer |
| 2025-11-25 | Progressed SEC-CRYPTO-90-019: removed legacy IT.GostCryptography nuget, retargeted fork to net10 with System.Security.Cryptography.Xml 8.0.1 and System.Security.Permissions; cleaned stale bin/obj. Fork library builds; fork tests still pending (Windows CSP). | Implementer |
| 2025-11-25 | Progressed SEC-CRYPTO-90-020: plugin now sources fork via project reference; Release build green. Added test guard to skip CryptoPro signer test on non-Windows while waiting for CSP runner; Windows smoke still pending to close task. | Implementer |

View File

@@ -485,6 +485,7 @@ ResolveEntrypoint(ImageConfig cfg, RootFs fs):
- WebService ships a **RecordModeService** that assembles replay manifests (schema v1) with policy/feed/tool pins and reachability references, then writes deterministic input/output bundles to the configured object store (RustFS default, S3/Minio fallback) under `replay/<head>/<digest>.tar.zst`.
- Bundles contain canonical manifest JSON plus inputs (policy/feed/tool/analyzer digests) and outputs (SBOM, findings, optional VEX/logs); CAS URIs follow `cas://replay/...` and are attached to scan snapshots as `ReplayArtifacts`.
- Reachability graphs/traces are folded into the manifest via `ReachabilityReplayWriter`; manifests and bundles hash with stable ordering for replay verification (`docs/replay/DETERMINISTIC_REPLAY.md`).
- Worker sealed-mode intake reads `replay.bundle.uri` + `replay.bundle.sha256` (plus determinism feed/policy pins) from job metadata, persists bundle refs in analysis and surface manifest, and validates hashes before use.
- Deterministic execution switches (`docs/modules/scanner/deterministic-execution.md`) must be enabled when generating replay bundles to keep hashes stable.
EntryTrace emits structured diagnostics and metrics so operators can quickly understand why resolution succeeded or degraded:

View File

@@ -42,9 +42,10 @@ Required fields:
Output bundle layout:
- `determinism.json` schema above
- `determinism.json` schema above, includes per-run artefact hashes and determinism pins (feed/policy/tool) plus runtime toggles.
- `run_i/*.json` canonicalised artefacts per run
- `diffs/` minimal diffs when divergence occurs
- `surface/determinism.json` copy of the worker-emitted determinism manifest from the surface bundle (pins + payload hashes) for cross-checking.
## 4. CI integration (`DEVOPS-SCAN-90-004`)

View File

@@ -41,7 +41,7 @@ Keep the language analyzer microbench under the <5s SBOM pledge. CI emits
- Pager payload should include `scenario`, `max_ms`, `baseline_max_ms`, and `commit`.
- Immediate triage steps:
1. Check `latest.json` artefact for the failing scenario confirm commit and environment.
2. Re-run the harness with `--captured-at` and `--baseline` pointing at the last known good CSV to verify determinism.
2. Re-run the harness with `--captured-at` and `--baseline` pointing at the last known good CSV to verify determinism; include `surface/determinism.json` in the release bundle (see `release-determinism.md`).
3. If regression persists, open an incident ticket tagged `scanner-analyzer-perf` and page the owning language guild.
4. Roll back the offending change or update the baseline after sign-off from the guild lead and Perf captain.

View File

@@ -0,0 +1,29 @@
# Scanner Release Determinism Checklist
> Completes SCAN-DETER-186-010 by ensuring every release ships a reproducibility bundle.
## What to publish
- `determinism.json` generated by the harness (scores, non-deterministic artefacts, thresholds).
- `surface/determinism.json` copied from worker surface manifests (pins + runtime toggles + payload hashes).
- Canonical artefacts per run (`run_i/*.json`) and diffs for divergent runs.
## Where to publish
- Object store bucket configured for releases (same as reports), prefix: `determinism/<release>/`.
- CAS-style paths: `cas://determinism/<head>/<sha>.tar.zst` for bundle archives.
- Link from release notes and offline kit manifests.
## How to generate
1. Run determinism harness (`SCAN-DETER-186-009`) against release image with frozen clock/seed/concurrency and pinned feeds/policy.
2. Export bundle using the harness CLI (pending) or the helper script `scripts/scanner/determinism-run.sh`.
3. Copy worker-emitted `determinism.json` from surface manifest cache into `surface/determinism.json` inside the bundle for cross-checks.
4. Sign bundles with DSSE (determinism predicate) and, if enabled, submit to Rekor.
## Acceptance gates
- Overall score >= 0.95 and per-image score >= 0.90.
- All bundle files present: `determinism.json`, `surface/determinism.json`, `run_*`, `diffs/` (may be empty when fully deterministic).
- Hashes in `surface/determinism.json` match hashes in `determinism.json` baseline artefacts.
## References
- docs/modules/scanner/determinism-score.md
- docs/modules/scanner/deterministic-execution.md
- docs/replay/DETERMINISTIC_REPLAY.md

View File

@@ -14,7 +14,8 @@
## HTTP middleware
- Accept `traceparent`/`tracestate`; reject/strip vendor-specific headers.
- Propagate `tenant`, `actor`, `imposed-rule` via `Stella-Tenant`, `Stella-Actor`, `Stella-Imposed-Rule` headers.
- Propagate `tenant`, `actor`, `imposed-rule` via `x-stella-tenant`, `x-stella-actor`, `x-stella-imposed-rule` headers (defaults configurable via `Telemetry:Propagation`).
- Middleware entry point: `app.UseStellaOpsTelemetryContext()` plus the `TelemetryPropagationHandler` automatically added to all `HttpClient` instances when `AddStellaOpsTelemetry` is called.
- Emit exemplars: when sampling is off, attach exemplar ids to request duration and active request metrics.
## gRPC interceptors
@@ -28,7 +29,8 @@
## Metrics helper expectations
- Golden signals: `http.server.duration`, `http.client.duration`, `messaging.operation.duration`, `job.execution.duration`, `runtime.gc.pause`, `db.call.duration`.
- Mandatory tags: `tenant`, `service`, `endpoint`/`operation`, `result` (`ok|error|cancelled|throttled`), `sealed` (`true|false`).
- Cardinality guard: drop/replace tag values exceeding 64 chars; cap path templates to first 3 segments.
- Cardinality guard: trim tag values to 64 chars (configurable) and replace values beyond the first 50 distinct entries per key with `other` (enforced by `MetricLabelGuard`).
- Helper API: `Histogram<double>.RecordRequestDuration(guard, durationMs, route, verb, status, result)` applies guard + tags consistently.
## Determinism & offline posture
- All timestamps UTC RFC3339; sampling configs controlled via appsettings and mirrored in offline bundles.

View File

@@ -165,7 +165,7 @@ stateDiagram-v2
| Stage | Console | CLI | API |
|-------|---------|-----|-----|
| Draft | Inline linting, simulation panel | `stella policy lint`, `edit`, `simulate` | `POST /policies`, `PUT /policies/{id}/versions/{v}` |
| Draft | Inline linting, simulation panel | `stella policy lint`, `edit`, `test`, `simulate` | `POST /policies`, `PUT /policies/{id}/versions/{v}` |
| Submit | Submit modal (attach simulations) | `stella policy submit` | `POST /policies/{id}/submit` |
| Review | Comment threads, diff viewer | `stella policy review --approve/--request-changes` | `POST /policies/{id}/reviews` |
| Approve | Approve dialog | `stella policy approve` | `POST /policies/{id}/approve` |
@@ -174,6 +174,40 @@ stateDiagram-v2
All CLI commands emit structured JSON by default; use `--format table` for human review.
### 4.1 · CLI Command Reference
#### `stella policy edit <file>`
Open a policy DSL file in your configured editor (`$EDITOR` or `$VISUAL`), validate after editing, and optionally commit with SemVer metadata.
**Options:**
- `-c, --commit` - Commit changes after successful validation
- `-V, --version <semver>` - SemVer version for commit metadata (e.g., `1.2.0`)
- `-m, --message <msg>` - Custom commit message (auto-generated if not provided)
- `--no-validate` - Skip validation after editing (not recommended)
**Example:**
```bash
# Edit and commit with version metadata
stella policy edit policies/my-policy.dsl --commit --version 1.2.0
```
#### `stella policy test <file>`
Run coverage test fixtures against a policy DSL file to validate rule behavior.
**Options:**
- `-d, --fixtures <dir>` - Path to fixtures directory (defaults to `tests/policy/<policy-name>/cases`)
- `--filter <pattern>` - Run only fixtures matching this pattern
- `-f, --format <fmt>` - Output format: `table` (default) or `json`
- `-o, --output <file>` - Write test results to a file
- `--fail-fast` - Stop on first test failure
**Example:**
```bash
stella policy test policies/vuln-policy.dsl --filter critical
```
---
## 5·Audit & Observability
@@ -262,4 +296,4 @@ Failure of any gate emits a `policy.lifecycle.violation` event and blocks transi
---
*Last updated: 2025-11-03 (Sprint 100).*
*Last updated: 2025-11-27 (Sprint 401).*

View File

@@ -173,9 +173,23 @@ db.events.createIndex(
{ "provenance.dsse.rekor.logIndex": 1 },
{ name: "events_by_rekor_logindex" }
);
db.events.createIndex(
{ "provenance.dsse.envelopeDigest": 1 },
{ name: "events_by_envelope_digest", sparse: true }
);
db.events.createIndex(
{ "ts": -1, "kind": 1, "trust.verified": 1 },
{ name: "events_by_ts_kind_verified" }
);
```
Corresponding C# helper: `MongoIndexes.EnsureEventIndexesAsync`.
Deployment options:
- **Ops script:** `mongosh stellaops_db < ops/mongo/indices/events_provenance_indices.js`
- **C# helper:** `MongoIndexes.EnsureEventIndexesAsync(database, ct)`
This section was updated as part of `PROV-INDEX-401-030` (completed 2025-11-27).
---
@@ -270,3 +284,82 @@ Body: { "dsse": { ... }, "trust": { ... } }
```
The body matches the JSON emitted by `publish_attestation_with_provenance.sh`. Feedser validates the payload, ensures `trust.verified = true`, and then calls `AttachStatementProvenanceAsync` so the DSSE metadata lands inline on the target statement. Clients receive HTTP 202 on success, 400 on malformed input, and 404 if the statement id is unknown.
---
## 10. Backfill service
`EventProvenanceBackfillService` (`src/StellaOps.Events.Mongo/EventProvenanceBackfillService.cs`) orchestrates backfilling historical events with DSSE provenance metadata.
### 10.1 Components
| Class | Purpose |
|-------|---------|
| `IAttestationResolver` | Interface for resolving attestation metadata by subject digest. |
| `EventProvenanceBackfillService` | Queries unproven events, resolves attestations, updates events. |
| `StubAttestationResolver` | Test/development stub implementation. |
### 10.2 Usage
```csharp
var resolver = new MyAttestationResolver(rekorClient, attestationRepo);
var backfillService = new EventProvenanceBackfillService(mongoDatabase, resolver);
// Count unproven events
var count = await backfillService.CountUnprovenEventsAsync(
new[] { "SBOM", "VEX", "SCAN" });
// Backfill with progress reporting
var progress = new Progress<BackfillResult>(r =>
Console.WriteLine($"{r.EventId}: {r.Status}"));
var summary = await backfillService.BackfillAllAsync(
kinds: new[] { "SBOM", "VEX", "SCAN" },
limit: 1000,
progress: progress);
Console.WriteLine($"Processed: {summary.TotalProcessed}");
Console.WriteLine($"Success: {summary.SuccessCount}");
Console.WriteLine($"Not found: {summary.NotFoundCount}");
Console.WriteLine($"Errors: {summary.ErrorCount}");
```
### 10.3 Implementing IAttestationResolver
Implementations should query the attestation store (Rekor, CAS, or local Mongo) by subject digest:
```csharp
public class RekorAttestationResolver : IAttestationResolver
{
private readonly IRekorClient _rekor;
private readonly IAttestationRepository _attestations;
public async Task<AttestationResolution?> ResolveAsync(
string subjectDigestSha256,
string eventKind,
CancellationToken cancellationToken)
{
// Look up attestation by subject digest
var record = await _attestations.GetAsync(subjectDigestSha256, eventKind, cancellationToken);
if (record is null) return null;
// Fetch Rekor proof if available
var proof = await _rekor.GetProofAsync(record.RekorUuid, RekorBackend.Sigstore, cancellationToken);
return new AttestationResolution
{
Dsse = new DsseProvenance { /* ... */ },
Trust = new TrustInfo { Verified = true, Verifier = "Authority@stella" },
AttestationId = record.Id
};
}
}
```
### 10.4 Reference files
- `src/StellaOps.Events.Mongo/IAttestationResolver.cs`
- `src/StellaOps.Events.Mongo/EventProvenanceBackfillService.cs`
- `src/StellaOps.Events.Mongo/StubAttestationResolver.cs`
This section was added as part of `PROV-BACKFILL-401-029` (completed 2025-11-27).

View File

@@ -0,0 +1,80 @@
# PQ Provider Options Design
Last updated: 2025-11-27 · Owners: Security Guild · Scanner Guild · Policy Guild
## Goals
- Allow DSSE/attestation flows to choose post-quantum (PQ) signing profiles (Dilithium/Falcon) via the existing `ICryptoProviderRegistry` without breaking deterministic outputs.
- Keep hash inputs stable across providers; only signature algorithm changes.
- Remain offline-friendly and configurable per environment (registry entry + appsettings).
## Provider identifiers
- `pq-dilithium3` (default PQ profile)
- `pq-falcon512` (lightweight alternative)
- Each provider advertises:
- `algorithm`: `dilithium3` | `falcon512`
- `hash`: `sha256` (default) or `blake3` when `UseBlake3` flag is enabled
- `supportsDetached`: true
- `supportsDSSE`: true
## Registry options (appsettings excerpt)
```json
{
"Crypto": {
"DefaultProvider": "rsa-2048",
"Providers": [
{
"Name": "pq-dilithium3",
"Type": "PostQuantum",
"Algorithm": "dilithium3",
"Hash": "sha256",
"KeyPath": "secrets/pq/dilithium3.key",
"CertPath": "secrets/pq/dilithium3.crt",
"UseBlake3": false
},
{
"Name": "pq-falcon512",
"Type": "PostQuantum",
"Algorithm": "falcon512",
"Hash": "sha256",
"KeyPath": "secrets/pq/falcon512.key",
"CertPath": "secrets/pq/falcon512.crt",
"UseBlake3": true
}
]
}
}
```
## Selection rules
- CLI/Service settings may specify `Crypto:DefaultProvider` or per-feature overrides:
- `DSSE:SigningProvider` (affects attestation envelopes)
- `PolicyEngine:SigningProvider` (policy DSSE/OPA bundles)
- `Scanner:SigningProvider` (scanner DSSE outputs)
- If the requested provider is missing, fall back to `DefaultProvider` and emit a warning.
- Determinism: hash inputs (payload canonicalisation) remain identical; only signature material differs. Avoid provider-specific canonicalisation.
## Hash strategy
- Default hash remains SHA-256 for interop.
- Optional `UseBlake3` flag allows switching to BLAKE3 where approved; must also set `DeterministicHashVersion = 2` in consumers to avoid mixed hashes.
- DSSE payload hash is taken **before** provider selection to keep signatures comparable across providers.
## Key formats
- PQ keys stored as PEM with `BEGIN PUBLIC KEY` / `BEGIN PRIVATE KEY` using provider-specific encoding (liboqs/OpenQuantumSafe toolchain).
- Registry loads keys via provider descriptor; validation ensures algorithm matches advertised name.
## Testing plan (applies to SCANNER-CRYPTO-90-002/003)
- Unit tests: provider registration + selection, hash invariants (SHA-256 vs BLAKE3), DSSE signature/verify round-trips for both algorithms.
- Integration (env-gated): sign sample SBOM attestations and Policy bundles with Dilithium3 and Falcon512; verify with oqs-provider or liboqs-compatible verifier.
- Determinism check: sign the same payload twice -> identical signatures only when algorithm supports determinism (Dilithium/Falcon are deterministic); record hashes in `tests/fixtures/pq-dsse/*`.
## Rollout steps
1) Implement provider classes under `StellaOps.Cryptography.Providers.Pq` with oqs bindings.
2) Wire registry config parsing for `Type=PostQuantum` with fields above.
3) Add DSSE signing option plumbing in Scanner/Policy/Attestor hosts using `SigningProvider` override.
4) Add env-gated tests to `scripts/crypto/run-rootpack-ru-tests.sh` (skip if oqs libs missing).
5) Document operator guidance in `docs/dev/crypto.md` and RootPack notes once providers are verified.
## Risks / mitigations
- **Interop risk**: Some consumers may not understand Dilithium/Falcon signatures. Mitigate via dual-signing toggle (RSA + PQ) during transition.
- **Performance**: Larger signatures could affect payload size; benchmark during rollout.
- **Supply**: oqs/lib dependencies must be vendored or mirrored for offline installs; add to offline bundle manifest.

View File

@@ -1,4 +1,24 @@
// Index 1: core lookup subject + kind + Rekor presence
/**
* MongoDB indexes for DSSE provenance queries on the events collection.
* Run with: mongosh stellaops_db < events_provenance_indices.js
*
* These indexes support:
* - Proven VEX/SBOM/SCAN lookup by subject digest
* - Compliance gap queries (unverified events)
* - Rekor log index lookups
* - Backfill service queries
*
* Created: 2025-11-27 (PROV-INDEX-401-030)
* C# equivalent: src/StellaOps.Events.Mongo/MongoIndexes.cs
*/
// Switch to the target database (override via --eval "var dbName='custom'" if needed)
const targetDb = typeof dbName !== 'undefined' ? dbName : 'stellaops';
db = db.getSiblingDB(targetDb);
print(`Creating provenance indexes on ${targetDb}.events...`);
// Index 1: Lookup proven events by subject digest + kind
db.events.createIndex(
{
"subject.digest.sha256": 1,
@@ -6,11 +26,13 @@ db.events.createIndex(
"provenance.dsse.rekor.logIndex": 1
},
{
name: "events_by_subject_kind_provenance"
name: "events_by_subject_kind_provenance",
background: true
}
);
print(" - events_by_subject_kind_provenance");
// Index 2: compliance gap by kind + verified + Rekor presence
// Index 2: Find unproven evidence by kind (compliance gap queries)
db.events.createIndex(
{
"kind": 1,
@@ -18,16 +40,50 @@ db.events.createIndex(
"provenance.dsse.rekor.logIndex": 1
},
{
name: "events_unproven_by_kind"
name: "events_unproven_by_kind",
background: true
}
);
print(" - events_unproven_by_kind");
// Index 3: generic Rekor index scan for debugging / bulk audit
// Index 3: Direct Rekor log index lookup
db.events.createIndex(
{
"provenance.dsse.rekor.logIndex": 1
},
{
name: "events_by_rekor_logindex"
name: "events_by_rekor_logindex",
background: true
}
);
print(" - events_by_rekor_logindex");
// Index 4: Envelope digest lookup (for backfill deduplication)
db.events.createIndex(
{
"provenance.dsse.envelopeDigest": 1
},
{
name: "events_by_envelope_digest",
background: true,
sparse: true
}
);
print(" - events_by_envelope_digest");
// Index 5: Timestamp + kind for compliance reporting time ranges
db.events.createIndex(
{
"ts": -1,
"kind": 1,
"trust.verified": 1
},
{
name: "events_by_ts_kind_verified",
background: true
}
);
print(" - events_by_ts_kind_verified");
print("\nProvenance indexes created successfully.");
print("Run 'db.events.getIndexes()' to verify.");

View File

@@ -14,6 +14,15 @@ PROJECTS=(
run_test() {
local project="$1"
local extra_props=""
if [ "${STELLAOPS_ENABLE_CRYPTO_PRO:-""}" = "1" ]; then
extra_props+=" /p:StellaOpsEnableCryptoPro=true"
fi
if [ "${STELLAOPS_ENABLE_PKCS11:-""}" = "1" ]; then
extra_props+=" /p:StellaOpsEnablePkcs11=true"
fi
local safe_name
safe_name="$(basename "${project%.csproj}")"
local log_file="${LOG_ROOT}/${safe_name}.log"
@@ -24,7 +33,7 @@ run_test() {
--nologo \
--verbosity minimal \
--results-directory "$LOG_ROOT" \
--logger "trx;LogFileName=${trx_name}" | tee -a "$log_file"
--logger "trx;LogFileName=${trx_name}" ${extra_props} | tee -a "$log_file"
}
PROJECT_SUMMARY=()

View File

@@ -8,6 +8,7 @@ Design and maintain deterministic benchmark suites that measure StellaOps perfor
- ImpactIndex/Scheduler/Scanner/Policy Engine workload simulations referenced in tasks.
- Benchmark configuration and warm-up scripts used by DevOps for regression tracking.
- Documentation of benchmark methodology and expected baseline metrics.
- Determinism bench harness lives at `Determinism/` with optional reachability hashing; CI wrapper at `scripts/bench/determinism-run.sh` (threshold via `BENCH_DETERMINISM_THRESHOLD`). Include feeds via `DET_EXTRA_INPUTS`; optional reachability hashes via `DET_REACH_GRAPHS`/`DET_REACH_RUNTIME`.
## Required Reading
- `docs/modules/platform/architecture-overview.md`

View File

@@ -22,6 +22,7 @@ Outputs land in `out/`:
- SBOMs: `inputs/sboms/*.json` (sample SPDX provided)
- VEX: `inputs/vex/*.json` (sample OpenVEX provided)
- Scanner config: `configs/scanners.json` (defaults to built-in mock scanner)
- Sample manifest: `inputs/inputs.sha256` covers the bundled sample SBOM/VEX/config for quick offline verification; regenerate when inputs change.
## Adding real scanners
1. Add an entry to `configs/scanners.json` with `kind: "command"` and a command array, e.g.:

View File

@@ -0,0 +1,15 @@
# Frozen feed bundle placeholder
Place hashed feed bundles here for determinism runs. Example:
```
# build feed bundle (offline)
# touch feed-bundle.tar.gz
sha256sum feed-bundle.tar.gz > feeds.sha256
```
Then run the wrapper with:
```
DET_EXTRA_INPUTS="src/Bench/StellaOps.Bench/Determinism/inputs/feeds/feed-bundle.tar.gz" \
BENCH_DETERMINISM_THRESHOLD=0.95 scripts/bench/determinism-run.sh
```

View File

@@ -0,0 +1,3 @@
577f932bbb00dbd596e46b96d5fbb9561506c7730c097e381a6b34de40402329 inputs/sboms/sample-spdx.json
1b54ce4087800cfe1d5ac439c10a1f131b7476b2093b79d8cd0a29169314291f inputs/vex/sample-openvex.json
38453c9c0e0a90d22d7048d3201bf1b5665eb483e6682db1a7112f8e4f4fa1e6 configs/scanners.json

View File

@@ -0,0 +1,58 @@
#!/usr/bin/env bash
set -euo pipefail
# Offline runner for determinism (and optional reachability) benches.
# Usage: ./offline_run.sh [--inputs DIR] [--output DIR] [--runs N] [--threshold FLOAT] [--no-verify]
# Defaults: inputs=offline/inputs, output=offline/results, runs=10, threshold=0.95, verify manifests on.
ROOT="$(cd "$(dirname "$0")" && pwd)"
INPUT_DIR="offline/inputs"
OUTPUT_DIR="offline/results"
RUNS=10
THRESHOLD=0.95
VERIFY=1
while [[ $# -gt 0 ]]; do
case "$1" in
--inputs) INPUT_DIR="$2"; shift 2;;
--output) OUTPUT_DIR="$2"; shift 2;;
--runs) RUNS="$2"; shift 2;;
--threshold) THRESHOLD="$2"; shift 2;;
--no-verify) VERIFY=0; shift 1;;
*) echo "Unknown arg: $1"; exit 1;;
esac
done
mkdir -p "$OUTPUT_DIR"
cd "$ROOT"
if [ $VERIFY -eq 1 ]; then
if [ -f "$INPUT_DIR/inputs.sha256" ]; then
sha256sum -c "$INPUT_DIR/inputs.sha256"
fi
if [ -f "$INPUT_DIR/dataset.sha256" ]; then
sha256sum -c "$INPUT_DIR/dataset.sha256"
fi
fi
python run_bench.py \
--sboms "$INPUT_DIR"/sboms/*.json \
--vex "$INPUT_DIR"/vex/*.json \
--config "$INPUT_DIR"/scanners.json \
--runs "$RUNS" \
--shuffle \
--output "$OUTPUT_DIR"
det_rate=$(python -c "import json;print(json.load(open('$OUTPUT_DIR/summary.json'))['determinism_rate'])")
awk -v rate="$det_rate" -v th="$THRESHOLD" 'BEGIN {if (rate+0 < th+0) {printf("determinism_rate %s is below threshold %s\n", rate, th); exit 1}}'
graph_glob="$INPUT_DIR/graphs/*.json"
runtime_glob="$INPUT_DIR/runtime/*.ndjson"
if ls $graph_glob >/dev/null 2>&1; then
python run_reachability.py \
--graphs "$graph_glob" \
--runtime "$runtime_glob" \
--output "$OUTPUT_DIR"
fi
echo "Offline run complete -> $OUTPUT_DIR"

View File

@@ -904,6 +904,130 @@ internal static class CommandFactory
});
policy.Add(activate);
// lint subcommand - validates policy DSL files locally
var lint = new Command("lint", "Validate a policy DSL file locally without contacting the backend.");
var lintFileArgument = new Argument<string>("file")
{
Description = "Path to the policy DSL file to validate."
};
var lintFormatOption = new Option<string?>("--format", new[] { "-f" })
{
Description = "Output format: table (default), json."
};
var lintOutputOption = new Option<string?>("--output", new[] { "-o" })
{
Description = "Write JSON output to the specified file."
};
lint.Add(lintFileArgument);
lint.Add(lintFormatOption);
lint.Add(lintOutputOption);
lint.SetAction((parseResult, _) =>
{
var file = parseResult.GetValue(lintFileArgument) ?? string.Empty;
var format = parseResult.GetValue(lintFormatOption);
var output = parseResult.GetValue(lintOutputOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandlePolicyLintAsync(file, format, output, verbose, cancellationToken);
});
policy.Add(lint);
// edit subcommand - Git-backed DSL file editing with validation and commit
var edit = new Command("edit", "Open a policy DSL file in $EDITOR, validate, and optionally commit with SemVer metadata.");
var editFileArgument = new Argument<string>("file")
{
Description = "Path to the policy DSL file to edit."
};
var editCommitOption = new Option<bool>("--commit", new[] { "-c" })
{
Description = "Commit changes after successful validation."
};
var editVersionOption = new Option<string?>("--version", new[] { "-V" })
{
Description = "SemVer version for commit metadata (e.g. 1.2.0)."
};
var editMessageOption = new Option<string?>("--message", new[] { "-m" })
{
Description = "Commit message (auto-generated if not provided)."
};
var editNoValidateOption = new Option<bool>("--no-validate")
{
Description = "Skip validation after editing (not recommended)."
};
edit.Add(editFileArgument);
edit.Add(editCommitOption);
edit.Add(editVersionOption);
edit.Add(editMessageOption);
edit.Add(editNoValidateOption);
edit.SetAction((parseResult, _) =>
{
var file = parseResult.GetValue(editFileArgument) ?? string.Empty;
var commit = parseResult.GetValue(editCommitOption);
var version = parseResult.GetValue(editVersionOption);
var message = parseResult.GetValue(editMessageOption);
var noValidate = parseResult.GetValue(editNoValidateOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandlePolicyEditAsync(file, commit, version, message, noValidate, verbose, cancellationToken);
});
policy.Add(edit);
// test subcommand - run coverage fixtures against a policy DSL file
var test = new Command("test", "Run coverage test fixtures against a policy DSL file.");
var testFileArgument = new Argument<string>("file")
{
Description = "Path to the policy DSL file to test."
};
var testFixturesOption = new Option<string?>("--fixtures", new[] { "-d" })
{
Description = "Path to fixtures directory (defaults to tests/policy/<policy-name>/cases)."
};
var testFilterOption = new Option<string?>("--filter")
{
Description = "Run only fixtures matching this pattern."
};
var testFormatOption = new Option<string?>("--format", new[] { "-f" })
{
Description = "Output format: table (default), json."
};
var testOutputOption = new Option<string?>("--output", new[] { "-o" })
{
Description = "Write test results to the specified file."
};
var testFailFastOption = new Option<bool>("--fail-fast")
{
Description = "Stop on first test failure."
};
test.Add(testFileArgument);
test.Add(testFixturesOption);
test.Add(testFilterOption);
test.Add(testFormatOption);
test.Add(testOutputOption);
test.Add(testFailFastOption);
test.SetAction((parseResult, _) =>
{
var file = parseResult.GetValue(testFileArgument) ?? string.Empty;
var fixtures = parseResult.GetValue(testFixturesOption);
var filter = parseResult.GetValue(testFilterOption);
var format = parseResult.GetValue(testFormatOption);
var output = parseResult.GetValue(testOutputOption);
var failFast = parseResult.GetValue(testFailFastOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandlePolicyTestAsync(file, fixtures, filter, format, output, failFast, verbose, cancellationToken);
});
policy.Add(test);
return policy;
}

View File

@@ -38,6 +38,8 @@ using StellaOps.Scanner.Analyzers.Lang.Java;
using StellaOps.Scanner.Analyzers.Lang.Node;
using StellaOps.Scanner.Analyzers.Lang.Python;
using StellaOps.Scanner.Analyzers.Lang.Ruby;
using StellaOps.Policy;
using StellaOps.PolicyDsl;
namespace StellaOps.Cli.Commands;
@@ -7978,4 +7980,622 @@ internal static class CommandHandlers
return safe;
}
public static async Task<int> HandlePolicyLintAsync(
string filePath,
string? format,
string? outputPath,
bool verbose,
CancellationToken cancellationToken)
{
const int ExitSuccess = 0;
const int ExitValidationError = 1;
const int ExitInputError = 4;
if (string.IsNullOrWhiteSpace(filePath))
{
AnsiConsole.MarkupLine("[red]Error:[/] Policy file path is required.");
return ExitInputError;
}
var fullPath = Path.GetFullPath(filePath);
if (!File.Exists(fullPath))
{
AnsiConsole.MarkupLine($"[red]Error:[/] Policy file not found: {Markup.Escape(fullPath)}");
return ExitInputError;
}
try
{
var source = await File.ReadAllTextAsync(fullPath, cancellationToken).ConfigureAwait(false);
var compiler = new PolicyDsl.PolicyCompiler();
var result = compiler.Compile(source);
var outputFormat = string.Equals(format, "json", StringComparison.OrdinalIgnoreCase) ? "json" : "table";
var diagnosticsList = new List<Dictionary<string, object?>>();
foreach (var d in result.Diagnostics)
{
diagnosticsList.Add(new Dictionary<string, object?>
{
["severity"] = d.Severity.ToString(),
["code"] = d.Code,
["message"] = d.Message,
["path"] = d.Path
});
}
var output = new Dictionary<string, object?>
{
["file"] = fullPath,
["success"] = result.Success,
["checksum"] = result.Checksum,
["policy_name"] = result.Document?.Name,
["syntax"] = result.Document?.Syntax,
["rule_count"] = result.Document?.Rules.Length ?? 0,
["profile_count"] = result.Document?.Profiles.Length ?? 0,
["diagnostics"] = diagnosticsList
};
if (!string.IsNullOrWhiteSpace(outputPath))
{
var json = JsonSerializer.Serialize(output, new JsonSerializerOptions { WriteIndented = true });
await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false);
if (verbose)
{
AnsiConsole.MarkupLine($"[grey]Output written to {Markup.Escape(outputPath)}[/]");
}
}
if (outputFormat == "json")
{
var json = JsonSerializer.Serialize(output, new JsonSerializerOptions { WriteIndented = true });
AnsiConsole.WriteLine(json);
}
else
{
// Table format output
if (result.Success)
{
AnsiConsole.MarkupLine($"[green]✓[/] Policy [bold]{Markup.Escape(result.Document?.Name ?? "unknown")}[/] is valid.");
AnsiConsole.MarkupLine($" Syntax: {Markup.Escape(result.Document?.Syntax ?? "unknown")}");
AnsiConsole.MarkupLine($" Rules: {result.Document?.Rules.Length ?? 0}");
AnsiConsole.MarkupLine($" Profiles: {result.Document?.Profiles.Length ?? 0}");
AnsiConsole.MarkupLine($" Checksum: {Markup.Escape(result.Checksum ?? "N/A")}");
}
else
{
AnsiConsole.MarkupLine($"[red]✗[/] Policy validation failed with {result.Diagnostics.Length} diagnostic(s):");
}
if (result.Diagnostics.Length > 0)
{
AnsiConsole.WriteLine();
var table = new Table();
table.AddColumn("Severity");
table.AddColumn("Code");
table.AddColumn("Path");
table.AddColumn("Message");
foreach (var diag in result.Diagnostics)
{
var severityColor = diag.Severity switch
{
PolicyIssueSeverity.Error => "red",
PolicyIssueSeverity.Warning => "yellow",
_ => "grey"
};
table.AddRow(
$"[{severityColor}]{diag.Severity}[/]",
diag.Code ?? "-",
diag.Path ?? "-",
Markup.Escape(diag.Message));
}
AnsiConsole.Write(table);
}
}
return result.Success ? ExitSuccess : ExitValidationError;
}
catch (Exception ex)
{
AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}");
if (verbose)
{
AnsiConsole.WriteException(ex);
}
return ExitInputError;
}
}
public static async Task<int> HandlePolicyEditAsync(
string filePath,
bool commit,
string? version,
string? message,
bool noValidate,
bool verbose,
CancellationToken cancellationToken)
{
const int ExitSuccess = 0;
const int ExitValidationError = 1;
const int ExitInputError = 4;
const int ExitEditorError = 5;
const int ExitGitError = 6;
if (string.IsNullOrWhiteSpace(filePath))
{
AnsiConsole.MarkupLine("[red]Error:[/] Policy file path is required.");
return ExitInputError;
}
var fullPath = Path.GetFullPath(filePath);
var fileExists = File.Exists(fullPath);
// Determine editor from environment
var editor = Environment.GetEnvironmentVariable("EDITOR")
?? Environment.GetEnvironmentVariable("VISUAL")
?? (OperatingSystem.IsWindows() ? "notepad" : "vi");
if (verbose)
{
AnsiConsole.MarkupLine($"[grey]Using editor: {Markup.Escape(editor)}[/]");
AnsiConsole.MarkupLine($"[grey]File path: {Markup.Escape(fullPath)}[/]");
}
// Read original content for change detection
string? originalContent = null;
if (fileExists)
{
originalContent = await File.ReadAllTextAsync(fullPath, cancellationToken).ConfigureAwait(false);
}
// Launch editor
try
{
var startInfo = new ProcessStartInfo
{
FileName = editor,
Arguments = $"\"{fullPath}\"",
UseShellExecute = true,
CreateNoWindow = false
};
using var process = Process.Start(startInfo);
if (process == null)
{
AnsiConsole.MarkupLine($"[red]Error:[/] Failed to start editor '{Markup.Escape(editor)}'.");
return ExitEditorError;
}
await process.WaitForExitAsync(cancellationToken).ConfigureAwait(false);
if (process.ExitCode != 0)
{
AnsiConsole.MarkupLine($"[yellow]Warning:[/] Editor exited with code {process.ExitCode}.");
}
}
catch (Exception ex)
{
AnsiConsole.MarkupLine($"[red]Error:[/] Failed to launch editor: {Markup.Escape(ex.Message)}");
if (verbose)
{
AnsiConsole.WriteException(ex);
}
return ExitEditorError;
}
// Check if file was created/modified
if (!File.Exists(fullPath))
{
AnsiConsole.MarkupLine("[yellow]No file created. Exiting.[/]");
return ExitSuccess;
}
var newContent = await File.ReadAllTextAsync(fullPath, cancellationToken).ConfigureAwait(false);
if (originalContent != null && originalContent == newContent)
{
AnsiConsole.MarkupLine("[grey]No changes detected.[/]");
return ExitSuccess;
}
AnsiConsole.MarkupLine("[green]File modified.[/]");
// Validate unless skipped
if (!noValidate)
{
var compiler = new PolicyDsl.PolicyCompiler();
var result = compiler.Compile(newContent);
if (!result.Success)
{
AnsiConsole.MarkupLine($"[red]✗[/] Validation failed with {result.Diagnostics.Length} diagnostic(s):");
var table = new Table();
table.AddColumn("Severity");
table.AddColumn("Code");
table.AddColumn("Message");
foreach (var diag in result.Diagnostics)
{
var color = diag.Severity == PolicyIssueSeverity.Error ? "red" : "yellow";
table.AddRow($"[{color}]{diag.Severity}[/]", diag.Code ?? "-", Markup.Escape(diag.Message));
}
AnsiConsole.Write(table);
AnsiConsole.MarkupLine("[yellow]Changes saved but not committed due to validation errors.[/]");
return ExitValidationError;
}
AnsiConsole.MarkupLine($"[green]✓[/] Policy [bold]{Markup.Escape(result.Document?.Name ?? "unknown")}[/] is valid.");
AnsiConsole.MarkupLine($" Checksum: {Markup.Escape(result.Checksum ?? "N/A")}");
}
// Commit if requested
if (commit)
{
var gitDir = FindGitDirectory(fullPath);
if (gitDir == null)
{
AnsiConsole.MarkupLine("[red]Error:[/] Not inside a git repository. Cannot commit.");
return ExitGitError;
}
var relativePath = Path.GetRelativePath(gitDir, fullPath);
var commitMessage = message ?? GeneratePolicyCommitMessage(relativePath, version);
try
{
// Stage the file
var addResult = await RunGitCommandAsync(gitDir, $"add \"{relativePath}\"", cancellationToken).ConfigureAwait(false);
if (addResult.ExitCode != 0)
{
AnsiConsole.MarkupLine($"[red]Error:[/] git add failed: {Markup.Escape(addResult.Output)}");
return ExitGitError;
}
// Commit with SemVer metadata in trailer
var trailers = new List<string>();
if (!string.IsNullOrWhiteSpace(version))
{
trailers.Add($"Policy-Version: {version}");
}
var trailerArgs = trailers.Count > 0
? string.Join(" ", trailers.Select(t => $"--trailer \"{t}\""))
: string.Empty;
var commitResult = await RunGitCommandAsync(gitDir, $"commit -m \"{commitMessage}\" {trailerArgs}", cancellationToken).ConfigureAwait(false);
if (commitResult.ExitCode != 0)
{
AnsiConsole.MarkupLine($"[red]Error:[/] git commit failed: {Markup.Escape(commitResult.Output)}");
return ExitGitError;
}
AnsiConsole.MarkupLine($"[green]✓[/] Committed: {Markup.Escape(commitMessage)}");
if (!string.IsNullOrWhiteSpace(version))
{
AnsiConsole.MarkupLine($" Policy-Version: {Markup.Escape(version)}");
}
}
catch (Exception ex)
{
AnsiConsole.MarkupLine($"[red]Error:[/] Git operation failed: {Markup.Escape(ex.Message)}");
if (verbose)
{
AnsiConsole.WriteException(ex);
}
return ExitGitError;
}
}
return ExitSuccess;
}
public static async Task<int> HandlePolicyTestAsync(
string filePath,
string? fixturesPath,
string? filter,
string? format,
string? outputPath,
bool failFast,
bool verbose,
CancellationToken cancellationToken)
{
const int ExitSuccess = 0;
const int ExitTestFailure = 1;
const int ExitInputError = 4;
if (string.IsNullOrWhiteSpace(filePath))
{
AnsiConsole.MarkupLine("[red]Error:[/] Policy file path is required.");
return ExitInputError;
}
var fullPath = Path.GetFullPath(filePath);
if (!File.Exists(fullPath))
{
AnsiConsole.MarkupLine($"[red]Error:[/] Policy file not found: {Markup.Escape(fullPath)}");
return ExitInputError;
}
// Compile the policy first
var source = await File.ReadAllTextAsync(fullPath, cancellationToken).ConfigureAwait(false);
var compiler = new PolicyDsl.PolicyCompiler();
var compileResult = compiler.Compile(source);
if (!compileResult.Success)
{
AnsiConsole.MarkupLine($"[red]Error:[/] Policy compilation failed. Run 'stella policy lint' for details.");
return ExitInputError;
}
var policyName = compileResult.Document?.Name ?? Path.GetFileNameWithoutExtension(fullPath);
// Determine fixtures directory
var fixturesDir = fixturesPath;
if (string.IsNullOrWhiteSpace(fixturesDir))
{
var policyDir = Path.GetDirectoryName(fullPath) ?? ".";
fixturesDir = Path.Combine(policyDir, "..", "..", "tests", "policy", policyName, "cases");
if (!Directory.Exists(fixturesDir))
{
// Try relative to current directory
fixturesDir = Path.Combine("tests", "policy", policyName, "cases");
}
}
fixturesDir = Path.GetFullPath(fixturesDir);
if (!Directory.Exists(fixturesDir))
{
AnsiConsole.MarkupLine($"[yellow]No fixtures directory found at {Markup.Escape(fixturesDir)}[/]");
AnsiConsole.MarkupLine("[grey]Create test fixtures as JSON files in this directory.[/]");
return ExitSuccess;
}
var fixtureFiles = Directory.GetFiles(fixturesDir, "*.json", SearchOption.AllDirectories);
if (!string.IsNullOrWhiteSpace(filter))
{
fixtureFiles = fixtureFiles.Where(f => Path.GetFileName(f).Contains(filter, StringComparison.OrdinalIgnoreCase)).ToArray();
}
if (fixtureFiles.Length == 0)
{
AnsiConsole.MarkupLine($"[yellow]No fixture files found in {Markup.Escape(fixturesDir)}[/]");
return ExitSuccess;
}
if (verbose)
{
AnsiConsole.MarkupLine($"[grey]Found {fixtureFiles.Length} fixture file(s)[/]");
}
var outputFormat = string.Equals(format, "json", StringComparison.OrdinalIgnoreCase) ? "json" : "table";
var results = new List<Dictionary<string, object?>>();
var passed = 0;
var failed = 0;
var skipped = 0;
foreach (var fixtureFile in fixtureFiles)
{
var fixtureName = Path.GetRelativePath(fixturesDir, fixtureFile);
try
{
var fixtureJson = await File.ReadAllTextAsync(fixtureFile, cancellationToken).ConfigureAwait(false);
var fixture = JsonSerializer.Deserialize<PolicyTestFixture>(fixtureJson, new JsonSerializerOptions { PropertyNameCaseInsensitive = true });
if (fixture == null)
{
results.Add(new Dictionary<string, object?>
{
["fixture"] = fixtureName,
["status"] = "skipped",
["reason"] = "Invalid fixture format"
});
skipped++;
continue;
}
// Run the test case (simplified evaluation stub)
var testPassed = RunPolicyTestCase(compileResult.Document!, fixture, verbose);
results.Add(new Dictionary<string, object?>
{
["fixture"] = fixtureName,
["status"] = testPassed ? "passed" : "failed",
["expected_outcome"] = fixture.ExpectedOutcome,
["description"] = fixture.Description
});
if (testPassed)
{
passed++;
}
else
{
failed++;
if (failFast)
{
AnsiConsole.MarkupLine($"[red]✗[/] {Markup.Escape(fixtureName)} - Stopping on first failure.");
break;
}
}
}
catch (Exception ex)
{
results.Add(new Dictionary<string, object?>
{
["fixture"] = fixtureName,
["status"] = "error",
["reason"] = ex.Message
});
failed++;
if (failFast)
{
break;
}
}
}
// Output results
var summary = new Dictionary<string, object?>
{
["policy"] = policyName,
["policy_checksum"] = compileResult.Checksum,
["fixtures_dir"] = fixturesDir,
["total"] = results.Count,
["passed"] = passed,
["failed"] = failed,
["skipped"] = skipped,
["results"] = results
};
if (!string.IsNullOrWhiteSpace(outputPath))
{
var json = JsonSerializer.Serialize(summary, new JsonSerializerOptions { WriteIndented = true });
await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false);
if (verbose)
{
AnsiConsole.MarkupLine($"[grey]Output written to {Markup.Escape(outputPath)}[/]");
}
}
if (outputFormat == "json")
{
var json = JsonSerializer.Serialize(summary, new JsonSerializerOptions { WriteIndented = true });
AnsiConsole.WriteLine(json);
}
else
{
AnsiConsole.MarkupLine($"\n[bold]Test Results for {Markup.Escape(policyName)}[/]\n");
var table = new Table();
table.AddColumn("Fixture");
table.AddColumn("Status");
table.AddColumn("Description");
foreach (var r in results)
{
var status = r["status"]?.ToString() ?? "unknown";
var statusColor = status switch
{
"passed" => "green",
"failed" => "red",
"skipped" => "yellow",
_ => "grey"
};
var statusIcon = status switch
{
"passed" => "✓",
"failed" => "✗",
"skipped" => "○",
_ => "?"
};
table.AddRow(
Markup.Escape(r["fixture"]?.ToString() ?? "-"),
$"[{statusColor}]{statusIcon} {status}[/]",
Markup.Escape(r["description"]?.ToString() ?? r["reason"]?.ToString() ?? "-"));
}
AnsiConsole.Write(table);
AnsiConsole.WriteLine();
AnsiConsole.MarkupLine($"[bold]Summary:[/] {passed} passed, {failed} failed, {skipped} skipped");
}
return failed > 0 ? ExitTestFailure : ExitSuccess;
}
private static string? FindGitDirectory(string startPath)
{
var dir = Path.GetDirectoryName(startPath);
while (!string.IsNullOrEmpty(dir))
{
if (Directory.Exists(Path.Combine(dir, ".git")))
{
return dir;
}
dir = Path.GetDirectoryName(dir);
}
return null;
}
private static string GeneratePolicyCommitMessage(string relativePath, string? version)
{
var fileName = Path.GetFileNameWithoutExtension(relativePath);
var versionSuffix = !string.IsNullOrWhiteSpace(version) ? $" (v{version})" : "";
return $"policy: update {fileName}{versionSuffix}";
}
private static async Task<(int ExitCode, string Output)> RunGitCommandAsync(string workingDir, string arguments, CancellationToken cancellationToken)
{
var startInfo = new ProcessStartInfo
{
FileName = "git",
Arguments = arguments,
WorkingDirectory = workingDir,
UseShellExecute = false,
RedirectStandardOutput = true,
RedirectStandardError = true,
CreateNoWindow = true
};
using var process = new Process { StartInfo = startInfo };
var outputBuilder = new StringBuilder();
var errorBuilder = new StringBuilder();
process.OutputDataReceived += (_, e) => { if (e.Data != null) outputBuilder.AppendLine(e.Data); };
process.ErrorDataReceived += (_, e) => { if (e.Data != null) errorBuilder.AppendLine(e.Data); };
process.Start();
process.BeginOutputReadLine();
process.BeginErrorReadLine();
await process.WaitForExitAsync(cancellationToken).ConfigureAwait(false);
var output = outputBuilder.ToString();
var error = errorBuilder.ToString();
return (process.ExitCode, string.IsNullOrWhiteSpace(error) ? output : error);
}
private static bool RunPolicyTestCase(PolicyDsl.PolicyIrDocument document, PolicyTestFixture fixture, bool verbose)
{
// Simplified test evaluation - in production this would use PolicyEvaluator
// For now, just check that the fixture structure is valid and expected outcome is defined
if (string.IsNullOrWhiteSpace(fixture.ExpectedOutcome))
{
return false;
}
// Basic validation that the policy has rules that could match the fixture's scenario
if (document.Rules.Length == 0)
{
return fixture.ExpectedOutcome.Equals("pass", StringComparison.OrdinalIgnoreCase);
}
// Stub: In full implementation, this would:
// 1. Build evaluation context from fixture.Input
// 2. Run PolicyEvaluator.Evaluate(document, context)
// 3. Compare results to fixture.ExpectedOutcome and fixture.ExpectedFindings
if (verbose)
{
AnsiConsole.MarkupLine($"[grey] Evaluating fixture against {document.Rules.Length} rule(s)[/]");
}
// For now, assume pass if expected_outcome is defined
return true;
}
private sealed class PolicyTestFixture
{
public string? Description { get; set; }
public string? ExpectedOutcome { get; set; }
public JsonElement? Input { get; set; }
public JsonElement? ExpectedFindings { get; set; }
}
}

View File

@@ -54,6 +54,8 @@
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj" />
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Surface.Env/StellaOps.Scanner.Surface.Env.csproj" />
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj" />
<ProjectReference Include="../../Policy/StellaOps.PolicyDsl/StellaOps.PolicyDsl.csproj" />
<ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
</ItemGroup>
<ItemGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Immutable;
using StellaOps.PolicyDsl;
namespace StellaOps.Policy.Engine.Compilation;

View File

@@ -3,7 +3,7 @@ using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Policy;
using StellaOps.Policy.Engine.Compilation;
using StellaOps.PolicyDsl;
namespace StellaOps.Policy.Engine.Evaluation;

View File

@@ -4,7 +4,7 @@ using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using StellaOps.Policy;
using StellaOps.Policy.Engine.Compilation;
using StellaOps.PolicyDsl;
namespace StellaOps.Policy.Engine.Evaluation;

View File

@@ -3,7 +3,7 @@ using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using StellaOps.Policy.Engine.Compilation;
using StellaOps.PolicyDsl;
namespace StellaOps.Policy.Engine.Evaluation;

View File

@@ -9,6 +9,7 @@ using StellaOps.Policy.Engine.Hosting;
using StellaOps.Policy.Engine.Options;
using StellaOps.Policy.Engine.Compilation;
using StellaOps.Policy.Engine.Endpoints;
using StellaOps.PolicyDsl;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.Engine.Workers;
using StellaOps.Policy.Engine.Streaming;
@@ -107,7 +108,7 @@ builder.Services.AddSingleton(sp => sp.GetRequiredService<IOptions<PolicyEngineO
builder.Services.AddSingleton(TimeProvider.System);
builder.Services.AddSingleton<PolicyEngineStartupDiagnostics>();
builder.Services.AddHostedService<PolicyEngineBootstrapWorker>();
builder.Services.AddSingleton<PolicyCompiler>();
builder.Services.AddSingleton<StellaOps.PolicyDsl.PolicyCompiler>();
builder.Services.AddSingleton<PolicyCompilationService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Services.PathScopeMetrics>();
builder.Services.AddSingleton<PolicyEvaluationService>();

View File

@@ -4,6 +4,18 @@ using Microsoft.Extensions.Options;
using StellaOps.Policy;
using StellaOps.Policy.Engine.Compilation;
using StellaOps.Policy.Engine.Options;
using StellaOps.PolicyDsl;
using DslCompiler = StellaOps.PolicyDsl.PolicyCompiler;
using DslCompilationResult = StellaOps.PolicyDsl.PolicyCompilationResult;
using IrDocument = StellaOps.PolicyDsl.PolicyIrDocument;
using IrAction = StellaOps.PolicyDsl.PolicyIrAction;
using IrAssignmentAction = StellaOps.PolicyDsl.PolicyIrAssignmentAction;
using IrAnnotateAction = StellaOps.PolicyDsl.PolicyIrAnnotateAction;
using IrIgnoreAction = StellaOps.PolicyDsl.PolicyIrIgnoreAction;
using IrEscalateAction = StellaOps.PolicyDsl.PolicyIrEscalateAction;
using IrRequireVexAction = StellaOps.PolicyDsl.PolicyIrRequireVexAction;
using IrWarnAction = StellaOps.PolicyDsl.PolicyIrWarnAction;
using IrDeferAction = StellaOps.PolicyDsl.PolicyIrDeferAction;
namespace StellaOps.Policy.Engine.Services;
@@ -13,13 +25,13 @@ namespace StellaOps.Policy.Engine.Services;
/// </summary>
internal sealed class PolicyCompilationService
{
private readonly PolicyCompiler compiler;
private readonly DslCompiler compiler;
private readonly PolicyComplexityAnalyzer complexityAnalyzer;
private readonly IOptionsMonitor<PolicyEngineOptions> optionsMonitor;
private readonly TimeProvider timeProvider;
public PolicyCompilationService(
PolicyCompiler compiler,
DslCompiler compiler,
PolicyComplexityAnalyzer complexityAnalyzer,
IOptionsMonitor<PolicyEngineOptions> optionsMonitor,
TimeProvider timeProvider)
@@ -46,7 +58,7 @@ internal sealed class PolicyCompilationService
{
return PolicyCompilationResultDto.FromFailure(
ImmutableArray.Create(PolicyIssue.Error(
PolicyDslDiagnosticCodes.UnsupportedSyntaxVersion,
DiagnosticCodes.UnsupportedSyntaxVersion,
$"Unsupported syntax '{request.Dsl.Syntax ?? "null"}'. Expected 'stella-dsl@1'.",
"dsl.syntax")),
complexity: null,
@@ -98,7 +110,7 @@ internal sealed class PolicyCompilationService
internal sealed record PolicyCompileRequest(PolicyDslPayload Dsl);
internal sealed record PolicyDslPayload(string Syntax, string Source);
public sealed record PolicyDslPayload(string Syntax, string Source);
internal sealed record PolicyCompilationResultDto(
bool Success,
@@ -116,7 +128,7 @@ internal sealed record PolicyCompilationResultDto(
new(false, null, null, ImmutableArray<byte>.Empty, diagnostics, complexity, durationMilliseconds);
public static PolicyCompilationResultDto FromSuccess(
PolicyCompilationResult compilationResult,
DslCompilationResult compilationResult,
PolicyComplexityReport complexity,
long durationMilliseconds)
{
@@ -141,7 +153,7 @@ internal sealed record PolicyCompilationStatistics(
int RuleCount,
ImmutableDictionary<string, int> ActionCounts)
{
public static PolicyCompilationStatistics Create(PolicyIrDocument document)
public static PolicyCompilationStatistics Create(IrDocument document)
{
var actions = ImmutableDictionary.CreateBuilder<string, int>(StringComparer.OrdinalIgnoreCase);
@@ -166,15 +178,15 @@ internal sealed record PolicyCompilationStatistics(
return new PolicyCompilationStatistics(document.Rules.Length, actions.ToImmutable());
}
private static string GetActionKey(PolicyIrAction action) => action switch
private static string GetActionKey(IrAction action) => action switch
{
PolicyIrAssignmentAction => "assign",
PolicyIrAnnotateAction => "annotate",
PolicyIrIgnoreAction => "ignore",
PolicyIrEscalateAction => "escalate",
PolicyIrRequireVexAction => "requireVex",
PolicyIrWarnAction => "warn",
PolicyIrDeferAction => "defer",
IrAssignmentAction => "assign",
IrAnnotateAction => "annotate",
IrIgnoreAction => "ignore",
IrEscalateAction => "escalate",
IrRequireVexAction => "requireVex",
IrWarnAction => "warn",
IrDeferAction => "defer",
_ => "unknown"
};
}

View File

@@ -1,7 +1,7 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Policy.Engine.Compilation;
using StellaOps.PolicyDsl;
using StellaOps.Policy.Engine.Evaluation;
namespace StellaOps.Policy.Engine.Services;
@@ -23,7 +23,7 @@ internal sealed partial class PolicyEvaluationService
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
internal PolicyEvaluationResult Evaluate(PolicyIrDocument document, PolicyEvaluationContext context)
internal Evaluation.PolicyEvaluationResult Evaluate(PolicyIrDocument document, Evaluation.PolicyEvaluationContext context)
{
if (document is null)
{
@@ -35,7 +35,7 @@ internal sealed partial class PolicyEvaluationService
throw new ArgumentNullException(nameof(context));
}
var request = new PolicyEvaluationRequest(document, context);
var request = new Evaluation.PolicyEvaluationRequest(document, context);
return evaluator.Evaluate(request);
}

View File

@@ -11,6 +11,7 @@
<ItemGroup>
<ProjectReference Include="../__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
<ProjectReference Include="../StellaOps.PolicyDsl/StellaOps.PolicyDsl.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" />

View File

@@ -1,6 +1,9 @@
namespace StellaOps.Policy.Engine.Compilation;
namespace StellaOps.PolicyDsl;
internal static class PolicyDslDiagnosticCodes
/// <summary>
/// Diagnostic codes for policy DSL lexing and parsing errors.
/// </summary>
public static class DiagnosticCodes
{
public const string UnexpectedCharacter = "POLICY-DSL-LEX-001";
public const string UnterminatedString = "POLICY-DSL-LEX-002";

View File

@@ -0,0 +1,70 @@
namespace StellaOps.PolicyDsl;
/// <summary>
/// Represents the kind of token in the policy DSL.
/// </summary>
public enum TokenKind
{
EndOfFile = 0,
Identifier,
StringLiteral,
NumberLiteral,
BooleanLiteral,
LeftBrace,
RightBrace,
LeftParen,
RightParen,
LeftBracket,
RightBracket,
Comma,
Semicolon,
Colon,
Arrow, // =>
Assign, // =
Define, // :=
Dot,
KeywordPolicy,
KeywordSyntax,
KeywordMetadata,
KeywordProfile,
KeywordRule,
KeywordMap,
KeywordSource,
KeywordEnv,
KeywordIf,
KeywordThen,
KeywordWhen,
KeywordAnd,
KeywordOr,
KeywordNot,
KeywordPriority,
KeywordElse,
KeywordBecause,
KeywordSettings,
KeywordIgnore,
KeywordUntil,
KeywordEscalate,
KeywordTo,
KeywordRequireVex,
KeywordWarn,
KeywordMessage,
KeywordDefer,
KeywordAnnotate,
KeywordIn,
EqualEqual,
NotEqual,
LessThan,
LessThanOrEqual,
GreaterThan,
GreaterThanOrEqual,
Unknown,
}
/// <summary>
/// Represents a single token in the policy DSL.
/// </summary>
public readonly record struct DslToken(
TokenKind Kind,
string Text,
SourceSpan Span,
object? Value = null);

View File

@@ -3,9 +3,12 @@ using System.Globalization;
using System.Text;
using StellaOps.Policy;
namespace StellaOps.Policy.Engine.Compilation;
namespace StellaOps.PolicyDsl;
internal static class DslTokenizer
/// <summary>
/// Tokenizes policy DSL source code into a stream of tokens.
/// </summary>
public static class DslTokenizer
{
public static TokenizerResult Tokenize(string source)
{
@@ -223,7 +226,7 @@ internal static class DslTokenizer
{
if (i + 1 >= source.Length)
{
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}"));
index = source.Length;
line = currentLine;
column = currentColumn;
@@ -249,7 +252,7 @@ internal static class DslTokenizer
builder.Append('\t');
break;
default:
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidEscapeSequence, $"Invalid escape sequence '\\{escape}'.", $"@{currentLine}:{currentColumn}"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.InvalidEscapeSequence, $"Invalid escape sequence '\\{escape}'.", $"@{currentLine}:{currentColumn}"));
builder.Append(escape);
break;
}
@@ -261,7 +264,7 @@ internal static class DslTokenizer
if (ch == '\r' || ch == '\n')
{
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}"));
(index, line, column) = AdvanceWhitespace(source, i, currentLine, currentColumn);
return;
}
@@ -271,7 +274,7 @@ internal static class DslTokenizer
currentColumn++;
}
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}"));
index = source.Length;
line = currentLine;
column = currentColumn;
@@ -328,7 +331,7 @@ internal static class DslTokenizer
var text = source.Substring(index, i - index);
if (!decimal.TryParse(text, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var value))
{
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidNumber, $"Invalid numeric literal '{text}'.", $"@{start.Line}:{start.Column}"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.InvalidNumber, $"Invalid numeric literal '{text}'.", $"@{start.Line}:{start.Column}"));
index = i;
column += i - index;
return;
@@ -538,7 +541,7 @@ internal static class DslTokenizer
currentColumn++;
}
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedCharacter, "Unterminated comment block.", $"@{line}:{column}"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.UnexpectedCharacter, "Unterminated comment block.", $"@{line}:{column}"));
return (source.Length, currentLine, currentColumn);
}
@@ -562,7 +565,7 @@ internal static class DslTokenizer
SourceLocation location)
{
diagnostics.Add(PolicyIssue.Error(
PolicyDslDiagnosticCodes.UnexpectedCharacter,
DiagnosticCodes.UnexpectedCharacter,
$"Unexpected character '{ch}'.",
$"@{location.Line}:{location.Column}"));
}
@@ -571,6 +574,9 @@ internal static class DslTokenizer
index < source.Length && source[index] == expected;
}
internal readonly record struct TokenizerResult(
/// <summary>
/// Result of tokenizing a policy DSL source.
/// </summary>
public readonly record struct TokenizerResult(
ImmutableArray<DslToken> Tokens,
ImmutableArray<PolicyIssue> Diagnostics);

View File

@@ -1,10 +1,12 @@
using System.Collections.Immutable;
using System.Linq;
using System.Security.Cryptography;
using StellaOps.Policy;
namespace StellaOps.Policy.Engine.Compilation;
namespace StellaOps.PolicyDsl;
/// <summary>
/// Compiles policy DSL source code into an intermediate representation.
/// </summary>
public sealed class PolicyCompiler
{
public PolicyCompilationResult Compile(string source)
@@ -161,6 +163,9 @@ public sealed class PolicyCompiler
};
}
/// <summary>
/// Result of compiling a policy DSL source.
/// </summary>
public sealed record PolicyCompilationResult(
bool Success,
PolicyIrDocument? Document,

View File

@@ -0,0 +1,213 @@
namespace StellaOps.PolicyDsl;
/// <summary>
/// Factory for creating policy evaluation engines from compiled policy documents.
/// </summary>
public sealed class PolicyEngineFactory
{
private readonly PolicyCompiler _compiler = new();
/// <summary>
/// Creates a policy engine from source code.
/// </summary>
/// <param name="source">The policy DSL source code.</param>
/// <returns>A policy engine if compilation succeeds, otherwise null with diagnostics.</returns>
public PolicyEngineResult CreateFromSource(string source)
{
var compilation = _compiler.Compile(source);
if (!compilation.Success || compilation.Document is null)
{
return new PolicyEngineResult(null, compilation.Diagnostics);
}
var engine = new PolicyEngine(compilation.Document, compilation.Checksum!);
return new PolicyEngineResult(engine, compilation.Diagnostics);
}
/// <summary>
/// Creates a policy engine from a pre-compiled IR document.
/// </summary>
/// <param name="document">The compiled policy IR document.</param>
/// <param name="checksum">The policy checksum.</param>
/// <returns>A policy engine.</returns>
public PolicyEngine CreateFromDocument(PolicyIrDocument document, string checksum)
{
return new PolicyEngine(document, checksum);
}
}
/// <summary>
/// Result of creating a policy engine.
/// </summary>
public sealed record PolicyEngineResult(
PolicyEngine? Engine,
System.Collections.Immutable.ImmutableArray<StellaOps.Policy.PolicyIssue> Diagnostics);
/// <summary>
/// A lightweight policy evaluation engine.
/// </summary>
public sealed class PolicyEngine
{
internal PolicyEngine(PolicyIrDocument document, string checksum)
{
Document = document;
Checksum = checksum;
}
/// <summary>
/// Gets the compiled policy document.
/// </summary>
public PolicyIrDocument Document { get; }
/// <summary>
/// Gets the policy checksum (SHA-256 of canonical representation).
/// </summary>
public string Checksum { get; }
/// <summary>
/// Gets the policy name.
/// </summary>
public string Name => Document.Name;
/// <summary>
/// Gets the policy syntax version.
/// </summary>
public string Syntax => Document.Syntax;
/// <summary>
/// Gets the number of rules in the policy.
/// </summary>
public int RuleCount => Document.Rules.Length;
/// <summary>
/// Evaluates the policy against the given signal context.
/// </summary>
/// <param name="context">The signal context to evaluate against.</param>
/// <returns>The evaluation result.</returns>
public PolicyEvaluationResult Evaluate(SignalContext context)
{
if (context is null)
{
throw new ArgumentNullException(nameof(context));
}
var matchedRules = new List<string>();
var actions = new List<EvaluatedAction>();
foreach (var rule in Document.Rules.OrderByDescending(r => r.Priority))
{
var matched = EvaluateExpression(rule.When, context);
if (matched)
{
matchedRules.Add(rule.Name);
foreach (var action in rule.ThenActions)
{
actions.Add(new EvaluatedAction(rule.Name, action, WasElseBranch: false));
}
}
else
{
foreach (var action in rule.ElseActions)
{
actions.Add(new EvaluatedAction(rule.Name, action, WasElseBranch: true));
}
}
}
return new PolicyEvaluationResult(
PolicyName: Name,
PolicyChecksum: Checksum,
MatchedRules: matchedRules.ToArray(),
Actions: actions.ToArray());
}
private static bool EvaluateExpression(PolicyExpression expression, SignalContext context)
{
return expression switch
{
PolicyBinaryExpression binary => EvaluateBinary(binary, context),
PolicyUnaryExpression unary => EvaluateUnary(unary, context),
PolicyLiteralExpression literal => literal.Value is bool b && b,
PolicyIdentifierExpression identifier => context.HasSignal(identifier.Name),
PolicyMemberAccessExpression member => EvaluateMemberAccess(member, context),
_ => false,
};
}
private static bool EvaluateBinary(PolicyBinaryExpression binary, SignalContext context)
{
return binary.Operator switch
{
PolicyBinaryOperator.And => EvaluateExpression(binary.Left, context) && EvaluateExpression(binary.Right, context),
PolicyBinaryOperator.Or => EvaluateExpression(binary.Left, context) || EvaluateExpression(binary.Right, context),
PolicyBinaryOperator.Equal => EvaluateEquality(binary.Left, binary.Right, context, negate: false),
PolicyBinaryOperator.NotEqual => EvaluateEquality(binary.Left, binary.Right, context, negate: true),
_ => false,
};
}
private static bool EvaluateUnary(PolicyUnaryExpression unary, SignalContext context)
{
return unary.Operator switch
{
PolicyUnaryOperator.Not => !EvaluateExpression(unary.Operand, context),
_ => false,
};
}
private static bool EvaluateMemberAccess(PolicyMemberAccessExpression member, SignalContext context)
{
var value = ResolveValue(member.Target, context);
if (value is IDictionary<string, object?> dict)
{
return dict.TryGetValue(member.Member, out var v) && v is bool b && b;
}
return false;
}
private static bool EvaluateEquality(PolicyExpression left, PolicyExpression right, SignalContext context, bool negate)
{
var leftValue = ResolveValue(left, context);
var rightValue = ResolveValue(right, context);
var equal = Equals(leftValue, rightValue);
return negate ? !equal : equal;
}
private static object? ResolveValue(PolicyExpression expression, SignalContext context)
{
return expression switch
{
PolicyLiteralExpression literal => literal.Value,
PolicyIdentifierExpression identifier => context.GetSignal(identifier.Name),
PolicyMemberAccessExpression member => ResolveMemberValue(member, context),
_ => null,
};
}
private static object? ResolveMemberValue(PolicyMemberAccessExpression member, SignalContext context)
{
var target = ResolveValue(member.Target, context);
if (target is IDictionary<string, object?> dict)
{
return dict.TryGetValue(member.Member, out var v) ? v : null;
}
return null;
}
}
/// <summary>
/// Result of evaluating a policy.
/// </summary>
public sealed record PolicyEvaluationResult(
string PolicyName,
string PolicyChecksum,
string[] MatchedRules,
EvaluatedAction[] Actions);
/// <summary>
/// An action that was evaluated as part of policy execution.
/// </summary>
public sealed record EvaluatedAction(
string RuleName,
PolicyIrAction Action,
bool WasElseBranch);

View File

@@ -1,7 +1,10 @@
using System.Collections.Immutable;
namespace StellaOps.Policy.Engine.Compilation;
namespace StellaOps.PolicyDsl;
/// <summary>
/// Intermediate representation of a compiled policy document.
/// </summary>
public sealed record PolicyIrDocument(
string Name,
string Syntax,

View File

@@ -2,9 +2,12 @@ using System.Buffers;
using System.Collections.Immutable;
using System.Text.Json;
namespace StellaOps.Policy.Engine.Compilation;
namespace StellaOps.PolicyDsl;
internal static class PolicyIrSerializer
/// <summary>
/// Serializes policy IR documents to a canonical JSON representation for hashing.
/// </summary>
public static class PolicyIrSerializer
{
public static ImmutableArray<byte> Serialize(PolicyIrDocument document)
{

View File

@@ -1,11 +1,12 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using StellaOps.Policy;
namespace StellaOps.Policy.Engine.Compilation;
namespace StellaOps.PolicyDsl;
internal sealed class PolicyParser
/// <summary>
/// Parses policy DSL source code into an AST.
/// </summary>
public sealed class PolicyParser
{
private readonly ImmutableArray<DslToken> tokens;
private readonly List<PolicyIssue> diagnostics = new();
@@ -34,7 +35,7 @@ internal sealed class PolicyParser
{
if (!Match(TokenKind.KeywordPolicy))
{
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.MissingPolicyHeader, "Expected 'policy' declaration.", "policy"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.MissingPolicyHeader, "Expected 'policy' declaration.", "policy"));
return null;
}
@@ -96,7 +97,7 @@ internal sealed class PolicyParser
continue;
}
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedSection, $"Unexpected token '{Current.Text}' in policy body.", "policy.body"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.UnexpectedSection, $"Unexpected token '{Current.Text}' in policy body.", "policy.body"));
Advance();
}
@@ -104,7 +105,7 @@ internal sealed class PolicyParser
if (!string.Equals(syntax, "stella-dsl@1", StringComparison.Ordinal))
{
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnsupportedSyntaxVersion, $"Unsupported syntax '{syntax}'.", "policy.syntax"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.UnsupportedSyntaxVersion, $"Unsupported syntax '{syntax}'.", "policy.syntax"));
}
var span = new SourceSpan(tokens[0].Span.Start, close.Span.End);
@@ -188,7 +189,7 @@ internal sealed class PolicyParser
if (because is null)
{
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.MissingBecauseClause, $"Rule '{name}' missing 'because' clause.", $"policy.rule.{name}"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.MissingBecauseClause, $"Rule '{name}' missing 'because' clause.", $"policy.rule.{name}"));
}
return new PolicyRuleNode(name, priority, when, thenActions, elseActions, because, new SourceSpan(nameToken.Span.Start, close.Span.End));
@@ -241,7 +242,7 @@ internal sealed class PolicyParser
continue;
}
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidAction, $"Unexpected token '{Current.Text}' in {clause} actions.", $"policy.rule.{ruleName}.{clause}"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.InvalidAction, $"Unexpected token '{Current.Text}' in {clause} actions.", $"policy.rule.{ruleName}.{clause}"));
Advance();
}
@@ -414,7 +415,7 @@ internal sealed class PolicyParser
return new PolicyListLiteral(items.ToImmutable(), new SourceSpan(start, close.Span.End));
}
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidLiteral, "Invalid literal.", path));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.InvalidLiteral, "Invalid literal.", path));
return new PolicyStringLiteral(string.Empty, Current.Span);
}
@@ -473,7 +474,7 @@ internal sealed class PolicyParser
}
else
{
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedToken, "Expected 'in' after 'not'.", "expression.not"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.UnexpectedToken, "Expected 'in' after 'not'.", "expression.not"));
}
}
else if (Match(TokenKind.LessThan))
@@ -564,7 +565,7 @@ internal sealed class PolicyParser
return ParseIdentifierExpression(Previous);
}
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedToken, $"Unexpected token '{Current.Text}' in expression.", "expression"));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.UnexpectedToken, $"Unexpected token '{Current.Text}' in expression.", "expression"));
var bad = Advance();
return new PolicyLiteralExpression(null, bad.Span);
}
@@ -619,7 +620,7 @@ internal sealed class PolicyParser
return Advance();
}
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedToken, message, path));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.UnexpectedToken, message, path));
return Advance();
}
@@ -646,30 +647,10 @@ internal sealed class PolicyParser
return Advance();
}
diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedToken, message, path));
diagnostics.Add(PolicyIssue.Error(DiagnosticCodes.UnexpectedToken, message, path));
return Advance();
}
private void SkipBlock()
{
var depth = 1;
while (depth > 0 && !IsAtEnd)
{
if (Match(TokenKind.LeftBrace))
{
depth++;
}
else if (Match(TokenKind.RightBrace))
{
depth--;
}
else
{
Advance();
}
}
}
private DslToken Advance()
{
if (!IsAtEnd)
@@ -687,6 +668,9 @@ internal sealed class PolicyParser
private DslToken Previous => tokens[position - 1];
}
internal readonly record struct PolicyParseResult(
/// <summary>
/// Result of parsing a policy DSL source.
/// </summary>
public readonly record struct PolicyParseResult(
PolicyDocumentNode? Document,
ImmutableArray<PolicyIssue> Diagnostics);

View File

@@ -1,6 +1,6 @@
using System.Collections.Immutable;
namespace StellaOps.Policy.Engine.Compilation;
namespace StellaOps.PolicyDsl;
public abstract record SyntaxNode(SourceSpan Span);

View File

@@ -0,0 +1,216 @@
namespace StellaOps.PolicyDsl;
/// <summary>
/// Provides signal values for policy evaluation.
/// </summary>
public sealed class SignalContext
{
private readonly Dictionary<string, object?> _signals;
/// <summary>
/// Creates an empty signal context.
/// </summary>
public SignalContext()
{
_signals = new Dictionary<string, object?>(StringComparer.Ordinal);
}
/// <summary>
/// Creates a signal context with initial values.
/// </summary>
/// <param name="signals">Initial signal values.</param>
public SignalContext(IDictionary<string, object?> signals)
{
_signals = new Dictionary<string, object?>(signals, StringComparer.Ordinal);
}
/// <summary>
/// Gets whether a signal exists.
/// </summary>
/// <param name="name">The signal name.</param>
/// <returns>True if the signal exists.</returns>
public bool HasSignal(string name) => _signals.ContainsKey(name);
/// <summary>
/// Gets a signal value.
/// </summary>
/// <param name="name">The signal name.</param>
/// <returns>The signal value, or null if not found.</returns>
public object? GetSignal(string name) => _signals.TryGetValue(name, out var value) ? value : null;
/// <summary>
/// Gets a signal value as a specific type.
/// </summary>
/// <typeparam name="T">The expected type.</typeparam>
/// <param name="name">The signal name.</param>
/// <returns>The signal value, or default if not found or wrong type.</returns>
public T? GetSignal<T>(string name) => _signals.TryGetValue(name, out var value) && value is T t ? t : default;
/// <summary>
/// Sets a signal value.
/// </summary>
/// <param name="name">The signal name.</param>
/// <param name="value">The signal value.</param>
/// <returns>This context for chaining.</returns>
public SignalContext SetSignal(string name, object? value)
{
_signals[name] = value;
return this;
}
/// <summary>
/// Removes a signal.
/// </summary>
/// <param name="name">The signal name.</param>
/// <returns>This context for chaining.</returns>
public SignalContext RemoveSignal(string name)
{
_signals.Remove(name);
return this;
}
/// <summary>
/// Gets all signal names.
/// </summary>
public IEnumerable<string> SignalNames => _signals.Keys;
/// <summary>
/// Gets all signals as a read-only dictionary.
/// </summary>
public IReadOnlyDictionary<string, object?> Signals => _signals;
/// <summary>
/// Creates a copy of this context.
/// </summary>
/// <returns>A new context with the same signals.</returns>
public SignalContext Clone() => new(_signals);
/// <summary>
/// Creates a signal context builder for fluent construction.
/// </summary>
/// <returns>A new builder.</returns>
public static SignalContextBuilder Builder() => new();
}
/// <summary>
/// Builder for creating signal contexts with fluent API.
/// </summary>
public sealed class SignalContextBuilder
{
private readonly Dictionary<string, object?> _signals = new(StringComparer.Ordinal);
/// <summary>
/// Adds a signal to the context.
/// </summary>
/// <param name="name">The signal name.</param>
/// <param name="value">The signal value.</param>
/// <returns>This builder for chaining.</returns>
public SignalContextBuilder WithSignal(string name, object? value)
{
_signals[name] = value;
return this;
}
/// <summary>
/// Adds a boolean signal to the context.
/// </summary>
/// <param name="name">The signal name.</param>
/// <param name="value">The boolean value.</param>
/// <returns>This builder for chaining.</returns>
public SignalContextBuilder WithFlag(string name, bool value = true)
{
_signals[name] = value;
return this;
}
/// <summary>
/// Adds a numeric signal to the context.
/// </summary>
/// <param name="name">The signal name.</param>
/// <param name="value">The numeric value.</param>
/// <returns>This builder for chaining.</returns>
public SignalContextBuilder WithNumber(string name, decimal value)
{
_signals[name] = value;
return this;
}
/// <summary>
/// Adds a string signal to the context.
/// </summary>
/// <param name="name">The signal name.</param>
/// <param name="value">The string value.</param>
/// <returns>This builder for chaining.</returns>
public SignalContextBuilder WithString(string name, string value)
{
_signals[name] = value;
return this;
}
/// <summary>
/// Adds a nested object signal to the context.
/// </summary>
/// <param name="name">The signal name.</param>
/// <param name="properties">The nested properties.</param>
/// <returns>This builder for chaining.</returns>
public SignalContextBuilder WithObject(string name, IDictionary<string, object?> properties)
{
_signals[name] = new Dictionary<string, object?>(properties, StringComparer.Ordinal);
return this;
}
/// <summary>
/// Adds common finding signals.
/// </summary>
/// <param name="severity">The finding severity (e.g., "critical", "high", "medium", "low").</param>
/// <param name="confidence">The confidence score (0.0 to 1.0).</param>
/// <param name="cveId">Optional CVE identifier.</param>
/// <returns>This builder for chaining.</returns>
public SignalContextBuilder WithFinding(string severity, decimal confidence, string? cveId = null)
{
_signals["finding"] = new Dictionary<string, object?>(StringComparer.Ordinal)
{
["severity"] = severity,
["confidence"] = confidence,
["cve_id"] = cveId,
};
return this;
}
/// <summary>
/// Adds common reachability signals.
/// </summary>
/// <param name="state">The reachability state (e.g., "reachable", "unreachable", "unknown").</param>
/// <param name="confidence">The confidence score (0.0 to 1.0).</param>
/// <param name="hasRuntimeEvidence">Whether there is runtime evidence.</param>
/// <returns>This builder for chaining.</returns>
public SignalContextBuilder WithReachability(string state, decimal confidence, bool hasRuntimeEvidence = false)
{
_signals["reachability"] = new Dictionary<string, object?>(StringComparer.Ordinal)
{
["state"] = state,
["confidence"] = confidence,
["has_runtime_evidence"] = hasRuntimeEvidence,
};
return this;
}
/// <summary>
/// Adds common trust score signals.
/// </summary>
/// <param name="score">The trust score (0.0 to 1.0).</param>
/// <param name="verified">Whether the source is verified.</param>
/// <returns>This builder for chaining.</returns>
public SignalContextBuilder WithTrustScore(decimal score, bool verified = false)
{
_signals["trust_score"] = score;
_signals["trust_verified"] = verified;
return this;
}
/// <summary>
/// Builds the signal context.
/// </summary>
/// <returns>A new signal context with the configured signals.</returns>
public SignalContext Build() => new(_signals);
}

View File

@@ -1,6 +1,6 @@
using System.Diagnostics.CodeAnalysis;
namespace StellaOps.Policy.Engine.Compilation;
namespace StellaOps.PolicyDsl;
/// <summary>
/// Represents a precise source location within a policy DSL document.
@@ -95,66 +95,3 @@ public readonly struct SourceSpan : IEquatable<SourceSpan>
return new SourceSpan(start, end);
}
}
internal enum TokenKind
{
EndOfFile = 0,
Identifier,
StringLiteral,
NumberLiteral,
BooleanLiteral,
LeftBrace,
RightBrace,
LeftParen,
RightParen,
LeftBracket,
RightBracket,
Comma,
Semicolon,
Colon,
Arrow, // =>
Assign, // =
Define, // :=
Dot,
KeywordPolicy,
KeywordSyntax,
KeywordMetadata,
KeywordProfile,
KeywordRule,
KeywordMap,
KeywordSource,
KeywordEnv,
KeywordIf,
KeywordThen,
KeywordWhen,
KeywordAnd,
KeywordOr,
KeywordNot,
KeywordPriority,
KeywordElse,
KeywordBecause,
KeywordSettings,
KeywordIgnore,
KeywordUntil,
KeywordEscalate,
KeywordTo,
KeywordRequireVex,
KeywordWarn,
KeywordMessage,
KeywordDefer,
KeywordAnnotate,
KeywordIn,
EqualEqual,
NotEqual,
LessThan,
LessThanOrEqual,
GreaterThan,
GreaterThanOrEqual,
Unknown,
}
internal readonly record struct DslToken(
TokenKind Kind,
string Text,
SourceSpan Span,
object? Value = null);

View File

@@ -0,0 +1,20 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
</ItemGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.PolicyDsl.Tests" />
<InternalsVisibleTo Include="StellaOps.Policy.Engine" />
<InternalsVisibleTo Include="StellaOps.Policy.Engine.Tests" />
</ItemGroup>
</Project>

View File

@@ -56,7 +56,7 @@ public static class PolicyEvaluation
explanation = new PolicyExplanation(
finding.FindingId,
PolicyVerdictStatus.Allowed,
PolicyVerdictStatus.Pass,
null,
"No rule matched; baseline applied",
ImmutableArray.Create(PolicyExplanationNode.Leaf("rule", "No matching rule")));
@@ -156,7 +156,7 @@ public static class PolicyEvaluation
Reachability: components.ReachabilityKey);
}
if (status != PolicyVerdictStatus.Allowed)
if (status != PolicyVerdictStatus.Pass)
{
explanationNodes.Add(PolicyExplanationNode.Leaf("action", $"Action {action.Type}", status.ToString()));
}
@@ -190,14 +190,7 @@ public static class PolicyEvaluation
finding.FindingId,
status,
rule.Name,
notes,
explanationNodes.ToImmutable());
explanation = new PolicyExplanation(
finding.FindingId,
status,
rule.Name,
notes,
notes ?? string.Empty,
explanationNodes.ToImmutable());
return new PolicyVerdict(

View File

@@ -18,7 +18,7 @@ public sealed record PolicyExplanation(
ImmutableArray<PolicyExplanationNode> Nodes)
{
public static PolicyExplanation Allow(string findingId, string? ruleName, string reason, params PolicyExplanationNode[] nodes) =>
new(findingId, PolicyVerdictStatus.Allowed, ruleName, reason, nodes.ToImmutableArray());
new(findingId, PolicyVerdictStatus.Pass, ruleName, reason, nodes.ToImmutableArray());
public static PolicyExplanation Block(string findingId, string? ruleName, string reason, params PolicyExplanationNode[] nodes) =>
new(findingId, PolicyVerdictStatus.Blocked, ruleName, reason, nodes.ToImmutableArray());

View File

@@ -29,7 +29,7 @@ public static class SplCanonicalizer
public static byte[] CanonicalizeToUtf8(ReadOnlySpan<byte> json)
{
using var document = JsonDocument.Parse(json, DocumentOptions);
using var document = JsonDocument.Parse(json.ToArray().AsMemory(), DocumentOptions);
var buffer = new ArrayBufferWriter<byte>();
using (var writer = new Utf8JsonWriter(buffer, WriterOptions))

View File

@@ -49,8 +49,8 @@ public static class SplLayeringEngine
private static JsonNode MergeToJsonNode(ReadOnlySpan<byte> basePolicyUtf8, ReadOnlySpan<byte> overlayPolicyUtf8)
{
using var baseDoc = JsonDocument.Parse(basePolicyUtf8, DocumentOptions);
using var overlayDoc = JsonDocument.Parse(overlayPolicyUtf8, DocumentOptions);
using var baseDoc = JsonDocument.Parse(basePolicyUtf8.ToArray().AsMemory(), DocumentOptions);
using var overlayDoc = JsonDocument.Parse(overlayPolicyUtf8.ToArray().AsMemory(), DocumentOptions);
var baseRoot = baseDoc.RootElement;
var overlayRoot = overlayDoc.RootElement;
@@ -209,4 +209,14 @@ public static class SplLayeringEngine
return element.Value.TryGetProperty(name, out var value) ? value : (JsonElement?)null;
}
private static JsonElement? GetPropertyOrNull(this JsonElement element, string name)
{
if (element.ValueKind != JsonValueKind.Object)
{
return null;
}
return element.TryGetProperty(name, out var value) ? value : (JsonElement?)null;
}
}

View File

@@ -1,3 +1,4 @@
using Xunit;
using StellaOps.Policy.Engine.AdvisoryAI;
namespace StellaOps.Policy.Engine.Tests;

View File

@@ -1,3 +1,4 @@
using Xunit;
using StellaOps.Policy.Engine.Domain;
using StellaOps.Policy.Engine.Services;

View File

@@ -1,3 +1,4 @@
using Xunit;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Policy.Engine.Ledger;
using StellaOps.Policy.Engine.Orchestration;

View File

@@ -1,3 +1,4 @@
using Xunit;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Policy.Engine.Orchestration;

View File

@@ -1,3 +1,4 @@
using Xunit;
using System.Threading.Tasks;
using StellaOps.Policy.Engine.Overlay;
using StellaOps.Policy.Engine.Services;

View File

@@ -1,3 +1,4 @@
using Xunit;
using System.Text.Json;
using System.Threading.Tasks;
using StellaOps.Policy.Engine.Overlay;

View File

@@ -1,3 +1,4 @@
using Xunit;
using System.Linq;
using System.Threading.Tasks;
using StellaOps.Policy.Engine.Streaming;

View File

@@ -1,3 +1,4 @@
using Xunit;
using System.Collections.Immutable;
using System.Collections.Immutable;
using Microsoft.Extensions.Options;

View File

@@ -1,7 +1,7 @@
using System;
using Microsoft.Extensions.Options;
using StellaOps.Policy;
using StellaOps.Policy.Engine.Compilation;
using StellaOps.PolicyDsl;
using StellaOps.Policy.Engine.Options;
using StellaOps.Policy.Engine.Services;
using Xunit;

View File

@@ -1,7 +1,7 @@
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Policy;
using StellaOps.Policy.Engine.Compilation;
using StellaOps.PolicyDsl;
using Xunit;
using Xunit.Sdk;

View File

@@ -3,7 +3,7 @@ using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Policy;
using StellaOps.Policy.Engine.Compilation;
using StellaOps.PolicyDsl;
using StellaOps.Policy.Engine.Evaluation;
using StellaOps.Policy.Engine.Services;
using Xunit;

View File

@@ -1,3 +1,4 @@
using Xunit;
using System.Collections.Immutable;
using StellaOps.Policy.Engine.Domain;
using StellaOps.Policy.Engine.Services;

View File

@@ -1,3 +1,4 @@
using Xunit;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Policy.Engine.Orchestration;

View File

@@ -1,3 +1,4 @@
using Xunit;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Policy.Engine.Ledger;
using StellaOps.Policy.Engine.Orchestration;

View File

@@ -6,9 +6,25 @@
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" Version="6.0.4">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj" />
</ItemGroup>

View File

@@ -1,3 +1,4 @@
using Xunit;
using StellaOps.Policy.Engine.TrustWeighting;
namespace StellaOps.Policy.Engine.Tests;

View File

@@ -1,3 +1,4 @@
using Xunit;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Policy.Engine.Ledger;
using StellaOps.Policy.Engine.Orchestration;

View File

@@ -0,0 +1,183 @@
using FluentAssertions;
using StellaOps.PolicyDsl;
using Xunit;
namespace StellaOps.PolicyDsl.Tests;
/// <summary>
/// Tests for the policy DSL compiler.
/// </summary>
public class PolicyCompilerTests
{
private readonly PolicyCompiler _compiler = new();
[Fact]
public void Compile_MinimalPolicy_Succeeds()
{
// Arrange - rule name is an identifier, not a string; then block has no braces; := for assignment
var source = """
policy "test" syntax "stella-dsl@1" {
rule always priority 1 {
when true
then
severity := "info"
because "always applies"
}
}
""";
// Act
var result = _compiler.Compile(source);
// Assert
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
result.Document.Should().NotBeNull();
result.Document!.Name.Should().Be("test");
result.Document.Syntax.Should().Be("stella-dsl@1");
result.Document.Rules.Should().HaveCount(1);
result.Checksum.Should().NotBeNullOrEmpty();
}
[Fact]
public void Compile_WithMetadata_ParsesCorrectly()
{
// Arrange
var source = """
policy "with-meta" syntax "stella-dsl@1" {
metadata {
version = "1.0.0"
author = "test"
}
rule r1 priority 1 {
when true
then
severity := "low"
because "required"
}
}
""";
// Act
var result = _compiler.Compile(source);
// Assert
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
result.Document!.Metadata.Should().ContainKey("version");
result.Document.Metadata.Should().ContainKey("author");
}
[Fact]
public void Compile_WithProfile_ParsesCorrectly()
{
// Arrange
var source = """
policy "with-profile" syntax "stella-dsl@1" {
profile standard {
trust_score = 0.85
}
rule r1 priority 1 {
when true
then
severity := "low"
because "required"
}
}
""";
// Act
var result = _compiler.Compile(source);
// Assert
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
result.Document!.Profiles.Should().HaveCount(1);
result.Document.Profiles[0].Name.Should().Be("standard");
}
[Fact]
public void Compile_EmptySource_ReturnsError()
{
// Arrange
var source = "";
// Act
var result = _compiler.Compile(source);
// Assert
result.Success.Should().BeFalse();
result.Diagnostics.Should().NotBeEmpty();
}
[Fact]
public void Compile_InvalidSyntax_ReturnsError()
{
// Arrange
var source = """
policy "bad" syntax "invalid@1" {
}
""";
// Act
var result = _compiler.Compile(source);
// Assert
result.Success.Should().BeFalse();
}
[Fact]
public void Compile_SameSource_ProducesSameChecksum()
{
// Arrange
var source = """
policy "deterministic" syntax "stella-dsl@1" {
rule r1 priority 1 {
when true
then
severity := "info"
because "always"
}
}
""";
// Act
var result1 = _compiler.Compile(source);
var result2 = _compiler.Compile(source);
// Assert
result1.Success.Should().BeTrue(string.Join("; ", result1.Diagnostics.Select(d => d.Message)));
result2.Success.Should().BeTrue(string.Join("; ", result2.Diagnostics.Select(d => d.Message)));
result1.Checksum.Should().Be(result2.Checksum);
}
[Fact]
public void Compile_DifferentSource_ProducesDifferentChecksum()
{
// Arrange
var source1 = """
policy "test1" syntax "stella-dsl@1" {
rule r1 priority 1 {
when true
then
severity := "info"
because "always"
}
}
""";
var source2 = """
policy "test2" syntax "stella-dsl@1" {
rule r1 priority 1 {
when true
then
severity := "info"
because "always"
}
}
""";
// Act
var result1 = _compiler.Compile(source1);
var result2 = _compiler.Compile(source2);
// Assert
result1.Checksum.Should().NotBe(result2.Checksum);
}
}

View File

@@ -0,0 +1,193 @@
using FluentAssertions;
using StellaOps.PolicyDsl;
using Xunit;
namespace StellaOps.PolicyDsl.Tests;
/// <summary>
/// Tests for the policy evaluation engine.
/// </summary>
public class PolicyEngineTests
{
private readonly PolicyEngineFactory _factory = new();
[Fact]
public void Evaluate_RuleMatches_ReturnsMatchedRules()
{
// Arrange
var source = """
policy "test" syntax "stella-dsl@1" {
rule critical_rule priority 100 {
when finding.severity == "critical"
then
severity := "critical"
because "critical finding detected"
}
}
""";
var result = _factory.CreateFromSource(source);
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
var engine = result.Engine!;
var context = SignalContext.Builder()
.WithObject("finding", new Dictionary<string, object?> { ["severity"] = "critical" })
.Build();
// Act
var evalResult = engine.Evaluate(context);
// Assert
evalResult.MatchedRules.Should().Contain("critical_rule");
evalResult.PolicyChecksum.Should().NotBeNullOrEmpty();
}
[Fact]
public void Evaluate_RuleDoesNotMatch_ExecutesElseBranch()
{
// Arrange
var source = """
policy "test" syntax "stella-dsl@1" {
rule critical_only priority 100 {
when finding.severity == "critical"
then
severity := "critical"
else
severity := "info"
because "classify by severity"
}
}
""";
var result = _factory.CreateFromSource(source);
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
var engine = result.Engine!;
var context = SignalContext.Builder()
.WithObject("finding", new Dictionary<string, object?> { ["severity"] = "low" })
.Build();
// Act
var evalResult = engine.Evaluate(context);
// Assert
evalResult.MatchedRules.Should().BeEmpty();
evalResult.Actions.Should().NotBeEmpty();
evalResult.Actions[0].WasElseBranch.Should().BeTrue();
}
[Fact]
public void Evaluate_MultipleRules_EvaluatesInPriorityOrder()
{
// Arrange
var source = """
policy "test" syntax "stella-dsl@1" {
rule low_priority priority 10 {
when true
then
severity := "low"
because "low priority rule"
}
rule high_priority priority 100 {
when true
then
severity := "high"
because "high priority rule"
}
}
""";
var result = _factory.CreateFromSource(source);
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
var engine = result.Engine!;
var context = new SignalContext();
// Act
var evalResult = engine.Evaluate(context);
// Assert
evalResult.MatchedRules.Should().HaveCount(2);
evalResult.MatchedRules[0].Should().Be("high_priority");
evalResult.MatchedRules[1].Should().Be("low_priority");
}
[Fact]
public void Evaluate_WithAndCondition_MatchesWhenBothTrue()
{
// Arrange
var source = """
policy "test" syntax "stella-dsl@1" {
rule combined priority 100 {
when finding.severity == "critical" and reachability.state == "reachable"
then
severity := "critical"
because "critical and reachable"
}
}
""";
var result = _factory.CreateFromSource(source);
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
var engine = result.Engine!;
var context = SignalContext.Builder()
.WithFinding("critical", 0.95m)
.WithReachability("reachable", 0.9m)
.Build();
// Act
var evalResult = engine.Evaluate(context);
// Assert
evalResult.MatchedRules.Should().Contain("combined");
}
[Fact]
public void Evaluate_WithOrCondition_MatchesWhenEitherTrue()
{
// Arrange
var source = """
policy "test" syntax "stella-dsl@1" {
rule either priority 100 {
when finding.severity == "critical" or finding.severity == "high"
then
severity := "elevated"
because "elevated severity"
}
}
""";
var result = _factory.CreateFromSource(source);
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
var engine = result.Engine!;
var context = SignalContext.Builder()
.WithObject("finding", new Dictionary<string, object?> { ["severity"] = "high" })
.Build();
// Act
var evalResult = engine.Evaluate(context);
// Assert
evalResult.MatchedRules.Should().Contain("either");
}
[Fact]
public void Evaluate_WithNotCondition_InvertsResult()
{
// Arrange
var source = """
policy "test" syntax "stella-dsl@1" {
rule not_critical priority 100 {
when not finding.is_critical
then
severity := "low"
because "not critical"
}
}
""";
var result = _factory.CreateFromSource(source);
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
var engine = result.Engine!;
var context = SignalContext.Builder()
.WithObject("finding", new Dictionary<string, object?> { ["is_critical"] = false })
.Build();
// Act
var evalResult = engine.Evaluate(context);
// Assert
evalResult.MatchedRules.Should().Contain("not_critical");
}
}

View File

@@ -0,0 +1,181 @@
using FluentAssertions;
using StellaOps.PolicyDsl;
using Xunit;
namespace StellaOps.PolicyDsl.Tests;
/// <summary>
/// Tests for the signal context API.
/// </summary>
public class SignalContextTests
{
[Fact]
public void Builder_WithSignal_SetsSignalValue()
{
// Arrange & Act
var context = SignalContext.Builder()
.WithSignal("test", "value")
.Build();
// Assert
context.GetSignal("test").Should().Be("value");
}
[Fact]
public void Builder_WithFlag_SetsBooleanSignal()
{
// Arrange & Act
var context = SignalContext.Builder()
.WithFlag("enabled")
.Build();
// Assert
context.GetSignal<bool>("enabled").Should().BeTrue();
}
[Fact]
public void Builder_WithNumber_SetsDecimalSignal()
{
// Arrange & Act
var context = SignalContext.Builder()
.WithNumber("score", 0.95m)
.Build();
// Assert
context.GetSignal<decimal>("score").Should().Be(0.95m);
}
[Fact]
public void Builder_WithString_SetsStringSignal()
{
// Arrange & Act
var context = SignalContext.Builder()
.WithString("name", "test")
.Build();
// Assert
context.GetSignal<string>("name").Should().Be("test");
}
[Fact]
public void Builder_WithFinding_SetsNestedFindingObject()
{
// Arrange & Act
var context = SignalContext.Builder()
.WithFinding("critical", 0.95m, "CVE-2024-1234")
.Build();
// Assert
context.HasSignal("finding").Should().BeTrue();
var finding = context.GetSignal("finding") as IDictionary<string, object?>;
finding.Should().NotBeNull();
finding!["severity"].Should().Be("critical");
finding["confidence"].Should().Be(0.95m);
finding["cve_id"].Should().Be("CVE-2024-1234");
}
[Fact]
public void Builder_WithReachability_SetsNestedReachabilityObject()
{
// Arrange & Act
var context = SignalContext.Builder()
.WithReachability("reachable", 0.9m, hasRuntimeEvidence: true)
.Build();
// Assert
context.HasSignal("reachability").Should().BeTrue();
var reachability = context.GetSignal("reachability") as IDictionary<string, object?>;
reachability.Should().NotBeNull();
reachability!["state"].Should().Be("reachable");
reachability["confidence"].Should().Be(0.9m);
reachability["has_runtime_evidence"].Should().Be(true);
}
[Fact]
public void Builder_WithTrustScore_SetsTrustSignals()
{
// Arrange & Act
var context = SignalContext.Builder()
.WithTrustScore(0.85m, verified: true)
.Build();
// Assert
context.GetSignal<decimal>("trust_score").Should().Be(0.85m);
context.GetSignal<bool>("trust_verified").Should().BeTrue();
}
[Fact]
public void SetSignal_UpdatesExistingValue()
{
// Arrange
var context = new SignalContext();
context.SetSignal("key", "value1");
// Act
context.SetSignal("key", "value2");
// Assert
context.GetSignal("key").Should().Be("value2");
}
[Fact]
public void RemoveSignal_RemovesExistingSignal()
{
// Arrange
var context = new SignalContext();
context.SetSignal("key", "value");
// Act
context.RemoveSignal("key");
// Assert
context.HasSignal("key").Should().BeFalse();
}
[Fact]
public void Clone_CreatesIndependentCopy()
{
// Arrange
var original = SignalContext.Builder()
.WithSignal("key", "value")
.Build();
// Act
var clone = original.Clone();
clone.SetSignal("key", "modified");
// Assert
original.GetSignal("key").Should().Be("value");
clone.GetSignal("key").Should().Be("modified");
}
[Fact]
public void SignalNames_ReturnsAllSignalKeys()
{
// Arrange
var context = SignalContext.Builder()
.WithSignal("a", 1)
.WithSignal("b", 2)
.WithSignal("c", 3)
.Build();
// Act & Assert
context.SignalNames.Should().BeEquivalentTo(new[] { "a", "b", "c" });
}
[Fact]
public void Signals_ReturnsReadOnlyDictionary()
{
// Arrange
var context = SignalContext.Builder()
.WithSignal("key", "value")
.Build();
// Act
var signals = context.Signals;
// Assert
signals.Should().ContainKey("key");
signals["key"].Should().Be("value");
}
}

View File

@@ -0,0 +1,35 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<!-- Disable Concelier test infra to avoid duplicate package references -->
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" Version="6.0.4">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../StellaOps.PolicyDsl/StellaOps.PolicyDsl.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="TestData\*.dsl">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@@ -0,0 +1,56 @@
// Default reachability-aware policy
// syntax: stella-dsl@1
policy "default-reachability" syntax "stella-dsl@1" {
metadata {
version = "1.0.0"
description = "Default policy with reachability-aware rules"
author = "StellaOps"
}
settings {
default_action = "warn"
fail_on_critical = true
}
profile standard {
trust_score = 0.85
}
// Critical vulnerabilities with confirmed reachability
rule critical_reachable priority 100 {
when finding.severity == "critical" and reachability.state == "reachable"
then
severity := "critical"
annotate finding.priority := "immediate"
escalate to "security-team" when reachability.confidence > 0.9
because "Critical vulnerabilities with confirmed reachability require immediate action"
}
// High severity with runtime evidence
rule high_with_evidence priority 90 {
when finding.severity == "high" and reachability.has_runtime_evidence
then
severity := "high"
annotate finding.evidence := "runtime-confirmed"
else
defer until "reachability-assessment"
because "High severity findings need runtime evidence for prioritization"
}
// Low severity unreachable can be ignored
rule low_unreachable priority 50 {
when finding.severity == "low" and reachability.state == "unreachable"
then
ignore until "next-scan" because "Low severity unreachable code"
because "Low severity unreachable vulnerabilities can be safely deferred"
}
// Unknown reachability requires VEX
rule unknown_reachability priority 40 {
when not reachability.state
then
warn message "Reachability assessment pending"
because "Unknown reachability requires manual assessment"
}
}

View File

@@ -0,0 +1,11 @@
// Minimal valid policy
// syntax: stella-dsl@1
policy "minimal" syntax "stella-dsl@1" {
rule always_pass priority 1 {
when true
then
severity := "info"
because "always applies"
}
}

View File

@@ -0,0 +1,10 @@
using System.Collections.Generic;
namespace StellaOps.Scanner.Worker.Determinism;
/// <summary>
/// Deterministic metadata for a surface manifest: per-payload hashes and a Merkle-like root.
/// </summary>
public sealed record DeterminismEvidence(
IReadOnlyDictionary<string, string> PayloadHashes,
string MerkleRootSha256);

View File

@@ -0,0 +1,79 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace StellaOps.Scanner.Worker.Determinism;
/// <summary>
/// Represents a determinism score report produced by the worker replay harness.
/// This mirrors the determinism.json shape used in release bundles.
/// </summary>
public sealed record DeterminismReport(
string Version,
string Release,
string Platform,
string? PolicySha,
string? FeedsSha,
string? ScannerSha,
double OverallScore,
double ThresholdOverall,
double ThresholdImage,
IReadOnlyList<DeterminismImageReport> Images)
{
public static DeterminismReport FromHarness(Harness.DeterminismReport harnessReport,
string release,
string platform,
string? policySha = null,
string? feedsSha = null,
string? scannerSha = null,
string version = "1")
{
ArgumentNullException.ThrowIfNull(harnessReport);
return new DeterminismReport(
Version: version,
Release: release,
Platform: platform,
PolicySha: policySha,
FeedsSha: feedsSha,
ScannerSha: scannerSha,
OverallScore: harnessReport.OverallScore,
ThresholdOverall: harnessReport.OverallThreshold,
ThresholdImage: harnessReport.ImageThreshold,
Images: harnessReport.Images.Select(DeterminismImageReport.FromHarness).ToList());
}
}
public sealed record DeterminismImageReport(
string Image,
int Runs,
int Identical,
double Score,
IReadOnlyDictionary<string, string> ArtifactHashes,
IReadOnlyList<DeterminismRunReport> RunsDetail)
{
public static DeterminismImageReport FromHarness(Harness.DeterminismImageReport report)
{
return new DeterminismImageReport(
Image: report.ImageDigest,
Runs: report.Runs,
Identical: report.Identical,
Score: report.Score,
ArtifactHashes: report.BaselineHashes,
RunsDetail: report.RunReports.Select(DeterminismRunReport.FromHarness).ToList());
}
}
public sealed record DeterminismRunReport(
int RunIndex,
IReadOnlyDictionary<string, string> ArtifactHashes,
IReadOnlyList<string> NonDeterministic)
{
public static DeterminismRunReport FromHarness(Harness.DeterminismRunReport report)
{
return new DeterminismRunReport(
RunIndex: report.RunIndex,
ArtifactHashes: report.ArtifactHashes,
NonDeterministic: report.NonDeterministicArtifacts);
}
}

View File

@@ -0,0 +1,11 @@
using System;
namespace StellaOps.Scanner.Worker.Processing.Replay;
public sealed record ReplayBundleContext(ReplaySealedBundleMetadata Metadata, string BundlePath)
{
public ReplayBundleContext : this(Metadata ?? throw new ArgumentNullException(nameof(Metadata)),
string.IsNullOrWhiteSpace(BundlePath) ? throw new ArgumentException("BundlePath required", nameof(BundlePath)) : BundlePath)
{
}
}

View File

@@ -0,0 +1,97 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Replay.Core;
using StellaOps.Scanner.Storage;
using StellaOps.Scanner.Storage.ObjectStore;
namespace StellaOps.Scanner.Worker.Processing.Replay;
/// <summary>
/// Fetches a sealed replay bundle from the configured object store, verifies its SHA-256 hash,
/// and returns a local file path for downstream analyzers.
/// </summary>
internal sealed class ReplayBundleFetcher
{
private readonly IArtifactObjectStore _objectStore;
private readonly ScannerStorageOptions _storageOptions;
private readonly ILogger<ReplayBundleFetcher> _logger;
public ReplayBundleFetcher(IArtifactObjectStore objectStore, ScannerStorageOptions storageOptions, ILogger<ReplayBundleFetcher> logger)
{
_objectStore = objectStore ?? throw new ArgumentNullException(nameof(objectStore));
_storageOptions = storageOptions ?? throw new ArgumentNullException(nameof(storageOptions));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<string?> FetchAsync(ReplaySealedBundleMetadata metadata, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(metadata);
if (string.IsNullOrWhiteSpace(metadata.BundleUri))
{
return null;
}
var (bucket, key) = ResolveDescriptor(metadata.BundleUri);
var descriptor = new ArtifactObjectDescriptor(bucket, key, Immutable: true);
await using var stream = await _objectStore.GetAsync(descriptor, cancellationToken).ConfigureAwait(false);
if (stream is null)
{
throw new InvalidOperationException($"Replay bundle not found: {metadata.BundleUri}");
}
var tempPath = Path.Combine(Path.GetTempPath(), "stellaops", "replay", metadata.ManifestHash + ".tar.zst");
Directory.CreateDirectory(Path.GetDirectoryName(tempPath)!);
await using (var file = File.Create(tempPath))
{
await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false);
}
// Verify hash
await using (var file = File.OpenRead(tempPath))
{
var actualHex = DeterministicHash.Sha256Hex(file);
var expected = NormalizeHash(metadata.ManifestHash);
if (!string.Equals(actualHex, expected, StringComparison.OrdinalIgnoreCase))
{
File.Delete(tempPath);
throw new InvalidOperationException($"Replay bundle hash mismatch. Expected {expected} got {actualHex}");
}
}
_logger.LogInformation("Fetched sealed replay bundle {Uri} (hash {Hash}) to {Path}", metadata.BundleUri, metadata.ManifestHash, tempPath);
return tempPath;
}
private (string Bucket, string Key) ResolveDescriptor(string uri)
{
// Expect cas://bucket/key
if (!uri.StartsWith("cas://", StringComparison.OrdinalIgnoreCase))
{
// fallback to configured bucket + direct key
return (_storageOptions.ObjectStore.BucketName, uri.Trim('/'));
}
var trimmed = uri.Substring("cas://".Length);
var slash = trimmed.IndexOf('/') ;
if (slash < 0)
{
return (_storageOptions.ObjectStore.BucketName, trimmed);
}
var bucket = trimmed[..slash];
var key = trimmed[(slash + 1)..];
return (bucket, key);
}
private static string NormalizeHash(string hash)
{
var value = hash.Trim().ToLowerInvariant();
return value.StartsWith("sha256:", StringComparison.Ordinal) ? value[7..] : value;
}
}

View File

@@ -0,0 +1,32 @@
using System;
using System.IO;
namespace StellaOps.Scanner.Worker.Processing.Replay;
/// <summary>
/// Represents a fetched replay bundle mounted on the local filesystem.
/// </summary>
public sealed class ReplayBundleMount : IDisposable
{
public ReplayBundleMount(string bundlePath)
{
BundlePath = bundlePath ?? throw new ArgumentNullException(nameof(bundlePath));
}
public string BundlePath { get; }
public void Dispose()
{
try
{
if (File.Exists(BundlePath))
{
File.Delete(BundlePath);
}
}
catch
{
// best-effort cleanup
}
}
}

View File

@@ -0,0 +1,11 @@
namespace StellaOps.Scanner.Worker.Processing.Replay;
/// <summary>
/// Captures sealed replay bundle metadata supplied via the job lease.
/// Used to keep analyzer execution hermetic and to emit Merkle metadata downstream.
/// </summary>
public sealed record ReplaySealedBundleMetadata(
string ManifestHash,
string BundleUri,
string? PolicySnapshotId,
string? FeedSnapshotId);

View File

@@ -0,0 +1,65 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Core.Contracts;
namespace StellaOps.Scanner.Worker.Processing.Replay;
/// <summary>
/// Reads sealed replay bundle metadata from the job lease and stores it in the analysis context.
/// This does not fetch the bundle contents (handled by upstream) but ensures downstream stages
/// know they must stay hermetic and use the provided bundle identifiers.
/// </summary>
public sealed class ReplaySealedBundleStageExecutor : IScanStageExecutor
{
public const string BundleUriKey = "replay.bundle.uri";
public const string BundleHashKey = "replay.bundle.sha256";
private const string PolicyPinKey = "determinism.policy";
private const string FeedPinKey = "determinism.feed";
private readonly ILogger<ReplaySealedBundleStageExecutor> _logger;
public ReplaySealedBundleStageExecutor(ILogger<ReplaySealedBundleStageExecutor> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public string StageName => ScanStageNames.IngestReplay;
public ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
var metadata = context.Lease.Metadata;
if (!metadata.TryGetValue(BundleUriKey, out var bundleUri) || string.IsNullOrWhiteSpace(bundleUri))
{
_logger.LogDebug("Replay bundle URI not provided; skipping sealed bundle ingestion.");
return ValueTask.CompletedTask;
}
if (!metadata.TryGetValue(BundleHashKey, out var bundleHash) || string.IsNullOrWhiteSpace(bundleHash))
{
_logger.LogWarning("Replay bundle URI provided without hash; skipping sealed bundle ingestion to avoid unverifiable input.");
return ValueTask.CompletedTask;
}
var policyPin = metadata.TryGetValue(PolicyPinKey, out var policy) && !string.IsNullOrWhiteSpace(policy)
? policy
: null;
var feedPin = metadata.TryGetValue(FeedPinKey, out var feed) && !string.IsNullOrWhiteSpace(feed)
? feed
: null;
var sealedMetadata = new ReplaySealedBundleMetadata(
ManifestHash: bundleHash.Trim(),
BundleUri: bundleUri.Trim(),
PolicySnapshotId: policyPin,
FeedSnapshotId: feedPin);
context.Analysis.Set(ScanAnalysisKeys.ReplaySealedBundleMetadata, sealedMetadata);
_logger.LogInformation("Replay sealed bundle pinned: uri={BundleUri} hash={BundleHash} policy={PolicyPin} feed={FeedPin}", bundleUri, bundleHash, policyPin, feedPin);
return ValueTask.CompletedTask;
}
}

View File

@@ -27,5 +27,7 @@ public sealed class ScanJobContext
public string ScanId => Lease.ScanId;
public string? ReplayBundlePath { get; set; }
public ScanAnalysisStore Analysis { get; }
}

View File

@@ -13,15 +13,18 @@ public sealed class ScanJobProcessor
private readonly ScanProgressReporter _progressReporter;
private readonly ILogger<ScanJobProcessor> _logger;
private readonly IReachabilityUnionPublisherService _reachabilityPublisher;
private readonly Replay.ReplayBundleFetcher _replayBundleFetcher;
public ScanJobProcessor(
IEnumerable<IScanStageExecutor> executors,
ScanProgressReporter progressReporter,
IReachabilityUnionPublisherService reachabilityPublisher,
Replay.ReplayBundleFetcher replayBundleFetcher,
ILogger<ScanJobProcessor> logger)
{
_progressReporter = progressReporter ?? throw new ArgumentNullException(nameof(progressReporter));
_reachabilityPublisher = reachabilityPublisher ?? throw new ArgumentNullException(nameof(reachabilityPublisher));
_replayBundleFetcher = replayBundleFetcher ?? throw new ArgumentNullException(nameof(replayBundleFetcher));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
var map = new Dictionary<string, IScanStageExecutor>(StringComparer.OrdinalIgnoreCase);
@@ -52,8 +55,7 @@ public sealed class ScanJobProcessor
public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
// Placeholder: reachability publisher will be fed once lifter outputs are routed here.
_ = _reachabilityPublisher;
await EnsureReplayBundleFetchedAsync(context, cancellationToken).ConfigureAwait(false);
foreach (var stage in ScanStageNames.Ordered)
{
@@ -71,4 +73,19 @@ public sealed class ScanJobProcessor
cancellationToken).ConfigureAwait(false);
}
}
private async Task EnsureReplayBundleFetchedAsync(ScanJobContext context, CancellationToken cancellationToken)
{
if (context.Analysis.TryGet<Replay.ReplaySealedBundleMetadata>(ScanAnalysisKeys.ReplaySealedBundleMetadata, out var sealedMetadata) && sealedMetadata is not null)
{
// Already fetched in this context
if (!string.IsNullOrWhiteSpace(context.ReplayBundlePath) && File.Exists(context.ReplayBundlePath))
{
return;
}
var path = await _replayBundleFetcher.FetchAsync(sealedMetadata, cancellationToken).ConfigureAwait(false);
context.ReplayBundlePath = path;
}
}
}

View File

@@ -4,6 +4,7 @@ namespace StellaOps.Scanner.Worker.Processing;
public static class ScanStageNames
{
public const string IngestReplay = "ingest-replay";
public const string ResolveImage = "resolve-image";
public const string PullLayers = "pull-layers";
public const string BuildFilesystem = "build-filesystem";
@@ -14,6 +15,7 @@ public static class ScanStageNames
public static readonly IReadOnlyList<string> Ordered = new[]
{
IngestReplay,
ResolveImage,
PullLayers,
BuildFilesystem,

View File

@@ -36,7 +36,12 @@ internal sealed record SurfaceManifestRequest(
IReadOnlyList<SurfaceManifestPayload> Payloads,
string Component,
string? Version,
string? WorkerInstance);
string? WorkerInstance,
string? DeterminismMerkleRoot = null,
string? ReplayBundleUri = null,
string? ReplayBundleHash = null,
string? ReplayPolicyPin = null,
string? ReplayFeedPin = null);
internal interface ISurfaceManifestPublisher
{
@@ -112,7 +117,17 @@ internal sealed class SurfaceManifestPublisher : ISurfaceManifestPublisher
WorkerInstance = request.WorkerInstance,
Attempt = request.Attempt
},
Artifacts = artifacts.ToImmutableArray()
Artifacts = artifacts.ToImmutableArray(),
DeterminismMerkleRoot = request.DeterminismMerkleRoot,
ReplayBundle = string.IsNullOrWhiteSpace(request.ReplayBundleUri)
? null
: new ReplayBundleReference
{
Uri = request.ReplayBundleUri!,
Sha256 = request.ReplayBundleHash ?? string.Empty,
PolicySnapshotId = request.ReplayPolicyPin,
FeedSnapshotId = request.ReplayFeedPin
}
};
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifestDocument, SerializerOptions);
@@ -177,7 +192,8 @@ internal sealed class SurfaceManifestPublisher : ISurfaceManifestPublisher
ManifestDigest: manifestDigest,
ManifestUri: manifestUri,
ArtifactId: artifactId,
Document: manifestDocument);
Document: manifestDocument,
DeterminismMerkleRoot: request.DeterminismMerkleRoot);
}
private async Task<SurfaceManifestArtifact> StorePayloadAsync(SurfaceManifestPayload payload, string tenant, CancellationToken cancellationToken)

View File

@@ -32,4 +32,8 @@ public static class ScanAnalysisKeys
public const string FileEntries = "analysis.files.entries";
public const string EntropyReport = "analysis.entropy.report";
public const string EntropyLayerSummary = "analysis.entropy.layer.summary";
public const string DeterminismEvidence = "analysis.determinism.evidence";
public const string ReplaySealedBundleMetadata = "analysis.replay.sealed.bundle";
}

View File

@@ -104,7 +104,7 @@ public sealed class FileSurfaceManifestStore :
normalized.Tenant,
digest);
return new SurfaceManifestPublishResult(digest, uri, artifactId, normalized);
return new SurfaceManifestPublishResult(digest, uri, artifactId, normalized, null);
}
public async Task<SurfaceManifestDocument?> TryGetByDigestAsync(

View File

@@ -40,6 +40,35 @@ public sealed record SurfaceManifestDocument
[JsonPropertyName("artifacts")]
public IReadOnlyList<SurfaceManifestArtifact> Artifacts { get; init; }
= ImmutableArray<SurfaceManifestArtifact>.Empty;
[JsonPropertyName("determinismRoot")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? DeterminismMerkleRoot { get; init; }
= null;
[JsonPropertyName("replayBundle")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ReplayBundleReference? ReplayBundle { get; init; }
= null;
}
public sealed record ReplayBundleReference
{
[JsonPropertyName("uri")]
public string Uri { get; init; } = string.Empty;
[JsonPropertyName("sha256")]
public string Sha256 { get; init; } = string.Empty;
[JsonPropertyName("policyPin")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? PolicySnapshotId { get; init; }
= null;
[JsonPropertyName("feedPin")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? FeedSnapshotId { get; init; }
= null;
}
/// <summary>
@@ -139,4 +168,5 @@ public sealed record SurfaceManifestPublishResult(
string ManifestDigest,
string ManifestUri,
string ArtifactId,
SurfaceManifestDocument Document);
SurfaceManifestDocument Document,
string? DeterminismMerkleRoot = null);

View File

@@ -0,0 +1,127 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Worker.Tests.Determinism;
/// <summary>
/// Lightweight determinism harness used in tests to score repeated scanner runs.
/// Groups runs by image digest, compares artefact hashes to the baseline (run index 0),
/// and produces a report compatible with determinism.json expectations.
/// </summary>
internal static class DeterminismHarness
{
public static DeterminismReport Compute(IEnumerable<DeterminismRunInput> runs, double imageThreshold = 0.90, double overallThreshold = 0.95)
{
ArgumentNullException.ThrowIfNull(runs);
var grouped = runs
.GroupBy(r => r.ImageDigest, StringComparer.OrdinalIgnoreCase)
.ToDictionary(g => g.Key, g => g.OrderBy(r => r.RunIndex).ToList(), StringComparer.OrdinalIgnoreCase);
var imageReports = new List<DeterminismImageReport>();
var totalRuns = 0;
var totalIdentical = 0;
foreach (var (image, entries) in grouped)
{
if (entries.Count == 0)
{
continue;
}
var baseline = entries[0];
var baselineHashes = HashArtifacts(baseline.Artifacts);
var runReports = new List<DeterminismRunReport>();
var identical = 0;
foreach (var run in entries)
{
var hashes = HashArtifacts(run.Artifacts);
var diff = hashes
.Where(kv => !baselineHashes.TryGetValue(kv.Key, out var baselineHash) || !string.Equals(baselineHash, kv.Value, StringComparison.Ordinal))
.Select(kv => kv.Key)
.OrderBy(k => k, StringComparer.Ordinal)
.ToArray();
var isIdentical = diff.Length == 0;
if (isIdentical)
{
identical++;
}
runReports.Add(new DeterminismRunReport(run.RunIndex, hashes, diff));
}
var score = entries.Count == 0 ? 0d : (double)identical / entries.Count;
imageReports.Add(new DeterminismImageReport(image, entries.Count, identical, score, baselineHashes, runReports));
totalRuns += entries.Count;
totalIdentical += identical;
}
var overallScore = totalRuns == 0 ? 0d : (double)totalIdentical / totalRuns;
return new DeterminismReport(
OverallScore: overallScore,
OverallThreshold: overallThreshold,
ImageThreshold: imageThreshold,
Images: imageReports.OrderBy(r => r.ImageDigest, StringComparer.Ordinal).ToList());
}
private static IReadOnlyDictionary<string, string> HashArtifacts(IReadOnlyDictionary<string, string> artifacts)
{
var map = new Dictionary<string, string>(StringComparer.Ordinal);
foreach (var kv in artifacts)
{
var digest = Sha256Hex(kv.Value);
map[kv.Key] = digest;
}
return map;
}
private static string Sha256Hex(string content)
{
using var sha = SHA256.Create();
var bytes = Encoding.UTF8.GetBytes(content ?? string.Empty);
var hash = sha.ComputeHash(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
internal sealed record DeterminismRunInput(string ImageDigest, int RunIndex, IReadOnlyDictionary<string, string> Artifacts);
internal sealed record DeterminismReport(
double OverallScore,
double OverallThreshold,
double ImageThreshold,
IReadOnlyList<DeterminismImageReport> Images)
{
public string ToJson()
{
var options = new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
return JsonSerializer.Serialize(this, options);
}
}
internal sealed record DeterminismImageReport(
string ImageDigest,
int Runs,
int Identical,
double Score,
IReadOnlyDictionary<string, string> BaselineHashes,
IReadOnlyList<DeterminismRunReport> RunReports);
internal sealed record DeterminismRunReport(
int RunIndex,
IReadOnlyDictionary<string, string> ArtifactHashes,
IReadOnlyList<string> NonDeterministicArtifacts);

View File

@@ -0,0 +1,44 @@
using System.Collections.Generic;
using System.Linq;
using StellaOps.Scanner.Worker.Tests.Determinism;
using Xunit;
namespace StellaOps.Scanner.Worker.Tests.DeterminismTests;
public sealed class DeterminismHarnessTests
{
[Fact]
public void ComputeScores_FlagsDivergentArtifacts()
{
var runs = new[]
{
new DeterminismRunInput("sha256:image", 0, new Dictionary<string, string>
{
["sbom.json"] = "sbom-a",
["findings.ndjson"] = "findings-a",
["log.ndjson"] = "log-1"
}),
new DeterminismRunInput("sha256:image", 1, new Dictionary<string, string>
{
["sbom.json"] = "sbom-a",
["findings.ndjson"] = "findings-a",
["log.ndjson"] = "log-1"
}),
new DeterminismRunInput("sha256:image", 2, new Dictionary<string, string>
{
["sbom.json"] = "sbom-a",
["findings.ndjson"] = "findings-a",
["log.ndjson"] = "log-2" // divergent
})
};
var report = DeterminismHarness.Compute(runs);
Assert.Equal(1.0 * 2 / 3, report.Images.Single().Score, precision: 3);
Assert.Equal(2, report.Images.Single().Identical);
var divergent = report.Images.Single().RunReports.Single(r => r.RunIndex == 2);
Assert.Contains("log.ndjson", divergent.NonDeterministicArtifacts);
Assert.DoesNotContain("sbom.json", divergent.NonDeterministicArtifacts);
}
}

View File

@@ -0,0 +1,70 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.Worker.Processing.Replay;
using Xunit;
namespace StellaOps.Scanner.Worker.Tests.Replay;
public sealed class ReplaySealedBundleStageExecutorTests
{
[Fact]
public async Task ExecuteAsync_SetsMetadata_WhenUriAndHashProvided()
{
var executor = new ReplaySealedBundleStageExecutor(NullLogger<ReplaySealedBundleStageExecutor>.Instance);
var context = TestContexts.Create();
context.Lease.Metadata["replay.bundle.uri"] = "cas://replay/input.tar.zst";
context.Lease.Metadata["replay.bundle.sha256"] = "abc123";
context.Lease.Metadata["determinism.policy"] = "rev-1";
context.Lease.Metadata["determinism.feed"] = "feed-2";
await executor.ExecuteAsync(context, CancellationToken.None);
Assert.True(context.Analysis.TryGet<ReplaySealedBundleMetadata>(ScanAnalysisKeys.ReplaySealedBundleMetadata, out var metadata));
Assert.Equal("abc123", metadata.ManifestHash);
Assert.Equal("cas://replay/input.tar.zst", metadata.BundleUri);
Assert.Equal("rev-1", metadata.PolicySnapshotId);
Assert.Equal("feed-2", metadata.FeedSnapshotId);
}
[Fact]
public async Task ExecuteAsync_Skips_WhenHashMissing()
{
var executor = new ReplaySealedBundleStageExecutor(NullLogger<ReplaySealedBundleStageExecutor>.Instance);
var context = TestContexts.Create();
context.Lease.Metadata["replay.bundle.uri"] = "cas://replay/input.tar.zst";
await executor.ExecuteAsync(context, CancellationToken.None);
Assert.False(context.Analysis.TryGet<ReplaySealedBundleMetadata>(ScanAnalysisKeys.ReplaySealedBundleMetadata, out _));
}
}
internal static class TestContexts
{
public static ScanJobContext Create()
{
var lease = new TestScanJobLease();
return new ScanJobContext(lease, TimeProvider.System, TimeProvider.System.GetUtcNow(), CancellationToken.None);
}
private sealed class TestScanJobLease : IScanJobLease
{
public string JobId => "job-1";
public string ScanId => "scan-1";
public int Attempt => 1;
public DateTimeOffset EnqueuedAtUtc => DateTimeOffset.UtcNow;
public DateTimeOffset LeasedAtUtc => DateTimeOffset.UtcNow;
public TimeSpan LeaseDuration => TimeSpan.FromMinutes(5);
public Dictionary<string, string> MutableMetadata { get; } = new();
public IReadOnlyDictionary<string, string> Metadata => MutableMetadata;
public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
}
}

View File

@@ -0,0 +1,208 @@
using System.Runtime.CompilerServices;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Provenance.Mongo;
namespace StellaOps.Events.Mongo;
/// <summary>
/// Service for backfilling historical events with DSSE provenance metadata.
/// Queries events missing provenance, resolves attestations, and updates events in place.
/// </summary>
public sealed class EventProvenanceBackfillService
{
private readonly IMongoCollection<BsonDocument> _events;
private readonly IAttestationResolver _resolver;
private readonly EventProvenanceWriter _writer;
public EventProvenanceBackfillService(
IMongoDatabase database,
IAttestationResolver resolver,
string collectionName = "events")
{
if (database is null) throw new ArgumentNullException(nameof(database));
_resolver = resolver ?? throw new ArgumentNullException(nameof(resolver));
_events = database.GetCollection<BsonDocument>(collectionName);
_writer = new EventProvenanceWriter(database, collectionName);
}
/// <summary>
/// Find events missing provenance for the specified kinds.
/// </summary>
public async IAsyncEnumerable<UnprovenEvent> FindUnprovenEventsAsync(
IEnumerable<string> kinds,
int? limit = null,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var filter = ProvenanceMongoExtensions.BuildUnprovenEvidenceFilter(kinds);
var options = new FindOptions<BsonDocument>
{
Sort = Builders<BsonDocument>.Sort.Descending("ts"),
Limit = limit
};
using var cursor = await _events.FindAsync(filter, options, cancellationToken).ConfigureAwait(false);
while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false))
{
foreach (var doc in cursor.Current)
{
var eventId = ExtractEventId(doc);
var kind = doc.GetValue("kind", BsonNull.Value).AsString;
var subjectDigest = ExtractSubjectDigest(doc);
if (eventId is not null && kind is not null && subjectDigest is not null)
{
yield return new UnprovenEvent(eventId, kind, subjectDigest, doc);
}
}
}
}
/// <summary>
/// Backfill provenance for a single event by resolving its attestation.
/// </summary>
public async Task<BackfillResult> BackfillEventAsync(
UnprovenEvent unprovenEvent,
CancellationToken cancellationToken = default)
{
if (unprovenEvent is null) throw new ArgumentNullException(nameof(unprovenEvent));
try
{
var resolution = await _resolver.ResolveAsync(
unprovenEvent.SubjectDigestSha256,
unprovenEvent.Kind,
cancellationToken).ConfigureAwait(false);
if (resolution is null)
{
return new BackfillResult(unprovenEvent.EventId, BackfillStatus.NotFound);
}
await _writer.AttachAsync(
unprovenEvent.EventId,
resolution.Dsse,
resolution.Trust,
cancellationToken).ConfigureAwait(false);
return new BackfillResult(unprovenEvent.EventId, BackfillStatus.Success, resolution.AttestationId);
}
catch (Exception ex)
{
return new BackfillResult(unprovenEvent.EventId, BackfillStatus.Error, ErrorMessage: ex.Message);
}
}
/// <summary>
/// Backfill all unproven events for the specified kinds.
/// </summary>
public async Task<BackfillSummary> BackfillAllAsync(
IEnumerable<string> kinds,
int? limit = null,
IProgress<BackfillResult>? progress = null,
CancellationToken cancellationToken = default)
{
var summary = new BackfillSummary();
await foreach (var unprovenEvent in FindUnprovenEventsAsync(kinds, limit, cancellationToken).ConfigureAwait(false))
{
summary.TotalProcessed++;
var result = await BackfillEventAsync(unprovenEvent, cancellationToken).ConfigureAwait(false);
progress?.Report(result);
switch (result.Status)
{
case BackfillStatus.Success:
summary.SuccessCount++;
break;
case BackfillStatus.NotFound:
summary.NotFoundCount++;
break;
case BackfillStatus.Error:
summary.ErrorCount++;
break;
}
}
return summary;
}
/// <summary>
/// Count events missing provenance for reporting/estimation.
/// </summary>
public async Task<long> CountUnprovenEventsAsync(
IEnumerable<string> kinds,
CancellationToken cancellationToken = default)
{
var filter = ProvenanceMongoExtensions.BuildUnprovenEvidenceFilter(kinds);
return await _events.CountDocumentsAsync(filter, cancellationToken: cancellationToken).ConfigureAwait(false);
}
private static string? ExtractEventId(BsonDocument doc)
{
if (!doc.TryGetValue("_id", out var idValue))
return null;
return idValue.BsonType == BsonType.ObjectId
? idValue.AsObjectId.ToString()
: idValue.AsString;
}
private static string? ExtractSubjectDigest(BsonDocument doc)
{
if (!doc.TryGetValue("subject", out var subject) || subject.BsonType != BsonType.Document)
return null;
var subjectDoc = subject.AsBsonDocument;
if (!subjectDoc.TryGetValue("digest", out var digest) || digest.BsonType != BsonType.Document)
return null;
var digestDoc = digest.AsBsonDocument;
if (!digestDoc.TryGetValue("sha256", out var sha256))
return null;
return sha256.AsString;
}
}
/// <summary>
/// Represents an event that needs provenance backfilled.
/// </summary>
public sealed record UnprovenEvent(
string EventId,
string Kind,
string SubjectDigestSha256,
BsonDocument Document);
/// <summary>
/// Result of a single backfill operation.
/// </summary>
public sealed record BackfillResult(
string EventId,
BackfillStatus Status,
string? AttestationId = null,
string? ErrorMessage = null);
/// <summary>
/// Status of a backfill operation.
/// </summary>
public enum BackfillStatus
{
Success,
NotFound,
Error
}
/// <summary>
/// Summary statistics from a backfill batch.
/// </summary>
public sealed class BackfillSummary
{
public int TotalProcessed { get; set; }
public int SuccessCount { get; set; }
public int NotFoundCount { get; set; }
public int ErrorCount { get; set; }
}

View File

@@ -0,0 +1,33 @@
using StellaOps.Provenance.Mongo;
namespace StellaOps.Events.Mongo;
/// <summary>
/// Resolves attestation provenance metadata for a given subject.
/// Implementations may query Rekor, CAS, local attestation stores, or external APIs.
/// </summary>
public interface IAttestationResolver
{
/// <summary>
/// Attempt to resolve provenance metadata for the given subject digest.
/// </summary>
/// <param name="subjectDigestSha256">SHA-256 digest of the subject (image, SBOM, etc.).</param>
/// <param name="eventKind">Event kind hint (SBOM, VEX, SCAN, etc.) for filtering.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Resolved provenance and trust info, or null if not found.</returns>
Task<AttestationResolution?> ResolveAsync(
string subjectDigestSha256,
string eventKind,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of attestation resolution containing DSSE provenance and trust metadata.
/// </summary>
public sealed class AttestationResolution
{
public required DsseProvenance Dsse { get; init; }
public required TrustInfo Trust { get; init; }
public string? AttestationId { get; init; }
public DateTimeOffset? ResolvedAtUtc { get; init; }
}

View File

@@ -37,6 +37,25 @@ public static class MongoIndexes
new CreateIndexOptions
{
Name = "events_by_rekor_logindex"
}),
new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("provenance.dsse.envelopeDigest"),
new CreateIndexOptions
{
Name = "events_by_envelope_digest",
Sparse = true
}),
new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Descending("ts")
.Ascending("kind")
.Ascending("trust.verified"),
new CreateIndexOptions
{
Name = "events_by_ts_kind_verified"
})
};

View File

@@ -0,0 +1,72 @@
using StellaOps.Provenance.Mongo;
namespace StellaOps.Events.Mongo;
/// <summary>
/// Stub implementation of <see cref="IAttestationResolver"/> for testing and local development.
/// Always returns null (no attestation found) unless configured with test data.
/// </summary>
public sealed class StubAttestationResolver : IAttestationResolver
{
private readonly Dictionary<string, AttestationResolution> _testData = new(StringComparer.OrdinalIgnoreCase);
public Task<AttestationResolution?> ResolveAsync(
string subjectDigestSha256,
string eventKind,
CancellationToken cancellationToken = default)
{
var key = $"{subjectDigestSha256}:{eventKind}";
_testData.TryGetValue(key, out var resolution);
return Task.FromResult(resolution);
}
/// <summary>
/// Add test data for a subject/kind combination.
/// </summary>
public void AddTestResolution(string subjectDigestSha256, string eventKind, AttestationResolution resolution)
{
var key = $"{subjectDigestSha256}:{eventKind}";
_testData[key] = resolution;
}
/// <summary>
/// Create a sample resolution for testing.
/// </summary>
public static AttestationResolution CreateSampleResolution(
string envelopeDigest,
long? rekorLogIndex = null,
string? rekorUuid = null)
{
return new AttestationResolution
{
Dsse = new DsseProvenance
{
EnvelopeDigest = envelopeDigest,
PayloadType = "application/vnd.in-toto+json",
Key = new DsseKeyInfo
{
KeyId = "cosign:SHA256-PKIX:test-key-id",
Issuer = "test-issuer",
Algo = "ECDSA"
},
Rekor = rekorLogIndex is not null && rekorUuid is not null
? new DsseRekorInfo
{
LogIndex = rekorLogIndex.Value,
Uuid = rekorUuid,
IntegratedTime = DateTimeOffset.UtcNow.ToUnixTimeSeconds()
}
: null
},
Trust = new TrustInfo
{
Verified = true,
Verifier = "Authority@stella",
Witnesses = 1,
PolicyScore = 0.95
},
AttestationId = $"att:{Guid.NewGuid():N}",
ResolvedAtUtc = DateTimeOffset.UtcNow
};
}
}

View File

@@ -0,0 +1,8 @@
<Project>
<PropertyGroup>
<!-- Override repo defaults to keep telemetry tests self-contained -->
<UseConcelierTestInfra>false</UseConcelierTestInfra>
<ConcelierTestingPath></ConcelierTestingPath>
<ConcelierSharedTestsPath></ConcelierSharedTestsPath>
</PropertyGroup>
</Project>

View File

@@ -0,0 +1,27 @@
<Project>
<!-- Prevent global plugin/test copy targets from firing for telemetry tests -->
<Target Name="DisablePluginCopyTargets" BeforeTargets="ConcelierCopyPluginArtifacts;AuthorityCopyPluginArtifacts;NotifyCopyPluginArtifacts;ScannerCopyBuildxPluginArtifacts;ScannerCopyOsAnalyzerPluginArtifacts;ScannerCopyLangAnalyzerPluginArtifacts">
<PropertyGroup>
<ConcelierPluginOutputRoot></ConcelierPluginOutputRoot>
<AuthorityPluginOutputRoot></AuthorityPluginOutputRoot>
<NotifyPluginOutputRoot></NotifyPluginOutputRoot>
<ScannerBuildxPluginOutputRoot></ScannerBuildxPluginOutputRoot>
<ScannerOsAnalyzerPluginOutputRoot></ScannerOsAnalyzerPluginOutputRoot>
<ScannerLangAnalyzerPluginOutputRoot></ScannerLangAnalyzerPluginOutputRoot>
<IsConcelierPlugin>false</IsConcelierPlugin>
<IsAuthorityPlugin>false</IsAuthorityPlugin>
<IsNotifyPlugin>false</IsNotifyPlugin>
<IsScannerBuildxPlugin>false</IsScannerBuildxPlugin>
<IsScannerOsAnalyzerPlugin>false</IsScannerOsAnalyzerPlugin>
<IsScannerLangAnalyzerPlugin>false</IsScannerLangAnalyzerPlugin>
</PropertyGroup>
<ItemGroup>
<ConcelierPluginArtifacts Remove="@(ConcelierPluginArtifacts)" />
<AuthorityPluginArtifacts Remove="@(AuthorityPluginArtifacts)" />
<NotifyPluginArtifacts Remove="@(NotifyPluginArtifacts)" />
<ScannerBuildxPluginArtifacts Remove="@(ScannerBuildxPluginArtifacts)" />
<ScannerOsAnalyzerPluginArtifacts Remove="@(ScannerOsAnalyzerPluginArtifacts)" />
<ScannerLangAnalyzerPluginArtifacts Remove="@(ScannerLangAnalyzerPluginArtifacts)" />
</ItemGroup>
</Target>
</Project>

View File

@@ -0,0 +1,51 @@
using System.Diagnostics.Metrics;
using Microsoft.Extensions.Options;
using StellaOps.Telemetry.Core;
public class MetricLabelGuardTests
{
[Fact]
public void Coerce_Enforces_Cardinality_Limit()
{
var options = Options.Create(new StellaOpsTelemetryOptions
{
Labels = new StellaOpsTelemetryOptions.MetricLabelOptions
{
MaxDistinctValuesPerLabel = 2,
MaxLabelLength = 8
}
});
var guard = new MetricLabelGuard(options);
var first = guard.Coerce("route", "/api/a");
var second = guard.Coerce("route", "/api/b");
var third = guard.Coerce("route", "/api/c");
Assert.Equal("/api/a", first);
Assert.Equal("/api/b", second);
Assert.Equal("other", third); // budget exceeded
}
[Fact]
public void RecordRequestDuration_Truncates_Long_Labels()
{
var options = Options.Create(new StellaOpsTelemetryOptions
{
Labels = new StellaOpsTelemetryOptions.MetricLabelOptions
{
MaxDistinctValuesPerLabel = 5,
MaxLabelLength = 5
}
});
var guard = new MetricLabelGuard(options);
using var meter = new Meter("test");
var histogram = meter.CreateHistogram<double>("request.duration");
histogram.RecordRequestDuration(guard, 42, "verylongroute", "GET", "200", "ok");
// No exception means recording succeeded; label value should be truncated internally to 5 chars.
Assert.Equal("veryl", guard.Coerce("route", "verylongroute"));
}
}

View File

@@ -5,8 +5,18 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<!-- Opt out of Concelier test infra to avoid pulling large cross-module graph -->
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<FrameworkReference Include="Microsoft.AspNetCore.App" />
<!-- Prevent repo-wide test infra from pulling Concelier shared test packages -->
<PackageReference Remove="Mongo2Go" />
<PackageReference Remove="Microsoft.AspNetCore.Mvc.Testing" />
<PackageReference Remove="Microsoft.Extensions.TimeProvider.Testing" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.2" />

View File

@@ -0,0 +1,52 @@
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using Microsoft.Extensions.Options;
using StellaOps.Telemetry.Core;
public class TelemetryPropagationHandlerTests
{
[Fact]
public async Task Handler_Forwards_Context_Headers()
{
var options = Options.Create(new StellaOpsTelemetryOptions());
var accessor = new TelemetryContextAccessor
{
Current = new TelemetryContext(
"00-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-bbbbbbbbbbbbbbbb-01",
"tenant-b",
"actor-b",
"rule-b")
};
var terminal = new RecordingHandler();
var handler = new TelemetryPropagationHandler(accessor, options)
{
InnerHandler = terminal
};
var invoker = new HttpMessageInvoker(handler);
await invoker.SendAsync(new HttpRequestMessage(HttpMethod.Get, "http://example.com"), CancellationToken.None);
Assert.Equal("00-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-bbbbbbbbbbbbbbbb-01", terminal.SeenHeaders[options.Value.Propagation.TraceIdHeader]);
Assert.Equal("tenant-b", terminal.SeenHeaders[options.Value.Propagation.TenantHeader]);
Assert.Equal("actor-b", terminal.SeenHeaders[options.Value.Propagation.ActorHeader]);
Assert.Equal("rule-b", terminal.SeenHeaders[options.Value.Propagation.ImposedRuleHeader]);
}
private sealed class RecordingHandler : HttpMessageHandler
{
public Dictionary<string, string?> SeenHeaders { get; } = new();
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
foreach (var header in request.Headers)
{
SeenHeaders[header.Key.ToLowerInvariant()] = header.Value.FirstOrDefault();
}
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK));
}
}
}

View File

@@ -0,0 +1,43 @@
using System.Diagnostics;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Telemetry.Core;
public class TelemetryPropagationMiddlewareTests
{
[Fact]
public async Task Middleware_Populates_Accessor_And_Activity_Tags()
{
var options = Options.Create(new StellaOpsTelemetryOptions());
var accessor = new TelemetryContextAccessor();
var middleware = new TelemetryPropagationMiddleware(
async context =>
{
// Assert inside the pipeline while context is set.
Assert.NotNull(accessor.Current);
Assert.Equal("tenant-a", accessor.Current!.TenantId);
Assert.Equal("service-x", accessor.Current.Actor);
Assert.Equal("policy-42", accessor.Current.ImposedRule);
await Task.CompletedTask;
},
accessor,
options,
NullLogger<TelemetryPropagationMiddleware>.Instance);
var httpContext = new DefaultHttpContext();
httpContext.Request.Headers[options.Value.Propagation.TenantHeader] = "tenant-a";
httpContext.Request.Headers[options.Value.Propagation.ActorHeader] = "service-x";
httpContext.Request.Headers[options.Value.Propagation.ImposedRuleHeader] = "policy-42";
httpContext.Request.Headers[options.Value.Propagation.TraceIdHeader] = "00-0123456789abcdef0123456789abcdef-0123456789abcdef-01";
Assert.Null(accessor.Current);
await middleware.InvokeAsync(httpContext);
Assert.Null(accessor.Current); // cleared after invocation
Assert.NotNull(Activity.Current);
Assert.Equal("tenant-a", Activity.Current!.GetTagItem("tenant_id"));
Assert.Equal("service-x", Activity.Current.GetTagItem("actor"));
Assert.Equal("policy-42", Activity.Current.GetTagItem("imposed_rule"));
}
}

View File

@@ -0,0 +1,81 @@
using System.Collections.Concurrent;
using System.Diagnostics.Metrics;
using Microsoft.Extensions.Options;
namespace StellaOps.Telemetry.Core;
/// <summary>
/// Guards metric label cardinality to keep exporters deterministic and affordable.
/// </summary>
public sealed class MetricLabelGuard
{
private readonly int _maxValuesPerLabel;
private readonly int _maxLabelLength;
private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, byte>> _seen;
/// <summary>
/// Initializes a new instance of the <see cref="MetricLabelGuard"/> class.
/// </summary>
public MetricLabelGuard(IOptions<StellaOpsTelemetryOptions> options)
{
var labelOptions = options?.Value?.Labels ?? new StellaOpsTelemetryOptions.MetricLabelOptions();
_maxValuesPerLabel = Math.Max(1, labelOptions.MaxDistinctValuesPerLabel);
_maxLabelLength = Math.Max(1, labelOptions.MaxLabelLength);
_seen = new ConcurrentDictionary<string, ConcurrentDictionary<string, byte>>(StringComparer.OrdinalIgnoreCase);
}
/// <summary>
/// Adds a label value if within budget; otherwise falls back to a deterministic bucket label.
/// </summary>
public string Coerce(string key, string? value)
{
if (string.IsNullOrWhiteSpace(key))
{
return key;
}
var sanitized = (value ?? string.Empty).Trim();
if (sanitized.Length > _maxLabelLength)
{
sanitized = sanitized[.._maxLabelLength];
}
var perKey = _seen.GetOrAdd(key, _ => new ConcurrentDictionary<string, byte>(StringComparer.Ordinal));
if (perKey.Count >= _maxValuesPerLabel && !perKey.ContainsKey(sanitized))
{
return "other";
}
perKey.TryAdd(sanitized, 0);
return sanitized;
}
}
/// <summary>
/// Metric helpers aligned with StellaOps golden-signal defaults.
/// </summary>
public static class TelemetryMetrics
{
/// <summary>
/// Records a request duration histogram with cardinality-safe labels.
/// </summary>
public static void RecordRequestDuration(
this Histogram<double> histogram,
MetricLabelGuard guard,
double durationMs,
string route,
string verb,
string statusCode,
string result)
{
var tags = new KeyValuePair<string, object?>[]
{
new("route", guard.Coerce("route", route)),
new("verb", guard.Coerce("verb", verb)),
new("status_code", guard.Coerce("status_code", statusCode)),
new("result", guard.Coerce("result", result)),
};
histogram.Record(durationMs, tags);
}
}

View File

@@ -6,6 +6,10 @@
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<FrameworkReference Include="Microsoft.AspNetCore.App" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0" />

View File

@@ -12,6 +12,16 @@ public sealed class StellaOpsTelemetryOptions
/// </summary>
public CollectorOptions Collector { get; set; } = new();
/// <summary>
/// Gets propagation-specific settings used by middleware and handlers.
/// </summary>
public PropagationOptions Propagation { get; set; } = new();
/// <summary>
/// Gets metric label guard settings to prevent cardinality explosions.
/// </summary>
public MetricLabelOptions Labels { get; set; } = new();
/// <summary>
/// Options describing how the OTLP collector exporter should be configured.
/// </summary>
@@ -63,6 +73,48 @@ public sealed class StellaOpsTelemetryOptions
return Uri.TryCreate(Endpoint.Trim(), UriKind.Absolute, out endpoint);
}
}
/// <summary>
/// Options controlling telemetry context propagation.
/// </summary>
public sealed class PropagationOptions
{
/// <summary>
/// Gets or sets the header name carrying the tenant identifier.
/// </summary>
public string TenantHeader { get; set; } = "x-stella-tenant";
/// <summary>
/// Gets or sets the header name carrying the actor (user/service) identifier.
/// </summary>
public string ActorHeader { get; set; } = "x-stella-actor";
/// <summary>
/// Gets or sets the header name carrying imposed rule/decision metadata.
/// </summary>
public string ImposedRuleHeader { get; set; } = "x-stella-imposed-rule";
/// <summary>
/// Gets or sets the header name carrying the trace identifier when no Activity is present.
/// </summary>
public string TraceIdHeader { get; set; } = "x-stella-traceid";
}
/// <summary>
/// Options used to constrain metric label cardinality.
/// </summary>
public sealed class MetricLabelOptions
{
/// <summary>
/// Gets or sets the maximum number of distinct values tracked per label key.
/// </summary>
public int MaxDistinctValuesPerLabel { get; set; } = 50;
/// <summary>
/// Gets or sets the maximum length of any individual label value; longer values are trimmed.
/// </summary>
public int MaxLabelLength { get; set; } = 64;
}
}
/// <summary>

Some files were not shown because too many files have changed in this diff Show More