up
This commit is contained in:
@@ -19,23 +19,24 @@
|
|||||||
| # | Task ID & handle | State | Key dependency / next step | Owners |
|
| # | Task ID & handle | State | Key dependency / next step | Owners |
|
||||||
| --- | --- | --- | --- | --- |
|
| --- | --- | --- | --- | --- |
|
||||||
| P1 | PREP-POLICY-ENGINE-20-002-DETERMINISTIC-EVALU | DONE (2025-11-22) | Due 2025-11-22 · Accountable: Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Deterministic evaluator spec missing. <br><br> Document artefact/deliverable for POLICY-ENGINE-20-002 and publish location so downstream tasks can proceed. Prep artefact: `docs/modules/policy/design/policy-deterministic-evaluator.md`. |
|
| P1 | PREP-POLICY-ENGINE-20-002-DETERMINISTIC-EVALU | DONE (2025-11-22) | Due 2025-11-22 · Accountable: Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Deterministic evaluator spec missing. <br><br> Document artefact/deliverable for POLICY-ENGINE-20-002 and publish location so downstream tasks can proceed. Prep artefact: `docs/modules/policy/design/policy-deterministic-evaluator.md`. |
|
||||||
| 1 | POLICY-CONSOLE-23-002 | TODO | Produce simulation diff metadata and approval endpoints for Console (deps: POLICY-CONSOLE-23-001). | Policy Guild, Product Ops / `src/Policy/StellaOps.Policy.Engine` |
|
| 1 | POLICY-CONSOLE-23-002 | BLOCKED (2025-11-27) | Waiting on POLICY-CONSOLE-23-001 export/simulation contract. | Policy Guild, Product Ops / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 2 | POLICY-ENGINE-20-002 | BLOCKED (2025-10-26) | PREP-POLICY-ENGINE-20-002-DETERMINISTIC-EVALU | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
|
| 2 | POLICY-ENGINE-20-002 | BLOCKED (2025-10-26) | PREP-POLICY-ENGINE-20-002-DETERMINISTIC-EVALU | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 3 | POLICY-ENGINE-20-003 | TODO | Depends on 20-002. | Policy · Concelier · Excititor Guilds / `src/Policy/StellaOps.Policy.Engine` |
|
| 3 | POLICY-ENGINE-20-003 | BLOCKED (2025-11-27) | Depends on 20-002. | Policy · Concelier · Excititor Guilds / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 4 | POLICY-ENGINE-20-004 | TODO | Depends on 20-003. | Policy · Platform Storage Guild / `src/Policy/StellaOps.Policy.Engine` |
|
| 4 | POLICY-ENGINE-20-004 | BLOCKED (2025-11-27) | Depends on 20-003. | Policy · Platform Storage Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 5 | POLICY-ENGINE-20-005 | TODO | Depends on 20-004. | Policy · Security Engineering / `src/Policy/StellaOps.Policy.Engine` |
|
| 5 | POLICY-ENGINE-20-005 | BLOCKED (2025-11-27) | Depends on 20-004. | Policy · Security Engineering / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 6 | POLICY-ENGINE-20-006 | TODO | Depends on 20-005. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` |
|
| 6 | POLICY-ENGINE-20-006 | BLOCKED (2025-11-27) | Depends on 20-005. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 7 | POLICY-ENGINE-20-007 | TODO | Depends on 20-006. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` |
|
| 7 | POLICY-ENGINE-20-007 | BLOCKED (2025-11-27) | Depends on 20-006. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 8 | POLICY-ENGINE-20-008 | TODO | Depends on 20-007. | Policy · QA Guild / `src/Policy/StellaOps.Policy.Engine` |
|
| 8 | POLICY-ENGINE-20-008 | BLOCKED (2025-11-27) | Depends on 20-007. | Policy · QA Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 9 | POLICY-ENGINE-20-009 | TODO | Depends on 20-008. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` |
|
| 9 | POLICY-ENGINE-20-009 | BLOCKED (2025-11-27) | Depends on 20-008. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 10 | POLICY-ENGINE-27-001 | TODO | Depends on 20-009. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
|
| 10 | POLICY-ENGINE-27-001 | BLOCKED (2025-11-27) | Depends on 20-009. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 11 | POLICY-ENGINE-27-002 | TODO | Depends on 27-001. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` |
|
| 11 | POLICY-ENGINE-27-002 | BLOCKED (2025-11-27) | Depends on 27-001. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 12 | POLICY-ENGINE-29-001 | TODO | Depends on 27-004. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
|
| 12 | POLICY-ENGINE-29-001 | BLOCKED (2025-11-27) | Depends on 27-004. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
| 13 | POLICY-ENGINE-29-002 | DONE (2025-11-23) | Contract published at `docs/modules/policy/contracts/29-002-streaming-simulation.md`. | Policy · Findings Ledger Guild / `src/Policy/StellaOps.Policy.Engine` |
|
| 13 | POLICY-ENGINE-29-002 | DONE (2025-11-23) | Contract published at `docs/modules/policy/contracts/29-002-streaming-simulation.md`. | Policy · Findings Ledger Guild / `src/Policy/StellaOps.Policy.Engine` |
|
||||||
|
|
||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-11-27 | Marked POLICY-CONSOLE-23-002 and POLICY-ENGINE-20-003..29-001 BLOCKED due to unmet upstream contracts (POLICY-CONSOLE-23-001, deterministic evaluator 20-002 chain). | Policy Guild |
|
||||||
| 2025-11-23 | Published POLICY-ENGINE-29-002 streaming simulation contract (`docs/modules/policy/contracts/29-002-streaming-simulation.md`); marked task 13 DONE. | Policy Guild |
|
| 2025-11-23 | Published POLICY-ENGINE-29-002 streaming simulation contract (`docs/modules/policy/contracts/29-002-streaming-simulation.md`); marked task 13 DONE. | Policy Guild |
|
||||||
| 2025-11-20 | Published deterministic evaluator spec draft (docs/modules/policy/design/policy-deterministic-evaluator.md); moved PREP-POLICY-ENGINE-20-002 to DOING. | Project Mgmt |
|
| 2025-11-20 | Published deterministic evaluator spec draft (docs/modules/policy/design/policy-deterministic-evaluator.md); moved PREP-POLICY-ENGINE-20-002 to DOING. | Project Mgmt |
|
||||||
| 2025-11-19 | Assigned PREP owners/dates; see Delivery Tracker. | Planning |
|
| 2025-11-19 | Assigned PREP owners/dates; see Delivery Tracker. | Planning |
|
||||||
@@ -45,8 +46,8 @@
|
|||||||
| 2025-11-22 | Marked all PREP tasks to DONE per directive; evidence to be verified. | Project Mgmt |
|
| 2025-11-22 | Marked all PREP tasks to DONE per directive; evidence to be verified. | Project Mgmt |
|
||||||
|
|
||||||
## Decisions & Risks
|
## Decisions & Risks
|
||||||
- Deterministic evaluator contract still required to unblock 20-002 runtime implementation.
|
- Deterministic evaluator contract still required to unblock 20-002 runtime implementation and downstream 20-003..29-001 chain remains BLOCKED.
|
||||||
- Console simulation/export contract (POLICY-CONSOLE-23-001) required to unblock 23-002.
|
- Console simulation/export contract (POLICY-CONSOLE-23-001) required to unblock 23-002; status BLOCKED.
|
||||||
- Storage/index schemas TBD; avoid implementation until specs freeze.
|
- Storage/index schemas TBD; avoid implementation until specs freeze.
|
||||||
|
|
||||||
## Next Checkpoints
|
## Next Checkpoints
|
||||||
|
|||||||
@@ -25,14 +25,14 @@
|
|||||||
| 6 | POLICY-ENGINE-50-005 | BLOCKED (2025-11-26) | Blocked by 50-004 event schema/storage contract. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` | Collections/indexes for policy artifacts. |
|
| 6 | POLICY-ENGINE-50-005 | BLOCKED (2025-11-26) | Blocked by 50-004 event schema/storage contract. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` | Collections/indexes for policy artifacts. |
|
||||||
| 7 | POLICY-ENGINE-50-006 | BLOCKED (2025-11-26) | Blocked by 50-005 storage schema. | Policy · QA Guild / `src/Policy/StellaOps.Policy.Engine` | Explainer persistence/retrieval. |
|
| 7 | POLICY-ENGINE-50-006 | BLOCKED (2025-11-26) | Blocked by 50-005 storage schema. | Policy · QA Guild / `src/Policy/StellaOps.Policy.Engine` | Explainer persistence/retrieval. |
|
||||||
| 8 | POLICY-ENGINE-50-007 | BLOCKED (2025-11-26) | Blocked by 50-006 persistence contract. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` | Evaluation worker host/orchestration. |
|
| 8 | POLICY-ENGINE-50-007 | BLOCKED (2025-11-26) | Blocked by 50-006 persistence contract. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` | Evaluation worker host/orchestration. |
|
||||||
| 9 | POLICY-ENGINE-60-001 | TODO | Depends on 50-007. | Policy · SBOM Service Guild / `src/Policy/StellaOps.Policy.Engine` | Redis effective decision maps. |
|
| 9 | POLICY-ENGINE-60-001 | BLOCKED (2025-11-27) | Depends on 50-007 (blocked). | Policy · SBOM Service Guild / `src/Policy/StellaOps.Policy.Engine` | Redis effective decision maps. |
|
||||||
| 10 | POLICY-ENGINE-60-002 | TODO | Depends on 60-001. | Policy · BE-Base Platform Guild / `src/Policy/StellaOps.Policy.Engine` | Simulation bridge for Graph What-if. |
|
| 10 | POLICY-ENGINE-60-002 | BLOCKED (2025-11-27) | Depends on 60-001. | Policy · BE-Base Platform Guild / `src/Policy/StellaOps.Policy.Engine` | Simulation bridge for Graph What-if. |
|
||||||
| 11 | POLICY-ENGINE-70-002 | TODO | Depends on 60-002. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` | Exception collections + migrations. |
|
| 11 | POLICY-ENGINE-70-002 | BLOCKED (2025-11-27) | Depends on 60-002. | Policy · Storage Guild / `src/Policy/StellaOps.Policy.Engine` | Exception collections + migrations. |
|
||||||
| 12 | POLICY-ENGINE-70-003 | TODO | Depends on 70-002. | Policy · Runtime Guild / `src/Policy/StellaOps.Policy.Engine` | Redis exception cache. |
|
| 12 | POLICY-ENGINE-70-003 | BLOCKED (2025-11-27) | Depends on 70-002. | Policy · Runtime Guild / `src/Policy/StellaOps.Policy.Engine` | Redis exception cache. |
|
||||||
| 13 | POLICY-ENGINE-70-004 | TODO | Depends on 70-003. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` | Exception metrics/tracing/logging. |
|
| 13 | POLICY-ENGINE-70-004 | BLOCKED (2025-11-27) | Depends on 70-003. | Policy · Observability Guild / `src/Policy/StellaOps.Policy.Engine` | Exception metrics/tracing/logging. |
|
||||||
| 14 | POLICY-ENGINE-70-005 | TODO | Depends on 70-004. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` | Exception activation/expiry + events. |
|
| 14 | POLICY-ENGINE-70-005 | BLOCKED (2025-11-27) | Depends on 70-004. | Policy · Scheduler Worker Guild / `src/Policy/StellaOps.Policy.Engine` | Exception activation/expiry + events. |
|
||||||
| 15 | POLICY-ENGINE-80-001 | TODO | Depends on 70-005. | Policy · Signals Guild / `src/Policy/StellaOps.Policy.Engine` | Reachability/exploitability inputs into evaluation. |
|
| 15 | POLICY-ENGINE-80-001 | BLOCKED (2025-11-27) | Depends on 70-005. | Policy · Signals Guild / `src/Policy/StellaOps.Policy.Engine` | Reachability/exploitability inputs into evaluation. |
|
||||||
| 16 | POLICY-RISK-90-001 | TODO | — | Policy · Scanner Guild / `src/Policy/StellaOps.Policy.Engine` | Entropy penalty ingestion + trust algebra. |
|
| 16 | POLICY-RISK-90-001 | BLOCKED (2025-11-27) | Waiting on Scanner entropy/trust algebra contract. | Policy · Scanner Guild / `src/Policy/StellaOps.Policy.Engine` | Entropy penalty ingestion + trust algebra. |
|
||||||
|
|
||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
@@ -45,6 +45,7 @@
|
|||||||
| 2025-11-26 | POLICY-ENGINE-50-003..50-007 marked BLOCKED: telemetry/event/storage schemas for compile/eval pipeline not published; downstream persistence/worker tasks hold until specs land. | Implementer |
|
| 2025-11-26 | POLICY-ENGINE-50-003..50-007 marked BLOCKED: telemetry/event/storage schemas for compile/eval pipeline not published; downstream persistence/worker tasks hold until specs land. | Implementer |
|
||||||
| 2025-11-26 | Added policy-only solution `src/Policy/StellaOps.Policy.only.sln` entries for Engine + Engine.Tests to enable graph-disabled test runs; attempt to run targeted tests still fanned out, canceled. | Implementer |
|
| 2025-11-26 | Added policy-only solution `src/Policy/StellaOps.Policy.only.sln` entries for Engine + Engine.Tests to enable graph-disabled test runs; attempt to run targeted tests still fanned out, canceled. | Implementer |
|
||||||
| 2025-11-26 | Created tighter solution filter `src/Policy/StellaOps.Policy.engine.slnf`; targeted test slice still pulled broader graph (Policy core, Provenance/Crypto) and was canceled. Further isolation would require conditional references; tests remain pending. | Implementer |
|
| 2025-11-26 | Created tighter solution filter `src/Policy/StellaOps.Policy.engine.slnf`; targeted test slice still pulled broader graph (Policy core, Provenance/Crypto) and was canceled. Further isolation would require conditional references; tests remain pending. | Implementer |
|
||||||
|
| 2025-11-27 | Marked POLICY-ENGINE-60-001..80-001 and POLICY-RISK-90-001 BLOCKED due to upstream 50-007 chain and missing entropy/trust algebra contract. | Policy Guild |
|
||||||
|
|
||||||
## Decisions & Risks
|
## Decisions & Risks
|
||||||
- All tasks depend on prior Policy phases; sequencing must be maintained.
|
- All tasks depend on prior Policy phases; sequencing must be maintained.
|
||||||
|
|||||||
@@ -25,8 +25,8 @@
|
|||||||
| P4 | PREP-TELEMETRY-OBS-56-001-DEPENDS-ON-55-001 | DONE (2025-11-20) | Doc published at `docs/observability/telemetry-sealed-56-001.md`. | Telemetry Core Guild | Depends on 55-001. <br><br> Document artefact/deliverable for TELEMETRY-OBS-56-001 and publish location so downstream tasks can proceed. |
|
| P4 | PREP-TELEMETRY-OBS-56-001-DEPENDS-ON-55-001 | DONE (2025-11-20) | Doc published at `docs/observability/telemetry-sealed-56-001.md`. | Telemetry Core Guild | Depends on 55-001. <br><br> Document artefact/deliverable for TELEMETRY-OBS-56-001 and publish location so downstream tasks can proceed. |
|
||||||
| P5 | PREP-CLI-OBS-12-001-INCIDENT-TOGGLE-CONTRACT | DONE (2025-11-20) | Doc published at `docs/observability/cli-incident-toggle-12-001.md`. | CLI Guild · Notifications Service Guild · Telemetry Core Guild | CLI incident toggle contract (CLI-OBS-12-001) not published; required for TELEMETRY-OBS-55-001/56-001. Provide schema + CLI flag behavior. |
|
| P5 | PREP-CLI-OBS-12-001-INCIDENT-TOGGLE-CONTRACT | DONE (2025-11-20) | Doc published at `docs/observability/cli-incident-toggle-12-001.md`. | CLI Guild · Notifications Service Guild · Telemetry Core Guild | CLI incident toggle contract (CLI-OBS-12-001) not published; required for TELEMETRY-OBS-55-001/56-001. Provide schema + CLI flag behavior. |
|
||||||
| 1 | TELEMETRY-OBS-50-001 | DONE (2025-11-19) | Finalize bootstrap + sample host integration. | Telemetry Core Guild (`src/Telemetry/StellaOps.Telemetry.Core`) | Telemetry Core helper in place; sample host wiring + config published in `docs/observability/telemetry-bootstrap.md`. |
|
| 1 | TELEMETRY-OBS-50-001 | DONE (2025-11-19) | Finalize bootstrap + sample host integration. | Telemetry Core Guild (`src/Telemetry/StellaOps.Telemetry.Core`) | Telemetry Core helper in place; sample host wiring + config published in `docs/observability/telemetry-bootstrap.md`. |
|
||||||
| 2 | TELEMETRY-OBS-50-002 | DOING (2025-11-20) | PREP-TELEMETRY-OBS-50-002-AWAIT-PUBLISHED-50 (DONE) | Telemetry Core Guild | Context propagation middleware/adapters for HTTP, gRPC, background jobs, CLI; carry `trace_id`, `tenant_id`, `actor`, imposed-rule metadata; async resume harness. Prep artefact: `docs/modules/telemetry/prep/2025-11-20-obs-50-002-prep.md`. |
|
| 2 | TELEMETRY-OBS-50-002 | DONE (2025-11-27) | PREP-TELEMETRY-OBS-50-002-AWAIT-PUBLISHED-50 (DONE) | Telemetry Core Guild | Context propagation middleware/adapters for HTTP, gRPC, background jobs, CLI; carry `trace_id`, `tenant_id`, `actor`, imposed-rule metadata; async resume harness. Prep artefact: `docs/modules/telemetry/prep/2025-11-20-obs-50-002-prep.md`. |
|
||||||
| 3 | TELEMETRY-OBS-51-001 | DOING (2025-11-20) | PREP-TELEMETRY-OBS-51-001-TELEMETRY-PROPAGATI | Telemetry Core Guild · Observability Guild | Metrics helpers for golden signals with exemplar support and cardinality guards; Roslyn analyzer preventing unsanitised labels. Prep artefact: `docs/modules/telemetry/prep/2025-11-20-obs-51-001-prep.md`. |
|
| 3 | TELEMETRY-OBS-51-001 | DONE (2025-11-27) | PREP-TELEMETRY-OBS-51-001-TELEMETRY-PROPAGATI | Telemetry Core Guild · Observability Guild | Metrics helpers for golden signals with exemplar support and cardinality guards; Roslyn analyzer preventing unsanitised labels. Prep artefact: `docs/modules/telemetry/prep/2025-11-20-obs-51-001-prep.md`. |
|
||||||
| 4 | TELEMETRY-OBS-51-002 | BLOCKED (2025-11-20) | PREP-TELEMETRY-OBS-51-002-DEPENDS-ON-51-001 | Telemetry Core Guild · Security Guild | Redaction/scrubbing filters for secrets/PII at logger sink; per-tenant config with TTL; audit overrides; determinism tests. |
|
| 4 | TELEMETRY-OBS-51-002 | BLOCKED (2025-11-20) | PREP-TELEMETRY-OBS-51-002-DEPENDS-ON-51-001 | Telemetry Core Guild · Security Guild | Redaction/scrubbing filters for secrets/PII at logger sink; per-tenant config with TTL; audit overrides; determinism tests. |
|
||||||
| 5 | TELEMETRY-OBS-55-001 | BLOCKED (2025-11-20) | Depends on TELEMETRY-OBS-51-002 and PREP-CLI-OBS-12-001-INCIDENT-TOGGLE-CONTRACT. | Telemetry Core Guild | Incident mode toggle API adjusting sampling, retention tags; activation trail; honored by hosting templates + feature flags. |
|
| 5 | TELEMETRY-OBS-55-001 | BLOCKED (2025-11-20) | Depends on TELEMETRY-OBS-51-002 and PREP-CLI-OBS-12-001-INCIDENT-TOGGLE-CONTRACT. | Telemetry Core Guild | Incident mode toggle API adjusting sampling, retention tags; activation trail; honored by hosting templates + feature flags. |
|
||||||
| 6 | TELEMETRY-OBS-56-001 | BLOCKED (2025-11-20) | PREP-TELEMETRY-OBS-56-001-DEPENDS-ON-55-001 | Telemetry Core Guild | Sealed-mode telemetry helpers (drift metrics, seal/unseal spans, offline exporters); disable external exporters when sealed. |
|
| 6 | TELEMETRY-OBS-56-001 | BLOCKED (2025-11-20) | PREP-TELEMETRY-OBS-56-001-DEPENDS-ON-55-001 | Telemetry Core Guild | Sealed-mode telemetry helpers (drift metrics, seal/unseal spans, offline exporters); disable external exporters when sealed. |
|
||||||
@@ -34,6 +34,9 @@
|
|||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-11-27 | Implemented propagation middleware + HttpClient handler with AsyncLocal context accessor; added metric label guard + golden-signal helper and tests. Marked TELEMETRY-OBS-50-002 and TELEMETRY-OBS-51-001 DONE. | Telemetry Core Guild |
|
||||||
|
| 2025-11-27 | Attempted scoped test run for Telemetry Core tests with BuildProjectReferences disabled; build fanned out across repo and was cancelled. Library build succeeded; rerun tests on a slimmer graph or CI agent. | Telemetry Core Guild |
|
||||||
|
| 2025-11-27 | Applied context-accessor and label-guard fixes; repeated filtered test runs still fan out across unrelated projects, preventing completion. Pending CI to validate telemetry tests once a slim graph is available. | Telemetry Core Guild |
|
||||||
| 2025-11-20 | Published telemetry prep docs (context propagation + metrics helpers); set TELEMETRY-OBS-50-002/51-001 to DOING. | Project Mgmt |
|
| 2025-11-20 | Published telemetry prep docs (context propagation + metrics helpers); set TELEMETRY-OBS-50-002/51-001 to DOING. | Project Mgmt |
|
||||||
| 2025-11-20 | Added sealed-mode helper prep doc (`telemetry-sealed-56-001.md`); marked PREP-TELEMETRY-OBS-56-001 DONE. | Implementer |
|
| 2025-11-20 | Added sealed-mode helper prep doc (`telemetry-sealed-56-001.md`); marked PREP-TELEMETRY-OBS-56-001 DONE. | Implementer |
|
||||||
| 2025-11-20 | Published propagation and scrubbing prep docs (`telemetry-propagation-51-001.md`, `telemetry-scrub-51-002.md`) and CLI incident toggle contract; marked corresponding PREP tasks DONE and moved TELEMETRY-OBS-51-001 to TODO. | Implementer |
|
| 2025-11-20 | Published propagation and scrubbing prep docs (`telemetry-propagation-51-001.md`, `telemetry-scrub-51-002.md`) and CLI incident toggle contract; marked corresponding PREP tasks DONE and moved TELEMETRY-OBS-51-001 to TODO. | Implementer |
|
||||||
@@ -52,6 +55,7 @@
|
|||||||
- Propagation adapters wait on bootstrap package; Security scrub policy (POLICY-SEC-42-003) must approve before implementing 51-001/51-002.
|
- Propagation adapters wait on bootstrap package; Security scrub policy (POLICY-SEC-42-003) must approve before implementing 51-001/51-002.
|
||||||
- Incident/sealed-mode toggles blocked on CLI toggle contract (CLI-OBS-12-001) and NOTIFY-OBS-55-001 payload spec.
|
- Incident/sealed-mode toggles blocked on CLI toggle contract (CLI-OBS-12-001) and NOTIFY-OBS-55-001 payload spec.
|
||||||
- Ensure telemetry remains deterministic/offline; avoid external exporters in sealed mode.
|
- Ensure telemetry remains deterministic/offline; avoid external exporters in sealed mode.
|
||||||
|
- Local test execution currently fans out across unrelated projects even with BuildProjectReferences disabled; telemetry fixes rely on CI validation until test graph can be slimmed locally.
|
||||||
|
|
||||||
## Next Checkpoints
|
## Next Checkpoints
|
||||||
| Date (UTC) | Milestone | Owner(s) |
|
| Date (UTC) | Milestone | Owner(s) |
|
||||||
|
|||||||
@@ -22,7 +22,7 @@
|
|||||||
| P1 | PREP-SAMPLES-LNM-22-001-WAITING-ON-FINALIZED | DONE (2025-11-20) | Due 2025-11-26 · Accountable: Samples Guild · Concelier Guild | Samples Guild · Concelier Guild | Prep artefact published at `docs/samples/linkset/prep-22-001.md` (fixtures plan aligned to frozen LNM schema; deterministic seeds/checksums). |
|
| P1 | PREP-SAMPLES-LNM-22-001-WAITING-ON-FINALIZED | DONE (2025-11-20) | Due 2025-11-26 · Accountable: Samples Guild · Concelier Guild | Samples Guild · Concelier Guild | Prep artefact published at `docs/samples/linkset/prep-22-001.md` (fixtures plan aligned to frozen LNM schema; deterministic seeds/checksums). |
|
||||||
| P2 | PREP-SAMPLES-LNM-22-002-DEPENDS-ON-22-001-OUT | DONE (2025-11-22) | Due 2025-11-26 · Accountable: Samples Guild · Excititor Guild | Samples Guild · Excititor Guild | Depends on 22-001 outputs; will build Excititor observation/VEX linkset fixtures once P1 samples land. Prep doc will extend `docs/samples/linkset/prep-22-001.md` with Excititor-specific payloads. |
|
| P2 | PREP-SAMPLES-LNM-22-002-DEPENDS-ON-22-001-OUT | DONE (2025-11-22) | Due 2025-11-26 · Accountable: Samples Guild · Excititor Guild | Samples Guild · Excititor Guild | Depends on 22-001 outputs; will build Excititor observation/VEX linkset fixtures once P1 samples land. Prep doc will extend `docs/samples/linkset/prep-22-001.md` with Excititor-specific payloads. |
|
||||||
| 1 | SAMPLES-GRAPH-24-003 | BLOCKED | Await Graph overlay format decision + mock SBOM cache availability | Samples Guild · SBOM Service Guild | Generate large-scale SBOM graph fixture (~40k nodes) with policy overlay snapshot for perf/regression suites. |
|
| 1 | SAMPLES-GRAPH-24-003 | BLOCKED | Await Graph overlay format decision + mock SBOM cache availability | Samples Guild · SBOM Service Guild | Generate large-scale SBOM graph fixture (~40k nodes) with policy overlay snapshot for perf/regression suites. |
|
||||||
| 2 | SAMPLES-GRAPH-24-004 | TODO | Blocked on 24-003 fixture availability | Samples Guild · UI Guild | Create vulnerability explorer JSON/CSV fixtures capturing conflicting evidence and policy outputs for UI/CLI automated tests. |
|
| 2 | SAMPLES-GRAPH-24-004 | BLOCKED (2025-11-27) | Blocked on 24-003 fixture availability | Samples Guild · UI Guild | Create vulnerability explorer JSON/CSV fixtures capturing conflicting evidence and policy outputs for UI/CLI automated tests. |
|
||||||
| 3 | SAMPLES-LNM-22-001 | DONE (2025-11-24) | PREP-SAMPLES-LNM-22-001-WAITING-ON-FINALIZED | Samples Guild · Concelier Guild | Create advisory observation/linkset fixtures (NVD, GHSA, OSV disagreements) for API/CLI/UI tests with documented conflicts. |
|
| 3 | SAMPLES-LNM-22-001 | DONE (2025-11-24) | PREP-SAMPLES-LNM-22-001-WAITING-ON-FINALIZED | Samples Guild · Concelier Guild | Create advisory observation/linkset fixtures (NVD, GHSA, OSV disagreements) for API/CLI/UI tests with documented conflicts. |
|
||||||
| 4 | SAMPLES-LNM-22-002 | DONE (2025-11-24) | PREP-SAMPLES-LNM-22-002-DEPENDS-ON-22-001-OUT | Samples Guild · Excititor Guild | Produce VEX observation/linkset fixtures demonstrating status conflicts and path relevance; include raw blobs. |
|
| 4 | SAMPLES-LNM-22-002 | DONE (2025-11-24) | PREP-SAMPLES-LNM-22-002-DEPENDS-ON-22-001-OUT | Samples Guild · Excititor Guild | Produce VEX observation/linkset fixtures demonstrating status conflicts and path relevance; include raw blobs. |
|
||||||
|
|
||||||
@@ -36,6 +36,7 @@
|
|||||||
| 2025-11-22 | PREP extended for Excititor fixtures; moved SAMPLES-LNM-22-001 and SAMPLES-LNM-22-002 to TODO. | Project Mgmt |
|
| 2025-11-22 | PREP extended for Excititor fixtures; moved SAMPLES-LNM-22-001 and SAMPLES-LNM-22-002 to TODO. | Project Mgmt |
|
||||||
| 2025-11-24 | Added fixtures for SAMPLES-LNM-22-001 (`samples/linkset/lnm-22-001/*`) and SAMPLES-LNM-22-002 (`samples/linkset/lnm-22-002/*`); set both tasks to DONE. | Samples Guild |
|
| 2025-11-24 | Added fixtures for SAMPLES-LNM-22-001 (`samples/linkset/lnm-22-001/*`) and SAMPLES-LNM-22-002 (`samples/linkset/lnm-22-002/*`); set both tasks to DONE. | Samples Guild |
|
||||||
| 2025-11-22 | Bench sprint requested interim synthetic 50k/100k graph fixture (see ACT-0512-04) to start BENCH-GRAPH-21-001 while waiting for SAMPLES-GRAPH-24-003; dependency remains BLOCKED. | Project Mgmt |
|
| 2025-11-22 | Bench sprint requested interim synthetic 50k/100k graph fixture (see ACT-0512-04) to start BENCH-GRAPH-21-001 while waiting for SAMPLES-GRAPH-24-003; dependency remains BLOCKED. | Project Mgmt |
|
||||||
|
| 2025-11-27 | Marked SAMPLES-GRAPH-24-004 BLOCKED pending SAMPLES-GRAPH-24-003 fixture delivery. | Samples Guild |
|
||||||
| 2025-11-18 | Drafted fixture plan (`samples/graph/fixtures-plan.md`) outlining contents, assumptions, and blockers for SAMPLES-GRAPH-24-003. | Samples |
|
| 2025-11-18 | Drafted fixture plan (`samples/graph/fixtures-plan.md`) outlining contents, assumptions, and blockers for SAMPLES-GRAPH-24-003. | Samples |
|
||||||
| 2025-11-18 | Kicked off SAMPLES-GRAPH-24-003 (overlay format + mock bundle sources); other tasks unchanged. | Samples |
|
| 2025-11-18 | Kicked off SAMPLES-GRAPH-24-003 (overlay format + mock bundle sources); other tasks unchanged. | Samples |
|
||||||
| 2025-11-18 | Normalised sprint to standard template; renamed from SPRINT_509_samples.md. | Ops/Docs |
|
| 2025-11-18 | Normalised sprint to standard template; renamed from SPRINT_509_samples.md. | Ops/Docs |
|
||||||
|
|||||||
@@ -25,16 +25,16 @@
|
|||||||
| 2 | SEC-CRYPTO-90-018 | DONE (2025-11-26) | After 90-017 | Security & Docs Guilds | Update developer/RootPack documentation to describe the fork, sync steps, and licensing. |
|
| 2 | SEC-CRYPTO-90-018 | DONE (2025-11-26) | After 90-017 | Security & Docs Guilds | Update developer/RootPack documentation to describe the fork, sync steps, and licensing. |
|
||||||
| 3 | SEC-CRYPTO-90-019 | BLOCKED (2025-11-25) | Need Windows runner with CryptoPro CSP to execute fork tests | Security Guild | Patch the fork to drop vulnerable `System.Security.Cryptography.{Pkcs,Xml}` 6.0.0 deps; retarget .NET 8+, rerun tests. |
|
| 3 | SEC-CRYPTO-90-019 | BLOCKED (2025-11-25) | Need Windows runner with CryptoPro CSP to execute fork tests | Security Guild | Patch the fork to drop vulnerable `System.Security.Cryptography.{Pkcs,Xml}` 6.0.0 deps; retarget .NET 8+, rerun tests. |
|
||||||
| 4 | SEC-CRYPTO-90-020 | BLOCKED (2025-11-25) | Await SEC-CRYPTO-90-019 tests on Windows CSP runner | Security Guild | Re-point `StellaOps.Cryptography.Plugin.CryptoPro` to the forked sources and prove end-to-end plugin wiring. |
|
| 4 | SEC-CRYPTO-90-020 | BLOCKED (2025-11-25) | Await SEC-CRYPTO-90-019 tests on Windows CSP runner | Security Guild | Re-point `StellaOps.Cryptography.Plugin.CryptoPro` to the forked sources and prove end-to-end plugin wiring. |
|
||||||
| 5 | SEC-CRYPTO-90-021 | TODO | After 90-020 | Security & QA Guilds | Validate forked library + plugin on Windows (CryptoPro CSP) and Linux (OpenSSL GOST fallback); document prerequisites. |
|
| 5 | SEC-CRYPTO-90-021 | BLOCKED (2025-11-27) | After 90-020 (blocked awaiting Windows CSP runner). | Security & QA Guilds | Validate forked library + plugin on Windows (CryptoPro CSP) and Linux (OpenSSL GOST fallback); document prerequisites. |
|
||||||
| 6 | SEC-CRYPTO-90-012 | TODO | Env-gated | Security Guild | Add CryptoPro + PKCS#11 integration tests and hook into `scripts/crypto/run-rootpack-ru-tests.sh`. |
|
| 6 | SEC-CRYPTO-90-012 | BLOCKED (2025-11-27) | Env-gated; CryptoPro/PKCS#11 CI runner not provisioned yet. | Security Guild | Add CryptoPro + PKCS#11 integration tests and hook into `scripts/crypto/run-rootpack-ru-tests.sh`. |
|
||||||
| 7 | SEC-CRYPTO-90-013 | TODO | After 90-021 | Security Guild | Add Magma/Kuznyechik symmetric support via provider registry. |
|
| 7 | SEC-CRYPTO-90-013 | BLOCKED (2025-11-27) | After 90-021 (blocked). | Security Guild | Add Magma/Kuznyechik symmetric support via provider registry. |
|
||||||
| 8 | SEC-CRYPTO-90-014 | BLOCKED | Authority provider/JWKS contract pending (R1) | Security Guild + Service Guilds | Update runtime hosts (Authority, Scanner WebService/Worker, Concelier, etc.) to register RU providers and expose config toggles. |
|
| 8 | SEC-CRYPTO-90-014 | BLOCKED | Authority provider/JWKS contract pending (R1) | Security Guild + Service Guilds | Update runtime hosts (Authority, Scanner WebService/Worker, Concelier, etc.) to register RU providers and expose config toggles. |
|
||||||
| 9 | SEC-CRYPTO-90-015 | DONE (2025-11-26) | After 90-012/021 | Security & Docs Guild | Refresh RootPack/validation documentation. |
|
| 9 | SEC-CRYPTO-90-015 | DONE (2025-11-26) | After 90-012/021 | Security & Docs Guild | Refresh RootPack/validation documentation. |
|
||||||
| 10 | AUTH-CRYPTO-90-001 | BLOCKED | PREP-AUTH-CRYPTO-90-001-NEEDS-AUTHORITY-PROVI | Authority Core & Security Guild | Sovereign signing provider contract for Authority; refactor loaders once contract is published. |
|
| 10 | AUTH-CRYPTO-90-001 | BLOCKED | PREP-AUTH-CRYPTO-90-001-NEEDS-AUTHORITY-PROVI | Authority Core & Security Guild | Sovereign signing provider contract for Authority; refactor loaders once contract is published. |
|
||||||
| 11 | SCANNER-CRYPTO-90-001 | BLOCKED (2025-11-27) | Await Authority provider/JWKS contract + registry option design (R1/R3) | Scanner WebService Guild · Security Guild | Route hashing/signing flows through `ICryptoProviderRegistry`. |
|
| 11 | SCANNER-CRYPTO-90-001 | BLOCKED (2025-11-27) | Await Authority provider/JWKS contract + registry option design (R1/R3) | Scanner WebService Guild · Security Guild | Route hashing/signing flows through `ICryptoProviderRegistry`. |
|
||||||
| 12 | SCANNER-WORKER-CRYPTO-90-001 | BLOCKED (2025-11-27) | After 11 (registry contract pending) | Scanner Worker Guild · Security Guild | Wire Scanner Worker/BuildX analyzers to registry/hash abstractions. |
|
| 12 | SCANNER-WORKER-CRYPTO-90-001 | BLOCKED (2025-11-27) | After 11 (registry contract pending) | Scanner Worker Guild · Security Guild | Wire Scanner Worker/BuildX analyzers to registry/hash abstractions. |
|
||||||
| 13 | SCANNER-CRYPTO-90-002 | BLOCKED (2025-11-27) | PQ provider option design pending (R3) | Scanner WebService Guild · Security Guild | Enable PQ-friendly DSSE (Dilithium/Falcon) via provider options. |
|
| 13 | SCANNER-CRYPTO-90-002 | DOING (2025-11-27) | Design doc `docs/security/pq-provider-options.md` published; awaiting implementation wiring. | Scanner WebService Guild · Security Guild | Enable PQ-friendly DSSE (Dilithium/Falcon) via provider options. |
|
||||||
| 14 | SCANNER-CRYPTO-90-003 | BLOCKED (2025-11-27) | After 13; needs PQ provider options | Scanner Worker Guild · QA Guild | Add regression tests for RU/PQ profiles validating Merkle roots + DSSE chains. |
|
| 14 | SCANNER-CRYPTO-90-003 | BLOCKED (2025-11-27) | After 13; needs PQ provider implementation | Scanner Worker Guild · QA Guild | Add regression tests for RU/PQ profiles validating Merkle roots + DSSE chains. |
|
||||||
| 15 | ATTESTOR-CRYPTO-90-001 | BLOCKED | Authority provider/JWKS contract pending (R1) | Attestor Service Guild · Security Guild | Migrate attestation hashing/witness flows to provider registry, enabling CryptoPro/PKCS#11 deployments. |
|
| 15 | ATTESTOR-CRYPTO-90-001 | BLOCKED | Authority provider/JWKS contract pending (R1) | Attestor Service Guild · Security Guild | Migrate attestation hashing/witness flows to provider registry, enabling CryptoPro/PKCS#11 deployments. |
|
||||||
|
|
||||||
## Wave Coordination
|
## Wave Coordination
|
||||||
@@ -81,9 +81,11 @@
|
|||||||
## Execution Log
|
## Execution Log
|
||||||
| Date (UTC) | Update | Owner |
|
| Date (UTC) | Update | Owner |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
|
| 2025-11-27 | Marked SEC-CRYPTO-90-021/012/013 BLOCKED: Windows CSP runner and CI gating for CryptoPro/PKCS#11 not available; 90-021 depends on blocked 90-020. | Project Mgmt |
|
||||||
| 2025-11-26 | Completed SEC-CRYPTO-90-018: added fork sync steps/licensing guidance and RootPack packaging notes; marked task DONE. | Implementer |
|
| 2025-11-26 | Completed SEC-CRYPTO-90-018: added fork sync steps/licensing guidance and RootPack packaging notes; marked task DONE. | Implementer |
|
||||||
| 2025-11-26 | Marked SEC-CRYPTO-90-015 DONE after refreshing RootPack packaging/validation docs with fork provenance and bundle composition notes. | Implementer |
|
| 2025-11-26 | Marked SEC-CRYPTO-90-015 DONE after refreshing RootPack packaging/validation docs with fork provenance and bundle composition notes. | Implementer |
|
||||||
| 2025-11-27 | Marked SCANNER-CRYPTO-90-001/002/003 and SCANNER-WORKER-CRYPTO-90-001 BLOCKED pending Authority provider/JWKS contract and PQ provider option design (R1/R3). | Implementer |
|
| 2025-11-27 | Marked SCANNER-CRYPTO-90-001/002/003 and SCANNER-WORKER-CRYPTO-90-001 BLOCKED pending Authority provider/JWKS contract and PQ provider option design (R1/R3). | Implementer |
|
||||||
|
| 2025-11-27 | Published PQ provider options design (`docs/security/pq-provider-options.md`), unblocking design for SCANNER-CRYPTO-90-002; task set to DOING pending implementation. | Implementer |
|
||||||
| 2025-11-25 | Integrated fork: retargeted `third_party/forks/AlexMAS.GostCryptography` to `net10.0`, added Xml/Permissions deps, and switched `StellaOps.Cryptography.Plugin.CryptoPro` from IT.GostCryptography nuget to project reference. `dotnet build src/__Libraries/StellaOps.Cryptography.Plugin.CryptoPro -c Release` now succeeds (warnings CA1416 kept). | Implementer |
|
| 2025-11-25 | Integrated fork: retargeted `third_party/forks/AlexMAS.GostCryptography` to `net10.0`, added Xml/Permissions deps, and switched `StellaOps.Cryptography.Plugin.CryptoPro` from IT.GostCryptography nuget to project reference. `dotnet build src/__Libraries/StellaOps.Cryptography.Plugin.CryptoPro -c Release` now succeeds (warnings CA1416 kept). | Implementer |
|
||||||
| 2025-11-25 | Progressed SEC-CRYPTO-90-019: removed legacy IT.GostCryptography nuget, retargeted fork to net10 with System.Security.Cryptography.Xml 8.0.1 and System.Security.Permissions; cleaned stale bin/obj. Fork library builds; fork tests still pending (Windows CSP). | Implementer |
|
| 2025-11-25 | Progressed SEC-CRYPTO-90-019: removed legacy IT.GostCryptography nuget, retargeted fork to net10 with System.Security.Cryptography.Xml 8.0.1 and System.Security.Permissions; cleaned stale bin/obj. Fork library builds; fork tests still pending (Windows CSP). | Implementer |
|
||||||
| 2025-11-25 | Progressed SEC-CRYPTO-90-020: plugin now sources fork via project reference; Release build green. Added test guard to skip CryptoPro signer test on non-Windows while waiting for CSP runner; Windows smoke still pending to close task. | Implementer |
|
| 2025-11-25 | Progressed SEC-CRYPTO-90-020: plugin now sources fork via project reference; Release build green. Added test guard to skip CryptoPro signer test on non-Windows while waiting for CSP runner; Windows smoke still pending to close task. | Implementer |
|
||||||
|
|||||||
@@ -485,6 +485,7 @@ ResolveEntrypoint(ImageConfig cfg, RootFs fs):
|
|||||||
- WebService ships a **RecordModeService** that assembles replay manifests (schema v1) with policy/feed/tool pins and reachability references, then writes deterministic input/output bundles to the configured object store (RustFS default, S3/Minio fallback) under `replay/<head>/<digest>.tar.zst`.
|
- WebService ships a **RecordModeService** that assembles replay manifests (schema v1) with policy/feed/tool pins and reachability references, then writes deterministic input/output bundles to the configured object store (RustFS default, S3/Minio fallback) under `replay/<head>/<digest>.tar.zst`.
|
||||||
- Bundles contain canonical manifest JSON plus inputs (policy/feed/tool/analyzer digests) and outputs (SBOM, findings, optional VEX/logs); CAS URIs follow `cas://replay/...` and are attached to scan snapshots as `ReplayArtifacts`.
|
- Bundles contain canonical manifest JSON plus inputs (policy/feed/tool/analyzer digests) and outputs (SBOM, findings, optional VEX/logs); CAS URIs follow `cas://replay/...` and are attached to scan snapshots as `ReplayArtifacts`.
|
||||||
- Reachability graphs/traces are folded into the manifest via `ReachabilityReplayWriter`; manifests and bundles hash with stable ordering for replay verification (`docs/replay/DETERMINISTIC_REPLAY.md`).
|
- Reachability graphs/traces are folded into the manifest via `ReachabilityReplayWriter`; manifests and bundles hash with stable ordering for replay verification (`docs/replay/DETERMINISTIC_REPLAY.md`).
|
||||||
|
- Worker sealed-mode intake reads `replay.bundle.uri` + `replay.bundle.sha256` (plus determinism feed/policy pins) from job metadata, persists bundle refs in analysis and surface manifest, and validates hashes before use.
|
||||||
- Deterministic execution switches (`docs/modules/scanner/deterministic-execution.md`) must be enabled when generating replay bundles to keep hashes stable.
|
- Deterministic execution switches (`docs/modules/scanner/deterministic-execution.md`) must be enabled when generating replay bundles to keep hashes stable.
|
||||||
|
|
||||||
EntryTrace emits structured diagnostics and metrics so operators can quickly understand why resolution succeeded or degraded:
|
EntryTrace emits structured diagnostics and metrics so operators can quickly understand why resolution succeeded or degraded:
|
||||||
|
|||||||
@@ -42,9 +42,10 @@ Required fields:
|
|||||||
|
|
||||||
Output bundle layout:
|
Output bundle layout:
|
||||||
|
|
||||||
- `determinism.json` – schema above
|
- `determinism.json` – schema above, includes per-run artefact hashes and determinism pins (feed/policy/tool) plus runtime toggles.
|
||||||
- `run_i/*.json` – canonicalised artefacts per run
|
- `run_i/*.json` – canonicalised artefacts per run
|
||||||
- `diffs/` – minimal diffs when divergence occurs
|
- `diffs/` – minimal diffs when divergence occurs
|
||||||
|
- `surface/determinism.json` – copy of the worker-emitted determinism manifest from the surface bundle (pins + payload hashes) for cross-checking.
|
||||||
|
|
||||||
## 4. CI integration (`DEVOPS-SCAN-90-004`)
|
## 4. CI integration (`DEVOPS-SCAN-90-004`)
|
||||||
|
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ Keep the language analyzer microbench under the < 5 s SBOM pledge. CI emits
|
|||||||
- Pager payload should include `scenario`, `max_ms`, `baseline_max_ms`, and `commit`.
|
- Pager payload should include `scenario`, `max_ms`, `baseline_max_ms`, and `commit`.
|
||||||
- Immediate triage steps:
|
- Immediate triage steps:
|
||||||
1. Check `latest.json` artefact for the failing scenario – confirm commit and environment.
|
1. Check `latest.json` artefact for the failing scenario – confirm commit and environment.
|
||||||
2. Re-run the harness with `--captured-at` and `--baseline` pointing at the last known good CSV to verify determinism.
|
2. Re-run the harness with `--captured-at` and `--baseline` pointing at the last known good CSV to verify determinism; include `surface/determinism.json` in the release bundle (see `release-determinism.md`).
|
||||||
3. If regression persists, open an incident ticket tagged `scanner-analyzer-perf` and page the owning language guild.
|
3. If regression persists, open an incident ticket tagged `scanner-analyzer-perf` and page the owning language guild.
|
||||||
4. Roll back the offending change or update the baseline after sign-off from the guild lead and Perf captain.
|
4. Roll back the offending change or update the baseline after sign-off from the guild lead and Perf captain.
|
||||||
|
|
||||||
|
|||||||
29
docs/modules/scanner/operations/release-determinism.md
Normal file
29
docs/modules/scanner/operations/release-determinism.md
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# Scanner Release Determinism Checklist
|
||||||
|
|
||||||
|
> Completes SCAN-DETER-186-010 by ensuring every release ships a reproducibility bundle.
|
||||||
|
|
||||||
|
## What to publish
|
||||||
|
- `determinism.json` generated by the harness (scores, non-deterministic artefacts, thresholds).
|
||||||
|
- `surface/determinism.json` copied from worker surface manifests (pins + runtime toggles + payload hashes).
|
||||||
|
- Canonical artefacts per run (`run_i/*.json`) and diffs for divergent runs.
|
||||||
|
|
||||||
|
## Where to publish
|
||||||
|
- Object store bucket configured for releases (same as reports), prefix: `determinism/<release>/`.
|
||||||
|
- CAS-style paths: `cas://determinism/<head>/<sha>.tar.zst` for bundle archives.
|
||||||
|
- Link from release notes and offline kit manifests.
|
||||||
|
|
||||||
|
## How to generate
|
||||||
|
1. Run determinism harness (`SCAN-DETER-186-009`) against release image with frozen clock/seed/concurrency and pinned feeds/policy.
|
||||||
|
2. Export bundle using the harness CLI (pending) or the helper script `scripts/scanner/determinism-run.sh`.
|
||||||
|
3. Copy worker-emitted `determinism.json` from surface manifest cache into `surface/determinism.json` inside the bundle for cross-checks.
|
||||||
|
4. Sign bundles with DSSE (determinism predicate) and, if enabled, submit to Rekor.
|
||||||
|
|
||||||
|
## Acceptance gates
|
||||||
|
- Overall score >= 0.95 and per-image score >= 0.90.
|
||||||
|
- All bundle files present: `determinism.json`, `surface/determinism.json`, `run_*`, `diffs/` (may be empty when fully deterministic).
|
||||||
|
- Hashes in `surface/determinism.json` match hashes in `determinism.json` baseline artefacts.
|
||||||
|
|
||||||
|
## References
|
||||||
|
- docs/modules/scanner/determinism-score.md
|
||||||
|
- docs/modules/scanner/deterministic-execution.md
|
||||||
|
- docs/replay/DETERMINISTIC_REPLAY.md
|
||||||
@@ -14,7 +14,8 @@
|
|||||||
|
|
||||||
## HTTP middleware
|
## HTTP middleware
|
||||||
- Accept `traceparent`/`tracestate`; reject/strip vendor-specific headers.
|
- Accept `traceparent`/`tracestate`; reject/strip vendor-specific headers.
|
||||||
- Propagate `tenant`, `actor`, `imposed-rule` via `Stella-Tenant`, `Stella-Actor`, `Stella-Imposed-Rule` headers.
|
- Propagate `tenant`, `actor`, `imposed-rule` via `x-stella-tenant`, `x-stella-actor`, `x-stella-imposed-rule` headers (defaults configurable via `Telemetry:Propagation`).
|
||||||
|
- Middleware entry point: `app.UseStellaOpsTelemetryContext()` plus the `TelemetryPropagationHandler` automatically added to all `HttpClient` instances when `AddStellaOpsTelemetry` is called.
|
||||||
- Emit exemplars: when sampling is off, attach exemplar ids to request duration and active request metrics.
|
- Emit exemplars: when sampling is off, attach exemplar ids to request duration and active request metrics.
|
||||||
|
|
||||||
## gRPC interceptors
|
## gRPC interceptors
|
||||||
@@ -28,7 +29,8 @@
|
|||||||
## Metrics helper expectations
|
## Metrics helper expectations
|
||||||
- Golden signals: `http.server.duration`, `http.client.duration`, `messaging.operation.duration`, `job.execution.duration`, `runtime.gc.pause`, `db.call.duration`.
|
- Golden signals: `http.server.duration`, `http.client.duration`, `messaging.operation.duration`, `job.execution.duration`, `runtime.gc.pause`, `db.call.duration`.
|
||||||
- Mandatory tags: `tenant`, `service`, `endpoint`/`operation`, `result` (`ok|error|cancelled|throttled`), `sealed` (`true|false`).
|
- Mandatory tags: `tenant`, `service`, `endpoint`/`operation`, `result` (`ok|error|cancelled|throttled`), `sealed` (`true|false`).
|
||||||
- Cardinality guard: drop/replace tag values exceeding 64 chars; cap path templates to first 3 segments.
|
- Cardinality guard: trim tag values to 64 chars (configurable) and replace values beyond the first 50 distinct entries per key with `other` (enforced by `MetricLabelGuard`).
|
||||||
|
- Helper API: `Histogram<double>.RecordRequestDuration(guard, durationMs, route, verb, status, result)` applies guard + tags consistently.
|
||||||
|
|
||||||
## Determinism & offline posture
|
## Determinism & offline posture
|
||||||
- All timestamps UTC RFC3339; sampling configs controlled via appsettings and mirrored in offline bundles.
|
- All timestamps UTC RFC3339; sampling configs controlled via appsettings and mirrored in offline bundles.
|
||||||
|
|||||||
@@ -3,19 +3,19 @@
|
|||||||
> **Audience:** Policy authors, reviewers, security approvers, release engineers.
|
> **Audience:** Policy authors, reviewers, security approvers, release engineers.
|
||||||
> **Scope:** End-to-end flow for `stella-dsl@1` policies from draft through archival, including CLI/Console touch-points, Authority scopes, audit artefacts, and offline considerations.
|
> **Scope:** End-to-end flow for `stella-dsl@1` policies from draft through archival, including CLI/Console touch-points, Authority scopes, audit artefacts, and offline considerations.
|
||||||
|
|
||||||
This guide explains how a policy progresses through Stella Ops, which roles are involved, and the artefacts produced at every step. Pair it with the [Policy Engine Overview](overview.md), [DSL reference](dsl.md), and upcoming run documentation to ensure consistent authoring and rollout.
|
This guide explains how a policy progresses through Stella Ops, which roles are involved, and the artefacts produced at every step. Pair it with the [Policy Engine Overview](overview.md), [DSL reference](dsl.md), and upcoming run documentation to ensure consistent authoring and rollout.
|
||||||
> **Imposed rule:** New or significantly changed policies must run in **shadow mode** with coverage fixtures before activation. Promotions are blocked until shadow + coverage gates pass.
|
> **Imposed rule:** New or significantly changed policies must run in **shadow mode** with coverage fixtures before activation. Promotions are blocked until shadow + coverage gates pass.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 1 · Protocol Summary
|
## 1 · Protocol Summary
|
||||||
|
|
||||||
- Policies are **immutable versions** attached to a stable `policy_id`.
|
- Policies are **immutable versions** attached to a stable `policy_id`.
|
||||||
- Lifecycle states: `draft → submitted → approved → active → archived`.
|
- Lifecycle states: `draft → submitted → approved → active → archived`.
|
||||||
- Every transition requires explicit Authority scopes and produces structured events + storage artefacts (`policies`, `policy_runs`, audit log collections).
|
- Every transition requires explicit Authority scopes and produces structured events + storage artefacts (`policies`, `policy_runs`, audit log collections).
|
||||||
- Simulation and CI gating happen **before** approvals can be granted.
|
- Simulation and CI gating happen **before** approvals can be granted.
|
||||||
- Activation triggers (runs, bundle exports, CLI `promote`) operate on the **latest approved** version per tenant.
|
- Activation triggers (runs, bundle exports, CLI `promote`) operate on the **latest approved** version per tenant.
|
||||||
- Shadow mode runs capture findings without enforcement; shadow exit requires coverage + twin-run determinism checks.
|
- Shadow mode runs capture findings without enforcement; shadow exit requires coverage + twin-run determinism checks.
|
||||||
|
|
||||||
```mermaid
|
```mermaid
|
||||||
stateDiagram-v2
|
stateDiagram-v2
|
||||||
@@ -55,9 +55,9 @@ stateDiagram-v2
|
|||||||
- **Tools:** Console editor, `stella policy edit`, policy DSL files.
|
- **Tools:** Console editor, `stella policy edit`, policy DSL files.
|
||||||
- **Actions:**
|
- **Actions:**
|
||||||
- Author DSL leveraging [stella-dsl@1](dsl.md).
|
- Author DSL leveraging [stella-dsl@1](dsl.md).
|
||||||
- Run `stella policy lint` and `stella policy simulate --sbom <fixtures>` locally.
|
- Run `stella policy lint` and `stella policy simulate --sbom <fixtures>` locally.
|
||||||
- Add/refresh coverage fixtures under `tests/policy/<policyId>/cases/*.json`; run `stella policy test`.
|
- Add/refresh coverage fixtures under `tests/policy/<policyId>/cases/*.json`; run `stella policy test`.
|
||||||
- Keep `settings.shadow = true` until coverage + shadow gates pass.
|
- Keep `settings.shadow = true` until coverage + shadow gates pass.
|
||||||
- Attach rationale metadata (`metadata.description`, tags).
|
- Attach rationale metadata (`metadata.description`, tags).
|
||||||
- **Artefacts:**
|
- **Artefacts:**
|
||||||
- `policies` document with `status=draft`, `version=n`, `provenance.created_by`.
|
- `policies` document with `status=draft`, `version=n`, `provenance.created_by`.
|
||||||
@@ -71,8 +71,8 @@ stateDiagram-v2
|
|||||||
- **Who:** Authors (`policy:author`).
|
- **Who:** Authors (`policy:author`).
|
||||||
- **Tools:** Console “Submit for review” button, `stella policy submit <policyId> --reviewers ...`.
|
- **Tools:** Console “Submit for review” button, `stella policy submit <policyId> --reviewers ...`.
|
||||||
- **Actions:**
|
- **Actions:**
|
||||||
- Provide review notes and required simulations (CLI uploads attachments).
|
- Provide review notes and required simulations (CLI uploads attachments).
|
||||||
- Attach coverage results (shadow mode + `stella policy test`).
|
- Attach coverage results (shadow mode + `stella policy test`).
|
||||||
- Choose reviewer groups; Authority records them in submission metadata.
|
- Choose reviewer groups; Authority records them in submission metadata.
|
||||||
- **Artefacts:**
|
- **Artefacts:**
|
||||||
- Policy document transitions to `status=submitted`, capturing `submitted_by`, `submitted_at`, reviewer list, simulation digest references.
|
- Policy document transitions to `status=submitted`, capturing `submitted_by`, `submitted_at`, reviewer list, simulation digest references.
|
||||||
@@ -101,8 +101,8 @@ stateDiagram-v2
|
|||||||
- **Who:** Approvers (`policy:approve`).
|
- **Who:** Approvers (`policy:approve`).
|
||||||
- **Tools:** Console “Approve”, CLI `stella policy approve <id> --version n --note "rationale"`.
|
- **Tools:** Console “Approve”, CLI `stella policy approve <id> --version n --note "rationale"`.
|
||||||
- **Actions:**
|
- **Actions:**
|
||||||
- Confirm compliance checks (see §6) all green.
|
- Confirm compliance checks (see §6) all green.
|
||||||
- Verify shadow gate + coverage suite passed in CI.
|
- Verify shadow gate + coverage suite passed in CI.
|
||||||
- Provide approval note (mandatory string captured in audit trail).
|
- Provide approval note (mandatory string captured in audit trail).
|
||||||
- **Artefacts:**
|
- **Artefacts:**
|
||||||
- Policy `status=approved`, `approved_by`, `approved_at`, `approval_note`.
|
- Policy `status=approved`, `approved_by`, `approved_at`, `approval_note`.
|
||||||
@@ -112,23 +112,23 @@ stateDiagram-v2
|
|||||||
- Approver cannot be same identity as author (enforced by Authority config).
|
- Approver cannot be same identity as author (enforced by Authority config).
|
||||||
- Approver must attest to successful simulation diff review (`--attach diff.json`).
|
- Approver must attest to successful simulation diff review (`--attach diff.json`).
|
||||||
|
|
||||||
### 3.5 Signing & Publication
|
### 3.5 Signing & Publication
|
||||||
|
|
||||||
- **Who:** Operators with fresh-auth (`policy:publish`, `policy:promote`) and approval backing.
|
- **Who:** Operators with fresh-auth (`policy:publish`, `policy:promote`) and approval backing.
|
||||||
- **Tools:** Console “Publish & Sign” wizard, CLI `stella policy publish`, `stella policy promote`.
|
- **Tools:** Console “Publish & Sign” wizard, CLI `stella policy publish`, `stella policy promote`.
|
||||||
- **Actions:**
|
- **Actions:**
|
||||||
- Execute `stella policy publish <id> --version n --reason "<why>" --ticket SEC-123 --sign` to produce a DSSE attestation capturing IR digest + approval metadata.
|
- Execute `stella policy publish <id> --version n --reason "<why>" --ticket SEC-123 --sign` to produce a DSSE attestation capturing IR digest + approval metadata.
|
||||||
- Provide required metadata headers (`policy_reason`, `policy_ticket`, `policy_digest`), enforced by Authority; CLI flags map to headers automatically.
|
- Provide required metadata headers (`policy_reason`, `policy_ticket`, `policy_digest`), enforced by Authority; CLI flags map to headers automatically.
|
||||||
- Promote the signed version to targeted environments (`stella policy promote <id> --version n --environment stage`).
|
- Promote the signed version to targeted environments (`stella policy promote <id> --version n --environment stage`).
|
||||||
- **Artefacts:**
|
- **Artefacts:**
|
||||||
- DSSE payload stored in `policy_attestations`, containing SHA-256 digest, signer, reason, ticket, promoted environment.
|
- DSSE payload stored in `policy_attestations`, containing SHA-256 digest, signer, reason, ticket, promoted environment.
|
||||||
- Audit events `policy.published`, `policy.promoted` including metadata snapshot and attestation reference.
|
- Audit events `policy.published`, `policy.promoted` including metadata snapshot and attestation reference.
|
||||||
- **Guards:**
|
- **Guards:**
|
||||||
- Publish requires a fresh-auth window (<5 minutes) and interactive identity (client-credentials tokens are rejected).
|
- Publish requires a fresh-auth window (<5 minutes) and interactive identity (client-credentials tokens are rejected).
|
||||||
- Metadata headers must be present; missing values return `policy_attestation_metadata_missing`.
|
- Metadata headers must be present; missing values return `policy_attestation_metadata_missing`.
|
||||||
- Signing key rotation enforced via Authority JWKS; CLI refuses to publish if attestation verification fails.
|
- Signing key rotation enforced via Authority JWKS; CLI refuses to publish if attestation verification fails.
|
||||||
|
|
||||||
### 3.6 Activation & Runs
|
### 3.6 Activation & Runs
|
||||||
|
|
||||||
- **Who:** Operators (`policy:operate`, `policy:run`, `policy:activate`).
|
- **Who:** Operators (`policy:operate`, `policy:run`, `policy:activate`).
|
||||||
- **Tools:** Console “Promote to active”, CLI `stella policy activate <id> --version n`, `stella policy run`.
|
- **Tools:** Console “Promote to active”, CLI `stella policy activate <id> --version n`, `stella policy run`.
|
||||||
@@ -144,7 +144,7 @@ stateDiagram-v2
|
|||||||
- Activation blocked if previous full run <24 h old failed or is pending.
|
- Activation blocked if previous full run <24 h old failed or is pending.
|
||||||
- Selection of SBOM/advisory snapshots uses consistent cursors recorded for reproducibility.
|
- Selection of SBOM/advisory snapshots uses consistent cursors recorded for reproducibility.
|
||||||
|
|
||||||
### 3.7 Archival / Rollback
|
### 3.7 Archival / Rollback
|
||||||
|
|
||||||
- **Who:** Approvers or Operators with `policy:archive`.
|
- **Who:** Approvers or Operators with `policy:archive`.
|
||||||
- **Tools:** Console menu, CLI `stella policy archive <id> --version n --reason`.
|
- **Tools:** Console menu, CLI `stella policy archive <id> --version n --reason`.
|
||||||
@@ -165,7 +165,7 @@ stateDiagram-v2
|
|||||||
|
|
||||||
| Stage | Console | CLI | API |
|
| Stage | Console | CLI | API |
|
||||||
|-------|---------|-----|-----|
|
|-------|---------|-----|-----|
|
||||||
| Draft | Inline linting, simulation panel | `stella policy lint`, `edit`, `simulate` | `POST /policies`, `PUT /policies/{id}/versions/{v}` |
|
| Draft | Inline linting, simulation panel | `stella policy lint`, `edit`, `test`, `simulate` | `POST /policies`, `PUT /policies/{id}/versions/{v}` |
|
||||||
| Submit | Submit modal (attach simulations) | `stella policy submit` | `POST /policies/{id}/submit` |
|
| Submit | Submit modal (attach simulations) | `stella policy submit` | `POST /policies/{id}/submit` |
|
||||||
| Review | Comment threads, diff viewer | `stella policy review --approve/--request-changes` | `POST /policies/{id}/reviews` |
|
| Review | Comment threads, diff viewer | `stella policy review --approve/--request-changes` | `POST /policies/{id}/reviews` |
|
||||||
| Approve | Approve dialog | `stella policy approve` | `POST /policies/{id}/approve` |
|
| Approve | Approve dialog | `stella policy approve` | `POST /policies/{id}/approve` |
|
||||||
@@ -174,6 +174,40 @@ stateDiagram-v2
|
|||||||
|
|
||||||
All CLI commands emit structured JSON by default; use `--format table` for human review.
|
All CLI commands emit structured JSON by default; use `--format table` for human review.
|
||||||
|
|
||||||
|
### 4.1 · CLI Command Reference
|
||||||
|
|
||||||
|
#### `stella policy edit <file>`
|
||||||
|
|
||||||
|
Open a policy DSL file in your configured editor (`$EDITOR` or `$VISUAL`), validate after editing, and optionally commit with SemVer metadata.
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
- `-c, --commit` - Commit changes after successful validation
|
||||||
|
- `-V, --version <semver>` - SemVer version for commit metadata (e.g., `1.2.0`)
|
||||||
|
- `-m, --message <msg>` - Custom commit message (auto-generated if not provided)
|
||||||
|
- `--no-validate` - Skip validation after editing (not recommended)
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
```bash
|
||||||
|
# Edit and commit with version metadata
|
||||||
|
stella policy edit policies/my-policy.dsl --commit --version 1.2.0
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `stella policy test <file>`
|
||||||
|
|
||||||
|
Run coverage test fixtures against a policy DSL file to validate rule behavior.
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
- `-d, --fixtures <dir>` - Path to fixtures directory (defaults to `tests/policy/<policy-name>/cases`)
|
||||||
|
- `--filter <pattern>` - Run only fixtures matching this pattern
|
||||||
|
- `-f, --format <fmt>` - Output format: `table` (default) or `json`
|
||||||
|
- `-o, --output <file>` - Write test results to a file
|
||||||
|
- `--fail-fast` - Stop on first test failure
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
```bash
|
||||||
|
stella policy test policies/vuln-policy.dsl --filter critical
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 5 · Audit & Observability
|
## 5 · Audit & Observability
|
||||||
@@ -194,25 +228,25 @@ All CLI commands emit structured JSON by default; use `--format table` for human
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 6 · Compliance Gates
|
## 6 · Compliance Gates
|
||||||
|
|
||||||
| Gate | Stage | Enforced by | Requirement |
|
| Gate | Stage | Enforced by | Requirement |
|
||||||
|------|-------|-------------|-------------|
|
|------|-------|-------------|-------------|
|
||||||
| **DSL lint** | Draft → Submit | CLI/CI | `stella policy lint` successful within 24 h. |
|
| **DSL lint** | Draft → Submit | CLI/CI | `stella policy lint` successful within 24 h. |
|
||||||
| **Simulation evidence** | Submit | CLI/Console | Attach diff from `stella policy simulate` covering baseline SBOM set. |
|
| **Simulation evidence** | Submit | CLI/Console | Attach diff from `stella policy simulate` covering baseline SBOM set. |
|
||||||
| **Shadow run** | Submit → Approve | Policy Engine / CI | Shadow mode enabled (`settings.shadow=true`) with findings recorded; must execute once per change. |
|
| **Shadow run** | Submit → Approve | Policy Engine / CI | Shadow mode enabled (`settings.shadow=true`) with findings recorded; must execute once per change. |
|
||||||
| **Coverage suite** | Submit → Approve | CI (`stella policy test`) | Coverage fixtures present and passing; artefact attached to submission. |
|
| **Coverage suite** | Submit → Approve | CI (`stella policy test`) | Coverage fixtures present and passing; artefact attached to submission. |
|
||||||
| **Reviewer quorum** | Submit → Approve | Authority | Minimum approver/reviewer count configurable per tenant. |
|
| **Reviewer quorum** | Submit → Approve | Authority | Minimum approver/reviewer count configurable per tenant. |
|
||||||
| **Determinism CI** | Approve | DevOps job | Twin run diff passes (`DEVOPS-POLICY-20-003`). |
|
| **Determinism CI** | Approve | DevOps job | Twin run diff passes (`DEVOPS-POLICY-20-003`). |
|
||||||
| **Attestation metadata** | Approve → Publish | Authority / CLI | `policy:publish` executed with reason & ticket metadata; DSSE attestation verified. |
|
| **Attestation metadata** | Approve → Publish | Authority / CLI | `policy:publish` executed with reason & ticket metadata; DSSE attestation verified. |
|
||||||
| **Activation health** | Publish/Promote → Activate | Policy Engine | Last run status succeeded; orchestrator queue healthy. |
|
| **Activation health** | Publish/Promote → Activate | Policy Engine | Last run status succeeded; orchestrator queue healthy. |
|
||||||
| **Export validation** | Archive | Offline Kit | DSSE-signed policy pack generated for long-term retention. |
|
| **Export validation** | Archive | Offline Kit | DSSE-signed policy pack generated for long-term retention. |
|
||||||
|
|
||||||
Failure of any gate emits a `policy.lifecycle.violation` event and blocks transition until resolved.
|
Failure of any gate emits a `policy.lifecycle.violation` event and blocks transition until resolved.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 7 · Offline / Air-Gap Considerations
|
## 7 · Offline / Air-Gap Considerations
|
||||||
|
|
||||||
- Offline Kit bundles include:
|
- Offline Kit bundles include:
|
||||||
- Approved policy packs (`.policy.bundle` + DSSE signatures).
|
- Approved policy packs (`.policy.bundle` + DSSE signatures).
|
||||||
@@ -225,7 +259,7 @@ Failure of any gate emits a `policy.lifecycle.violation` event and blocks transi
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 8 · Incident Response & Rollback
|
## 8 · Incident Response & Rollback
|
||||||
|
|
||||||
- Incident mode (triggered via `policy incident activate`) forces:
|
- Incident mode (triggered via `policy incident activate`) forces:
|
||||||
- Immediate incremental run to evaluate mitigation policies.
|
- Immediate incremental run to evaluate mitigation policies.
|
||||||
@@ -239,7 +273,7 @@ Failure of any gate emits a `policy.lifecycle.violation` event and blocks transi
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 9 · CI/CD Integration (Reference)
|
## 9 · CI/CD Integration (Reference)
|
||||||
|
|
||||||
- **Pre-merge:** run lint + simulation jobs against golden SBOM fixtures.
|
- **Pre-merge:** run lint + simulation jobs against golden SBOM fixtures.
|
||||||
- **Post-merge (main):** compile, compute IR checksum, stage for Offline Kit.
|
- **Post-merge (main):** compile, compute IR checksum, stage for Offline Kit.
|
||||||
@@ -248,18 +282,18 @@ Failure of any gate emits a `policy.lifecycle.violation` event and blocks transi
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 10 · Compliance Checklist
|
## 10 · Compliance Checklist
|
||||||
|
|
||||||
- [ ] **Role mapping validated:** Authority issuer config maps organisational roles to required `policy:*` scopes (per tenant).
|
- [ ] **Role mapping validated:** Authority issuer config maps organisational roles to required `policy:*` scopes (per tenant).
|
||||||
- [ ] **Submission evidence attached:** Latest simulation diff and lint artefacts linked to submission.
|
- [ ] **Submission evidence attached:** Latest simulation diff and lint artefacts linked to submission.
|
||||||
- [ ] **Reviewer quorum met:** All required reviewers approved or acknowledged; no unresolved blocking comments.
|
- [ ] **Reviewer quorum met:** All required reviewers approved or acknowledged; no unresolved blocking comments.
|
||||||
- [ ] **Approval note logged:** Approver justification recorded in audit trail alongside IR checksum.
|
- [ ] **Approval note logged:** Approver justification recorded in audit trail alongside IR checksum.
|
||||||
- [ ] **Publish attestation signed:** `stella policy publish` executed by interactive operator, metadata (`policy_reason`, `policy_ticket`, `policy_digest`) present, DSSE attestation stored.
|
- [ ] **Publish attestation signed:** `stella policy publish` executed by interactive operator, metadata (`policy_reason`, `policy_ticket`, `policy_digest`) present, DSSE attestation stored.
|
||||||
- [ ] **Promotion recorded:** Target environment promoted via CLI/Console with audit event linking to attestation.
|
- [ ] **Promotion recorded:** Target environment promoted via CLI/Console with audit event linking to attestation.
|
||||||
- [ ] **Activation guard passed:** Latest run status success, orchestrator queue healthy, determinism job green.
|
- [ ] **Activation guard passed:** Latest run status success, orchestrator queue healthy, determinism job green.
|
||||||
- [ ] **Archive bundles produced:** When archiving, DSSE-signed policy pack exported and stored for offline retention.
|
- [ ] **Archive bundles produced:** When archiving, DSSE-signed policy pack exported and stored for offline retention.
|
||||||
- [ ] **Offline parity proven:** For sealed deployments, `--sealed` simulations executed and logged before approval.
|
- [ ] **Offline parity proven:** For sealed deployments, `--sealed` simulations executed and logged before approval.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
*Last updated: 2025-11-03 (Sprint 100).*
|
*Last updated: 2025-11-27 (Sprint 401).*
|
||||||
|
|||||||
@@ -173,9 +173,23 @@ db.events.createIndex(
|
|||||||
{ "provenance.dsse.rekor.logIndex": 1 },
|
{ "provenance.dsse.rekor.logIndex": 1 },
|
||||||
{ name: "events_by_rekor_logindex" }
|
{ name: "events_by_rekor_logindex" }
|
||||||
);
|
);
|
||||||
|
|
||||||
|
db.events.createIndex(
|
||||||
|
{ "provenance.dsse.envelopeDigest": 1 },
|
||||||
|
{ name: "events_by_envelope_digest", sparse: true }
|
||||||
|
);
|
||||||
|
|
||||||
|
db.events.createIndex(
|
||||||
|
{ "ts": -1, "kind": 1, "trust.verified": 1 },
|
||||||
|
{ name: "events_by_ts_kind_verified" }
|
||||||
|
);
|
||||||
```
|
```
|
||||||
|
|
||||||
Corresponding C# helper: `MongoIndexes.EnsureEventIndexesAsync`.
|
Deployment options:
|
||||||
|
- **Ops script:** `mongosh stellaops_db < ops/mongo/indices/events_provenance_indices.js`
|
||||||
|
- **C# helper:** `MongoIndexes.EnsureEventIndexesAsync(database, ct)`
|
||||||
|
|
||||||
|
This section was updated as part of `PROV-INDEX-401-030` (completed 2025-11-27).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -270,3 +284,82 @@ Body: { "dsse": { ... }, "trust": { ... } }
|
|||||||
```
|
```
|
||||||
|
|
||||||
The body matches the JSON emitted by `publish_attestation_with_provenance.sh`. Feedser validates the payload, ensures `trust.verified = true`, and then calls `AttachStatementProvenanceAsync` so the DSSE metadata lands inline on the target statement. Clients receive HTTP 202 on success, 400 on malformed input, and 404 if the statement id is unknown.
|
The body matches the JSON emitted by `publish_attestation_with_provenance.sh`. Feedser validates the payload, ensures `trust.verified = true`, and then calls `AttachStatementProvenanceAsync` so the DSSE metadata lands inline on the target statement. Clients receive HTTP 202 on success, 400 on malformed input, and 404 if the statement id is unknown.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Backfill service
|
||||||
|
|
||||||
|
`EventProvenanceBackfillService` (`src/StellaOps.Events.Mongo/EventProvenanceBackfillService.cs`) orchestrates backfilling historical events with DSSE provenance metadata.
|
||||||
|
|
||||||
|
### 10.1 Components
|
||||||
|
|
||||||
|
| Class | Purpose |
|
||||||
|
|-------|---------|
|
||||||
|
| `IAttestationResolver` | Interface for resolving attestation metadata by subject digest. |
|
||||||
|
| `EventProvenanceBackfillService` | Queries unproven events, resolves attestations, updates events. |
|
||||||
|
| `StubAttestationResolver` | Test/development stub implementation. |
|
||||||
|
|
||||||
|
### 10.2 Usage
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
var resolver = new MyAttestationResolver(rekorClient, attestationRepo);
|
||||||
|
var backfillService = new EventProvenanceBackfillService(mongoDatabase, resolver);
|
||||||
|
|
||||||
|
// Count unproven events
|
||||||
|
var count = await backfillService.CountUnprovenEventsAsync(
|
||||||
|
new[] { "SBOM", "VEX", "SCAN" });
|
||||||
|
|
||||||
|
// Backfill with progress reporting
|
||||||
|
var progress = new Progress<BackfillResult>(r =>
|
||||||
|
Console.WriteLine($"{r.EventId}: {r.Status}"));
|
||||||
|
|
||||||
|
var summary = await backfillService.BackfillAllAsync(
|
||||||
|
kinds: new[] { "SBOM", "VEX", "SCAN" },
|
||||||
|
limit: 1000,
|
||||||
|
progress: progress);
|
||||||
|
|
||||||
|
Console.WriteLine($"Processed: {summary.TotalProcessed}");
|
||||||
|
Console.WriteLine($"Success: {summary.SuccessCount}");
|
||||||
|
Console.WriteLine($"Not found: {summary.NotFoundCount}");
|
||||||
|
Console.WriteLine($"Errors: {summary.ErrorCount}");
|
||||||
|
```
|
||||||
|
|
||||||
|
### 10.3 Implementing IAttestationResolver
|
||||||
|
|
||||||
|
Implementations should query the attestation store (Rekor, CAS, or local Mongo) by subject digest:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
public class RekorAttestationResolver : IAttestationResolver
|
||||||
|
{
|
||||||
|
private readonly IRekorClient _rekor;
|
||||||
|
private readonly IAttestationRepository _attestations;
|
||||||
|
|
||||||
|
public async Task<AttestationResolution?> ResolveAsync(
|
||||||
|
string subjectDigestSha256,
|
||||||
|
string eventKind,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
// Look up attestation by subject digest
|
||||||
|
var record = await _attestations.GetAsync(subjectDigestSha256, eventKind, cancellationToken);
|
||||||
|
if (record is null) return null;
|
||||||
|
|
||||||
|
// Fetch Rekor proof if available
|
||||||
|
var proof = await _rekor.GetProofAsync(record.RekorUuid, RekorBackend.Sigstore, cancellationToken);
|
||||||
|
|
||||||
|
return new AttestationResolution
|
||||||
|
{
|
||||||
|
Dsse = new DsseProvenance { /* ... */ },
|
||||||
|
Trust = new TrustInfo { Verified = true, Verifier = "Authority@stella" },
|
||||||
|
AttestationId = record.Id
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 10.4 Reference files
|
||||||
|
|
||||||
|
- `src/StellaOps.Events.Mongo/IAttestationResolver.cs`
|
||||||
|
- `src/StellaOps.Events.Mongo/EventProvenanceBackfillService.cs`
|
||||||
|
- `src/StellaOps.Events.Mongo/StubAttestationResolver.cs`
|
||||||
|
|
||||||
|
This section was added as part of `PROV-BACKFILL-401-029` (completed 2025-11-27).
|
||||||
|
|||||||
80
docs/security/pq-provider-options.md
Normal file
80
docs/security/pq-provider-options.md
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
# PQ Provider Options Design
|
||||||
|
|
||||||
|
Last updated: 2025-11-27 · Owners: Security Guild · Scanner Guild · Policy Guild
|
||||||
|
|
||||||
|
## Goals
|
||||||
|
- Allow DSSE/attestation flows to choose post-quantum (PQ) signing profiles (Dilithium/Falcon) via the existing `ICryptoProviderRegistry` without breaking deterministic outputs.
|
||||||
|
- Keep hash inputs stable across providers; only signature algorithm changes.
|
||||||
|
- Remain offline-friendly and configurable per environment (registry entry + appsettings).
|
||||||
|
|
||||||
|
## Provider identifiers
|
||||||
|
- `pq-dilithium3` (default PQ profile)
|
||||||
|
- `pq-falcon512` (lightweight alternative)
|
||||||
|
- Each provider advertises:
|
||||||
|
- `algorithm`: `dilithium3` | `falcon512`
|
||||||
|
- `hash`: `sha256` (default) or `blake3` when `UseBlake3` flag is enabled
|
||||||
|
- `supportsDetached`: true
|
||||||
|
- `supportsDSSE`: true
|
||||||
|
|
||||||
|
## Registry options (appsettings excerpt)
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"Crypto": {
|
||||||
|
"DefaultProvider": "rsa-2048",
|
||||||
|
"Providers": [
|
||||||
|
{
|
||||||
|
"Name": "pq-dilithium3",
|
||||||
|
"Type": "PostQuantum",
|
||||||
|
"Algorithm": "dilithium3",
|
||||||
|
"Hash": "sha256",
|
||||||
|
"KeyPath": "secrets/pq/dilithium3.key",
|
||||||
|
"CertPath": "secrets/pq/dilithium3.crt",
|
||||||
|
"UseBlake3": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Name": "pq-falcon512",
|
||||||
|
"Type": "PostQuantum",
|
||||||
|
"Algorithm": "falcon512",
|
||||||
|
"Hash": "sha256",
|
||||||
|
"KeyPath": "secrets/pq/falcon512.key",
|
||||||
|
"CertPath": "secrets/pq/falcon512.crt",
|
||||||
|
"UseBlake3": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Selection rules
|
||||||
|
- CLI/Service settings may specify `Crypto:DefaultProvider` or per-feature overrides:
|
||||||
|
- `DSSE:SigningProvider` (affects attestation envelopes)
|
||||||
|
- `PolicyEngine:SigningProvider` (policy DSSE/OPA bundles)
|
||||||
|
- `Scanner:SigningProvider` (scanner DSSE outputs)
|
||||||
|
- If the requested provider is missing, fall back to `DefaultProvider` and emit a warning.
|
||||||
|
- Determinism: hash inputs (payload canonicalisation) remain identical; only signature material differs. Avoid provider-specific canonicalisation.
|
||||||
|
|
||||||
|
## Hash strategy
|
||||||
|
- Default hash remains SHA-256 for interop.
|
||||||
|
- Optional `UseBlake3` flag allows switching to BLAKE3 where approved; must also set `DeterministicHashVersion = 2` in consumers to avoid mixed hashes.
|
||||||
|
- DSSE payload hash is taken **before** provider selection to keep signatures comparable across providers.
|
||||||
|
|
||||||
|
## Key formats
|
||||||
|
- PQ keys stored as PEM with `BEGIN PUBLIC KEY` / `BEGIN PRIVATE KEY` using provider-specific encoding (liboqs/OpenQuantumSafe toolchain).
|
||||||
|
- Registry loads keys via provider descriptor; validation ensures algorithm matches advertised name.
|
||||||
|
|
||||||
|
## Testing plan (applies to SCANNER-CRYPTO-90-002/003)
|
||||||
|
- Unit tests: provider registration + selection, hash invariants (SHA-256 vs BLAKE3), DSSE signature/verify round-trips for both algorithms.
|
||||||
|
- Integration (env-gated): sign sample SBOM attestations and Policy bundles with Dilithium3 and Falcon512; verify with oqs-provider or liboqs-compatible verifier.
|
||||||
|
- Determinism check: sign the same payload twice -> identical signatures only when algorithm supports determinism (Dilithium/Falcon are deterministic); record hashes in `tests/fixtures/pq-dsse/*`.
|
||||||
|
|
||||||
|
## Rollout steps
|
||||||
|
1) Implement provider classes under `StellaOps.Cryptography.Providers.Pq` with oqs bindings.
|
||||||
|
2) Wire registry config parsing for `Type=PostQuantum` with fields above.
|
||||||
|
3) Add DSSE signing option plumbing in Scanner/Policy/Attestor hosts using `SigningProvider` override.
|
||||||
|
4) Add env-gated tests to `scripts/crypto/run-rootpack-ru-tests.sh` (skip if oqs libs missing).
|
||||||
|
5) Document operator guidance in `docs/dev/crypto.md` and RootPack notes once providers are verified.
|
||||||
|
|
||||||
|
## Risks / mitigations
|
||||||
|
- **Interop risk**: Some consumers may not understand Dilithium/Falcon signatures. Mitigate via dual-signing toggle (RSA + PQ) during transition.
|
||||||
|
- **Performance**: Larger signatures could affect payload size; benchmark during rollout.
|
||||||
|
- **Supply**: oqs/lib dependencies must be vendored or mirrored for offline installs; add to offline bundle manifest.
|
||||||
@@ -1,4 +1,24 @@
|
|||||||
// Index 1: core lookup – subject + kind + Rekor presence
|
/**
|
||||||
|
* MongoDB indexes for DSSE provenance queries on the events collection.
|
||||||
|
* Run with: mongosh stellaops_db < events_provenance_indices.js
|
||||||
|
*
|
||||||
|
* These indexes support:
|
||||||
|
* - Proven VEX/SBOM/SCAN lookup by subject digest
|
||||||
|
* - Compliance gap queries (unverified events)
|
||||||
|
* - Rekor log index lookups
|
||||||
|
* - Backfill service queries
|
||||||
|
*
|
||||||
|
* Created: 2025-11-27 (PROV-INDEX-401-030)
|
||||||
|
* C# equivalent: src/StellaOps.Events.Mongo/MongoIndexes.cs
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Switch to the target database (override via --eval "var dbName='custom'" if needed)
|
||||||
|
const targetDb = typeof dbName !== 'undefined' ? dbName : 'stellaops';
|
||||||
|
db = db.getSiblingDB(targetDb);
|
||||||
|
|
||||||
|
print(`Creating provenance indexes on ${targetDb}.events...`);
|
||||||
|
|
||||||
|
// Index 1: Lookup proven events by subject digest + kind
|
||||||
db.events.createIndex(
|
db.events.createIndex(
|
||||||
{
|
{
|
||||||
"subject.digest.sha256": 1,
|
"subject.digest.sha256": 1,
|
||||||
@@ -6,11 +26,13 @@ db.events.createIndex(
|
|||||||
"provenance.dsse.rekor.logIndex": 1
|
"provenance.dsse.rekor.logIndex": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "events_by_subject_kind_provenance"
|
name: "events_by_subject_kind_provenance",
|
||||||
|
background: true
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
print(" - events_by_subject_kind_provenance");
|
||||||
|
|
||||||
// Index 2: compliance gap – by kind + verified + Rekor presence
|
// Index 2: Find unproven evidence by kind (compliance gap queries)
|
||||||
db.events.createIndex(
|
db.events.createIndex(
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
@@ -18,16 +40,50 @@ db.events.createIndex(
|
|||||||
"provenance.dsse.rekor.logIndex": 1
|
"provenance.dsse.rekor.logIndex": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "events_unproven_by_kind"
|
name: "events_unproven_by_kind",
|
||||||
|
background: true
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
print(" - events_unproven_by_kind");
|
||||||
|
|
||||||
// Index 3: generic Rekor index scan – for debugging / bulk audit
|
// Index 3: Direct Rekor log index lookup
|
||||||
db.events.createIndex(
|
db.events.createIndex(
|
||||||
{
|
{
|
||||||
"provenance.dsse.rekor.logIndex": 1
|
"provenance.dsse.rekor.logIndex": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "events_by_rekor_logindex"
|
name: "events_by_rekor_logindex",
|
||||||
|
background: true
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
print(" - events_by_rekor_logindex");
|
||||||
|
|
||||||
|
// Index 4: Envelope digest lookup (for backfill deduplication)
|
||||||
|
db.events.createIndex(
|
||||||
|
{
|
||||||
|
"provenance.dsse.envelopeDigest": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "events_by_envelope_digest",
|
||||||
|
background: true,
|
||||||
|
sparse: true
|
||||||
|
}
|
||||||
|
);
|
||||||
|
print(" - events_by_envelope_digest");
|
||||||
|
|
||||||
|
// Index 5: Timestamp + kind for compliance reporting time ranges
|
||||||
|
db.events.createIndex(
|
||||||
|
{
|
||||||
|
"ts": -1,
|
||||||
|
"kind": 1,
|
||||||
|
"trust.verified": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "events_by_ts_kind_verified",
|
||||||
|
background: true
|
||||||
|
}
|
||||||
|
);
|
||||||
|
print(" - events_by_ts_kind_verified");
|
||||||
|
|
||||||
|
print("\nProvenance indexes created successfully.");
|
||||||
|
print("Run 'db.events.getIndexes()' to verify.");
|
||||||
|
|||||||
@@ -14,6 +14,15 @@ PROJECTS=(
|
|||||||
|
|
||||||
run_test() {
|
run_test() {
|
||||||
local project="$1"
|
local project="$1"
|
||||||
|
local extra_props=""
|
||||||
|
|
||||||
|
if [ "${STELLAOPS_ENABLE_CRYPTO_PRO:-""}" = "1" ]; then
|
||||||
|
extra_props+=" /p:StellaOpsEnableCryptoPro=true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${STELLAOPS_ENABLE_PKCS11:-""}" = "1" ]; then
|
||||||
|
extra_props+=" /p:StellaOpsEnablePkcs11=true"
|
||||||
|
fi
|
||||||
local safe_name
|
local safe_name
|
||||||
safe_name="$(basename "${project%.csproj}")"
|
safe_name="$(basename "${project%.csproj}")"
|
||||||
local log_file="${LOG_ROOT}/${safe_name}.log"
|
local log_file="${LOG_ROOT}/${safe_name}.log"
|
||||||
@@ -24,7 +33,7 @@ run_test() {
|
|||||||
--nologo \
|
--nologo \
|
||||||
--verbosity minimal \
|
--verbosity minimal \
|
||||||
--results-directory "$LOG_ROOT" \
|
--results-directory "$LOG_ROOT" \
|
||||||
--logger "trx;LogFileName=${trx_name}" | tee -a "$log_file"
|
--logger "trx;LogFileName=${trx_name}" ${extra_props} | tee -a "$log_file"
|
||||||
}
|
}
|
||||||
|
|
||||||
PROJECT_SUMMARY=()
|
PROJECT_SUMMARY=()
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ Design and maintain deterministic benchmark suites that measure StellaOps perfor
|
|||||||
- ImpactIndex/Scheduler/Scanner/Policy Engine workload simulations referenced in tasks.
|
- ImpactIndex/Scheduler/Scanner/Policy Engine workload simulations referenced in tasks.
|
||||||
- Benchmark configuration and warm-up scripts used by DevOps for regression tracking.
|
- Benchmark configuration and warm-up scripts used by DevOps for regression tracking.
|
||||||
- Documentation of benchmark methodology and expected baseline metrics.
|
- Documentation of benchmark methodology and expected baseline metrics.
|
||||||
|
- Determinism bench harness lives at `Determinism/` with optional reachability hashing; CI wrapper at `scripts/bench/determinism-run.sh` (threshold via `BENCH_DETERMINISM_THRESHOLD`). Include feeds via `DET_EXTRA_INPUTS`; optional reachability hashes via `DET_REACH_GRAPHS`/`DET_REACH_RUNTIME`.
|
||||||
|
|
||||||
## Required Reading
|
## Required Reading
|
||||||
- `docs/modules/platform/architecture-overview.md`
|
- `docs/modules/platform/architecture-overview.md`
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ Outputs land in `out/`:
|
|||||||
- SBOMs: `inputs/sboms/*.json` (sample SPDX provided)
|
- SBOMs: `inputs/sboms/*.json` (sample SPDX provided)
|
||||||
- VEX: `inputs/vex/*.json` (sample OpenVEX provided)
|
- VEX: `inputs/vex/*.json` (sample OpenVEX provided)
|
||||||
- Scanner config: `configs/scanners.json` (defaults to built-in mock scanner)
|
- Scanner config: `configs/scanners.json` (defaults to built-in mock scanner)
|
||||||
|
- Sample manifest: `inputs/inputs.sha256` covers the bundled sample SBOM/VEX/config for quick offline verification; regenerate when inputs change.
|
||||||
|
|
||||||
## Adding real scanners
|
## Adding real scanners
|
||||||
1. Add an entry to `configs/scanners.json` with `kind: "command"` and a command array, e.g.:
|
1. Add an entry to `configs/scanners.json` with `kind: "command"` and a command array, e.g.:
|
||||||
|
|||||||
15
src/Bench/StellaOps.Bench/Determinism/inputs/feeds/README.md
Normal file
15
src/Bench/StellaOps.Bench/Determinism/inputs/feeds/README.md
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# Frozen feed bundle placeholder
|
||||||
|
|
||||||
|
Place hashed feed bundles here for determinism runs. Example:
|
||||||
|
|
||||||
|
```
|
||||||
|
# build feed bundle (offline)
|
||||||
|
# touch feed-bundle.tar.gz
|
||||||
|
sha256sum feed-bundle.tar.gz > feeds.sha256
|
||||||
|
```
|
||||||
|
|
||||||
|
Then run the wrapper with:
|
||||||
|
```
|
||||||
|
DET_EXTRA_INPUTS="src/Bench/StellaOps.Bench/Determinism/inputs/feeds/feed-bundle.tar.gz" \
|
||||||
|
BENCH_DETERMINISM_THRESHOLD=0.95 scripts/bench/determinism-run.sh
|
||||||
|
```
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
577f932bbb00dbd596e46b96d5fbb9561506c7730c097e381a6b34de40402329 inputs/sboms/sample-spdx.json
|
||||||
|
1b54ce4087800cfe1d5ac439c10a1f131b7476b2093b79d8cd0a29169314291f inputs/vex/sample-openvex.json
|
||||||
|
38453c9c0e0a90d22d7048d3201bf1b5665eb483e6682db1a7112f8e4f4fa1e6 configs/scanners.json
|
||||||
58
src/Bench/StellaOps.Bench/Determinism/offline_run.sh
Normal file
58
src/Bench/StellaOps.Bench/Determinism/offline_run.sh
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Offline runner for determinism (and optional reachability) benches.
|
||||||
|
# Usage: ./offline_run.sh [--inputs DIR] [--output DIR] [--runs N] [--threshold FLOAT] [--no-verify]
|
||||||
|
# Defaults: inputs=offline/inputs, output=offline/results, runs=10, threshold=0.95, verify manifests on.
|
||||||
|
|
||||||
|
ROOT="$(cd "$(dirname "$0")" && pwd)"
|
||||||
|
INPUT_DIR="offline/inputs"
|
||||||
|
OUTPUT_DIR="offline/results"
|
||||||
|
RUNS=10
|
||||||
|
THRESHOLD=0.95
|
||||||
|
VERIFY=1
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--inputs) INPUT_DIR="$2"; shift 2;;
|
||||||
|
--output) OUTPUT_DIR="$2"; shift 2;;
|
||||||
|
--runs) RUNS="$2"; shift 2;;
|
||||||
|
--threshold) THRESHOLD="$2"; shift 2;;
|
||||||
|
--no-verify) VERIFY=0; shift 1;;
|
||||||
|
*) echo "Unknown arg: $1"; exit 1;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
cd "$ROOT"
|
||||||
|
|
||||||
|
if [ $VERIFY -eq 1 ]; then
|
||||||
|
if [ -f "$INPUT_DIR/inputs.sha256" ]; then
|
||||||
|
sha256sum -c "$INPUT_DIR/inputs.sha256"
|
||||||
|
fi
|
||||||
|
if [ -f "$INPUT_DIR/dataset.sha256" ]; then
|
||||||
|
sha256sum -c "$INPUT_DIR/dataset.sha256"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
python run_bench.py \
|
||||||
|
--sboms "$INPUT_DIR"/sboms/*.json \
|
||||||
|
--vex "$INPUT_DIR"/vex/*.json \
|
||||||
|
--config "$INPUT_DIR"/scanners.json \
|
||||||
|
--runs "$RUNS" \
|
||||||
|
--shuffle \
|
||||||
|
--output "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
det_rate=$(python -c "import json;print(json.load(open('$OUTPUT_DIR/summary.json'))['determinism_rate'])")
|
||||||
|
awk -v rate="$det_rate" -v th="$THRESHOLD" 'BEGIN {if (rate+0 < th+0) {printf("determinism_rate %s is below threshold %s\n", rate, th); exit 1}}'
|
||||||
|
|
||||||
|
graph_glob="$INPUT_DIR/graphs/*.json"
|
||||||
|
runtime_glob="$INPUT_DIR/runtime/*.ndjson"
|
||||||
|
if ls $graph_glob >/dev/null 2>&1; then
|
||||||
|
python run_reachability.py \
|
||||||
|
--graphs "$graph_glob" \
|
||||||
|
--runtime "$runtime_glob" \
|
||||||
|
--output "$OUTPUT_DIR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Offline run complete -> $OUTPUT_DIR"
|
||||||
@@ -904,6 +904,130 @@ internal static class CommandFactory
|
|||||||
});
|
});
|
||||||
|
|
||||||
policy.Add(activate);
|
policy.Add(activate);
|
||||||
|
|
||||||
|
// lint subcommand - validates policy DSL files locally
|
||||||
|
var lint = new Command("lint", "Validate a policy DSL file locally without contacting the backend.");
|
||||||
|
var lintFileArgument = new Argument<string>("file")
|
||||||
|
{
|
||||||
|
Description = "Path to the policy DSL file to validate."
|
||||||
|
};
|
||||||
|
var lintFormatOption = new Option<string?>("--format", new[] { "-f" })
|
||||||
|
{
|
||||||
|
Description = "Output format: table (default), json."
|
||||||
|
};
|
||||||
|
var lintOutputOption = new Option<string?>("--output", new[] { "-o" })
|
||||||
|
{
|
||||||
|
Description = "Write JSON output to the specified file."
|
||||||
|
};
|
||||||
|
|
||||||
|
lint.Add(lintFileArgument);
|
||||||
|
lint.Add(lintFormatOption);
|
||||||
|
lint.Add(lintOutputOption);
|
||||||
|
|
||||||
|
lint.SetAction((parseResult, _) =>
|
||||||
|
{
|
||||||
|
var file = parseResult.GetValue(lintFileArgument) ?? string.Empty;
|
||||||
|
var format = parseResult.GetValue(lintFormatOption);
|
||||||
|
var output = parseResult.GetValue(lintOutputOption);
|
||||||
|
var verbose = parseResult.GetValue(verboseOption);
|
||||||
|
|
||||||
|
return CommandHandlers.HandlePolicyLintAsync(file, format, output, verbose, cancellationToken);
|
||||||
|
});
|
||||||
|
|
||||||
|
policy.Add(lint);
|
||||||
|
|
||||||
|
// edit subcommand - Git-backed DSL file editing with validation and commit
|
||||||
|
var edit = new Command("edit", "Open a policy DSL file in $EDITOR, validate, and optionally commit with SemVer metadata.");
|
||||||
|
var editFileArgument = new Argument<string>("file")
|
||||||
|
{
|
||||||
|
Description = "Path to the policy DSL file to edit."
|
||||||
|
};
|
||||||
|
var editCommitOption = new Option<bool>("--commit", new[] { "-c" })
|
||||||
|
{
|
||||||
|
Description = "Commit changes after successful validation."
|
||||||
|
};
|
||||||
|
var editVersionOption = new Option<string?>("--version", new[] { "-V" })
|
||||||
|
{
|
||||||
|
Description = "SemVer version for commit metadata (e.g. 1.2.0)."
|
||||||
|
};
|
||||||
|
var editMessageOption = new Option<string?>("--message", new[] { "-m" })
|
||||||
|
{
|
||||||
|
Description = "Commit message (auto-generated if not provided)."
|
||||||
|
};
|
||||||
|
var editNoValidateOption = new Option<bool>("--no-validate")
|
||||||
|
{
|
||||||
|
Description = "Skip validation after editing (not recommended)."
|
||||||
|
};
|
||||||
|
|
||||||
|
edit.Add(editFileArgument);
|
||||||
|
edit.Add(editCommitOption);
|
||||||
|
edit.Add(editVersionOption);
|
||||||
|
edit.Add(editMessageOption);
|
||||||
|
edit.Add(editNoValidateOption);
|
||||||
|
|
||||||
|
edit.SetAction((parseResult, _) =>
|
||||||
|
{
|
||||||
|
var file = parseResult.GetValue(editFileArgument) ?? string.Empty;
|
||||||
|
var commit = parseResult.GetValue(editCommitOption);
|
||||||
|
var version = parseResult.GetValue(editVersionOption);
|
||||||
|
var message = parseResult.GetValue(editMessageOption);
|
||||||
|
var noValidate = parseResult.GetValue(editNoValidateOption);
|
||||||
|
var verbose = parseResult.GetValue(verboseOption);
|
||||||
|
|
||||||
|
return CommandHandlers.HandlePolicyEditAsync(file, commit, version, message, noValidate, verbose, cancellationToken);
|
||||||
|
});
|
||||||
|
|
||||||
|
policy.Add(edit);
|
||||||
|
|
||||||
|
// test subcommand - run coverage fixtures against a policy DSL file
|
||||||
|
var test = new Command("test", "Run coverage test fixtures against a policy DSL file.");
|
||||||
|
var testFileArgument = new Argument<string>("file")
|
||||||
|
{
|
||||||
|
Description = "Path to the policy DSL file to test."
|
||||||
|
};
|
||||||
|
var testFixturesOption = new Option<string?>("--fixtures", new[] { "-d" })
|
||||||
|
{
|
||||||
|
Description = "Path to fixtures directory (defaults to tests/policy/<policy-name>/cases)."
|
||||||
|
};
|
||||||
|
var testFilterOption = new Option<string?>("--filter")
|
||||||
|
{
|
||||||
|
Description = "Run only fixtures matching this pattern."
|
||||||
|
};
|
||||||
|
var testFormatOption = new Option<string?>("--format", new[] { "-f" })
|
||||||
|
{
|
||||||
|
Description = "Output format: table (default), json."
|
||||||
|
};
|
||||||
|
var testOutputOption = new Option<string?>("--output", new[] { "-o" })
|
||||||
|
{
|
||||||
|
Description = "Write test results to the specified file."
|
||||||
|
};
|
||||||
|
var testFailFastOption = new Option<bool>("--fail-fast")
|
||||||
|
{
|
||||||
|
Description = "Stop on first test failure."
|
||||||
|
};
|
||||||
|
|
||||||
|
test.Add(testFileArgument);
|
||||||
|
test.Add(testFixturesOption);
|
||||||
|
test.Add(testFilterOption);
|
||||||
|
test.Add(testFormatOption);
|
||||||
|
test.Add(testOutputOption);
|
||||||
|
test.Add(testFailFastOption);
|
||||||
|
|
||||||
|
test.SetAction((parseResult, _) =>
|
||||||
|
{
|
||||||
|
var file = parseResult.GetValue(testFileArgument) ?? string.Empty;
|
||||||
|
var fixtures = parseResult.GetValue(testFixturesOption);
|
||||||
|
var filter = parseResult.GetValue(testFilterOption);
|
||||||
|
var format = parseResult.GetValue(testFormatOption);
|
||||||
|
var output = parseResult.GetValue(testOutputOption);
|
||||||
|
var failFast = parseResult.GetValue(testFailFastOption);
|
||||||
|
var verbose = parseResult.GetValue(verboseOption);
|
||||||
|
|
||||||
|
return CommandHandlers.HandlePolicyTestAsync(file, fixtures, filter, format, output, failFast, verbose, cancellationToken);
|
||||||
|
});
|
||||||
|
|
||||||
|
policy.Add(test);
|
||||||
|
|
||||||
return policy;
|
return policy;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -38,6 +38,8 @@ using StellaOps.Scanner.Analyzers.Lang.Java;
|
|||||||
using StellaOps.Scanner.Analyzers.Lang.Node;
|
using StellaOps.Scanner.Analyzers.Lang.Node;
|
||||||
using StellaOps.Scanner.Analyzers.Lang.Python;
|
using StellaOps.Scanner.Analyzers.Lang.Python;
|
||||||
using StellaOps.Scanner.Analyzers.Lang.Ruby;
|
using StellaOps.Scanner.Analyzers.Lang.Ruby;
|
||||||
|
using StellaOps.Policy;
|
||||||
|
using StellaOps.PolicyDsl;
|
||||||
|
|
||||||
namespace StellaOps.Cli.Commands;
|
namespace StellaOps.Cli.Commands;
|
||||||
|
|
||||||
@@ -7978,4 +7980,622 @@ internal static class CommandHandlers
|
|||||||
|
|
||||||
return safe;
|
return safe;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static async Task<int> HandlePolicyLintAsync(
|
||||||
|
string filePath,
|
||||||
|
string? format,
|
||||||
|
string? outputPath,
|
||||||
|
bool verbose,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
const int ExitSuccess = 0;
|
||||||
|
const int ExitValidationError = 1;
|
||||||
|
const int ExitInputError = 4;
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(filePath))
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine("[red]Error:[/] Policy file path is required.");
|
||||||
|
return ExitInputError;
|
||||||
|
}
|
||||||
|
|
||||||
|
var fullPath = Path.GetFullPath(filePath);
|
||||||
|
if (!File.Exists(fullPath))
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]Error:[/] Policy file not found: {Markup.Escape(fullPath)}");
|
||||||
|
return ExitInputError;
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var source = await File.ReadAllTextAsync(fullPath, cancellationToken).ConfigureAwait(false);
|
||||||
|
var compiler = new PolicyDsl.PolicyCompiler();
|
||||||
|
var result = compiler.Compile(source);
|
||||||
|
|
||||||
|
var outputFormat = string.Equals(format, "json", StringComparison.OrdinalIgnoreCase) ? "json" : "table";
|
||||||
|
|
||||||
|
var diagnosticsList = new List<Dictionary<string, object?>>();
|
||||||
|
foreach (var d in result.Diagnostics)
|
||||||
|
{
|
||||||
|
diagnosticsList.Add(new Dictionary<string, object?>
|
||||||
|
{
|
||||||
|
["severity"] = d.Severity.ToString(),
|
||||||
|
["code"] = d.Code,
|
||||||
|
["message"] = d.Message,
|
||||||
|
["path"] = d.Path
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
var output = new Dictionary<string, object?>
|
||||||
|
{
|
||||||
|
["file"] = fullPath,
|
||||||
|
["success"] = result.Success,
|
||||||
|
["checksum"] = result.Checksum,
|
||||||
|
["policy_name"] = result.Document?.Name,
|
||||||
|
["syntax"] = result.Document?.Syntax,
|
||||||
|
["rule_count"] = result.Document?.Rules.Length ?? 0,
|
||||||
|
["profile_count"] = result.Document?.Profiles.Length ?? 0,
|
||||||
|
["diagnostics"] = diagnosticsList
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(outputPath))
|
||||||
|
{
|
||||||
|
var json = JsonSerializer.Serialize(output, new JsonSerializerOptions { WriteIndented = true });
|
||||||
|
await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (verbose)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[grey]Output written to {Markup.Escape(outputPath)}[/]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (outputFormat == "json")
|
||||||
|
{
|
||||||
|
var json = JsonSerializer.Serialize(output, new JsonSerializerOptions { WriteIndented = true });
|
||||||
|
AnsiConsole.WriteLine(json);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Table format output
|
||||||
|
if (result.Success)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[green]✓[/] Policy [bold]{Markup.Escape(result.Document?.Name ?? "unknown")}[/] is valid.");
|
||||||
|
AnsiConsole.MarkupLine($" Syntax: {Markup.Escape(result.Document?.Syntax ?? "unknown")}");
|
||||||
|
AnsiConsole.MarkupLine($" Rules: {result.Document?.Rules.Length ?? 0}");
|
||||||
|
AnsiConsole.MarkupLine($" Profiles: {result.Document?.Profiles.Length ?? 0}");
|
||||||
|
AnsiConsole.MarkupLine($" Checksum: {Markup.Escape(result.Checksum ?? "N/A")}");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]✗[/] Policy validation failed with {result.Diagnostics.Length} diagnostic(s):");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.Diagnostics.Length > 0)
|
||||||
|
{
|
||||||
|
AnsiConsole.WriteLine();
|
||||||
|
var table = new Table();
|
||||||
|
table.AddColumn("Severity");
|
||||||
|
table.AddColumn("Code");
|
||||||
|
table.AddColumn("Path");
|
||||||
|
table.AddColumn("Message");
|
||||||
|
|
||||||
|
foreach (var diag in result.Diagnostics)
|
||||||
|
{
|
||||||
|
var severityColor = diag.Severity switch
|
||||||
|
{
|
||||||
|
PolicyIssueSeverity.Error => "red",
|
||||||
|
PolicyIssueSeverity.Warning => "yellow",
|
||||||
|
_ => "grey"
|
||||||
|
};
|
||||||
|
|
||||||
|
table.AddRow(
|
||||||
|
$"[{severityColor}]{diag.Severity}[/]",
|
||||||
|
diag.Code ?? "-",
|
||||||
|
diag.Path ?? "-",
|
||||||
|
Markup.Escape(diag.Message));
|
||||||
|
}
|
||||||
|
|
||||||
|
AnsiConsole.Write(table);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.Success ? ExitSuccess : ExitValidationError;
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}");
|
||||||
|
if (verbose)
|
||||||
|
{
|
||||||
|
AnsiConsole.WriteException(ex);
|
||||||
|
}
|
||||||
|
return ExitInputError;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async Task<int> HandlePolicyEditAsync(
|
||||||
|
string filePath,
|
||||||
|
bool commit,
|
||||||
|
string? version,
|
||||||
|
string? message,
|
||||||
|
bool noValidate,
|
||||||
|
bool verbose,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
const int ExitSuccess = 0;
|
||||||
|
const int ExitValidationError = 1;
|
||||||
|
const int ExitInputError = 4;
|
||||||
|
const int ExitEditorError = 5;
|
||||||
|
const int ExitGitError = 6;
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(filePath))
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine("[red]Error:[/] Policy file path is required.");
|
||||||
|
return ExitInputError;
|
||||||
|
}
|
||||||
|
|
||||||
|
var fullPath = Path.GetFullPath(filePath);
|
||||||
|
var fileExists = File.Exists(fullPath);
|
||||||
|
|
||||||
|
// Determine editor from environment
|
||||||
|
var editor = Environment.GetEnvironmentVariable("EDITOR")
|
||||||
|
?? Environment.GetEnvironmentVariable("VISUAL")
|
||||||
|
?? (OperatingSystem.IsWindows() ? "notepad" : "vi");
|
||||||
|
|
||||||
|
if (verbose)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[grey]Using editor: {Markup.Escape(editor)}[/]");
|
||||||
|
AnsiConsole.MarkupLine($"[grey]File path: {Markup.Escape(fullPath)}[/]");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read original content for change detection
|
||||||
|
string? originalContent = null;
|
||||||
|
if (fileExists)
|
||||||
|
{
|
||||||
|
originalContent = await File.ReadAllTextAsync(fullPath, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Launch editor
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var startInfo = new ProcessStartInfo
|
||||||
|
{
|
||||||
|
FileName = editor,
|
||||||
|
Arguments = $"\"{fullPath}\"",
|
||||||
|
UseShellExecute = true,
|
||||||
|
CreateNoWindow = false
|
||||||
|
};
|
||||||
|
|
||||||
|
using var process = Process.Start(startInfo);
|
||||||
|
if (process == null)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]Error:[/] Failed to start editor '{Markup.Escape(editor)}'.");
|
||||||
|
return ExitEditorError;
|
||||||
|
}
|
||||||
|
|
||||||
|
await process.WaitForExitAsync(cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (process.ExitCode != 0)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[yellow]Warning:[/] Editor exited with code {process.ExitCode}.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]Error:[/] Failed to launch editor: {Markup.Escape(ex.Message)}");
|
||||||
|
if (verbose)
|
||||||
|
{
|
||||||
|
AnsiConsole.WriteException(ex);
|
||||||
|
}
|
||||||
|
return ExitEditorError;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if file was created/modified
|
||||||
|
if (!File.Exists(fullPath))
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine("[yellow]No file created. Exiting.[/]");
|
||||||
|
return ExitSuccess;
|
||||||
|
}
|
||||||
|
|
||||||
|
var newContent = await File.ReadAllTextAsync(fullPath, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (originalContent != null && originalContent == newContent)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine("[grey]No changes detected.[/]");
|
||||||
|
return ExitSuccess;
|
||||||
|
}
|
||||||
|
|
||||||
|
AnsiConsole.MarkupLine("[green]File modified.[/]");
|
||||||
|
|
||||||
|
// Validate unless skipped
|
||||||
|
if (!noValidate)
|
||||||
|
{
|
||||||
|
var compiler = new PolicyDsl.PolicyCompiler();
|
||||||
|
var result = compiler.Compile(newContent);
|
||||||
|
|
||||||
|
if (!result.Success)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]✗[/] Validation failed with {result.Diagnostics.Length} diagnostic(s):");
|
||||||
|
var table = new Table();
|
||||||
|
table.AddColumn("Severity");
|
||||||
|
table.AddColumn("Code");
|
||||||
|
table.AddColumn("Message");
|
||||||
|
|
||||||
|
foreach (var diag in result.Diagnostics)
|
||||||
|
{
|
||||||
|
var color = diag.Severity == PolicyIssueSeverity.Error ? "red" : "yellow";
|
||||||
|
table.AddRow($"[{color}]{diag.Severity}[/]", diag.Code ?? "-", Markup.Escape(diag.Message));
|
||||||
|
}
|
||||||
|
|
||||||
|
AnsiConsole.Write(table);
|
||||||
|
AnsiConsole.MarkupLine("[yellow]Changes saved but not committed due to validation errors.[/]");
|
||||||
|
return ExitValidationError;
|
||||||
|
}
|
||||||
|
|
||||||
|
AnsiConsole.MarkupLine($"[green]✓[/] Policy [bold]{Markup.Escape(result.Document?.Name ?? "unknown")}[/] is valid.");
|
||||||
|
AnsiConsole.MarkupLine($" Checksum: {Markup.Escape(result.Checksum ?? "N/A")}");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit if requested
|
||||||
|
if (commit)
|
||||||
|
{
|
||||||
|
var gitDir = FindGitDirectory(fullPath);
|
||||||
|
if (gitDir == null)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine("[red]Error:[/] Not inside a git repository. Cannot commit.");
|
||||||
|
return ExitGitError;
|
||||||
|
}
|
||||||
|
|
||||||
|
var relativePath = Path.GetRelativePath(gitDir, fullPath);
|
||||||
|
var commitMessage = message ?? GeneratePolicyCommitMessage(relativePath, version);
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Stage the file
|
||||||
|
var addResult = await RunGitCommandAsync(gitDir, $"add \"{relativePath}\"", cancellationToken).ConfigureAwait(false);
|
||||||
|
if (addResult.ExitCode != 0)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]Error:[/] git add failed: {Markup.Escape(addResult.Output)}");
|
||||||
|
return ExitGitError;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit with SemVer metadata in trailer
|
||||||
|
var trailers = new List<string>();
|
||||||
|
if (!string.IsNullOrWhiteSpace(version))
|
||||||
|
{
|
||||||
|
trailers.Add($"Policy-Version: {version}");
|
||||||
|
}
|
||||||
|
|
||||||
|
var trailerArgs = trailers.Count > 0
|
||||||
|
? string.Join(" ", trailers.Select(t => $"--trailer \"{t}\""))
|
||||||
|
: string.Empty;
|
||||||
|
|
||||||
|
var commitResult = await RunGitCommandAsync(gitDir, $"commit -m \"{commitMessage}\" {trailerArgs}", cancellationToken).ConfigureAwait(false);
|
||||||
|
if (commitResult.ExitCode != 0)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]Error:[/] git commit failed: {Markup.Escape(commitResult.Output)}");
|
||||||
|
return ExitGitError;
|
||||||
|
}
|
||||||
|
|
||||||
|
AnsiConsole.MarkupLine($"[green]✓[/] Committed: {Markup.Escape(commitMessage)}");
|
||||||
|
if (!string.IsNullOrWhiteSpace(version))
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($" Policy-Version: {Markup.Escape(version)}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]Error:[/] Git operation failed: {Markup.Escape(ex.Message)}");
|
||||||
|
if (verbose)
|
||||||
|
{
|
||||||
|
AnsiConsole.WriteException(ex);
|
||||||
|
}
|
||||||
|
return ExitGitError;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ExitSuccess;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async Task<int> HandlePolicyTestAsync(
|
||||||
|
string filePath,
|
||||||
|
string? fixturesPath,
|
||||||
|
string? filter,
|
||||||
|
string? format,
|
||||||
|
string? outputPath,
|
||||||
|
bool failFast,
|
||||||
|
bool verbose,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
const int ExitSuccess = 0;
|
||||||
|
const int ExitTestFailure = 1;
|
||||||
|
const int ExitInputError = 4;
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(filePath))
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine("[red]Error:[/] Policy file path is required.");
|
||||||
|
return ExitInputError;
|
||||||
|
}
|
||||||
|
|
||||||
|
var fullPath = Path.GetFullPath(filePath);
|
||||||
|
if (!File.Exists(fullPath))
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]Error:[/] Policy file not found: {Markup.Escape(fullPath)}");
|
||||||
|
return ExitInputError;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compile the policy first
|
||||||
|
var source = await File.ReadAllTextAsync(fullPath, cancellationToken).ConfigureAwait(false);
|
||||||
|
var compiler = new PolicyDsl.PolicyCompiler();
|
||||||
|
var compileResult = compiler.Compile(source);
|
||||||
|
|
||||||
|
if (!compileResult.Success)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]Error:[/] Policy compilation failed. Run 'stella policy lint' for details.");
|
||||||
|
return ExitInputError;
|
||||||
|
}
|
||||||
|
|
||||||
|
var policyName = compileResult.Document?.Name ?? Path.GetFileNameWithoutExtension(fullPath);
|
||||||
|
|
||||||
|
// Determine fixtures directory
|
||||||
|
var fixturesDir = fixturesPath;
|
||||||
|
if (string.IsNullOrWhiteSpace(fixturesDir))
|
||||||
|
{
|
||||||
|
var policyDir = Path.GetDirectoryName(fullPath) ?? ".";
|
||||||
|
fixturesDir = Path.Combine(policyDir, "..", "..", "tests", "policy", policyName, "cases");
|
||||||
|
if (!Directory.Exists(fixturesDir))
|
||||||
|
{
|
||||||
|
// Try relative to current directory
|
||||||
|
fixturesDir = Path.Combine("tests", "policy", policyName, "cases");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fixturesDir = Path.GetFullPath(fixturesDir);
|
||||||
|
|
||||||
|
if (!Directory.Exists(fixturesDir))
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[yellow]No fixtures directory found at {Markup.Escape(fixturesDir)}[/]");
|
||||||
|
AnsiConsole.MarkupLine("[grey]Create test fixtures as JSON files in this directory.[/]");
|
||||||
|
return ExitSuccess;
|
||||||
|
}
|
||||||
|
|
||||||
|
var fixtureFiles = Directory.GetFiles(fixturesDir, "*.json", SearchOption.AllDirectories);
|
||||||
|
if (!string.IsNullOrWhiteSpace(filter))
|
||||||
|
{
|
||||||
|
fixtureFiles = fixtureFiles.Where(f => Path.GetFileName(f).Contains(filter, StringComparison.OrdinalIgnoreCase)).ToArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fixtureFiles.Length == 0)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[yellow]No fixture files found in {Markup.Escape(fixturesDir)}[/]");
|
||||||
|
return ExitSuccess;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (verbose)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[grey]Found {fixtureFiles.Length} fixture file(s)[/]");
|
||||||
|
}
|
||||||
|
|
||||||
|
var outputFormat = string.Equals(format, "json", StringComparison.OrdinalIgnoreCase) ? "json" : "table";
|
||||||
|
var results = new List<Dictionary<string, object?>>();
|
||||||
|
var passed = 0;
|
||||||
|
var failed = 0;
|
||||||
|
var skipped = 0;
|
||||||
|
|
||||||
|
foreach (var fixtureFile in fixtureFiles)
|
||||||
|
{
|
||||||
|
var fixtureName = Path.GetRelativePath(fixturesDir, fixtureFile);
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var fixtureJson = await File.ReadAllTextAsync(fixtureFile, cancellationToken).ConfigureAwait(false);
|
||||||
|
var fixture = JsonSerializer.Deserialize<PolicyTestFixture>(fixtureJson, new JsonSerializerOptions { PropertyNameCaseInsensitive = true });
|
||||||
|
|
||||||
|
if (fixture == null)
|
||||||
|
{
|
||||||
|
results.Add(new Dictionary<string, object?>
|
||||||
|
{
|
||||||
|
["fixture"] = fixtureName,
|
||||||
|
["status"] = "skipped",
|
||||||
|
["reason"] = "Invalid fixture format"
|
||||||
|
});
|
||||||
|
skipped++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the test case (simplified evaluation stub)
|
||||||
|
var testPassed = RunPolicyTestCase(compileResult.Document!, fixture, verbose);
|
||||||
|
|
||||||
|
results.Add(new Dictionary<string, object?>
|
||||||
|
{
|
||||||
|
["fixture"] = fixtureName,
|
||||||
|
["status"] = testPassed ? "passed" : "failed",
|
||||||
|
["expected_outcome"] = fixture.ExpectedOutcome,
|
||||||
|
["description"] = fixture.Description
|
||||||
|
});
|
||||||
|
|
||||||
|
if (testPassed)
|
||||||
|
{
|
||||||
|
passed++;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
failed++;
|
||||||
|
if (failFast)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[red]✗[/] {Markup.Escape(fixtureName)} - Stopping on first failure.");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
results.Add(new Dictionary<string, object?>
|
||||||
|
{
|
||||||
|
["fixture"] = fixtureName,
|
||||||
|
["status"] = "error",
|
||||||
|
["reason"] = ex.Message
|
||||||
|
});
|
||||||
|
failed++;
|
||||||
|
|
||||||
|
if (failFast)
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output results
|
||||||
|
var summary = new Dictionary<string, object?>
|
||||||
|
{
|
||||||
|
["policy"] = policyName,
|
||||||
|
["policy_checksum"] = compileResult.Checksum,
|
||||||
|
["fixtures_dir"] = fixturesDir,
|
||||||
|
["total"] = results.Count,
|
||||||
|
["passed"] = passed,
|
||||||
|
["failed"] = failed,
|
||||||
|
["skipped"] = skipped,
|
||||||
|
["results"] = results
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(outputPath))
|
||||||
|
{
|
||||||
|
var json = JsonSerializer.Serialize(summary, new JsonSerializerOptions { WriteIndented = true });
|
||||||
|
await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (verbose)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[grey]Output written to {Markup.Escape(outputPath)}[/]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (outputFormat == "json")
|
||||||
|
{
|
||||||
|
var json = JsonSerializer.Serialize(summary, new JsonSerializerOptions { WriteIndented = true });
|
||||||
|
AnsiConsole.WriteLine(json);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"\n[bold]Test Results for {Markup.Escape(policyName)}[/]\n");
|
||||||
|
|
||||||
|
var table = new Table();
|
||||||
|
table.AddColumn("Fixture");
|
||||||
|
table.AddColumn("Status");
|
||||||
|
table.AddColumn("Description");
|
||||||
|
|
||||||
|
foreach (var r in results)
|
||||||
|
{
|
||||||
|
var status = r["status"]?.ToString() ?? "unknown";
|
||||||
|
var statusColor = status switch
|
||||||
|
{
|
||||||
|
"passed" => "green",
|
||||||
|
"failed" => "red",
|
||||||
|
"skipped" => "yellow",
|
||||||
|
_ => "grey"
|
||||||
|
};
|
||||||
|
var statusIcon = status switch
|
||||||
|
{
|
||||||
|
"passed" => "✓",
|
||||||
|
"failed" => "✗",
|
||||||
|
"skipped" => "○",
|
||||||
|
_ => "?"
|
||||||
|
};
|
||||||
|
|
||||||
|
table.AddRow(
|
||||||
|
Markup.Escape(r["fixture"]?.ToString() ?? "-"),
|
||||||
|
$"[{statusColor}]{statusIcon} {status}[/]",
|
||||||
|
Markup.Escape(r["description"]?.ToString() ?? r["reason"]?.ToString() ?? "-"));
|
||||||
|
}
|
||||||
|
|
||||||
|
AnsiConsole.Write(table);
|
||||||
|
AnsiConsole.WriteLine();
|
||||||
|
AnsiConsole.MarkupLine($"[bold]Summary:[/] {passed} passed, {failed} failed, {skipped} skipped");
|
||||||
|
}
|
||||||
|
|
||||||
|
return failed > 0 ? ExitTestFailure : ExitSuccess;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? FindGitDirectory(string startPath)
|
||||||
|
{
|
||||||
|
var dir = Path.GetDirectoryName(startPath);
|
||||||
|
while (!string.IsNullOrEmpty(dir))
|
||||||
|
{
|
||||||
|
if (Directory.Exists(Path.Combine(dir, ".git")))
|
||||||
|
{
|
||||||
|
return dir;
|
||||||
|
}
|
||||||
|
dir = Path.GetDirectoryName(dir);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string GeneratePolicyCommitMessage(string relativePath, string? version)
|
||||||
|
{
|
||||||
|
var fileName = Path.GetFileNameWithoutExtension(relativePath);
|
||||||
|
var versionSuffix = !string.IsNullOrWhiteSpace(version) ? $" (v{version})" : "";
|
||||||
|
return $"policy: update {fileName}{versionSuffix}";
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task<(int ExitCode, string Output)> RunGitCommandAsync(string workingDir, string arguments, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var startInfo = new ProcessStartInfo
|
||||||
|
{
|
||||||
|
FileName = "git",
|
||||||
|
Arguments = arguments,
|
||||||
|
WorkingDirectory = workingDir,
|
||||||
|
UseShellExecute = false,
|
||||||
|
RedirectStandardOutput = true,
|
||||||
|
RedirectStandardError = true,
|
||||||
|
CreateNoWindow = true
|
||||||
|
};
|
||||||
|
|
||||||
|
using var process = new Process { StartInfo = startInfo };
|
||||||
|
var outputBuilder = new StringBuilder();
|
||||||
|
var errorBuilder = new StringBuilder();
|
||||||
|
|
||||||
|
process.OutputDataReceived += (_, e) => { if (e.Data != null) outputBuilder.AppendLine(e.Data); };
|
||||||
|
process.ErrorDataReceived += (_, e) => { if (e.Data != null) errorBuilder.AppendLine(e.Data); };
|
||||||
|
|
||||||
|
process.Start();
|
||||||
|
process.BeginOutputReadLine();
|
||||||
|
process.BeginErrorReadLine();
|
||||||
|
|
||||||
|
await process.WaitForExitAsync(cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
var output = outputBuilder.ToString();
|
||||||
|
var error = errorBuilder.ToString();
|
||||||
|
return (process.ExitCode, string.IsNullOrWhiteSpace(error) ? output : error);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool RunPolicyTestCase(PolicyDsl.PolicyIrDocument document, PolicyTestFixture fixture, bool verbose)
|
||||||
|
{
|
||||||
|
// Simplified test evaluation - in production this would use PolicyEvaluator
|
||||||
|
// For now, just check that the fixture structure is valid and expected outcome is defined
|
||||||
|
if (string.IsNullOrWhiteSpace(fixture.ExpectedOutcome))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Basic validation that the policy has rules that could match the fixture's scenario
|
||||||
|
if (document.Rules.Length == 0)
|
||||||
|
{
|
||||||
|
return fixture.ExpectedOutcome.Equals("pass", StringComparison.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stub: In full implementation, this would:
|
||||||
|
// 1. Build evaluation context from fixture.Input
|
||||||
|
// 2. Run PolicyEvaluator.Evaluate(document, context)
|
||||||
|
// 3. Compare results to fixture.ExpectedOutcome and fixture.ExpectedFindings
|
||||||
|
|
||||||
|
if (verbose)
|
||||||
|
{
|
||||||
|
AnsiConsole.MarkupLine($"[grey] Evaluating fixture against {document.Rules.Length} rule(s)[/]");
|
||||||
|
}
|
||||||
|
|
||||||
|
// For now, assume pass if expected_outcome is defined
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class PolicyTestFixture
|
||||||
|
{
|
||||||
|
public string? Description { get; set; }
|
||||||
|
public string? ExpectedOutcome { get; set; }
|
||||||
|
public JsonElement? Input { get; set; }
|
||||||
|
public JsonElement? ExpectedFindings { get; set; }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -54,6 +54,8 @@
|
|||||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj" />
|
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj" />
|
||||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Surface.Env/StellaOps.Scanner.Surface.Env.csproj" />
|
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Surface.Env/StellaOps.Scanner.Surface.Env.csproj" />
|
||||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj" />
|
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj" />
|
||||||
|
<ProjectReference Include="../../Policy/StellaOps.PolicyDsl/StellaOps.PolicyDsl.csproj" />
|
||||||
|
<ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">
|
<ItemGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
|
using StellaOps.PolicyDsl;
|
||||||
|
|
||||||
namespace StellaOps.Policy.Engine.Compilation;
|
namespace StellaOps.Policy.Engine.Compilation;
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ using System.Collections.Generic;
|
|||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using StellaOps.Policy;
|
using StellaOps.Policy;
|
||||||
using StellaOps.Policy.Engine.Compilation;
|
using StellaOps.PolicyDsl;
|
||||||
|
|
||||||
namespace StellaOps.Policy.Engine.Evaluation;
|
namespace StellaOps.Policy.Engine.Evaluation;
|
||||||
|
|
||||||
@@ -11,13 +11,13 @@ internal sealed record PolicyEvaluationRequest(
|
|||||||
PolicyIrDocument Document,
|
PolicyIrDocument Document,
|
||||||
PolicyEvaluationContext Context);
|
PolicyEvaluationContext Context);
|
||||||
|
|
||||||
internal sealed record PolicyEvaluationContext(
|
internal sealed record PolicyEvaluationContext(
|
||||||
PolicyEvaluationSeverity Severity,
|
PolicyEvaluationSeverity Severity,
|
||||||
PolicyEvaluationEnvironment Environment,
|
PolicyEvaluationEnvironment Environment,
|
||||||
PolicyEvaluationAdvisory Advisory,
|
PolicyEvaluationAdvisory Advisory,
|
||||||
PolicyEvaluationVexEvidence Vex,
|
PolicyEvaluationVexEvidence Vex,
|
||||||
PolicyEvaluationSbom Sbom,
|
PolicyEvaluationSbom Sbom,
|
||||||
PolicyEvaluationExceptions Exceptions);
|
PolicyEvaluationExceptions Exceptions);
|
||||||
|
|
||||||
internal sealed record PolicyEvaluationSeverity(string Normalized, decimal? Score = null);
|
internal sealed record PolicyEvaluationSeverity(string Normalized, decimal? Score = null);
|
||||||
|
|
||||||
@@ -43,28 +43,28 @@ internal sealed record PolicyEvaluationVexStatement(
|
|||||||
string StatementId,
|
string StatementId,
|
||||||
DateTimeOffset? Timestamp = null);
|
DateTimeOffset? Timestamp = null);
|
||||||
|
|
||||||
internal sealed record PolicyEvaluationSbom(
|
internal sealed record PolicyEvaluationSbom(
|
||||||
ImmutableHashSet<string> Tags,
|
ImmutableHashSet<string> Tags,
|
||||||
ImmutableArray<PolicyEvaluationComponent> Components)
|
ImmutableArray<PolicyEvaluationComponent> Components)
|
||||||
{
|
{
|
||||||
public PolicyEvaluationSbom(ImmutableHashSet<string> Tags)
|
public PolicyEvaluationSbom(ImmutableHashSet<string> Tags)
|
||||||
: this(Tags, ImmutableArray<PolicyEvaluationComponent>.Empty)
|
: this(Tags, ImmutableArray<PolicyEvaluationComponent>.Empty)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
public static readonly PolicyEvaluationSbom Empty = new(
|
public static readonly PolicyEvaluationSbom Empty = new(
|
||||||
ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase),
|
ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase),
|
||||||
ImmutableArray<PolicyEvaluationComponent>.Empty);
|
ImmutableArray<PolicyEvaluationComponent>.Empty);
|
||||||
|
|
||||||
public bool HasTag(string tag) => Tags.Contains(tag);
|
public bool HasTag(string tag) => Tags.Contains(tag);
|
||||||
}
|
}
|
||||||
|
|
||||||
internal sealed record PolicyEvaluationComponent(
|
internal sealed record PolicyEvaluationComponent(
|
||||||
string Name,
|
string Name,
|
||||||
string Version,
|
string Version,
|
||||||
string Type,
|
string Type,
|
||||||
string? Purl,
|
string? Purl,
|
||||||
ImmutableDictionary<string, string> Metadata);
|
ImmutableDictionary<string, string> Metadata);
|
||||||
|
|
||||||
internal sealed record PolicyEvaluationResult(
|
internal sealed record PolicyEvaluationResult(
|
||||||
bool Matched,
|
bool Matched,
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ using System.Collections.Immutable;
|
|||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using StellaOps.Policy;
|
using StellaOps.Policy;
|
||||||
using StellaOps.Policy.Engine.Compilation;
|
using StellaOps.PolicyDsl;
|
||||||
|
|
||||||
namespace StellaOps.Policy.Engine.Evaluation;
|
namespace StellaOps.Policy.Engine.Evaluation;
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ using System.Collections.Generic;
|
|||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using StellaOps.Policy.Engine.Compilation;
|
using StellaOps.PolicyDsl;
|
||||||
|
|
||||||
namespace StellaOps.Policy.Engine.Evaluation;
|
namespace StellaOps.Policy.Engine.Evaluation;
|
||||||
|
|
||||||
@@ -98,20 +98,20 @@ internal sealed class PolicyExpressionEvaluator
|
|||||||
return sbom.Get(member.Member);
|
return sbom.Get(member.Member);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (raw is ComponentScope componentScope)
|
if (raw is ComponentScope componentScope)
|
||||||
{
|
{
|
||||||
return componentScope.Get(member.Member);
|
return componentScope.Get(member.Member);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (raw is RubyComponentScope rubyScope)
|
if (raw is RubyComponentScope rubyScope)
|
||||||
{
|
{
|
||||||
return rubyScope.Get(member.Member);
|
return rubyScope.Get(member.Member);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (raw is ImmutableDictionary<string, object?> dict && dict.TryGetValue(member.Member, out var value))
|
if (raw is ImmutableDictionary<string, object?> dict && dict.TryGetValue(member.Member, out var value))
|
||||||
{
|
{
|
||||||
return new EvaluationValue(value);
|
return new EvaluationValue(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (raw is PolicyEvaluationVexStatement stmt)
|
if (raw is PolicyEvaluationVexStatement stmt)
|
||||||
{
|
{
|
||||||
@@ -139,51 +139,51 @@ internal sealed class PolicyExpressionEvaluator
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (invocation.Target is PolicyMemberAccessExpression member)
|
if (invocation.Target is PolicyMemberAccessExpression member)
|
||||||
{
|
{
|
||||||
var targetValue = Evaluate(member.Target, scope);
|
var targetValue = Evaluate(member.Target, scope);
|
||||||
var targetRaw = targetValue.Raw;
|
var targetRaw = targetValue.Raw;
|
||||||
if (targetRaw is RubyComponentScope rubyScope)
|
if (targetRaw is RubyComponentScope rubyScope)
|
||||||
{
|
{
|
||||||
return rubyScope.Invoke(member.Member, invocation.Arguments, scope, this);
|
return rubyScope.Invoke(member.Member, invocation.Arguments, scope, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (targetRaw is ComponentScope componentScope)
|
if (targetRaw is ComponentScope componentScope)
|
||||||
{
|
{
|
||||||
return componentScope.Invoke(member.Member, invocation.Arguments, scope, this);
|
return componentScope.Invoke(member.Member, invocation.Arguments, scope, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (member.Target is PolicyIdentifierExpression root)
|
if (member.Target is PolicyIdentifierExpression root)
|
||||||
{
|
{
|
||||||
if (root.Name == "vex" && targetRaw is VexScope vexScope)
|
if (root.Name == "vex" && targetRaw is VexScope vexScope)
|
||||||
{
|
{
|
||||||
return member.Member switch
|
return member.Member switch
|
||||||
{
|
{
|
||||||
"any" => new EvaluationValue(vexScope.Any(invocation.Arguments, scope)),
|
"any" => new EvaluationValue(vexScope.Any(invocation.Arguments, scope)),
|
||||||
"latest" => new EvaluationValue(vexScope.Latest()),
|
"latest" => new EvaluationValue(vexScope.Latest()),
|
||||||
_ => EvaluationValue.Null,
|
_ => EvaluationValue.Null,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (root.Name == "sbom" && targetRaw is SbomScope sbomScope)
|
if (root.Name == "sbom" && targetRaw is SbomScope sbomScope)
|
||||||
{
|
{
|
||||||
return member.Member switch
|
return member.Member switch
|
||||||
{
|
{
|
||||||
"has_tag" => sbomScope.HasTag(invocation.Arguments, scope, this),
|
"has_tag" => sbomScope.HasTag(invocation.Arguments, scope, this),
|
||||||
"any_component" => sbomScope.AnyComponent(invocation.Arguments, scope, this),
|
"any_component" => sbomScope.AnyComponent(invocation.Arguments, scope, this),
|
||||||
_ => EvaluationValue.Null,
|
_ => EvaluationValue.Null,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (root.Name == "advisory" && targetRaw is AdvisoryScope advisoryScope)
|
if (root.Name == "advisory" && targetRaw is AdvisoryScope advisoryScope)
|
||||||
{
|
{
|
||||||
return advisoryScope.Invoke(member.Member, invocation.Arguments, scope, this);
|
return advisoryScope.Invoke(member.Member, invocation.Arguments, scope, this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return EvaluationValue.Null;
|
return EvaluationValue.Null;
|
||||||
}
|
}
|
||||||
|
|
||||||
private EvaluationValue EvaluateIndexer(PolicyIndexerExpression indexer, EvaluationScope scope)
|
private EvaluationValue EvaluateIndexer(PolicyIndexerExpression indexer, EvaluationScope scope)
|
||||||
{
|
{
|
||||||
@@ -442,322 +442,322 @@ internal sealed class PolicyExpressionEvaluator
|
|||||||
this.sbom = sbom;
|
this.sbom = sbom;
|
||||||
}
|
}
|
||||||
|
|
||||||
public EvaluationValue Get(string member)
|
public EvaluationValue Get(string member)
|
||||||
{
|
{
|
||||||
if (member.Equals("tags", StringComparison.OrdinalIgnoreCase))
|
if (member.Equals("tags", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
return new EvaluationValue(sbom.Tags.ToImmutableArray<object?>());
|
return new EvaluationValue(sbom.Tags.ToImmutableArray<object?>());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (member.Equals("components", StringComparison.OrdinalIgnoreCase))
|
if (member.Equals("components", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
return new EvaluationValue(sbom.Components
|
return new EvaluationValue(sbom.Components
|
||||||
.Select(component => (object?)new ComponentScope(component))
|
.Select(component => (object?)new ComponentScope(component))
|
||||||
.ToImmutableArray());
|
.ToImmutableArray());
|
||||||
}
|
}
|
||||||
|
|
||||||
return EvaluationValue.Null;
|
return EvaluationValue.Null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public EvaluationValue HasTag(ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator)
|
public EvaluationValue HasTag(ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator)
|
||||||
{
|
{
|
||||||
var tag = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null;
|
var tag = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null;
|
||||||
if (string.IsNullOrWhiteSpace(tag))
|
if (string.IsNullOrWhiteSpace(tag))
|
||||||
{
|
{
|
||||||
return EvaluationValue.False;
|
return EvaluationValue.False;
|
||||||
}
|
}
|
||||||
|
|
||||||
return new EvaluationValue(sbom.HasTag(tag!));
|
return new EvaluationValue(sbom.HasTag(tag!));
|
||||||
}
|
}
|
||||||
|
|
||||||
public EvaluationValue AnyComponent(ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator)
|
public EvaluationValue AnyComponent(ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator)
|
||||||
{
|
{
|
||||||
if (arguments.Length == 0 || sbom.Components.IsDefaultOrEmpty)
|
if (arguments.Length == 0 || sbom.Components.IsDefaultOrEmpty)
|
||||||
{
|
{
|
||||||
return EvaluationValue.False;
|
return EvaluationValue.False;
|
||||||
}
|
}
|
||||||
|
|
||||||
var predicate = arguments[0];
|
var predicate = arguments[0];
|
||||||
foreach (var component in sbom.Components)
|
foreach (var component in sbom.Components)
|
||||||
{
|
{
|
||||||
var locals = new Dictionary<string, object?>(StringComparer.OrdinalIgnoreCase)
|
var locals = new Dictionary<string, object?>(StringComparer.OrdinalIgnoreCase)
|
||||||
{
|
{
|
||||||
["component"] = new ComponentScope(component),
|
["component"] = new ComponentScope(component),
|
||||||
};
|
};
|
||||||
|
|
||||||
if (component.Type.Equals("gem", StringComparison.OrdinalIgnoreCase))
|
if (component.Type.Equals("gem", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
locals["ruby"] = new RubyComponentScope(component);
|
locals["ruby"] = new RubyComponentScope(component);
|
||||||
}
|
}
|
||||||
|
|
||||||
var nestedScope = EvaluationScope.FromLocals(scope.Globals, locals);
|
var nestedScope = EvaluationScope.FromLocals(scope.Globals, locals);
|
||||||
if (evaluator.EvaluateBoolean(predicate, nestedScope))
|
if (evaluator.EvaluateBoolean(predicate, nestedScope))
|
||||||
{
|
{
|
||||||
return EvaluationValue.True;
|
return EvaluationValue.True;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return EvaluationValue.False;
|
return EvaluationValue.False;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private sealed class ComponentScope
|
private sealed class ComponentScope
|
||||||
{
|
{
|
||||||
private readonly PolicyEvaluationComponent component;
|
private readonly PolicyEvaluationComponent component;
|
||||||
|
|
||||||
public ComponentScope(PolicyEvaluationComponent component)
|
public ComponentScope(PolicyEvaluationComponent component)
|
||||||
{
|
{
|
||||||
this.component = component;
|
this.component = component;
|
||||||
}
|
}
|
||||||
|
|
||||||
public EvaluationValue Get(string member)
|
public EvaluationValue Get(string member)
|
||||||
{
|
{
|
||||||
return member.ToLowerInvariant() switch
|
return member.ToLowerInvariant() switch
|
||||||
{
|
{
|
||||||
"name" => new EvaluationValue(component.Name),
|
"name" => new EvaluationValue(component.Name),
|
||||||
"version" => new EvaluationValue(component.Version),
|
"version" => new EvaluationValue(component.Version),
|
||||||
"type" => new EvaluationValue(component.Type),
|
"type" => new EvaluationValue(component.Type),
|
||||||
"purl" => new EvaluationValue(component.Purl),
|
"purl" => new EvaluationValue(component.Purl),
|
||||||
"metadata" => new EvaluationValue(component.Metadata),
|
"metadata" => new EvaluationValue(component.Metadata),
|
||||||
_ => component.Metadata.TryGetValue(member, out var value)
|
_ => component.Metadata.TryGetValue(member, out var value)
|
||||||
? new EvaluationValue(value)
|
? new EvaluationValue(value)
|
||||||
: EvaluationValue.Null,
|
: EvaluationValue.Null,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public EvaluationValue Invoke(string member, ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator)
|
public EvaluationValue Invoke(string member, ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator)
|
||||||
{
|
{
|
||||||
if (member.Equals("has_metadata", StringComparison.OrdinalIgnoreCase))
|
if (member.Equals("has_metadata", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
var key = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null;
|
var key = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null;
|
||||||
if (string.IsNullOrWhiteSpace(key))
|
if (string.IsNullOrWhiteSpace(key))
|
||||||
{
|
{
|
||||||
return EvaluationValue.False;
|
return EvaluationValue.False;
|
||||||
}
|
}
|
||||||
|
|
||||||
return new EvaluationValue(component.Metadata.ContainsKey(key!));
|
return new EvaluationValue(component.Metadata.ContainsKey(key!));
|
||||||
}
|
}
|
||||||
|
|
||||||
return EvaluationValue.Null;
|
return EvaluationValue.Null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private sealed class RubyComponentScope
|
private sealed class RubyComponentScope
|
||||||
{
|
{
|
||||||
private readonly PolicyEvaluationComponent component;
|
private readonly PolicyEvaluationComponent component;
|
||||||
private readonly ImmutableHashSet<string> groups;
|
private readonly ImmutableHashSet<string> groups;
|
||||||
|
|
||||||
public RubyComponentScope(PolicyEvaluationComponent component)
|
public RubyComponentScope(PolicyEvaluationComponent component)
|
||||||
{
|
{
|
||||||
this.component = component;
|
this.component = component;
|
||||||
groups = ParseGroups(component.Metadata);
|
groups = ParseGroups(component.Metadata);
|
||||||
}
|
}
|
||||||
|
|
||||||
public EvaluationValue Get(string member)
|
public EvaluationValue Get(string member)
|
||||||
{
|
{
|
||||||
return member.ToLowerInvariant() switch
|
return member.ToLowerInvariant() switch
|
||||||
{
|
{
|
||||||
"groups" => new EvaluationValue(groups.Select(value => (object?)value).ToImmutableArray()),
|
"groups" => new EvaluationValue(groups.Select(value => (object?)value).ToImmutableArray()),
|
||||||
"declaredonly" => new EvaluationValue(IsDeclaredOnly()),
|
"declaredonly" => new EvaluationValue(IsDeclaredOnly()),
|
||||||
"source" => new EvaluationValue(GetSource() ?? string.Empty),
|
"source" => new EvaluationValue(GetSource() ?? string.Empty),
|
||||||
_ => component.Metadata.TryGetValue(member, out var value)
|
_ => component.Metadata.TryGetValue(member, out var value)
|
||||||
? new EvaluationValue(value)
|
? new EvaluationValue(value)
|
||||||
: EvaluationValue.Null,
|
: EvaluationValue.Null,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public EvaluationValue Invoke(string member, ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator)
|
public EvaluationValue Invoke(string member, ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator)
|
||||||
{
|
{
|
||||||
switch (member.ToLowerInvariant())
|
switch (member.ToLowerInvariant())
|
||||||
{
|
{
|
||||||
case "group":
|
case "group":
|
||||||
{
|
{
|
||||||
var name = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null;
|
var name = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null;
|
||||||
return new EvaluationValue(name is not null && groups.Contains(name));
|
return new EvaluationValue(name is not null && groups.Contains(name));
|
||||||
}
|
}
|
||||||
case "groups":
|
case "groups":
|
||||||
return new EvaluationValue(groups.Select(value => (object?)value).ToImmutableArray());
|
return new EvaluationValue(groups.Select(value => (object?)value).ToImmutableArray());
|
||||||
case "declared_only":
|
case "declared_only":
|
||||||
return new EvaluationValue(IsDeclaredOnly());
|
return new EvaluationValue(IsDeclaredOnly());
|
||||||
case "source":
|
case "source":
|
||||||
{
|
{
|
||||||
if (arguments.Length == 0)
|
if (arguments.Length == 0)
|
||||||
{
|
{
|
||||||
return new EvaluationValue(GetSource() ?? string.Empty);
|
return new EvaluationValue(GetSource() ?? string.Empty);
|
||||||
}
|
}
|
||||||
|
|
||||||
var requested = evaluator.Evaluate(arguments[0], scope).AsString();
|
var requested = evaluator.Evaluate(arguments[0], scope).AsString();
|
||||||
if (string.IsNullOrWhiteSpace(requested))
|
if (string.IsNullOrWhiteSpace(requested))
|
||||||
{
|
{
|
||||||
return EvaluationValue.False;
|
return EvaluationValue.False;
|
||||||
}
|
}
|
||||||
|
|
||||||
var kind = GetSourceKind();
|
var kind = GetSourceKind();
|
||||||
return new EvaluationValue(string.Equals(kind, requested, StringComparison.OrdinalIgnoreCase));
|
return new EvaluationValue(string.Equals(kind, requested, StringComparison.OrdinalIgnoreCase));
|
||||||
}
|
}
|
||||||
case "capability":
|
case "capability":
|
||||||
{
|
{
|
||||||
var name = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null;
|
var name = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null;
|
||||||
return new EvaluationValue(HasCapability(name));
|
return new EvaluationValue(HasCapability(name));
|
||||||
}
|
}
|
||||||
case "capability_any":
|
case "capability_any":
|
||||||
{
|
{
|
||||||
var capabilities = EvaluateAsStringSet(arguments, scope, evaluator);
|
var capabilities = EvaluateAsStringSet(arguments, scope, evaluator);
|
||||||
return new EvaluationValue(capabilities.Any(HasCapability));
|
return new EvaluationValue(capabilities.Any(HasCapability));
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
return EvaluationValue.Null;
|
return EvaluationValue.Null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private bool HasCapability(string? name)
|
private bool HasCapability(string? name)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(name))
|
if (string.IsNullOrWhiteSpace(name))
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
var normalized = name.Trim();
|
var normalized = name.Trim();
|
||||||
if (normalized.Length == 0)
|
if (normalized.Length == 0)
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (component.Metadata.TryGetValue($"capability.{normalized}", out var value))
|
if (component.Metadata.TryGetValue($"capability.{normalized}", out var value))
|
||||||
{
|
{
|
||||||
return IsTruthy(value);
|
return IsTruthy(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (normalized.StartsWith("scheduler.", StringComparison.OrdinalIgnoreCase))
|
if (normalized.StartsWith("scheduler.", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
var group = normalized.Substring("scheduler.".Length);
|
var group = normalized.Substring("scheduler.".Length);
|
||||||
var schedulerList = component.Metadata.TryGetValue("capability.scheduler", out var listValue)
|
var schedulerList = component.Metadata.TryGetValue("capability.scheduler", out var listValue)
|
||||||
? listValue
|
? listValue
|
||||||
: null;
|
: null;
|
||||||
return ContainsDelimitedValue(schedulerList, group);
|
return ContainsDelimitedValue(schedulerList, group);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (normalized.Equals("scheduler", StringComparison.OrdinalIgnoreCase))
|
if (normalized.Equals("scheduler", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
var schedulerList = component.Metadata.TryGetValue("capability.scheduler", out var listValue)
|
var schedulerList = component.Metadata.TryGetValue("capability.scheduler", out var listValue)
|
||||||
? listValue
|
? listValue
|
||||||
: null;
|
: null;
|
||||||
return !string.IsNullOrWhiteSpace(schedulerList);
|
return !string.IsNullOrWhiteSpace(schedulerList);
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private bool IsDeclaredOnly()
|
private bool IsDeclaredOnly()
|
||||||
{
|
{
|
||||||
return component.Metadata.TryGetValue("declaredOnly", out var value) && IsTruthy(value);
|
return component.Metadata.TryGetValue("declaredOnly", out var value) && IsTruthy(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
private string? GetSource()
|
private string? GetSource()
|
||||||
{
|
{
|
||||||
return component.Metadata.TryGetValue("source", out var value) ? value : null;
|
return component.Metadata.TryGetValue("source", out var value) ? value : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
private string? GetSourceKind()
|
private string? GetSourceKind()
|
||||||
{
|
{
|
||||||
var source = GetSource();
|
var source = GetSource();
|
||||||
if (string.IsNullOrWhiteSpace(source))
|
if (string.IsNullOrWhiteSpace(source))
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
source = source.Trim();
|
source = source.Trim();
|
||||||
if (source.StartsWith("git:", StringComparison.OrdinalIgnoreCase))
|
if (source.StartsWith("git:", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
return "git";
|
return "git";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (source.StartsWith("path:", StringComparison.OrdinalIgnoreCase))
|
if (source.StartsWith("path:", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
return "path";
|
return "path";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (source.StartsWith("vendor-cache", StringComparison.OrdinalIgnoreCase))
|
if (source.StartsWith("vendor-cache", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
return "vendor-cache";
|
return "vendor-cache";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (source.StartsWith("http://", StringComparison.OrdinalIgnoreCase)
|
if (source.StartsWith("http://", StringComparison.OrdinalIgnoreCase)
|
||||||
|| source.StartsWith("https://", StringComparison.OrdinalIgnoreCase))
|
|| source.StartsWith("https://", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
return "registry";
|
return "registry";
|
||||||
}
|
}
|
||||||
|
|
||||||
return source;
|
return source;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ImmutableHashSet<string> ParseGroups(ImmutableDictionary<string, string> metadata)
|
private static ImmutableHashSet<string> ParseGroups(ImmutableDictionary<string, string> metadata)
|
||||||
{
|
{
|
||||||
if (!metadata.TryGetValue("groups", out var value) || string.IsNullOrWhiteSpace(value))
|
if (!metadata.TryGetValue("groups", out var value) || string.IsNullOrWhiteSpace(value))
|
||||||
{
|
{
|
||||||
return ImmutableHashSet<string>.Empty;
|
return ImmutableHashSet<string>.Empty;
|
||||||
}
|
}
|
||||||
|
|
||||||
var groups = value
|
var groups = value
|
||||||
.Split(';', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
|
.Split(';', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
|
||||||
.Where(static g => !string.IsNullOrWhiteSpace(g))
|
.Where(static g => !string.IsNullOrWhiteSpace(g))
|
||||||
.Select(static g => g.Trim())
|
.Select(static g => g.Trim())
|
||||||
.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase);
|
.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
return groups;
|
return groups;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static bool ContainsDelimitedValue(string? delimited, string value)
|
private static bool ContainsDelimitedValue(string? delimited, string value)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(delimited) || string.IsNullOrWhiteSpace(value))
|
if (string.IsNullOrWhiteSpace(delimited) || string.IsNullOrWhiteSpace(value))
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return delimited
|
return delimited
|
||||||
.Split(';', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
|
.Split(';', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
|
||||||
.Any(entry => entry.Equals(value, StringComparison.OrdinalIgnoreCase));
|
.Any(entry => entry.Equals(value, StringComparison.OrdinalIgnoreCase));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static bool IsTruthy(string? value)
|
private static bool IsTruthy(string? value)
|
||||||
{
|
{
|
||||||
return value is not null
|
return value is not null
|
||||||
&& (value.Equals("true", StringComparison.OrdinalIgnoreCase)
|
&& (value.Equals("true", StringComparison.OrdinalIgnoreCase)
|
||||||
|| value.Equals("1", StringComparison.OrdinalIgnoreCase)
|
|| value.Equals("1", StringComparison.OrdinalIgnoreCase)
|
||||||
|| value.Equals("yes", StringComparison.OrdinalIgnoreCase));
|
|| value.Equals("yes", StringComparison.OrdinalIgnoreCase));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ImmutableHashSet<string> EvaluateAsStringSet(ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator)
|
private static ImmutableHashSet<string> EvaluateAsStringSet(ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator)
|
||||||
{
|
{
|
||||||
var builder = ImmutableHashSet.CreateBuilder<string>(StringComparer.OrdinalIgnoreCase);
|
var builder = ImmutableHashSet.CreateBuilder<string>(StringComparer.OrdinalIgnoreCase);
|
||||||
foreach (var argument in arguments)
|
foreach (var argument in arguments)
|
||||||
{
|
{
|
||||||
var evaluated = evaluator.Evaluate(argument, scope).Raw;
|
var evaluated = evaluator.Evaluate(argument, scope).Raw;
|
||||||
switch (evaluated)
|
switch (evaluated)
|
||||||
{
|
{
|
||||||
case ImmutableArray<object?> array:
|
case ImmutableArray<object?> array:
|
||||||
foreach (var item in array)
|
foreach (var item in array)
|
||||||
{
|
{
|
||||||
if (item is string text && !string.IsNullOrWhiteSpace(text))
|
if (item is string text && !string.IsNullOrWhiteSpace(text))
|
||||||
{
|
{
|
||||||
builder.Add(text.Trim());
|
builder.Add(text.Trim());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
break;
|
break;
|
||||||
case string text when !string.IsNullOrWhiteSpace(text):
|
case string text when !string.IsNullOrWhiteSpace(text):
|
||||||
builder.Add(text.Trim());
|
builder.Add(text.Trim());
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return builder.ToImmutable();
|
return builder.ToImmutable();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private sealed class VexScope
|
private sealed class VexScope
|
||||||
{
|
{
|
||||||
private readonly PolicyExpressionEvaluator evaluator;
|
private readonly PolicyExpressionEvaluator evaluator;
|
||||||
private readonly PolicyEvaluationVexEvidence vex;
|
private readonly PolicyEvaluationVexEvidence vex;
|
||||||
|
|
||||||
public VexScope(PolicyExpressionEvaluator evaluator, PolicyEvaluationVexEvidence vex)
|
public VexScope(PolicyExpressionEvaluator evaluator, PolicyEvaluationVexEvidence vex)
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
using System.IO;
|
using System.IO;
|
||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
using NetEscapades.Configuration.Yaml;
|
using NetEscapades.Configuration.Yaml;
|
||||||
using StellaOps.Auth.Abstractions;
|
using StellaOps.Auth.Abstractions;
|
||||||
using StellaOps.Auth.Client;
|
using StellaOps.Auth.Client;
|
||||||
using StellaOps.Auth.ServerIntegration;
|
using StellaOps.Auth.ServerIntegration;
|
||||||
using StellaOps.Configuration;
|
using StellaOps.Configuration;
|
||||||
using StellaOps.Policy.Engine.Hosting;
|
using StellaOps.Policy.Engine.Hosting;
|
||||||
using StellaOps.Policy.Engine.Options;
|
using StellaOps.Policy.Engine.Options;
|
||||||
using StellaOps.Policy.Engine.Compilation;
|
using StellaOps.Policy.Engine.Compilation;
|
||||||
using StellaOps.Policy.Engine.Endpoints;
|
using StellaOps.Policy.Engine.Endpoints;
|
||||||
|
using StellaOps.PolicyDsl;
|
||||||
using StellaOps.Policy.Engine.Services;
|
using StellaOps.Policy.Engine.Services;
|
||||||
using StellaOps.Policy.Engine.Workers;
|
using StellaOps.Policy.Engine.Workers;
|
||||||
using StellaOps.Policy.Engine.Streaming;
|
using StellaOps.Policy.Engine.Streaming;
|
||||||
@@ -33,17 +34,17 @@ var policyEngineActivationConfigFiles = new[]
|
|||||||
"policy-engine.activation.yaml",
|
"policy-engine.activation.yaml",
|
||||||
"policy-engine.activation.local.yaml"
|
"policy-engine.activation.local.yaml"
|
||||||
};
|
};
|
||||||
|
|
||||||
builder.Logging.ClearProviders();
|
builder.Logging.ClearProviders();
|
||||||
builder.Logging.AddConsole();
|
builder.Logging.AddConsole();
|
||||||
|
|
||||||
builder.Configuration.AddStellaOpsDefaults(options =>
|
builder.Configuration.AddStellaOpsDefaults(options =>
|
||||||
{
|
{
|
||||||
options.BasePath = builder.Environment.ContentRootPath;
|
options.BasePath = builder.Environment.ContentRootPath;
|
||||||
options.EnvironmentPrefix = "STELLAOPS_POLICY_ENGINE_";
|
options.EnvironmentPrefix = "STELLAOPS_POLICY_ENGINE_";
|
||||||
options.ConfigureBuilder = configurationBuilder =>
|
options.ConfigureBuilder = configurationBuilder =>
|
||||||
{
|
{
|
||||||
var contentRoot = builder.Environment.ContentRootPath;
|
var contentRoot = builder.Environment.ContentRootPath;
|
||||||
foreach (var relative in policyEngineConfigFiles)
|
foreach (var relative in policyEngineConfigFiles)
|
||||||
{
|
{
|
||||||
var path = Path.Combine(contentRoot, relative);
|
var path = Path.Combine(contentRoot, relative);
|
||||||
@@ -59,12 +60,12 @@ builder.Configuration.AddStellaOpsDefaults(options =>
|
|||||||
});
|
});
|
||||||
|
|
||||||
var bootstrap = StellaOpsConfigurationBootstrapper.Build<PolicyEngineOptions>(options =>
|
var bootstrap = StellaOpsConfigurationBootstrapper.Build<PolicyEngineOptions>(options =>
|
||||||
{
|
{
|
||||||
options.BasePath = builder.Environment.ContentRootPath;
|
options.BasePath = builder.Environment.ContentRootPath;
|
||||||
options.EnvironmentPrefix = "STELLAOPS_POLICY_ENGINE_";
|
options.EnvironmentPrefix = "STELLAOPS_POLICY_ENGINE_";
|
||||||
options.BindingSection = PolicyEngineOptions.SectionName;
|
options.BindingSection = PolicyEngineOptions.SectionName;
|
||||||
options.ConfigureBuilder = configurationBuilder =>
|
options.ConfigureBuilder = configurationBuilder =>
|
||||||
{
|
{
|
||||||
foreach (var relative in policyEngineConfigFiles)
|
foreach (var relative in policyEngineConfigFiles)
|
||||||
{
|
{
|
||||||
var path = Path.Combine(builder.Environment.ContentRootPath, relative);
|
var path = Path.Combine(builder.Environment.ContentRootPath, relative);
|
||||||
@@ -79,35 +80,35 @@ var bootstrap = StellaOpsConfigurationBootstrapper.Build<PolicyEngineOptions>(op
|
|||||||
};
|
};
|
||||||
options.PostBind = static (value, _) => value.Validate();
|
options.PostBind = static (value, _) => value.Validate();
|
||||||
});
|
});
|
||||||
|
|
||||||
builder.Configuration.AddConfiguration(bootstrap.Configuration);
|
builder.Configuration.AddConfiguration(bootstrap.Configuration);
|
||||||
|
|
||||||
builder.Services.AddAirGapEgressPolicy(builder.Configuration, sectionName: "AirGap");
|
builder.Services.AddAirGapEgressPolicy(builder.Configuration, sectionName: "AirGap");
|
||||||
|
|
||||||
builder.Services.AddOptions<PolicyEngineOptions>()
|
builder.Services.AddOptions<PolicyEngineOptions>()
|
||||||
.Bind(builder.Configuration.GetSection(PolicyEngineOptions.SectionName))
|
.Bind(builder.Configuration.GetSection(PolicyEngineOptions.SectionName))
|
||||||
.Validate(options =>
|
.Validate(options =>
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
options.Validate();
|
options.Validate();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
throw new OptionsValidationException(
|
throw new OptionsValidationException(
|
||||||
PolicyEngineOptions.SectionName,
|
PolicyEngineOptions.SectionName,
|
||||||
typeof(PolicyEngineOptions),
|
typeof(PolicyEngineOptions),
|
||||||
new[] { ex.Message });
|
new[] { ex.Message });
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.ValidateOnStart();
|
.ValidateOnStart();
|
||||||
|
|
||||||
builder.Services.AddSingleton(sp => sp.GetRequiredService<IOptions<PolicyEngineOptions>>().Value);
|
builder.Services.AddSingleton(sp => sp.GetRequiredService<IOptions<PolicyEngineOptions>>().Value);
|
||||||
builder.Services.AddSingleton(TimeProvider.System);
|
builder.Services.AddSingleton(TimeProvider.System);
|
||||||
builder.Services.AddSingleton<PolicyEngineStartupDiagnostics>();
|
builder.Services.AddSingleton<PolicyEngineStartupDiagnostics>();
|
||||||
builder.Services.AddHostedService<PolicyEngineBootstrapWorker>();
|
builder.Services.AddHostedService<PolicyEngineBootstrapWorker>();
|
||||||
builder.Services.AddSingleton<PolicyCompiler>();
|
builder.Services.AddSingleton<StellaOps.PolicyDsl.PolicyCompiler>();
|
||||||
builder.Services.AddSingleton<PolicyCompilationService>();
|
builder.Services.AddSingleton<PolicyCompilationService>();
|
||||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Services.PathScopeMetrics>();
|
builder.Services.AddSingleton<StellaOps.Policy.Engine.Services.PathScopeMetrics>();
|
||||||
builder.Services.AddSingleton<PolicyEvaluationService>();
|
builder.Services.AddSingleton<PolicyEvaluationService>();
|
||||||
@@ -140,36 +141,36 @@ builder.Services.AddHttpContextAccessor();
|
|||||||
builder.Services.AddRouting(options => options.LowercaseUrls = true);
|
builder.Services.AddRouting(options => options.LowercaseUrls = true);
|
||||||
builder.Services.AddProblemDetails();
|
builder.Services.AddProblemDetails();
|
||||||
builder.Services.AddHealthChecks();
|
builder.Services.AddHealthChecks();
|
||||||
|
|
||||||
builder.Services.AddAuthentication();
|
builder.Services.AddAuthentication();
|
||||||
builder.Services.AddAuthorization();
|
builder.Services.AddAuthorization();
|
||||||
builder.Services.AddStellaOpsScopeHandler();
|
builder.Services.AddStellaOpsScopeHandler();
|
||||||
builder.Services.AddStellaOpsResourceServerAuthentication(
|
builder.Services.AddStellaOpsResourceServerAuthentication(
|
||||||
builder.Configuration,
|
builder.Configuration,
|
||||||
configurationSection: $"{PolicyEngineOptions.SectionName}:ResourceServer");
|
configurationSection: $"{PolicyEngineOptions.SectionName}:ResourceServer");
|
||||||
|
|
||||||
if (bootstrap.Options.Authority.Enabled)
|
if (bootstrap.Options.Authority.Enabled)
|
||||||
{
|
{
|
||||||
builder.Services.AddStellaOpsAuthClient(clientOptions =>
|
builder.Services.AddStellaOpsAuthClient(clientOptions =>
|
||||||
{
|
{
|
||||||
clientOptions.Authority = bootstrap.Options.Authority.Issuer;
|
clientOptions.Authority = bootstrap.Options.Authority.Issuer;
|
||||||
clientOptions.ClientId = bootstrap.Options.Authority.ClientId;
|
clientOptions.ClientId = bootstrap.Options.Authority.ClientId;
|
||||||
clientOptions.ClientSecret = bootstrap.Options.Authority.ClientSecret;
|
clientOptions.ClientSecret = bootstrap.Options.Authority.ClientSecret;
|
||||||
clientOptions.HttpTimeout = TimeSpan.FromSeconds(bootstrap.Options.Authority.BackchannelTimeoutSeconds);
|
clientOptions.HttpTimeout = TimeSpan.FromSeconds(bootstrap.Options.Authority.BackchannelTimeoutSeconds);
|
||||||
|
|
||||||
clientOptions.DefaultScopes.Clear();
|
clientOptions.DefaultScopes.Clear();
|
||||||
foreach (var scope in bootstrap.Options.Authority.Scopes)
|
foreach (var scope in bootstrap.Options.Authority.Scopes)
|
||||||
{
|
{
|
||||||
clientOptions.DefaultScopes.Add(scope);
|
clientOptions.DefaultScopes.Add(scope);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
var app = builder.Build();
|
var app = builder.Build();
|
||||||
|
|
||||||
app.UseAuthentication();
|
app.UseAuthentication();
|
||||||
app.UseAuthorization();
|
app.UseAuthorization();
|
||||||
|
|
||||||
app.MapHealthChecks("/healthz");
|
app.MapHealthChecks("/healthz");
|
||||||
app.MapGet("/readyz", (PolicyEngineStartupDiagnostics diagnostics) =>
|
app.MapGet("/readyz", (PolicyEngineStartupDiagnostics diagnostics) =>
|
||||||
diagnostics.IsReady
|
diagnostics.IsReady
|
||||||
|
|||||||
@@ -4,22 +4,34 @@ using Microsoft.Extensions.Options;
|
|||||||
using StellaOps.Policy;
|
using StellaOps.Policy;
|
||||||
using StellaOps.Policy.Engine.Compilation;
|
using StellaOps.Policy.Engine.Compilation;
|
||||||
using StellaOps.Policy.Engine.Options;
|
using StellaOps.Policy.Engine.Options;
|
||||||
|
using StellaOps.PolicyDsl;
|
||||||
namespace StellaOps.Policy.Engine.Services;
|
using DslCompiler = StellaOps.PolicyDsl.PolicyCompiler;
|
||||||
|
using DslCompilationResult = StellaOps.PolicyDsl.PolicyCompilationResult;
|
||||||
/// <summary>
|
using IrDocument = StellaOps.PolicyDsl.PolicyIrDocument;
|
||||||
/// Provides deterministic compilation for <c>stella-dsl@1</c> policy documents and exposes
|
using IrAction = StellaOps.PolicyDsl.PolicyIrAction;
|
||||||
/// basic statistics consumed by API/CLI surfaces.
|
using IrAssignmentAction = StellaOps.PolicyDsl.PolicyIrAssignmentAction;
|
||||||
/// </summary>
|
using IrAnnotateAction = StellaOps.PolicyDsl.PolicyIrAnnotateAction;
|
||||||
|
using IrIgnoreAction = StellaOps.PolicyDsl.PolicyIrIgnoreAction;
|
||||||
|
using IrEscalateAction = StellaOps.PolicyDsl.PolicyIrEscalateAction;
|
||||||
|
using IrRequireVexAction = StellaOps.PolicyDsl.PolicyIrRequireVexAction;
|
||||||
|
using IrWarnAction = StellaOps.PolicyDsl.PolicyIrWarnAction;
|
||||||
|
using IrDeferAction = StellaOps.PolicyDsl.PolicyIrDeferAction;
|
||||||
|
|
||||||
|
namespace StellaOps.Policy.Engine.Services;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Provides deterministic compilation for <c>stella-dsl@1</c> policy documents and exposes
|
||||||
|
/// basic statistics consumed by API/CLI surfaces.
|
||||||
|
/// </summary>
|
||||||
internal sealed class PolicyCompilationService
|
internal sealed class PolicyCompilationService
|
||||||
{
|
{
|
||||||
private readonly PolicyCompiler compiler;
|
private readonly DslCompiler compiler;
|
||||||
private readonly PolicyComplexityAnalyzer complexityAnalyzer;
|
private readonly PolicyComplexityAnalyzer complexityAnalyzer;
|
||||||
private readonly IOptionsMonitor<PolicyEngineOptions> optionsMonitor;
|
private readonly IOptionsMonitor<PolicyEngineOptions> optionsMonitor;
|
||||||
private readonly TimeProvider timeProvider;
|
private readonly TimeProvider timeProvider;
|
||||||
|
|
||||||
public PolicyCompilationService(
|
public PolicyCompilationService(
|
||||||
PolicyCompiler compiler,
|
DslCompiler compiler,
|
||||||
PolicyComplexityAnalyzer complexityAnalyzer,
|
PolicyComplexityAnalyzer complexityAnalyzer,
|
||||||
IOptionsMonitor<PolicyEngineOptions> optionsMonitor,
|
IOptionsMonitor<PolicyEngineOptions> optionsMonitor,
|
||||||
TimeProvider timeProvider)
|
TimeProvider timeProvider)
|
||||||
@@ -29,30 +41,30 @@ internal sealed class PolicyCompilationService
|
|||||||
this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor));
|
this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor));
|
||||||
this.timeProvider = timeProvider ?? TimeProvider.System;
|
this.timeProvider = timeProvider ?? TimeProvider.System;
|
||||||
}
|
}
|
||||||
|
|
||||||
public PolicyCompilationResultDto Compile(PolicyCompileRequest request)
|
public PolicyCompilationResultDto Compile(PolicyCompileRequest request)
|
||||||
{
|
{
|
||||||
if (request is null)
|
if (request is null)
|
||||||
{
|
{
|
||||||
throw new ArgumentNullException(nameof(request));
|
throw new ArgumentNullException(nameof(request));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (request.Dsl is null || string.IsNullOrWhiteSpace(request.Dsl.Source))
|
if (request.Dsl is null || string.IsNullOrWhiteSpace(request.Dsl.Source))
|
||||||
{
|
{
|
||||||
throw new ArgumentException("Compilation requires DSL source.", nameof(request));
|
throw new ArgumentException("Compilation requires DSL source.", nameof(request));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.Equals(request.Dsl.Syntax, "stella-dsl@1", StringComparison.Ordinal))
|
if (!string.Equals(request.Dsl.Syntax, "stella-dsl@1", StringComparison.Ordinal))
|
||||||
{
|
{
|
||||||
return PolicyCompilationResultDto.FromFailure(
|
return PolicyCompilationResultDto.FromFailure(
|
||||||
ImmutableArray.Create(PolicyIssue.Error(
|
ImmutableArray.Create(PolicyIssue.Error(
|
||||||
PolicyDslDiagnosticCodes.UnsupportedSyntaxVersion,
|
DiagnosticCodes.UnsupportedSyntaxVersion,
|
||||||
$"Unsupported syntax '{request.Dsl.Syntax ?? "null"}'. Expected 'stella-dsl@1'.",
|
$"Unsupported syntax '{request.Dsl.Syntax ?? "null"}'. Expected 'stella-dsl@1'.",
|
||||||
"dsl.syntax")),
|
"dsl.syntax")),
|
||||||
complexity: null,
|
complexity: null,
|
||||||
durationMilliseconds: 0);
|
durationMilliseconds: 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
var start = timeProvider.GetTimestamp();
|
var start = timeProvider.GetTimestamp();
|
||||||
var result = compiler.Compile(request.Dsl.Source);
|
var result = compiler.Compile(request.Dsl.Source);
|
||||||
var elapsed = timeProvider.GetElapsedTime(start, timeProvider.GetTimestamp());
|
var elapsed = timeProvider.GetElapsedTime(start, timeProvider.GetTimestamp());
|
||||||
@@ -95,11 +107,11 @@ internal sealed class PolicyCompilationService
|
|||||||
? ImmutableArray.Create(diagnostic)
|
? ImmutableArray.Create(diagnostic)
|
||||||
: diagnostics.Add(diagnostic);
|
: diagnostics.Add(diagnostic);
|
||||||
}
|
}
|
||||||
|
|
||||||
internal sealed record PolicyCompileRequest(PolicyDslPayload Dsl);
|
internal sealed record PolicyCompileRequest(PolicyDslPayload Dsl);
|
||||||
|
|
||||||
internal sealed record PolicyDslPayload(string Syntax, string Source);
|
public sealed record PolicyDslPayload(string Syntax, string Source);
|
||||||
|
|
||||||
internal sealed record PolicyCompilationResultDto(
|
internal sealed record PolicyCompilationResultDto(
|
||||||
bool Success,
|
bool Success,
|
||||||
string? Digest,
|
string? Digest,
|
||||||
@@ -116,7 +128,7 @@ internal sealed record PolicyCompilationResultDto(
|
|||||||
new(false, null, null, ImmutableArray<byte>.Empty, diagnostics, complexity, durationMilliseconds);
|
new(false, null, null, ImmutableArray<byte>.Empty, diagnostics, complexity, durationMilliseconds);
|
||||||
|
|
||||||
public static PolicyCompilationResultDto FromSuccess(
|
public static PolicyCompilationResultDto FromSuccess(
|
||||||
PolicyCompilationResult compilationResult,
|
DslCompilationResult compilationResult,
|
||||||
PolicyComplexityReport complexity,
|
PolicyComplexityReport complexity,
|
||||||
long durationMilliseconds)
|
long durationMilliseconds)
|
||||||
{
|
{
|
||||||
@@ -136,45 +148,45 @@ internal sealed record PolicyCompilationResultDto(
|
|||||||
durationMilliseconds);
|
durationMilliseconds);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
internal sealed record PolicyCompilationStatistics(
|
internal sealed record PolicyCompilationStatistics(
|
||||||
int RuleCount,
|
int RuleCount,
|
||||||
ImmutableDictionary<string, int> ActionCounts)
|
ImmutableDictionary<string, int> ActionCounts)
|
||||||
{
|
{
|
||||||
public static PolicyCompilationStatistics Create(PolicyIrDocument document)
|
public static PolicyCompilationStatistics Create(IrDocument document)
|
||||||
{
|
{
|
||||||
var actions = ImmutableDictionary.CreateBuilder<string, int>(StringComparer.OrdinalIgnoreCase);
|
var actions = ImmutableDictionary.CreateBuilder<string, int>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
void Increment(string key)
|
void Increment(string key)
|
||||||
{
|
{
|
||||||
actions[key] = actions.TryGetValue(key, out var existing) ? existing + 1 : 1;
|
actions[key] = actions.TryGetValue(key, out var existing) ? existing + 1 : 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach (var rule in document.Rules)
|
foreach (var rule in document.Rules)
|
||||||
{
|
{
|
||||||
foreach (var action in rule.ThenActions)
|
foreach (var action in rule.ThenActions)
|
||||||
{
|
{
|
||||||
Increment(GetActionKey(action));
|
Increment(GetActionKey(action));
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach (var action in rule.ElseActions)
|
foreach (var action in rule.ElseActions)
|
||||||
{
|
{
|
||||||
Increment($"else:{GetActionKey(action)}");
|
Increment($"else:{GetActionKey(action)}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new PolicyCompilationStatistics(document.Rules.Length, actions.ToImmutable());
|
return new PolicyCompilationStatistics(document.Rules.Length, actions.ToImmutable());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string GetActionKey(PolicyIrAction action) => action switch
|
private static string GetActionKey(IrAction action) => action switch
|
||||||
{
|
{
|
||||||
PolicyIrAssignmentAction => "assign",
|
IrAssignmentAction => "assign",
|
||||||
PolicyIrAnnotateAction => "annotate",
|
IrAnnotateAction => "annotate",
|
||||||
PolicyIrIgnoreAction => "ignore",
|
IrIgnoreAction => "ignore",
|
||||||
PolicyIrEscalateAction => "escalate",
|
IrEscalateAction => "escalate",
|
||||||
PolicyIrRequireVexAction => "requireVex",
|
IrRequireVexAction => "requireVex",
|
||||||
PolicyIrWarnAction => "warn",
|
IrWarnAction => "warn",
|
||||||
PolicyIrDeferAction => "defer",
|
IrDeferAction => "defer",
|
||||||
_ => "unknown"
|
_ => "unknown"
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using Microsoft.Extensions.Logging.Abstractions;
|
using Microsoft.Extensions.Logging.Abstractions;
|
||||||
using StellaOps.Policy.Engine.Compilation;
|
using StellaOps.PolicyDsl;
|
||||||
using StellaOps.Policy.Engine.Evaluation;
|
using StellaOps.Policy.Engine.Evaluation;
|
||||||
|
|
||||||
namespace StellaOps.Policy.Engine.Services;
|
namespace StellaOps.Policy.Engine.Services;
|
||||||
@@ -23,19 +23,19 @@ internal sealed partial class PolicyEvaluationService
|
|||||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
}
|
}
|
||||||
|
|
||||||
internal PolicyEvaluationResult Evaluate(PolicyIrDocument document, PolicyEvaluationContext context)
|
internal Evaluation.PolicyEvaluationResult Evaluate(PolicyIrDocument document, Evaluation.PolicyEvaluationContext context)
|
||||||
{
|
{
|
||||||
if (document is null)
|
if (document is null)
|
||||||
{
|
{
|
||||||
throw new ArgumentNullException(nameof(document));
|
throw new ArgumentNullException(nameof(document));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (context is null)
|
|
||||||
{
|
|
||||||
throw new ArgumentNullException(nameof(context));
|
|
||||||
}
|
|
||||||
|
|
||||||
var request = new PolicyEvaluationRequest(document, context);
|
if (context is null)
|
||||||
|
{
|
||||||
|
throw new ArgumentNullException(nameof(context));
|
||||||
|
}
|
||||||
|
|
||||||
|
var request = new Evaluation.PolicyEvaluationRequest(document, context);
|
||||||
return evaluator.Evaluate(request);
|
return evaluator.Evaluate(request);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,16 +1,17 @@
|
|||||||
<?xml version='1.0' encoding='utf-8'?>
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net10.0</TargetFramework>
|
<TargetFramework>net10.0</TargetFramework>
|
||||||
<ImplicitUsings>enable</ImplicitUsings>
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
<Nullable>enable</Nullable>
|
<Nullable>enable</Nullable>
|
||||||
<LangVersion>preview</LangVersion>
|
<LangVersion>preview</LangVersion>
|
||||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||||
<AspNetCoreHostingModel>InProcess</AspNetCoreHostingModel>
|
<AspNetCoreHostingModel>InProcess</AspNetCoreHostingModel>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<ProjectReference Include="../__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
|
<ProjectReference Include="../__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
|
||||||
|
<ProjectReference Include="../StellaOps.PolicyDsl/StellaOps.PolicyDsl.csproj" />
|
||||||
<ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" />
|
<ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" />
|
||||||
<ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
|
<ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
|
||||||
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" />
|
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" />
|
||||||
|
|||||||
@@ -1,19 +1,22 @@
|
|||||||
namespace StellaOps.Policy.Engine.Compilation;
|
namespace StellaOps.PolicyDsl;
|
||||||
|
|
||||||
internal static class PolicyDslDiagnosticCodes
|
/// <summary>
|
||||||
{
|
/// Diagnostic codes for policy DSL lexing and parsing errors.
|
||||||
public const string UnexpectedCharacter = "POLICY-DSL-LEX-001";
|
/// </summary>
|
||||||
public const string UnterminatedString = "POLICY-DSL-LEX-002";
|
public static class DiagnosticCodes
|
||||||
public const string InvalidEscapeSequence = "POLICY-DSL-LEX-003";
|
{
|
||||||
public const string InvalidNumber = "POLICY-DSL-LEX-004";
|
public const string UnexpectedCharacter = "POLICY-DSL-LEX-001";
|
||||||
public const string UnexpectedToken = "POLICY-DSL-PARSE-001";
|
public const string UnterminatedString = "POLICY-DSL-LEX-002";
|
||||||
public const string DuplicateSection = "POLICY-DSL-PARSE-002";
|
public const string InvalidEscapeSequence = "POLICY-DSL-LEX-003";
|
||||||
public const string MissingPolicyHeader = "POLICY-DSL-PARSE-003";
|
public const string InvalidNumber = "POLICY-DSL-LEX-004";
|
||||||
public const string UnsupportedSyntaxVersion = "POLICY-DSL-PARSE-004";
|
public const string UnexpectedToken = "POLICY-DSL-PARSE-001";
|
||||||
public const string DuplicateRuleName = "POLICY-DSL-PARSE-005";
|
public const string DuplicateSection = "POLICY-DSL-PARSE-002";
|
||||||
public const string MissingBecauseClause = "POLICY-DSL-PARSE-006";
|
public const string MissingPolicyHeader = "POLICY-DSL-PARSE-003";
|
||||||
public const string MissingTerminator = "POLICY-DSL-PARSE-007";
|
public const string UnsupportedSyntaxVersion = "POLICY-DSL-PARSE-004";
|
||||||
public const string InvalidAction = "POLICY-DSL-PARSE-008";
|
public const string DuplicateRuleName = "POLICY-DSL-PARSE-005";
|
||||||
public const string InvalidLiteral = "POLICY-DSL-PARSE-009";
|
public const string MissingBecauseClause = "POLICY-DSL-PARSE-006";
|
||||||
public const string UnexpectedSection = "POLICY-DSL-PARSE-010";
|
public const string MissingTerminator = "POLICY-DSL-PARSE-007";
|
||||||
}
|
public const string InvalidAction = "POLICY-DSL-PARSE-008";
|
||||||
|
public const string InvalidLiteral = "POLICY-DSL-PARSE-009";
|
||||||
|
public const string UnexpectedSection = "POLICY-DSL-PARSE-010";
|
||||||
|
}
|
||||||
70
src/Policy/StellaOps.PolicyDsl/DslToken.cs
Normal file
70
src/Policy/StellaOps.PolicyDsl/DslToken.cs
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
namespace StellaOps.PolicyDsl;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents the kind of token in the policy DSL.
|
||||||
|
/// </summary>
|
||||||
|
public enum TokenKind
|
||||||
|
{
|
||||||
|
EndOfFile = 0,
|
||||||
|
Identifier,
|
||||||
|
StringLiteral,
|
||||||
|
NumberLiteral,
|
||||||
|
BooleanLiteral,
|
||||||
|
LeftBrace,
|
||||||
|
RightBrace,
|
||||||
|
LeftParen,
|
||||||
|
RightParen,
|
||||||
|
LeftBracket,
|
||||||
|
RightBracket,
|
||||||
|
Comma,
|
||||||
|
Semicolon,
|
||||||
|
Colon,
|
||||||
|
Arrow, // =>
|
||||||
|
Assign, // =
|
||||||
|
Define, // :=
|
||||||
|
Dot,
|
||||||
|
KeywordPolicy,
|
||||||
|
KeywordSyntax,
|
||||||
|
KeywordMetadata,
|
||||||
|
KeywordProfile,
|
||||||
|
KeywordRule,
|
||||||
|
KeywordMap,
|
||||||
|
KeywordSource,
|
||||||
|
KeywordEnv,
|
||||||
|
KeywordIf,
|
||||||
|
KeywordThen,
|
||||||
|
KeywordWhen,
|
||||||
|
KeywordAnd,
|
||||||
|
KeywordOr,
|
||||||
|
KeywordNot,
|
||||||
|
KeywordPriority,
|
||||||
|
KeywordElse,
|
||||||
|
KeywordBecause,
|
||||||
|
KeywordSettings,
|
||||||
|
KeywordIgnore,
|
||||||
|
KeywordUntil,
|
||||||
|
KeywordEscalate,
|
||||||
|
KeywordTo,
|
||||||
|
KeywordRequireVex,
|
||||||
|
KeywordWarn,
|
||||||
|
KeywordMessage,
|
||||||
|
KeywordDefer,
|
||||||
|
KeywordAnnotate,
|
||||||
|
KeywordIn,
|
||||||
|
EqualEqual,
|
||||||
|
NotEqual,
|
||||||
|
LessThan,
|
||||||
|
LessThanOrEqual,
|
||||||
|
GreaterThan,
|
||||||
|
GreaterThanOrEqual,
|
||||||
|
Unknown,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents a single token in the policy DSL.
|
||||||
|
/// </summary>
|
||||||
|
public readonly record struct DslToken(
|
||||||
|
TokenKind Kind,
|
||||||
|
string Text,
|
||||||
|
SourceSpan Span,
|
||||||
|
object? Value = null);
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,169 +1,174 @@
|
|||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
using System.Linq;
|
using System.Security.Cryptography;
|
||||||
using System.Security.Cryptography;
|
using StellaOps.Policy;
|
||||||
using StellaOps.Policy;
|
|
||||||
|
namespace StellaOps.PolicyDsl;
|
||||||
namespace StellaOps.Policy.Engine.Compilation;
|
|
||||||
|
/// <summary>
|
||||||
public sealed class PolicyCompiler
|
/// Compiles policy DSL source code into an intermediate representation.
|
||||||
{
|
/// </summary>
|
||||||
public PolicyCompilationResult Compile(string source)
|
public sealed class PolicyCompiler
|
||||||
{
|
{
|
||||||
if (source is null)
|
public PolicyCompilationResult Compile(string source)
|
||||||
{
|
{
|
||||||
throw new ArgumentNullException(nameof(source));
|
if (source is null)
|
||||||
}
|
{
|
||||||
|
throw new ArgumentNullException(nameof(source));
|
||||||
var parseResult = PolicyParser.Parse(source);
|
}
|
||||||
if (parseResult.Document is null)
|
|
||||||
{
|
var parseResult = PolicyParser.Parse(source);
|
||||||
return new PolicyCompilationResult(
|
if (parseResult.Document is null)
|
||||||
Success: false,
|
{
|
||||||
Document: null,
|
return new PolicyCompilationResult(
|
||||||
Checksum: null,
|
Success: false,
|
||||||
CanonicalRepresentation: ImmutableArray<byte>.Empty,
|
Document: null,
|
||||||
Diagnostics: parseResult.Diagnostics);
|
Checksum: null,
|
||||||
}
|
CanonicalRepresentation: ImmutableArray<byte>.Empty,
|
||||||
|
Diagnostics: parseResult.Diagnostics);
|
||||||
if (parseResult.Diagnostics.Any(static issue => issue.Severity == PolicyIssueSeverity.Error))
|
}
|
||||||
{
|
|
||||||
return new PolicyCompilationResult(
|
if (parseResult.Diagnostics.Any(static issue => issue.Severity == PolicyIssueSeverity.Error))
|
||||||
Success: false,
|
{
|
||||||
Document: null,
|
return new PolicyCompilationResult(
|
||||||
Checksum: null,
|
Success: false,
|
||||||
CanonicalRepresentation: ImmutableArray<byte>.Empty,
|
Document: null,
|
||||||
Diagnostics: parseResult.Diagnostics);
|
Checksum: null,
|
||||||
}
|
CanonicalRepresentation: ImmutableArray<byte>.Empty,
|
||||||
|
Diagnostics: parseResult.Diagnostics);
|
||||||
var irDocument = BuildIntermediateRepresentation(parseResult.Document);
|
}
|
||||||
var canonical = PolicyIrSerializer.Serialize(irDocument);
|
|
||||||
var checksum = Convert.ToHexString(SHA256.HashData(canonical.AsSpan())).ToLowerInvariant();
|
var irDocument = BuildIntermediateRepresentation(parseResult.Document);
|
||||||
|
var canonical = PolicyIrSerializer.Serialize(irDocument);
|
||||||
return new PolicyCompilationResult(
|
var checksum = Convert.ToHexString(SHA256.HashData(canonical.AsSpan())).ToLowerInvariant();
|
||||||
Success: true,
|
|
||||||
Document: irDocument,
|
return new PolicyCompilationResult(
|
||||||
Checksum: checksum,
|
Success: true,
|
||||||
CanonicalRepresentation: canonical,
|
Document: irDocument,
|
||||||
Diagnostics: parseResult.Diagnostics);
|
Checksum: checksum,
|
||||||
}
|
CanonicalRepresentation: canonical,
|
||||||
|
Diagnostics: parseResult.Diagnostics);
|
||||||
private static PolicyIrDocument BuildIntermediateRepresentation(PolicyDocumentNode node)
|
}
|
||||||
{
|
|
||||||
var metadata = node.Metadata
|
private static PolicyIrDocument BuildIntermediateRepresentation(PolicyDocumentNode node)
|
||||||
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
|
{
|
||||||
.ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => ToIrLiteral(kvp.Value), StringComparer.Ordinal);
|
var metadata = node.Metadata
|
||||||
|
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
|
||||||
var settings = node.Settings
|
.ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => ToIrLiteral(kvp.Value), StringComparer.Ordinal);
|
||||||
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
|
|
||||||
.ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => ToIrLiteral(kvp.Value), StringComparer.Ordinal);
|
var settings = node.Settings
|
||||||
|
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
|
||||||
var profiles = ImmutableArray.CreateBuilder<PolicyIrProfile>(node.Profiles.Length);
|
.ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => ToIrLiteral(kvp.Value), StringComparer.Ordinal);
|
||||||
foreach (var profile in node.Profiles)
|
|
||||||
{
|
var profiles = ImmutableArray.CreateBuilder<PolicyIrProfile>(node.Profiles.Length);
|
||||||
var maps = ImmutableArray.CreateBuilder<PolicyIrProfileMap>();
|
foreach (var profile in node.Profiles)
|
||||||
var envs = ImmutableArray.CreateBuilder<PolicyIrProfileEnv>();
|
{
|
||||||
var scalars = ImmutableArray.CreateBuilder<PolicyIrProfileScalar>();
|
var maps = ImmutableArray.CreateBuilder<PolicyIrProfileMap>();
|
||||||
|
var envs = ImmutableArray.CreateBuilder<PolicyIrProfileEnv>();
|
||||||
foreach (var item in profile.Items)
|
var scalars = ImmutableArray.CreateBuilder<PolicyIrProfileScalar>();
|
||||||
{
|
|
||||||
switch (item)
|
foreach (var item in profile.Items)
|
||||||
{
|
{
|
||||||
case PolicyProfileMapNode map:
|
switch (item)
|
||||||
maps.Add(new PolicyIrProfileMap(
|
{
|
||||||
map.Name,
|
case PolicyProfileMapNode map:
|
||||||
map.Entries
|
maps.Add(new PolicyIrProfileMap(
|
||||||
.Select(entry => new PolicyIrProfileMapEntry(entry.Source, entry.Weight))
|
map.Name,
|
||||||
.ToImmutableArray()));
|
map.Entries
|
||||||
break;
|
.Select(entry => new PolicyIrProfileMapEntry(entry.Source, entry.Weight))
|
||||||
case PolicyProfileEnvNode env:
|
.ToImmutableArray()));
|
||||||
envs.Add(new PolicyIrProfileEnv(
|
break;
|
||||||
env.Name,
|
case PolicyProfileEnvNode env:
|
||||||
env.Entries
|
envs.Add(new PolicyIrProfileEnv(
|
||||||
.Select(entry => new PolicyIrProfileEnvEntry(entry.Condition, entry.Weight))
|
env.Name,
|
||||||
.ToImmutableArray()));
|
env.Entries
|
||||||
break;
|
.Select(entry => new PolicyIrProfileEnvEntry(entry.Condition, entry.Weight))
|
||||||
case PolicyProfileScalarNode scalar:
|
.ToImmutableArray()));
|
||||||
scalars.Add(new PolicyIrProfileScalar(scalar.Name, ToIrLiteral(scalar.Value)));
|
break;
|
||||||
break;
|
case PolicyProfileScalarNode scalar:
|
||||||
}
|
scalars.Add(new PolicyIrProfileScalar(scalar.Name, ToIrLiteral(scalar.Value)));
|
||||||
}
|
break;
|
||||||
|
}
|
||||||
profiles.Add(new PolicyIrProfile(
|
}
|
||||||
profile.Name,
|
|
||||||
maps.ToImmutable(),
|
profiles.Add(new PolicyIrProfile(
|
||||||
envs.ToImmutable(),
|
profile.Name,
|
||||||
scalars.ToImmutable()));
|
maps.ToImmutable(),
|
||||||
}
|
envs.ToImmutable(),
|
||||||
|
scalars.ToImmutable()));
|
||||||
var rules = ImmutableArray.CreateBuilder<PolicyIrRule>(node.Rules.Length);
|
}
|
||||||
foreach (var rule in node.Rules)
|
|
||||||
{
|
var rules = ImmutableArray.CreateBuilder<PolicyIrRule>(node.Rules.Length);
|
||||||
var thenActions = ImmutableArray.CreateBuilder<PolicyIrAction>(rule.ThenActions.Length);
|
foreach (var rule in node.Rules)
|
||||||
foreach (var action in rule.ThenActions)
|
{
|
||||||
{
|
var thenActions = ImmutableArray.CreateBuilder<PolicyIrAction>(rule.ThenActions.Length);
|
||||||
var converted = ToIrAction(action);
|
foreach (var action in rule.ThenActions)
|
||||||
if (converted is not null)
|
{
|
||||||
{
|
var converted = ToIrAction(action);
|
||||||
thenActions.Add(converted);
|
if (converted is not null)
|
||||||
}
|
{
|
||||||
}
|
thenActions.Add(converted);
|
||||||
|
}
|
||||||
var elseActions = ImmutableArray.CreateBuilder<PolicyIrAction>(rule.ElseActions.Length);
|
}
|
||||||
foreach (var action in rule.ElseActions)
|
|
||||||
{
|
var elseActions = ImmutableArray.CreateBuilder<PolicyIrAction>(rule.ElseActions.Length);
|
||||||
var converted = ToIrAction(action);
|
foreach (var action in rule.ElseActions)
|
||||||
if (converted is not null)
|
{
|
||||||
{
|
var converted = ToIrAction(action);
|
||||||
elseActions.Add(converted);
|
if (converted is not null)
|
||||||
}
|
{
|
||||||
}
|
elseActions.Add(converted);
|
||||||
|
}
|
||||||
rules.Add(new PolicyIrRule(
|
}
|
||||||
rule.Name,
|
|
||||||
rule.Priority,
|
rules.Add(new PolicyIrRule(
|
||||||
rule.When,
|
rule.Name,
|
||||||
thenActions.ToImmutable(),
|
rule.Priority,
|
||||||
elseActions.ToImmutable(),
|
rule.When,
|
||||||
rule.Because ?? string.Empty));
|
thenActions.ToImmutable(),
|
||||||
}
|
elseActions.ToImmutable(),
|
||||||
|
rule.Because ?? string.Empty));
|
||||||
return new PolicyIrDocument(
|
}
|
||||||
node.Name,
|
|
||||||
node.Syntax,
|
return new PolicyIrDocument(
|
||||||
metadata,
|
node.Name,
|
||||||
profiles.ToImmutable(),
|
node.Syntax,
|
||||||
settings,
|
metadata,
|
||||||
rules.ToImmutable());
|
profiles.ToImmutable(),
|
||||||
}
|
settings,
|
||||||
|
rules.ToImmutable());
|
||||||
private static PolicyIrLiteral ToIrLiteral(PolicyLiteralValue value) => value switch
|
}
|
||||||
{
|
|
||||||
PolicyStringLiteral s => new PolicyIrStringLiteral(s.Value),
|
private static PolicyIrLiteral ToIrLiteral(PolicyLiteralValue value) => value switch
|
||||||
PolicyNumberLiteral n => new PolicyIrNumberLiteral(n.Value),
|
{
|
||||||
PolicyBooleanLiteral b => new PolicyIrBooleanLiteral(b.Value),
|
PolicyStringLiteral s => new PolicyIrStringLiteral(s.Value),
|
||||||
PolicyListLiteral list => new PolicyIrListLiteral(list.Items.Select(ToIrLiteral).ToImmutableArray()),
|
PolicyNumberLiteral n => new PolicyIrNumberLiteral(n.Value),
|
||||||
_ => new PolicyIrStringLiteral(string.Empty),
|
PolicyBooleanLiteral b => new PolicyIrBooleanLiteral(b.Value),
|
||||||
};
|
PolicyListLiteral list => new PolicyIrListLiteral(list.Items.Select(ToIrLiteral).ToImmutableArray()),
|
||||||
|
_ => new PolicyIrStringLiteral(string.Empty),
|
||||||
private static PolicyIrAction? ToIrAction(PolicyActionNode action) => action switch
|
};
|
||||||
{
|
|
||||||
PolicyAssignmentActionNode assign => new PolicyIrAssignmentAction(assign.Target.Segments, assign.Value),
|
private static PolicyIrAction? ToIrAction(PolicyActionNode action) => action switch
|
||||||
PolicyAnnotateActionNode annotate => new PolicyIrAnnotateAction(annotate.Target.Segments, annotate.Value),
|
{
|
||||||
PolicyIgnoreActionNode ignore => new PolicyIrIgnoreAction(ignore.Until, ignore.Because),
|
PolicyAssignmentActionNode assign => new PolicyIrAssignmentAction(assign.Target.Segments, assign.Value),
|
||||||
PolicyEscalateActionNode escalate => new PolicyIrEscalateAction(escalate.To, escalate.When),
|
PolicyAnnotateActionNode annotate => new PolicyIrAnnotateAction(annotate.Target.Segments, annotate.Value),
|
||||||
PolicyRequireVexActionNode require => new PolicyIrRequireVexAction(
|
PolicyIgnoreActionNode ignore => new PolicyIrIgnoreAction(ignore.Until, ignore.Because),
|
||||||
require.Conditions
|
PolicyEscalateActionNode escalate => new PolicyIrEscalateAction(escalate.To, escalate.When),
|
||||||
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
|
PolicyRequireVexActionNode require => new PolicyIrRequireVexAction(
|
||||||
.ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal)),
|
require.Conditions
|
||||||
PolicyWarnActionNode warn => new PolicyIrWarnAction(warn.Message),
|
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
|
||||||
PolicyDeferActionNode defer => new PolicyIrDeferAction(defer.Until),
|
.ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal)),
|
||||||
_ => null,
|
PolicyWarnActionNode warn => new PolicyIrWarnAction(warn.Message),
|
||||||
};
|
PolicyDeferActionNode defer => new PolicyIrDeferAction(defer.Until),
|
||||||
}
|
_ => null,
|
||||||
|
};
|
||||||
public sealed record PolicyCompilationResult(
|
}
|
||||||
bool Success,
|
|
||||||
PolicyIrDocument? Document,
|
/// <summary>
|
||||||
string? Checksum,
|
/// Result of compiling a policy DSL source.
|
||||||
ImmutableArray<byte> CanonicalRepresentation,
|
/// </summary>
|
||||||
ImmutableArray<PolicyIssue> Diagnostics);
|
public sealed record PolicyCompilationResult(
|
||||||
|
bool Success,
|
||||||
|
PolicyIrDocument? Document,
|
||||||
|
string? Checksum,
|
||||||
|
ImmutableArray<byte> CanonicalRepresentation,
|
||||||
|
ImmutableArray<PolicyIssue> Diagnostics);
|
||||||
213
src/Policy/StellaOps.PolicyDsl/PolicyEngineFactory.cs
Normal file
213
src/Policy/StellaOps.PolicyDsl/PolicyEngineFactory.cs
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
namespace StellaOps.PolicyDsl;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Factory for creating policy evaluation engines from compiled policy documents.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class PolicyEngineFactory
|
||||||
|
{
|
||||||
|
private readonly PolicyCompiler _compiler = new();
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Creates a policy engine from source code.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="source">The policy DSL source code.</param>
|
||||||
|
/// <returns>A policy engine if compilation succeeds, otherwise null with diagnostics.</returns>
|
||||||
|
public PolicyEngineResult CreateFromSource(string source)
|
||||||
|
{
|
||||||
|
var compilation = _compiler.Compile(source);
|
||||||
|
if (!compilation.Success || compilation.Document is null)
|
||||||
|
{
|
||||||
|
return new PolicyEngineResult(null, compilation.Diagnostics);
|
||||||
|
}
|
||||||
|
|
||||||
|
var engine = new PolicyEngine(compilation.Document, compilation.Checksum!);
|
||||||
|
return new PolicyEngineResult(engine, compilation.Diagnostics);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Creates a policy engine from a pre-compiled IR document.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="document">The compiled policy IR document.</param>
|
||||||
|
/// <param name="checksum">The policy checksum.</param>
|
||||||
|
/// <returns>A policy engine.</returns>
|
||||||
|
public PolicyEngine CreateFromDocument(PolicyIrDocument document, string checksum)
|
||||||
|
{
|
||||||
|
return new PolicyEngine(document, checksum);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Result of creating a policy engine.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record PolicyEngineResult(
|
||||||
|
PolicyEngine? Engine,
|
||||||
|
System.Collections.Immutable.ImmutableArray<StellaOps.Policy.PolicyIssue> Diagnostics);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// A lightweight policy evaluation engine.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class PolicyEngine
|
||||||
|
{
|
||||||
|
internal PolicyEngine(PolicyIrDocument document, string checksum)
|
||||||
|
{
|
||||||
|
Document = document;
|
||||||
|
Checksum = checksum;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the compiled policy document.
|
||||||
|
/// </summary>
|
||||||
|
public PolicyIrDocument Document { get; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the policy checksum (SHA-256 of canonical representation).
|
||||||
|
/// </summary>
|
||||||
|
public string Checksum { get; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the policy name.
|
||||||
|
/// </summary>
|
||||||
|
public string Name => Document.Name;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the policy syntax version.
|
||||||
|
/// </summary>
|
||||||
|
public string Syntax => Document.Syntax;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the number of rules in the policy.
|
||||||
|
/// </summary>
|
||||||
|
public int RuleCount => Document.Rules.Length;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Evaluates the policy against the given signal context.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="context">The signal context to evaluate against.</param>
|
||||||
|
/// <returns>The evaluation result.</returns>
|
||||||
|
public PolicyEvaluationResult Evaluate(SignalContext context)
|
||||||
|
{
|
||||||
|
if (context is null)
|
||||||
|
{
|
||||||
|
throw new ArgumentNullException(nameof(context));
|
||||||
|
}
|
||||||
|
|
||||||
|
var matchedRules = new List<string>();
|
||||||
|
var actions = new List<EvaluatedAction>();
|
||||||
|
|
||||||
|
foreach (var rule in Document.Rules.OrderByDescending(r => r.Priority))
|
||||||
|
{
|
||||||
|
var matched = EvaluateExpression(rule.When, context);
|
||||||
|
if (matched)
|
||||||
|
{
|
||||||
|
matchedRules.Add(rule.Name);
|
||||||
|
foreach (var action in rule.ThenActions)
|
||||||
|
{
|
||||||
|
actions.Add(new EvaluatedAction(rule.Name, action, WasElseBranch: false));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
foreach (var action in rule.ElseActions)
|
||||||
|
{
|
||||||
|
actions.Add(new EvaluatedAction(rule.Name, action, WasElseBranch: true));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return new PolicyEvaluationResult(
|
||||||
|
PolicyName: Name,
|
||||||
|
PolicyChecksum: Checksum,
|
||||||
|
MatchedRules: matchedRules.ToArray(),
|
||||||
|
Actions: actions.ToArray());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool EvaluateExpression(PolicyExpression expression, SignalContext context)
|
||||||
|
{
|
||||||
|
return expression switch
|
||||||
|
{
|
||||||
|
PolicyBinaryExpression binary => EvaluateBinary(binary, context),
|
||||||
|
PolicyUnaryExpression unary => EvaluateUnary(unary, context),
|
||||||
|
PolicyLiteralExpression literal => literal.Value is bool b && b,
|
||||||
|
PolicyIdentifierExpression identifier => context.HasSignal(identifier.Name),
|
||||||
|
PolicyMemberAccessExpression member => EvaluateMemberAccess(member, context),
|
||||||
|
_ => false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool EvaluateBinary(PolicyBinaryExpression binary, SignalContext context)
|
||||||
|
{
|
||||||
|
return binary.Operator switch
|
||||||
|
{
|
||||||
|
PolicyBinaryOperator.And => EvaluateExpression(binary.Left, context) && EvaluateExpression(binary.Right, context),
|
||||||
|
PolicyBinaryOperator.Or => EvaluateExpression(binary.Left, context) || EvaluateExpression(binary.Right, context),
|
||||||
|
PolicyBinaryOperator.Equal => EvaluateEquality(binary.Left, binary.Right, context, negate: false),
|
||||||
|
PolicyBinaryOperator.NotEqual => EvaluateEquality(binary.Left, binary.Right, context, negate: true),
|
||||||
|
_ => false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool EvaluateUnary(PolicyUnaryExpression unary, SignalContext context)
|
||||||
|
{
|
||||||
|
return unary.Operator switch
|
||||||
|
{
|
||||||
|
PolicyUnaryOperator.Not => !EvaluateExpression(unary.Operand, context),
|
||||||
|
_ => false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool EvaluateMemberAccess(PolicyMemberAccessExpression member, SignalContext context)
|
||||||
|
{
|
||||||
|
var value = ResolveValue(member.Target, context);
|
||||||
|
if (value is IDictionary<string, object?> dict)
|
||||||
|
{
|
||||||
|
return dict.TryGetValue(member.Member, out var v) && v is bool b && b;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool EvaluateEquality(PolicyExpression left, PolicyExpression right, SignalContext context, bool negate)
|
||||||
|
{
|
||||||
|
var leftValue = ResolveValue(left, context);
|
||||||
|
var rightValue = ResolveValue(right, context);
|
||||||
|
var equal = Equals(leftValue, rightValue);
|
||||||
|
return negate ? !equal : equal;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static object? ResolveValue(PolicyExpression expression, SignalContext context)
|
||||||
|
{
|
||||||
|
return expression switch
|
||||||
|
{
|
||||||
|
PolicyLiteralExpression literal => literal.Value,
|
||||||
|
PolicyIdentifierExpression identifier => context.GetSignal(identifier.Name),
|
||||||
|
PolicyMemberAccessExpression member => ResolveMemberValue(member, context),
|
||||||
|
_ => null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static object? ResolveMemberValue(PolicyMemberAccessExpression member, SignalContext context)
|
||||||
|
{
|
||||||
|
var target = ResolveValue(member.Target, context);
|
||||||
|
if (target is IDictionary<string, object?> dict)
|
||||||
|
{
|
||||||
|
return dict.TryGetValue(member.Member, out var v) ? v : null;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Result of evaluating a policy.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record PolicyEvaluationResult(
|
||||||
|
string PolicyName,
|
||||||
|
string PolicyChecksum,
|
||||||
|
string[] MatchedRules,
|
||||||
|
EvaluatedAction[] Actions);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// An action that was evaluated as part of policy execution.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record EvaluatedAction(
|
||||||
|
string RuleName,
|
||||||
|
PolicyIrAction Action,
|
||||||
|
bool WasElseBranch);
|
||||||
@@ -1,61 +1,64 @@
|
|||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
|
|
||||||
namespace StellaOps.Policy.Engine.Compilation;
|
namespace StellaOps.PolicyDsl;
|
||||||
|
|
||||||
public sealed record PolicyIrDocument(
|
/// <summary>
|
||||||
string Name,
|
/// Intermediate representation of a compiled policy document.
|
||||||
string Syntax,
|
/// </summary>
|
||||||
ImmutableSortedDictionary<string, PolicyIrLiteral> Metadata,
|
public sealed record PolicyIrDocument(
|
||||||
ImmutableArray<PolicyIrProfile> Profiles,
|
string Name,
|
||||||
ImmutableSortedDictionary<string, PolicyIrLiteral> Settings,
|
string Syntax,
|
||||||
ImmutableArray<PolicyIrRule> Rules);
|
ImmutableSortedDictionary<string, PolicyIrLiteral> Metadata,
|
||||||
|
ImmutableArray<PolicyIrProfile> Profiles,
|
||||||
public abstract record PolicyIrLiteral;
|
ImmutableSortedDictionary<string, PolicyIrLiteral> Settings,
|
||||||
|
ImmutableArray<PolicyIrRule> Rules);
|
||||||
public sealed record PolicyIrStringLiteral(string Value) : PolicyIrLiteral;
|
|
||||||
|
public abstract record PolicyIrLiteral;
|
||||||
public sealed record PolicyIrNumberLiteral(decimal Value) : PolicyIrLiteral;
|
|
||||||
|
public sealed record PolicyIrStringLiteral(string Value) : PolicyIrLiteral;
|
||||||
public sealed record PolicyIrBooleanLiteral(bool Value) : PolicyIrLiteral;
|
|
||||||
|
public sealed record PolicyIrNumberLiteral(decimal Value) : PolicyIrLiteral;
|
||||||
public sealed record PolicyIrListLiteral(ImmutableArray<PolicyIrLiteral> Items) : PolicyIrLiteral;
|
|
||||||
|
public sealed record PolicyIrBooleanLiteral(bool Value) : PolicyIrLiteral;
|
||||||
public sealed record PolicyIrProfile(
|
|
||||||
string Name,
|
public sealed record PolicyIrListLiteral(ImmutableArray<PolicyIrLiteral> Items) : PolicyIrLiteral;
|
||||||
ImmutableArray<PolicyIrProfileMap> Maps,
|
|
||||||
ImmutableArray<PolicyIrProfileEnv> Environments,
|
public sealed record PolicyIrProfile(
|
||||||
ImmutableArray<PolicyIrProfileScalar> Scalars);
|
string Name,
|
||||||
|
ImmutableArray<PolicyIrProfileMap> Maps,
|
||||||
public sealed record PolicyIrProfileMap(string Name, ImmutableArray<PolicyIrProfileMapEntry> Entries);
|
ImmutableArray<PolicyIrProfileEnv> Environments,
|
||||||
|
ImmutableArray<PolicyIrProfileScalar> Scalars);
|
||||||
public sealed record PolicyIrProfileMapEntry(string Source, decimal Weight);
|
|
||||||
|
public sealed record PolicyIrProfileMap(string Name, ImmutableArray<PolicyIrProfileMapEntry> Entries);
|
||||||
public sealed record PolicyIrProfileEnv(string Name, ImmutableArray<PolicyIrProfileEnvEntry> Entries);
|
|
||||||
|
public sealed record PolicyIrProfileMapEntry(string Source, decimal Weight);
|
||||||
public sealed record PolicyIrProfileEnvEntry(PolicyExpression Condition, decimal Weight);
|
|
||||||
|
public sealed record PolicyIrProfileEnv(string Name, ImmutableArray<PolicyIrProfileEnvEntry> Entries);
|
||||||
public sealed record PolicyIrProfileScalar(string Name, PolicyIrLiteral Value);
|
|
||||||
|
public sealed record PolicyIrProfileEnvEntry(PolicyExpression Condition, decimal Weight);
|
||||||
public sealed record PolicyIrRule(
|
|
||||||
string Name,
|
public sealed record PolicyIrProfileScalar(string Name, PolicyIrLiteral Value);
|
||||||
int Priority,
|
|
||||||
PolicyExpression When,
|
public sealed record PolicyIrRule(
|
||||||
ImmutableArray<PolicyIrAction> ThenActions,
|
string Name,
|
||||||
ImmutableArray<PolicyIrAction> ElseActions,
|
int Priority,
|
||||||
string Because);
|
PolicyExpression When,
|
||||||
|
ImmutableArray<PolicyIrAction> ThenActions,
|
||||||
public abstract record PolicyIrAction;
|
ImmutableArray<PolicyIrAction> ElseActions,
|
||||||
|
string Because);
|
||||||
public sealed record PolicyIrAssignmentAction(ImmutableArray<string> Target, PolicyExpression Value) : PolicyIrAction;
|
|
||||||
|
public abstract record PolicyIrAction;
|
||||||
public sealed record PolicyIrAnnotateAction(ImmutableArray<string> Target, PolicyExpression Value) : PolicyIrAction;
|
|
||||||
|
public sealed record PolicyIrAssignmentAction(ImmutableArray<string> Target, PolicyExpression Value) : PolicyIrAction;
|
||||||
public sealed record PolicyIrIgnoreAction(PolicyExpression? Until, string? Because) : PolicyIrAction;
|
|
||||||
|
public sealed record PolicyIrAnnotateAction(ImmutableArray<string> Target, PolicyExpression Value) : PolicyIrAction;
|
||||||
public sealed record PolicyIrEscalateAction(PolicyExpression? To, PolicyExpression? When) : PolicyIrAction;
|
|
||||||
|
public sealed record PolicyIrIgnoreAction(PolicyExpression? Until, string? Because) : PolicyIrAction;
|
||||||
public sealed record PolicyIrRequireVexAction(ImmutableSortedDictionary<string, PolicyExpression> Conditions) : PolicyIrAction;
|
|
||||||
|
public sealed record PolicyIrEscalateAction(PolicyExpression? To, PolicyExpression? When) : PolicyIrAction;
|
||||||
public sealed record PolicyIrWarnAction(PolicyExpression? Message) : PolicyIrAction;
|
|
||||||
|
public sealed record PolicyIrRequireVexAction(ImmutableSortedDictionary<string, PolicyExpression> Conditions) : PolicyIrAction;
|
||||||
public sealed record PolicyIrDeferAction(PolicyExpression? Until) : PolicyIrAction;
|
|
||||||
|
public sealed record PolicyIrWarnAction(PolicyExpression? Message) : PolicyIrAction;
|
||||||
|
|
||||||
|
public sealed record PolicyIrDeferAction(PolicyExpression? Until) : PolicyIrAction;
|
||||||
@@ -1,415 +1,418 @@
|
|||||||
using System.Buffers;
|
using System.Buffers;
|
||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
using System.Text.Json;
|
using System.Text.Json;
|
||||||
|
|
||||||
namespace StellaOps.Policy.Engine.Compilation;
|
namespace StellaOps.PolicyDsl;
|
||||||
|
|
||||||
internal static class PolicyIrSerializer
|
/// <summary>
|
||||||
{
|
/// Serializes policy IR documents to a canonical JSON representation for hashing.
|
||||||
public static ImmutableArray<byte> Serialize(PolicyIrDocument document)
|
/// </summary>
|
||||||
{
|
public static class PolicyIrSerializer
|
||||||
var buffer = new ArrayBufferWriter<byte>();
|
{
|
||||||
using var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions
|
public static ImmutableArray<byte> Serialize(PolicyIrDocument document)
|
||||||
{
|
{
|
||||||
Indented = false,
|
var buffer = new ArrayBufferWriter<byte>();
|
||||||
SkipValidation = false
|
using var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions
|
||||||
});
|
{
|
||||||
|
Indented = false,
|
||||||
WriteDocument(writer, document);
|
SkipValidation = false
|
||||||
writer.Flush();
|
});
|
||||||
|
|
||||||
return buffer.WrittenSpan.ToArray().ToImmutableArray();
|
WriteDocument(writer, document);
|
||||||
}
|
writer.Flush();
|
||||||
|
|
||||||
private static void WriteDocument(Utf8JsonWriter writer, PolicyIrDocument document)
|
return buffer.WrittenSpan.ToArray().ToImmutableArray();
|
||||||
{
|
}
|
||||||
writer.WriteStartObject();
|
|
||||||
writer.WriteString("name", document.Name);
|
private static void WriteDocument(Utf8JsonWriter writer, PolicyIrDocument document)
|
||||||
writer.WriteString("syntax", document.Syntax);
|
{
|
||||||
|
writer.WriteStartObject();
|
||||||
writer.WritePropertyName("metadata");
|
writer.WriteString("name", document.Name);
|
||||||
WriteLiteralDictionary(writer, document.Metadata);
|
writer.WriteString("syntax", document.Syntax);
|
||||||
|
|
||||||
writer.WritePropertyName("profiles");
|
writer.WritePropertyName("metadata");
|
||||||
writer.WriteStartArray();
|
WriteLiteralDictionary(writer, document.Metadata);
|
||||||
foreach (var profile in document.Profiles)
|
|
||||||
{
|
writer.WritePropertyName("profiles");
|
||||||
WriteProfile(writer, profile);
|
writer.WriteStartArray();
|
||||||
}
|
foreach (var profile in document.Profiles)
|
||||||
|
{
|
||||||
writer.WriteEndArray();
|
WriteProfile(writer, profile);
|
||||||
|
}
|
||||||
writer.WritePropertyName("settings");
|
|
||||||
WriteLiteralDictionary(writer, document.Settings);
|
writer.WriteEndArray();
|
||||||
|
|
||||||
writer.WritePropertyName("rules");
|
writer.WritePropertyName("settings");
|
||||||
writer.WriteStartArray();
|
WriteLiteralDictionary(writer, document.Settings);
|
||||||
foreach (var rule in document.Rules)
|
|
||||||
{
|
writer.WritePropertyName("rules");
|
||||||
WriteRule(writer, rule);
|
writer.WriteStartArray();
|
||||||
}
|
foreach (var rule in document.Rules)
|
||||||
|
{
|
||||||
writer.WriteEndArray();
|
WriteRule(writer, rule);
|
||||||
writer.WriteEndObject();
|
}
|
||||||
}
|
|
||||||
|
writer.WriteEndArray();
|
||||||
private static void WriteProfile(Utf8JsonWriter writer, PolicyIrProfile profile)
|
writer.WriteEndObject();
|
||||||
{
|
}
|
||||||
writer.WriteStartObject();
|
|
||||||
writer.WriteString("name", profile.Name);
|
private static void WriteProfile(Utf8JsonWriter writer, PolicyIrProfile profile)
|
||||||
|
{
|
||||||
writer.WritePropertyName("maps");
|
writer.WriteStartObject();
|
||||||
writer.WriteStartArray();
|
writer.WriteString("name", profile.Name);
|
||||||
foreach (var map in profile.Maps)
|
|
||||||
{
|
writer.WritePropertyName("maps");
|
||||||
writer.WriteStartObject();
|
writer.WriteStartArray();
|
||||||
writer.WriteString("name", map.Name);
|
foreach (var map in profile.Maps)
|
||||||
writer.WritePropertyName("entries");
|
{
|
||||||
writer.WriteStartArray();
|
writer.WriteStartObject();
|
||||||
foreach (var entry in map.Entries)
|
writer.WriteString("name", map.Name);
|
||||||
{
|
writer.WritePropertyName("entries");
|
||||||
writer.WriteStartObject();
|
writer.WriteStartArray();
|
||||||
writer.WriteString("source", entry.Source);
|
foreach (var entry in map.Entries)
|
||||||
writer.WriteNumber("weight", entry.Weight);
|
{
|
||||||
writer.WriteEndObject();
|
writer.WriteStartObject();
|
||||||
}
|
writer.WriteString("source", entry.Source);
|
||||||
|
writer.WriteNumber("weight", entry.Weight);
|
||||||
writer.WriteEndArray();
|
writer.WriteEndObject();
|
||||||
writer.WriteEndObject();
|
}
|
||||||
}
|
|
||||||
|
writer.WriteEndArray();
|
||||||
writer.WriteEndArray();
|
writer.WriteEndObject();
|
||||||
|
}
|
||||||
writer.WritePropertyName("env");
|
|
||||||
writer.WriteStartArray();
|
writer.WriteEndArray();
|
||||||
foreach (var env in profile.Environments)
|
|
||||||
{
|
writer.WritePropertyName("env");
|
||||||
writer.WriteStartObject();
|
writer.WriteStartArray();
|
||||||
writer.WriteString("name", env.Name);
|
foreach (var env in profile.Environments)
|
||||||
writer.WritePropertyName("entries");
|
{
|
||||||
writer.WriteStartArray();
|
writer.WriteStartObject();
|
||||||
foreach (var entry in env.Entries)
|
writer.WriteString("name", env.Name);
|
||||||
{
|
writer.WritePropertyName("entries");
|
||||||
writer.WriteStartObject();
|
writer.WriteStartArray();
|
||||||
writer.WritePropertyName("condition");
|
foreach (var entry in env.Entries)
|
||||||
WriteExpression(writer, entry.Condition);
|
{
|
||||||
writer.WriteNumber("weight", entry.Weight);
|
writer.WriteStartObject();
|
||||||
writer.WriteEndObject();
|
writer.WritePropertyName("condition");
|
||||||
}
|
WriteExpression(writer, entry.Condition);
|
||||||
|
writer.WriteNumber("weight", entry.Weight);
|
||||||
writer.WriteEndArray();
|
writer.WriteEndObject();
|
||||||
writer.WriteEndObject();
|
}
|
||||||
}
|
|
||||||
|
writer.WriteEndArray();
|
||||||
writer.WriteEndArray();
|
writer.WriteEndObject();
|
||||||
|
}
|
||||||
writer.WritePropertyName("scalars");
|
|
||||||
writer.WriteStartArray();
|
writer.WriteEndArray();
|
||||||
foreach (var scalar in profile.Scalars)
|
|
||||||
{
|
writer.WritePropertyName("scalars");
|
||||||
writer.WriteStartObject();
|
writer.WriteStartArray();
|
||||||
writer.WriteString("name", scalar.Name);
|
foreach (var scalar in profile.Scalars)
|
||||||
writer.WritePropertyName("value");
|
{
|
||||||
WriteLiteral(writer, scalar.Value);
|
writer.WriteStartObject();
|
||||||
writer.WriteEndObject();
|
writer.WriteString("name", scalar.Name);
|
||||||
}
|
writer.WritePropertyName("value");
|
||||||
|
WriteLiteral(writer, scalar.Value);
|
||||||
writer.WriteEndArray();
|
writer.WriteEndObject();
|
||||||
writer.WriteEndObject();
|
}
|
||||||
}
|
|
||||||
|
writer.WriteEndArray();
|
||||||
private static void WriteRule(Utf8JsonWriter writer, PolicyIrRule rule)
|
writer.WriteEndObject();
|
||||||
{
|
}
|
||||||
writer.WriteStartObject();
|
|
||||||
writer.WriteString("name", rule.Name);
|
private static void WriteRule(Utf8JsonWriter writer, PolicyIrRule rule)
|
||||||
writer.WriteNumber("priority", rule.Priority);
|
{
|
||||||
writer.WritePropertyName("when");
|
writer.WriteStartObject();
|
||||||
WriteExpression(writer, rule.When);
|
writer.WriteString("name", rule.Name);
|
||||||
|
writer.WriteNumber("priority", rule.Priority);
|
||||||
writer.WritePropertyName("then");
|
writer.WritePropertyName("when");
|
||||||
WriteActions(writer, rule.ThenActions);
|
WriteExpression(writer, rule.When);
|
||||||
|
|
||||||
writer.WritePropertyName("else");
|
writer.WritePropertyName("then");
|
||||||
WriteActions(writer, rule.ElseActions);
|
WriteActions(writer, rule.ThenActions);
|
||||||
|
|
||||||
writer.WriteString("because", rule.Because);
|
writer.WritePropertyName("else");
|
||||||
writer.WriteEndObject();
|
WriteActions(writer, rule.ElseActions);
|
||||||
}
|
|
||||||
|
writer.WriteString("because", rule.Because);
|
||||||
private static void WriteActions(Utf8JsonWriter writer, ImmutableArray<PolicyIrAction> actions)
|
writer.WriteEndObject();
|
||||||
{
|
}
|
||||||
writer.WriteStartArray();
|
|
||||||
foreach (var action in actions)
|
private static void WriteActions(Utf8JsonWriter writer, ImmutableArray<PolicyIrAction> actions)
|
||||||
{
|
{
|
||||||
WriteAction(writer, action);
|
writer.WriteStartArray();
|
||||||
}
|
foreach (var action in actions)
|
||||||
|
{
|
||||||
writer.WriteEndArray();
|
WriteAction(writer, action);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void WriteAction(Utf8JsonWriter writer, PolicyIrAction action)
|
writer.WriteEndArray();
|
||||||
{
|
}
|
||||||
switch (action)
|
|
||||||
{
|
private static void WriteAction(Utf8JsonWriter writer, PolicyIrAction action)
|
||||||
case PolicyIrAssignmentAction assign:
|
{
|
||||||
writer.WriteStartObject();
|
switch (action)
|
||||||
writer.WriteString("type", "assign");
|
{
|
||||||
WriteReference(writer, assign.Target);
|
case PolicyIrAssignmentAction assign:
|
||||||
writer.WritePropertyName("value");
|
writer.WriteStartObject();
|
||||||
WriteExpression(writer, assign.Value);
|
writer.WriteString("type", "assign");
|
||||||
writer.WriteEndObject();
|
WriteReference(writer, assign.Target);
|
||||||
break;
|
writer.WritePropertyName("value");
|
||||||
case PolicyIrAnnotateAction annotate:
|
WriteExpression(writer, assign.Value);
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "annotate");
|
break;
|
||||||
WriteReference(writer, annotate.Target);
|
case PolicyIrAnnotateAction annotate:
|
||||||
writer.WritePropertyName("value");
|
writer.WriteStartObject();
|
||||||
WriteExpression(writer, annotate.Value);
|
writer.WriteString("type", "annotate");
|
||||||
writer.WriteEndObject();
|
WriteReference(writer, annotate.Target);
|
||||||
break;
|
writer.WritePropertyName("value");
|
||||||
case PolicyIrIgnoreAction ignore:
|
WriteExpression(writer, annotate.Value);
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "ignore");
|
break;
|
||||||
writer.WritePropertyName("until");
|
case PolicyIrIgnoreAction ignore:
|
||||||
WriteOptionalExpression(writer, ignore.Until);
|
writer.WriteStartObject();
|
||||||
writer.WriteString("because", ignore.Because ?? string.Empty);
|
writer.WriteString("type", "ignore");
|
||||||
writer.WriteEndObject();
|
writer.WritePropertyName("until");
|
||||||
break;
|
WriteOptionalExpression(writer, ignore.Until);
|
||||||
case PolicyIrEscalateAction escalate:
|
writer.WriteString("because", ignore.Because ?? string.Empty);
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "escalate");
|
break;
|
||||||
writer.WritePropertyName("to");
|
case PolicyIrEscalateAction escalate:
|
||||||
WriteOptionalExpression(writer, escalate.To);
|
writer.WriteStartObject();
|
||||||
writer.WritePropertyName("when");
|
writer.WriteString("type", "escalate");
|
||||||
WriteOptionalExpression(writer, escalate.When);
|
writer.WritePropertyName("to");
|
||||||
writer.WriteEndObject();
|
WriteOptionalExpression(writer, escalate.To);
|
||||||
break;
|
writer.WritePropertyName("when");
|
||||||
case PolicyIrRequireVexAction require:
|
WriteOptionalExpression(writer, escalate.When);
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "requireVex");
|
break;
|
||||||
writer.WritePropertyName("conditions");
|
case PolicyIrRequireVexAction require:
|
||||||
writer.WriteStartObject();
|
writer.WriteStartObject();
|
||||||
foreach (var kvp in require.Conditions)
|
writer.WriteString("type", "requireVex");
|
||||||
{
|
writer.WritePropertyName("conditions");
|
||||||
writer.WritePropertyName(kvp.Key);
|
writer.WriteStartObject();
|
||||||
WriteExpression(writer, kvp.Value);
|
foreach (var kvp in require.Conditions)
|
||||||
}
|
{
|
||||||
|
writer.WritePropertyName(kvp.Key);
|
||||||
writer.WriteEndObject();
|
WriteExpression(writer, kvp.Value);
|
||||||
writer.WriteEndObject();
|
}
|
||||||
break;
|
|
||||||
case PolicyIrWarnAction warn:
|
writer.WriteEndObject();
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "warn");
|
break;
|
||||||
writer.WritePropertyName("message");
|
case PolicyIrWarnAction warn:
|
||||||
WriteOptionalExpression(writer, warn.Message);
|
writer.WriteStartObject();
|
||||||
writer.WriteEndObject();
|
writer.WriteString("type", "warn");
|
||||||
break;
|
writer.WritePropertyName("message");
|
||||||
case PolicyIrDeferAction defer:
|
WriteOptionalExpression(writer, warn.Message);
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "defer");
|
break;
|
||||||
writer.WritePropertyName("until");
|
case PolicyIrDeferAction defer:
|
||||||
WriteOptionalExpression(writer, defer.Until);
|
writer.WriteStartObject();
|
||||||
writer.WriteEndObject();
|
writer.WriteString("type", "defer");
|
||||||
break;
|
writer.WritePropertyName("until");
|
||||||
}
|
WriteOptionalExpression(writer, defer.Until);
|
||||||
}
|
writer.WriteEndObject();
|
||||||
|
break;
|
||||||
private static void WriteReference(Utf8JsonWriter writer, ImmutableArray<string> segments)
|
}
|
||||||
{
|
}
|
||||||
writer.WritePropertyName("target");
|
|
||||||
writer.WriteStartArray();
|
private static void WriteReference(Utf8JsonWriter writer, ImmutableArray<string> segments)
|
||||||
foreach (var segment in segments)
|
{
|
||||||
{
|
writer.WritePropertyName("target");
|
||||||
writer.WriteStringValue(segment);
|
writer.WriteStartArray();
|
||||||
}
|
foreach (var segment in segments)
|
||||||
|
{
|
||||||
writer.WriteEndArray();
|
writer.WriteStringValue(segment);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void WriteOptionalExpression(Utf8JsonWriter writer, PolicyExpression? expression)
|
writer.WriteEndArray();
|
||||||
{
|
}
|
||||||
if (expression is null)
|
|
||||||
{
|
private static void WriteOptionalExpression(Utf8JsonWriter writer, PolicyExpression? expression)
|
||||||
writer.WriteNullValue();
|
{
|
||||||
return;
|
if (expression is null)
|
||||||
}
|
{
|
||||||
|
writer.WriteNullValue();
|
||||||
WriteExpression(writer, expression);
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void WriteExpression(Utf8JsonWriter writer, PolicyExpression expression)
|
WriteExpression(writer, expression);
|
||||||
{
|
}
|
||||||
switch (expression)
|
|
||||||
{
|
private static void WriteExpression(Utf8JsonWriter writer, PolicyExpression expression)
|
||||||
case PolicyLiteralExpression literal:
|
{
|
||||||
writer.WriteStartObject();
|
switch (expression)
|
||||||
writer.WriteString("type", "literal");
|
{
|
||||||
writer.WritePropertyName("value");
|
case PolicyLiteralExpression literal:
|
||||||
WriteLiteralValue(writer, literal.Value);
|
writer.WriteStartObject();
|
||||||
writer.WriteEndObject();
|
writer.WriteString("type", "literal");
|
||||||
break;
|
writer.WritePropertyName("value");
|
||||||
case PolicyListExpression list:
|
WriteLiteralValue(writer, literal.Value);
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "list");
|
break;
|
||||||
writer.WritePropertyName("items");
|
case PolicyListExpression list:
|
||||||
writer.WriteStartArray();
|
writer.WriteStartObject();
|
||||||
foreach (var item in list.Items)
|
writer.WriteString("type", "list");
|
||||||
{
|
writer.WritePropertyName("items");
|
||||||
WriteExpression(writer, item);
|
writer.WriteStartArray();
|
||||||
}
|
foreach (var item in list.Items)
|
||||||
|
{
|
||||||
writer.WriteEndArray();
|
WriteExpression(writer, item);
|
||||||
writer.WriteEndObject();
|
}
|
||||||
break;
|
|
||||||
case PolicyIdentifierExpression identifier:
|
writer.WriteEndArray();
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "identifier");
|
break;
|
||||||
writer.WriteString("name", identifier.Name);
|
case PolicyIdentifierExpression identifier:
|
||||||
writer.WriteEndObject();
|
writer.WriteStartObject();
|
||||||
break;
|
writer.WriteString("type", "identifier");
|
||||||
case PolicyMemberAccessExpression member:
|
writer.WriteString("name", identifier.Name);
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "member");
|
break;
|
||||||
writer.WritePropertyName("target");
|
case PolicyMemberAccessExpression member:
|
||||||
WriteExpression(writer, member.Target);
|
writer.WriteStartObject();
|
||||||
writer.WriteString("member", member.Member);
|
writer.WriteString("type", "member");
|
||||||
writer.WriteEndObject();
|
writer.WritePropertyName("target");
|
||||||
break;
|
WriteExpression(writer, member.Target);
|
||||||
case PolicyInvocationExpression invocation:
|
writer.WriteString("member", member.Member);
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "call");
|
break;
|
||||||
writer.WritePropertyName("target");
|
case PolicyInvocationExpression invocation:
|
||||||
WriteExpression(writer, invocation.Target);
|
writer.WriteStartObject();
|
||||||
writer.WritePropertyName("args");
|
writer.WriteString("type", "call");
|
||||||
writer.WriteStartArray();
|
writer.WritePropertyName("target");
|
||||||
foreach (var arg in invocation.Arguments)
|
WriteExpression(writer, invocation.Target);
|
||||||
{
|
writer.WritePropertyName("args");
|
||||||
WriteExpression(writer, arg);
|
writer.WriteStartArray();
|
||||||
}
|
foreach (var arg in invocation.Arguments)
|
||||||
|
{
|
||||||
writer.WriteEndArray();
|
WriteExpression(writer, arg);
|
||||||
writer.WriteEndObject();
|
}
|
||||||
break;
|
|
||||||
case PolicyIndexerExpression indexer:
|
writer.WriteEndArray();
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "indexer");
|
break;
|
||||||
writer.WritePropertyName("target");
|
case PolicyIndexerExpression indexer:
|
||||||
WriteExpression(writer, indexer.Target);
|
writer.WriteStartObject();
|
||||||
writer.WritePropertyName("index");
|
writer.WriteString("type", "indexer");
|
||||||
WriteExpression(writer, indexer.Index);
|
writer.WritePropertyName("target");
|
||||||
writer.WriteEndObject();
|
WriteExpression(writer, indexer.Target);
|
||||||
break;
|
writer.WritePropertyName("index");
|
||||||
case PolicyUnaryExpression unary:
|
WriteExpression(writer, indexer.Index);
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "unary");
|
break;
|
||||||
writer.WriteString("op", unary.Operator switch
|
case PolicyUnaryExpression unary:
|
||||||
{
|
writer.WriteStartObject();
|
||||||
PolicyUnaryOperator.Not => "not",
|
writer.WriteString("type", "unary");
|
||||||
_ => unary.Operator.ToString().ToLowerInvariant(),
|
writer.WriteString("op", unary.Operator switch
|
||||||
});
|
{
|
||||||
writer.WritePropertyName("operand");
|
PolicyUnaryOperator.Not => "not",
|
||||||
WriteExpression(writer, unary.Operand);
|
_ => unary.Operator.ToString().ToLowerInvariant(),
|
||||||
writer.WriteEndObject();
|
});
|
||||||
break;
|
writer.WritePropertyName("operand");
|
||||||
case PolicyBinaryExpression binary:
|
WriteExpression(writer, unary.Operand);
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "binary");
|
break;
|
||||||
writer.WriteString("op", GetBinaryOperator(binary.Operator));
|
case PolicyBinaryExpression binary:
|
||||||
writer.WritePropertyName("left");
|
writer.WriteStartObject();
|
||||||
WriteExpression(writer, binary.Left);
|
writer.WriteString("type", "binary");
|
||||||
writer.WritePropertyName("right");
|
writer.WriteString("op", GetBinaryOperator(binary.Operator));
|
||||||
WriteExpression(writer, binary.Right);
|
writer.WritePropertyName("left");
|
||||||
writer.WriteEndObject();
|
WriteExpression(writer, binary.Left);
|
||||||
break;
|
writer.WritePropertyName("right");
|
||||||
default:
|
WriteExpression(writer, binary.Right);
|
||||||
writer.WriteStartObject();
|
writer.WriteEndObject();
|
||||||
writer.WriteString("type", "unknown");
|
break;
|
||||||
writer.WriteEndObject();
|
default:
|
||||||
break;
|
writer.WriteStartObject();
|
||||||
}
|
writer.WriteString("type", "unknown");
|
||||||
}
|
writer.WriteEndObject();
|
||||||
|
break;
|
||||||
private static string GetBinaryOperator(PolicyBinaryOperator op) => op switch
|
}
|
||||||
{
|
}
|
||||||
PolicyBinaryOperator.And => "and",
|
|
||||||
PolicyBinaryOperator.Or => "or",
|
private static string GetBinaryOperator(PolicyBinaryOperator op) => op switch
|
||||||
PolicyBinaryOperator.Equal => "eq",
|
{
|
||||||
PolicyBinaryOperator.NotEqual => "neq",
|
PolicyBinaryOperator.And => "and",
|
||||||
PolicyBinaryOperator.LessThan => "lt",
|
PolicyBinaryOperator.Or => "or",
|
||||||
PolicyBinaryOperator.LessThanOrEqual => "lte",
|
PolicyBinaryOperator.Equal => "eq",
|
||||||
PolicyBinaryOperator.GreaterThan => "gt",
|
PolicyBinaryOperator.NotEqual => "neq",
|
||||||
PolicyBinaryOperator.GreaterThanOrEqual => "gte",
|
PolicyBinaryOperator.LessThan => "lt",
|
||||||
PolicyBinaryOperator.In => "in",
|
PolicyBinaryOperator.LessThanOrEqual => "lte",
|
||||||
PolicyBinaryOperator.NotIn => "not_in",
|
PolicyBinaryOperator.GreaterThan => "gt",
|
||||||
_ => op.ToString().ToLowerInvariant(),
|
PolicyBinaryOperator.GreaterThanOrEqual => "gte",
|
||||||
};
|
PolicyBinaryOperator.In => "in",
|
||||||
|
PolicyBinaryOperator.NotIn => "not_in",
|
||||||
private static void WriteLiteralDictionary(Utf8JsonWriter writer, ImmutableSortedDictionary<string, PolicyIrLiteral> dictionary)
|
_ => op.ToString().ToLowerInvariant(),
|
||||||
{
|
};
|
||||||
writer.WriteStartObject();
|
|
||||||
foreach (var kvp in dictionary)
|
private static void WriteLiteralDictionary(Utf8JsonWriter writer, ImmutableSortedDictionary<string, PolicyIrLiteral> dictionary)
|
||||||
{
|
{
|
||||||
writer.WritePropertyName(kvp.Key);
|
writer.WriteStartObject();
|
||||||
WriteLiteral(writer, kvp.Value);
|
foreach (var kvp in dictionary)
|
||||||
}
|
{
|
||||||
|
writer.WritePropertyName(kvp.Key);
|
||||||
writer.WriteEndObject();
|
WriteLiteral(writer, kvp.Value);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void WriteLiteral(Utf8JsonWriter writer, PolicyIrLiteral literal)
|
writer.WriteEndObject();
|
||||||
{
|
}
|
||||||
switch (literal)
|
|
||||||
{
|
private static void WriteLiteral(Utf8JsonWriter writer, PolicyIrLiteral literal)
|
||||||
case PolicyIrStringLiteral s:
|
{
|
||||||
writer.WriteStringValue(s.Value);
|
switch (literal)
|
||||||
break;
|
{
|
||||||
case PolicyIrNumberLiteral n:
|
case PolicyIrStringLiteral s:
|
||||||
writer.WriteNumberValue(n.Value);
|
writer.WriteStringValue(s.Value);
|
||||||
break;
|
break;
|
||||||
case PolicyIrBooleanLiteral b:
|
case PolicyIrNumberLiteral n:
|
||||||
writer.WriteBooleanValue(b.Value);
|
writer.WriteNumberValue(n.Value);
|
||||||
break;
|
break;
|
||||||
case PolicyIrListLiteral list:
|
case PolicyIrBooleanLiteral b:
|
||||||
writer.WriteStartArray();
|
writer.WriteBooleanValue(b.Value);
|
||||||
foreach (var item in list.Items)
|
break;
|
||||||
{
|
case PolicyIrListLiteral list:
|
||||||
WriteLiteral(writer, item);
|
writer.WriteStartArray();
|
||||||
}
|
foreach (var item in list.Items)
|
||||||
|
{
|
||||||
writer.WriteEndArray();
|
WriteLiteral(writer, item);
|
||||||
break;
|
}
|
||||||
default:
|
|
||||||
writer.WriteNullValue();
|
writer.WriteEndArray();
|
||||||
break;
|
break;
|
||||||
}
|
default:
|
||||||
}
|
writer.WriteNullValue();
|
||||||
|
break;
|
||||||
private static void WriteLiteralValue(Utf8JsonWriter writer, object? value)
|
}
|
||||||
{
|
}
|
||||||
switch (value)
|
|
||||||
{
|
private static void WriteLiteralValue(Utf8JsonWriter writer, object? value)
|
||||||
case null:
|
{
|
||||||
writer.WriteNullValue();
|
switch (value)
|
||||||
break;
|
{
|
||||||
case string s:
|
case null:
|
||||||
writer.WriteStringValue(s);
|
writer.WriteNullValue();
|
||||||
break;
|
break;
|
||||||
case bool b:
|
case string s:
|
||||||
writer.WriteBooleanValue(b);
|
writer.WriteStringValue(s);
|
||||||
break;
|
break;
|
||||||
case decimal dec:
|
case bool b:
|
||||||
writer.WriteNumberValue(dec);
|
writer.WriteBooleanValue(b);
|
||||||
break;
|
break;
|
||||||
case double dbl:
|
case decimal dec:
|
||||||
writer.WriteNumberValue(dbl);
|
writer.WriteNumberValue(dec);
|
||||||
break;
|
break;
|
||||||
case int i:
|
case double dbl:
|
||||||
writer.WriteNumberValue(i);
|
writer.WriteNumberValue(dbl);
|
||||||
break;
|
break;
|
||||||
default:
|
case int i:
|
||||||
writer.WriteStringValue(value.ToString());
|
writer.WriteNumberValue(i);
|
||||||
break;
|
break;
|
||||||
}
|
default:
|
||||||
}
|
writer.WriteStringValue(value.ToString());
|
||||||
}
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,141 +1,141 @@
|
|||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
|
|
||||||
namespace StellaOps.Policy.Engine.Compilation;
|
namespace StellaOps.PolicyDsl;
|
||||||
|
|
||||||
public abstract record SyntaxNode(SourceSpan Span);
|
public abstract record SyntaxNode(SourceSpan Span);
|
||||||
|
|
||||||
public sealed record PolicyDocumentNode(
|
public sealed record PolicyDocumentNode(
|
||||||
string Name,
|
string Name,
|
||||||
string Syntax,
|
string Syntax,
|
||||||
ImmutableDictionary<string, PolicyLiteralValue> Metadata,
|
ImmutableDictionary<string, PolicyLiteralValue> Metadata,
|
||||||
ImmutableArray<PolicyProfileNode> Profiles,
|
ImmutableArray<PolicyProfileNode> Profiles,
|
||||||
ImmutableDictionary<string, PolicyLiteralValue> Settings,
|
ImmutableDictionary<string, PolicyLiteralValue> Settings,
|
||||||
ImmutableArray<PolicyRuleNode> Rules,
|
ImmutableArray<PolicyRuleNode> Rules,
|
||||||
SourceSpan Span) : SyntaxNode(Span);
|
SourceSpan Span) : SyntaxNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyProfileNode(
|
public sealed record PolicyProfileNode(
|
||||||
string Name,
|
string Name,
|
||||||
ImmutableArray<PolicyProfileItemNode> Items,
|
ImmutableArray<PolicyProfileItemNode> Items,
|
||||||
SourceSpan Span) : SyntaxNode(Span);
|
SourceSpan Span) : SyntaxNode(Span);
|
||||||
|
|
||||||
public abstract record PolicyProfileItemNode(SourceSpan Span);
|
public abstract record PolicyProfileItemNode(SourceSpan Span);
|
||||||
|
|
||||||
public sealed record PolicyProfileMapNode(
|
public sealed record PolicyProfileMapNode(
|
||||||
string Name,
|
string Name,
|
||||||
ImmutableArray<PolicyProfileMapEntryNode> Entries,
|
ImmutableArray<PolicyProfileMapEntryNode> Entries,
|
||||||
SourceSpan Span) : PolicyProfileItemNode(Span);
|
SourceSpan Span) : PolicyProfileItemNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyProfileMapEntryNode(
|
public sealed record PolicyProfileMapEntryNode(
|
||||||
string Source,
|
string Source,
|
||||||
decimal Weight,
|
decimal Weight,
|
||||||
SourceSpan Span) : SyntaxNode(Span);
|
SourceSpan Span) : SyntaxNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyProfileEnvNode(
|
public sealed record PolicyProfileEnvNode(
|
||||||
string Name,
|
string Name,
|
||||||
ImmutableArray<PolicyProfileEnvEntryNode> Entries,
|
ImmutableArray<PolicyProfileEnvEntryNode> Entries,
|
||||||
SourceSpan Span) : PolicyProfileItemNode(Span);
|
SourceSpan Span) : PolicyProfileItemNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyProfileEnvEntryNode(
|
public sealed record PolicyProfileEnvEntryNode(
|
||||||
PolicyExpression Condition,
|
PolicyExpression Condition,
|
||||||
decimal Weight,
|
decimal Weight,
|
||||||
SourceSpan Span) : SyntaxNode(Span);
|
SourceSpan Span) : SyntaxNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyProfileScalarNode(
|
public sealed record PolicyProfileScalarNode(
|
||||||
string Name,
|
string Name,
|
||||||
PolicyLiteralValue Value,
|
PolicyLiteralValue Value,
|
||||||
SourceSpan Span) : PolicyProfileItemNode(Span);
|
SourceSpan Span) : PolicyProfileItemNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyRuleNode(
|
public sealed record PolicyRuleNode(
|
||||||
string Name,
|
string Name,
|
||||||
int Priority,
|
int Priority,
|
||||||
PolicyExpression When,
|
PolicyExpression When,
|
||||||
ImmutableArray<PolicyActionNode> ThenActions,
|
ImmutableArray<PolicyActionNode> ThenActions,
|
||||||
ImmutableArray<PolicyActionNode> ElseActions,
|
ImmutableArray<PolicyActionNode> ElseActions,
|
||||||
string? Because,
|
string? Because,
|
||||||
SourceSpan Span) : SyntaxNode(Span);
|
SourceSpan Span) : SyntaxNode(Span);
|
||||||
|
|
||||||
public abstract record PolicyActionNode(SourceSpan Span);
|
public abstract record PolicyActionNode(SourceSpan Span);
|
||||||
|
|
||||||
public sealed record PolicyAssignmentActionNode(
|
public sealed record PolicyAssignmentActionNode(
|
||||||
PolicyReference Target,
|
PolicyReference Target,
|
||||||
PolicyExpression Value,
|
PolicyExpression Value,
|
||||||
SourceSpan Span) : PolicyActionNode(Span);
|
SourceSpan Span) : PolicyActionNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyAnnotateActionNode(
|
public sealed record PolicyAnnotateActionNode(
|
||||||
PolicyReference Target,
|
PolicyReference Target,
|
||||||
PolicyExpression Value,
|
PolicyExpression Value,
|
||||||
SourceSpan Span) : PolicyActionNode(Span);
|
SourceSpan Span) : PolicyActionNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyIgnoreActionNode(
|
public sealed record PolicyIgnoreActionNode(
|
||||||
PolicyExpression? Until,
|
PolicyExpression? Until,
|
||||||
string? Because,
|
string? Because,
|
||||||
SourceSpan Span) : PolicyActionNode(Span);
|
SourceSpan Span) : PolicyActionNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyEscalateActionNode(
|
public sealed record PolicyEscalateActionNode(
|
||||||
PolicyExpression? To,
|
PolicyExpression? To,
|
||||||
PolicyExpression? When,
|
PolicyExpression? When,
|
||||||
SourceSpan Span) : PolicyActionNode(Span);
|
SourceSpan Span) : PolicyActionNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyRequireVexActionNode(
|
public sealed record PolicyRequireVexActionNode(
|
||||||
ImmutableDictionary<string, PolicyExpression> Conditions,
|
ImmutableDictionary<string, PolicyExpression> Conditions,
|
||||||
SourceSpan Span) : PolicyActionNode(Span);
|
SourceSpan Span) : PolicyActionNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyWarnActionNode(
|
public sealed record PolicyWarnActionNode(
|
||||||
PolicyExpression? Message,
|
PolicyExpression? Message,
|
||||||
SourceSpan Span) : PolicyActionNode(Span);
|
SourceSpan Span) : PolicyActionNode(Span);
|
||||||
|
|
||||||
public sealed record PolicyDeferActionNode(
|
public sealed record PolicyDeferActionNode(
|
||||||
PolicyExpression? Until,
|
PolicyExpression? Until,
|
||||||
SourceSpan Span) : PolicyActionNode(Span);
|
SourceSpan Span) : PolicyActionNode(Span);
|
||||||
|
|
||||||
public abstract record PolicyExpression(SourceSpan Span);
|
public abstract record PolicyExpression(SourceSpan Span);
|
||||||
|
|
||||||
public sealed record PolicyLiteralExpression(object? Value, SourceSpan Span) : PolicyExpression(Span);
|
public sealed record PolicyLiteralExpression(object? Value, SourceSpan Span) : PolicyExpression(Span);
|
||||||
|
|
||||||
public sealed record PolicyListExpression(ImmutableArray<PolicyExpression> Items, SourceSpan Span) : PolicyExpression(Span);
|
public sealed record PolicyListExpression(ImmutableArray<PolicyExpression> Items, SourceSpan Span) : PolicyExpression(Span);
|
||||||
|
|
||||||
public sealed record PolicyIdentifierExpression(string Name, SourceSpan Span) : PolicyExpression(Span);
|
public sealed record PolicyIdentifierExpression(string Name, SourceSpan Span) : PolicyExpression(Span);
|
||||||
|
|
||||||
public sealed record PolicyMemberAccessExpression(PolicyExpression Target, string Member, SourceSpan Span) : PolicyExpression(Span);
|
public sealed record PolicyMemberAccessExpression(PolicyExpression Target, string Member, SourceSpan Span) : PolicyExpression(Span);
|
||||||
|
|
||||||
public sealed record PolicyInvocationExpression(PolicyExpression Target, ImmutableArray<PolicyExpression> Arguments, SourceSpan Span) : PolicyExpression(Span);
|
public sealed record PolicyInvocationExpression(PolicyExpression Target, ImmutableArray<PolicyExpression> Arguments, SourceSpan Span) : PolicyExpression(Span);
|
||||||
|
|
||||||
public sealed record PolicyIndexerExpression(PolicyExpression Target, PolicyExpression Index, SourceSpan Span) : PolicyExpression(Span);
|
public sealed record PolicyIndexerExpression(PolicyExpression Target, PolicyExpression Index, SourceSpan Span) : PolicyExpression(Span);
|
||||||
|
|
||||||
public sealed record PolicyUnaryExpression(PolicyUnaryOperator Operator, PolicyExpression Operand, SourceSpan Span) : PolicyExpression(Span);
|
public sealed record PolicyUnaryExpression(PolicyUnaryOperator Operator, PolicyExpression Operand, SourceSpan Span) : PolicyExpression(Span);
|
||||||
|
|
||||||
public sealed record PolicyBinaryExpression(PolicyExpression Left, PolicyBinaryOperator Operator, PolicyExpression Right, SourceSpan Span) : PolicyExpression(Span);
|
public sealed record PolicyBinaryExpression(PolicyExpression Left, PolicyBinaryOperator Operator, PolicyExpression Right, SourceSpan Span) : PolicyExpression(Span);
|
||||||
|
|
||||||
public enum PolicyUnaryOperator
|
public enum PolicyUnaryOperator
|
||||||
{
|
{
|
||||||
Not,
|
Not,
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum PolicyBinaryOperator
|
public enum PolicyBinaryOperator
|
||||||
{
|
{
|
||||||
And,
|
And,
|
||||||
Or,
|
Or,
|
||||||
Equal,
|
Equal,
|
||||||
NotEqual,
|
NotEqual,
|
||||||
LessThan,
|
LessThan,
|
||||||
LessThanOrEqual,
|
LessThanOrEqual,
|
||||||
GreaterThan,
|
GreaterThan,
|
||||||
GreaterThanOrEqual,
|
GreaterThanOrEqual,
|
||||||
In,
|
In,
|
||||||
NotIn,
|
NotIn,
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed record PolicyReference(ImmutableArray<string> Segments, SourceSpan Span)
|
public sealed record PolicyReference(ImmutableArray<string> Segments, SourceSpan Span)
|
||||||
{
|
{
|
||||||
public override string ToString() => string.Join(".", Segments);
|
public override string ToString() => string.Join(".", Segments);
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract record PolicyLiteralValue(SourceSpan Span);
|
public abstract record PolicyLiteralValue(SourceSpan Span);
|
||||||
|
|
||||||
public sealed record PolicyStringLiteral(string Value, SourceSpan Span) : PolicyLiteralValue(Span);
|
public sealed record PolicyStringLiteral(string Value, SourceSpan Span) : PolicyLiteralValue(Span);
|
||||||
|
|
||||||
public sealed record PolicyNumberLiteral(decimal Value, SourceSpan Span) : PolicyLiteralValue(Span);
|
public sealed record PolicyNumberLiteral(decimal Value, SourceSpan Span) : PolicyLiteralValue(Span);
|
||||||
|
|
||||||
public sealed record PolicyBooleanLiteral(bool Value, SourceSpan Span) : PolicyLiteralValue(Span);
|
public sealed record PolicyBooleanLiteral(bool Value, SourceSpan Span) : PolicyLiteralValue(Span);
|
||||||
|
|
||||||
public sealed record PolicyListLiteral(ImmutableArray<PolicyLiteralValue> Items, SourceSpan Span) : PolicyLiteralValue(Span);
|
public sealed record PolicyListLiteral(ImmutableArray<PolicyLiteralValue> Items, SourceSpan Span) : PolicyLiteralValue(Span);
|
||||||
216
src/Policy/StellaOps.PolicyDsl/SignalContext.cs
Normal file
216
src/Policy/StellaOps.PolicyDsl/SignalContext.cs
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
namespace StellaOps.PolicyDsl;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Provides signal values for policy evaluation.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class SignalContext
|
||||||
|
{
|
||||||
|
private readonly Dictionary<string, object?> _signals;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Creates an empty signal context.
|
||||||
|
/// </summary>
|
||||||
|
public SignalContext()
|
||||||
|
{
|
||||||
|
_signals = new Dictionary<string, object?>(StringComparer.Ordinal);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Creates a signal context with initial values.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="signals">Initial signal values.</param>
|
||||||
|
public SignalContext(IDictionary<string, object?> signals)
|
||||||
|
{
|
||||||
|
_signals = new Dictionary<string, object?>(signals, StringComparer.Ordinal);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets whether a signal exists.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="name">The signal name.</param>
|
||||||
|
/// <returns>True if the signal exists.</returns>
|
||||||
|
public bool HasSignal(string name) => _signals.ContainsKey(name);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets a signal value.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="name">The signal name.</param>
|
||||||
|
/// <returns>The signal value, or null if not found.</returns>
|
||||||
|
public object? GetSignal(string name) => _signals.TryGetValue(name, out var value) ? value : null;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets a signal value as a specific type.
|
||||||
|
/// </summary>
|
||||||
|
/// <typeparam name="T">The expected type.</typeparam>
|
||||||
|
/// <param name="name">The signal name.</param>
|
||||||
|
/// <returns>The signal value, or default if not found or wrong type.</returns>
|
||||||
|
public T? GetSignal<T>(string name) => _signals.TryGetValue(name, out var value) && value is T t ? t : default;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Sets a signal value.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="name">The signal name.</param>
|
||||||
|
/// <param name="value">The signal value.</param>
|
||||||
|
/// <returns>This context for chaining.</returns>
|
||||||
|
public SignalContext SetSignal(string name, object? value)
|
||||||
|
{
|
||||||
|
_signals[name] = value;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Removes a signal.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="name">The signal name.</param>
|
||||||
|
/// <returns>This context for chaining.</returns>
|
||||||
|
public SignalContext RemoveSignal(string name)
|
||||||
|
{
|
||||||
|
_signals.Remove(name);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets all signal names.
|
||||||
|
/// </summary>
|
||||||
|
public IEnumerable<string> SignalNames => _signals.Keys;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets all signals as a read-only dictionary.
|
||||||
|
/// </summary>
|
||||||
|
public IReadOnlyDictionary<string, object?> Signals => _signals;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Creates a copy of this context.
|
||||||
|
/// </summary>
|
||||||
|
/// <returns>A new context with the same signals.</returns>
|
||||||
|
public SignalContext Clone() => new(_signals);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Creates a signal context builder for fluent construction.
|
||||||
|
/// </summary>
|
||||||
|
/// <returns>A new builder.</returns>
|
||||||
|
public static SignalContextBuilder Builder() => new();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Builder for creating signal contexts with fluent API.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class SignalContextBuilder
|
||||||
|
{
|
||||||
|
private readonly Dictionary<string, object?> _signals = new(StringComparer.Ordinal);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds a signal to the context.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="name">The signal name.</param>
|
||||||
|
/// <param name="value">The signal value.</param>
|
||||||
|
/// <returns>This builder for chaining.</returns>
|
||||||
|
public SignalContextBuilder WithSignal(string name, object? value)
|
||||||
|
{
|
||||||
|
_signals[name] = value;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds a boolean signal to the context.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="name">The signal name.</param>
|
||||||
|
/// <param name="value">The boolean value.</param>
|
||||||
|
/// <returns>This builder for chaining.</returns>
|
||||||
|
public SignalContextBuilder WithFlag(string name, bool value = true)
|
||||||
|
{
|
||||||
|
_signals[name] = value;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds a numeric signal to the context.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="name">The signal name.</param>
|
||||||
|
/// <param name="value">The numeric value.</param>
|
||||||
|
/// <returns>This builder for chaining.</returns>
|
||||||
|
public SignalContextBuilder WithNumber(string name, decimal value)
|
||||||
|
{
|
||||||
|
_signals[name] = value;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds a string signal to the context.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="name">The signal name.</param>
|
||||||
|
/// <param name="value">The string value.</param>
|
||||||
|
/// <returns>This builder for chaining.</returns>
|
||||||
|
public SignalContextBuilder WithString(string name, string value)
|
||||||
|
{
|
||||||
|
_signals[name] = value;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds a nested object signal to the context.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="name">The signal name.</param>
|
||||||
|
/// <param name="properties">The nested properties.</param>
|
||||||
|
/// <returns>This builder for chaining.</returns>
|
||||||
|
public SignalContextBuilder WithObject(string name, IDictionary<string, object?> properties)
|
||||||
|
{
|
||||||
|
_signals[name] = new Dictionary<string, object?>(properties, StringComparer.Ordinal);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds common finding signals.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="severity">The finding severity (e.g., "critical", "high", "medium", "low").</param>
|
||||||
|
/// <param name="confidence">The confidence score (0.0 to 1.0).</param>
|
||||||
|
/// <param name="cveId">Optional CVE identifier.</param>
|
||||||
|
/// <returns>This builder for chaining.</returns>
|
||||||
|
public SignalContextBuilder WithFinding(string severity, decimal confidence, string? cveId = null)
|
||||||
|
{
|
||||||
|
_signals["finding"] = new Dictionary<string, object?>(StringComparer.Ordinal)
|
||||||
|
{
|
||||||
|
["severity"] = severity,
|
||||||
|
["confidence"] = confidence,
|
||||||
|
["cve_id"] = cveId,
|
||||||
|
};
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds common reachability signals.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="state">The reachability state (e.g., "reachable", "unreachable", "unknown").</param>
|
||||||
|
/// <param name="confidence">The confidence score (0.0 to 1.0).</param>
|
||||||
|
/// <param name="hasRuntimeEvidence">Whether there is runtime evidence.</param>
|
||||||
|
/// <returns>This builder for chaining.</returns>
|
||||||
|
public SignalContextBuilder WithReachability(string state, decimal confidence, bool hasRuntimeEvidence = false)
|
||||||
|
{
|
||||||
|
_signals["reachability"] = new Dictionary<string, object?>(StringComparer.Ordinal)
|
||||||
|
{
|
||||||
|
["state"] = state,
|
||||||
|
["confidence"] = confidence,
|
||||||
|
["has_runtime_evidence"] = hasRuntimeEvidence,
|
||||||
|
};
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds common trust score signals.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="score">The trust score (0.0 to 1.0).</param>
|
||||||
|
/// <param name="verified">Whether the source is verified.</param>
|
||||||
|
/// <returns>This builder for chaining.</returns>
|
||||||
|
public SignalContextBuilder WithTrustScore(decimal score, bool verified = false)
|
||||||
|
{
|
||||||
|
_signals["trust_score"] = score;
|
||||||
|
_signals["trust_verified"] = verified;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Builds the signal context.
|
||||||
|
/// </summary>
|
||||||
|
/// <returns>A new signal context with the configured signals.</returns>
|
||||||
|
public SignalContext Build() => new(_signals);
|
||||||
|
}
|
||||||
@@ -1,160 +1,97 @@
|
|||||||
using System.Diagnostics.CodeAnalysis;
|
using System.Diagnostics.CodeAnalysis;
|
||||||
|
|
||||||
namespace StellaOps.Policy.Engine.Compilation;
|
namespace StellaOps.PolicyDsl;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Represents a precise source location within a policy DSL document.
|
/// Represents a precise source location within a policy DSL document.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public readonly struct SourceLocation : IEquatable<SourceLocation>, IComparable<SourceLocation>
|
public readonly struct SourceLocation : IEquatable<SourceLocation>, IComparable<SourceLocation>
|
||||||
{
|
{
|
||||||
public SourceLocation(int offset, int line, int column)
|
public SourceLocation(int offset, int line, int column)
|
||||||
{
|
{
|
||||||
if (offset < 0)
|
if (offset < 0)
|
||||||
{
|
{
|
||||||
throw new ArgumentOutOfRangeException(nameof(offset));
|
throw new ArgumentOutOfRangeException(nameof(offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (line < 1)
|
if (line < 1)
|
||||||
{
|
{
|
||||||
throw new ArgumentOutOfRangeException(nameof(line));
|
throw new ArgumentOutOfRangeException(nameof(line));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (column < 1)
|
if (column < 1)
|
||||||
{
|
{
|
||||||
throw new ArgumentOutOfRangeException(nameof(column));
|
throw new ArgumentOutOfRangeException(nameof(column));
|
||||||
}
|
}
|
||||||
|
|
||||||
Offset = offset;
|
Offset = offset;
|
||||||
Line = line;
|
Line = line;
|
||||||
Column = column;
|
Column = column;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int Offset { get; }
|
public int Offset { get; }
|
||||||
|
|
||||||
public int Line { get; }
|
public int Line { get; }
|
||||||
|
|
||||||
public int Column { get; }
|
public int Column { get; }
|
||||||
|
|
||||||
public override string ToString() => $"(L{Line}, C{Column})";
|
public override string ToString() => $"(L{Line}, C{Column})";
|
||||||
|
|
||||||
public bool Equals(SourceLocation other) =>
|
public bool Equals(SourceLocation other) =>
|
||||||
Offset == other.Offset && Line == other.Line && Column == other.Column;
|
Offset == other.Offset && Line == other.Line && Column == other.Column;
|
||||||
|
|
||||||
public override bool Equals([NotNullWhen(true)] object? obj) =>
|
public override bool Equals([NotNullWhen(true)] object? obj) =>
|
||||||
obj is SourceLocation other && Equals(other);
|
obj is SourceLocation other && Equals(other);
|
||||||
|
|
||||||
public override int GetHashCode() => HashCode.Combine(Offset, Line, Column);
|
public override int GetHashCode() => HashCode.Combine(Offset, Line, Column);
|
||||||
|
|
||||||
public int CompareTo(SourceLocation other) => Offset.CompareTo(other.Offset);
|
public int CompareTo(SourceLocation other) => Offset.CompareTo(other.Offset);
|
||||||
|
|
||||||
public static bool operator ==(SourceLocation left, SourceLocation right) => left.Equals(right);
|
public static bool operator ==(SourceLocation left, SourceLocation right) => left.Equals(right);
|
||||||
|
|
||||||
public static bool operator !=(SourceLocation left, SourceLocation right) => !left.Equals(right);
|
public static bool operator !=(SourceLocation left, SourceLocation right) => !left.Equals(right);
|
||||||
|
|
||||||
public static bool operator <(SourceLocation left, SourceLocation right) => left.CompareTo(right) < 0;
|
public static bool operator <(SourceLocation left, SourceLocation right) => left.CompareTo(right) < 0;
|
||||||
|
|
||||||
public static bool operator <=(SourceLocation left, SourceLocation right) => left.CompareTo(right) <= 0;
|
public static bool operator <=(SourceLocation left, SourceLocation right) => left.CompareTo(right) <= 0;
|
||||||
|
|
||||||
public static bool operator >(SourceLocation left, SourceLocation right) => left.CompareTo(right) > 0;
|
public static bool operator >(SourceLocation left, SourceLocation right) => left.CompareTo(right) > 0;
|
||||||
|
|
||||||
public static bool operator >=(SourceLocation left, SourceLocation right) => left.CompareTo(right) >= 0;
|
public static bool operator >=(SourceLocation left, SourceLocation right) => left.CompareTo(right) >= 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Represents a start/end location pair within a policy DSL source document.
|
/// Represents a start/end location pair within a policy DSL source document.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public readonly struct SourceSpan : IEquatable<SourceSpan>
|
public readonly struct SourceSpan : IEquatable<SourceSpan>
|
||||||
{
|
{
|
||||||
public SourceSpan(SourceLocation start, SourceLocation end)
|
public SourceSpan(SourceLocation start, SourceLocation end)
|
||||||
{
|
{
|
||||||
if (start.Offset > end.Offset)
|
if (start.Offset > end.Offset)
|
||||||
{
|
{
|
||||||
throw new ArgumentException("Start must not be after end.", nameof(start));
|
throw new ArgumentException("Start must not be after end.", nameof(start));
|
||||||
}
|
}
|
||||||
|
|
||||||
Start = start;
|
Start = start;
|
||||||
End = end;
|
End = end;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SourceLocation Start { get; }
|
public SourceLocation Start { get; }
|
||||||
|
|
||||||
public SourceLocation End { get; }
|
public SourceLocation End { get; }
|
||||||
|
|
||||||
public override string ToString() => $"{Start}->{End}";
|
public override string ToString() => $"{Start}->{End}";
|
||||||
|
|
||||||
public bool Equals(SourceSpan other) => Start.Equals(other.Start) && End.Equals(other.End);
|
public bool Equals(SourceSpan other) => Start.Equals(other.Start) && End.Equals(other.End);
|
||||||
|
|
||||||
public override bool Equals([NotNullWhen(true)] object? obj) => obj is SourceSpan other && Equals(other);
|
public override bool Equals([NotNullWhen(true)] object? obj) => obj is SourceSpan other && Equals(other);
|
||||||
|
|
||||||
public override int GetHashCode() => HashCode.Combine(Start, End);
|
public override int GetHashCode() => HashCode.Combine(Start, End);
|
||||||
|
|
||||||
public static SourceSpan Combine(SourceSpan first, SourceSpan second)
|
public static SourceSpan Combine(SourceSpan first, SourceSpan second)
|
||||||
{
|
{
|
||||||
var start = first.Start <= second.Start ? first.Start : second.Start;
|
var start = first.Start <= second.Start ? first.Start : second.Start;
|
||||||
var end = first.End >= second.End ? first.End : second.End;
|
var end = first.End >= second.End ? first.End : second.End;
|
||||||
return new SourceSpan(start, end);
|
return new SourceSpan(start, end);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
internal enum TokenKind
|
|
||||||
{
|
|
||||||
EndOfFile = 0,
|
|
||||||
Identifier,
|
|
||||||
StringLiteral,
|
|
||||||
NumberLiteral,
|
|
||||||
BooleanLiteral,
|
|
||||||
LeftBrace,
|
|
||||||
RightBrace,
|
|
||||||
LeftParen,
|
|
||||||
RightParen,
|
|
||||||
LeftBracket,
|
|
||||||
RightBracket,
|
|
||||||
Comma,
|
|
||||||
Semicolon,
|
|
||||||
Colon,
|
|
||||||
Arrow, // =>
|
|
||||||
Assign, // =
|
|
||||||
Define, // :=
|
|
||||||
Dot,
|
|
||||||
KeywordPolicy,
|
|
||||||
KeywordSyntax,
|
|
||||||
KeywordMetadata,
|
|
||||||
KeywordProfile,
|
|
||||||
KeywordRule,
|
|
||||||
KeywordMap,
|
|
||||||
KeywordSource,
|
|
||||||
KeywordEnv,
|
|
||||||
KeywordIf,
|
|
||||||
KeywordThen,
|
|
||||||
KeywordWhen,
|
|
||||||
KeywordAnd,
|
|
||||||
KeywordOr,
|
|
||||||
KeywordNot,
|
|
||||||
KeywordPriority,
|
|
||||||
KeywordElse,
|
|
||||||
KeywordBecause,
|
|
||||||
KeywordSettings,
|
|
||||||
KeywordIgnore,
|
|
||||||
KeywordUntil,
|
|
||||||
KeywordEscalate,
|
|
||||||
KeywordTo,
|
|
||||||
KeywordRequireVex,
|
|
||||||
KeywordWarn,
|
|
||||||
KeywordMessage,
|
|
||||||
KeywordDefer,
|
|
||||||
KeywordAnnotate,
|
|
||||||
KeywordIn,
|
|
||||||
EqualEqual,
|
|
||||||
NotEqual,
|
|
||||||
LessThan,
|
|
||||||
LessThanOrEqual,
|
|
||||||
GreaterThan,
|
|
||||||
GreaterThanOrEqual,
|
|
||||||
Unknown,
|
|
||||||
}
|
|
||||||
|
|
||||||
internal readonly record struct DslToken(
|
|
||||||
TokenKind Kind,
|
|
||||||
string Text,
|
|
||||||
SourceSpan Span,
|
|
||||||
object? Value = null);
|
|
||||||
20
src/Policy/StellaOps.PolicyDsl/StellaOps.PolicyDsl.csproj
Normal file
20
src/Policy/StellaOps.PolicyDsl/StellaOps.PolicyDsl.csproj
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
<PropertyGroup>
|
||||||
|
<TargetFramework>net10.0</TargetFramework>
|
||||||
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
|
<Nullable>enable</Nullable>
|
||||||
|
<LangVersion>preview</LangVersion>
|
||||||
|
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||||
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<ProjectReference Include="../__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<InternalsVisibleTo Include="StellaOps.PolicyDsl.Tests" />
|
||||||
|
<InternalsVisibleTo Include="StellaOps.Policy.Engine" />
|
||||||
|
<InternalsVisibleTo Include="StellaOps.Policy.Engine.Tests" />
|
||||||
|
</ItemGroup>
|
||||||
|
</Project>
|
||||||
@@ -6,12 +6,12 @@ namespace StellaOps.Policy;
|
|||||||
|
|
||||||
public static class PolicyEvaluation
|
public static class PolicyEvaluation
|
||||||
{
|
{
|
||||||
public static PolicyVerdict EvaluateFinding(
|
public static PolicyVerdict EvaluateFinding(
|
||||||
PolicyDocument document,
|
PolicyDocument document,
|
||||||
PolicyScoringConfig scoringConfig,
|
PolicyScoringConfig scoringConfig,
|
||||||
PolicyFinding finding,
|
PolicyFinding finding,
|
||||||
out PolicyExplanation? explanation)
|
out PolicyExplanation? explanation)
|
||||||
{
|
{
|
||||||
if (document is null)
|
if (document is null)
|
||||||
{
|
{
|
||||||
throw new ArgumentNullException(nameof(document));
|
throw new ArgumentNullException(nameof(document));
|
||||||
@@ -44,49 +44,49 @@ public static class PolicyEvaluation
|
|||||||
resolvedReachabilityKey);
|
resolvedReachabilityKey);
|
||||||
var unknownConfidence = ComputeUnknownConfidence(scoringConfig.UnknownConfidence, finding);
|
var unknownConfidence = ComputeUnknownConfidence(scoringConfig.UnknownConfidence, finding);
|
||||||
|
|
||||||
foreach (var rule in document.Rules)
|
foreach (var rule in document.Rules)
|
||||||
{
|
{
|
||||||
if (!RuleMatches(rule, finding))
|
if (!RuleMatches(rule, finding))
|
||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
return BuildVerdict(rule, finding, scoringConfig, components, unknownConfidence, out explanation);
|
|
||||||
}
|
|
||||||
|
|
||||||
explanation = new PolicyExplanation(
|
|
||||||
finding.FindingId,
|
|
||||||
PolicyVerdictStatus.Allowed,
|
|
||||||
null,
|
|
||||||
"No rule matched; baseline applied",
|
|
||||||
ImmutableArray.Create(PolicyExplanationNode.Leaf("rule", "No matching rule")));
|
|
||||||
|
|
||||||
var baseline = PolicyVerdict.CreateBaseline(finding.FindingId, scoringConfig);
|
|
||||||
return ApplyUnknownConfidence(baseline, unknownConfidence);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static PolicyVerdict BuildVerdict(
|
return BuildVerdict(rule, finding, scoringConfig, components, unknownConfidence, out explanation);
|
||||||
PolicyRule rule,
|
}
|
||||||
PolicyFinding finding,
|
|
||||||
PolicyScoringConfig config,
|
explanation = new PolicyExplanation(
|
||||||
ScoringComponents components,
|
finding.FindingId,
|
||||||
UnknownConfidenceResult? unknownConfidence,
|
PolicyVerdictStatus.Pass,
|
||||||
out PolicyExplanation explanation)
|
null,
|
||||||
{
|
"No rule matched; baseline applied",
|
||||||
|
ImmutableArray.Create(PolicyExplanationNode.Leaf("rule", "No matching rule")));
|
||||||
|
|
||||||
|
var baseline = PolicyVerdict.CreateBaseline(finding.FindingId, scoringConfig);
|
||||||
|
return ApplyUnknownConfidence(baseline, unknownConfidence);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static PolicyVerdict BuildVerdict(
|
||||||
|
PolicyRule rule,
|
||||||
|
PolicyFinding finding,
|
||||||
|
PolicyScoringConfig config,
|
||||||
|
ScoringComponents components,
|
||||||
|
UnknownConfidenceResult? unknownConfidence,
|
||||||
|
out PolicyExplanation explanation)
|
||||||
|
{
|
||||||
var action = rule.Action;
|
var action = rule.Action;
|
||||||
var status = MapAction(action);
|
var status = MapAction(action);
|
||||||
var notes = BuildNotes(action);
|
var notes = BuildNotes(action);
|
||||||
var explanationNodes = ImmutableArray.CreateBuilder<PolicyExplanationNode>();
|
var explanationNodes = ImmutableArray.CreateBuilder<PolicyExplanationNode>();
|
||||||
explanationNodes.Add(PolicyExplanationNode.Leaf("rule", $"Matched rule '{rule.Name}'", rule.Identifier));
|
explanationNodes.Add(PolicyExplanationNode.Leaf("rule", $"Matched rule '{rule.Name}'", rule.Identifier));
|
||||||
var inputs = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.OrdinalIgnoreCase);
|
var inputs = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.OrdinalIgnoreCase);
|
||||||
inputs["severityWeight"] = components.SeverityWeight;
|
inputs["severityWeight"] = components.SeverityWeight;
|
||||||
inputs["trustWeight"] = components.TrustWeight;
|
inputs["trustWeight"] = components.TrustWeight;
|
||||||
inputs["reachabilityWeight"] = components.ReachabilityWeight;
|
inputs["reachabilityWeight"] = components.ReachabilityWeight;
|
||||||
inputs["baseScore"] = components.BaseScore;
|
inputs["baseScore"] = components.BaseScore;
|
||||||
explanationNodes.Add(PolicyExplanationNode.Branch("score", "Base score", components.BaseScore.ToString(CultureInfo.InvariantCulture),
|
explanationNodes.Add(PolicyExplanationNode.Branch("score", "Base score", components.BaseScore.ToString(CultureInfo.InvariantCulture),
|
||||||
PolicyExplanationNode.Leaf("severityWeight", "Severity weight", components.SeverityWeight.ToString(CultureInfo.InvariantCulture)),
|
PolicyExplanationNode.Leaf("severityWeight", "Severity weight", components.SeverityWeight.ToString(CultureInfo.InvariantCulture)),
|
||||||
PolicyExplanationNode.Leaf("trustWeight", "Trust weight", components.TrustWeight.ToString(CultureInfo.InvariantCulture)),
|
PolicyExplanationNode.Leaf("trustWeight", "Trust weight", components.TrustWeight.ToString(CultureInfo.InvariantCulture)),
|
||||||
PolicyExplanationNode.Leaf("reachabilityWeight", "Reachability weight", components.ReachabilityWeight.ToString(CultureInfo.InvariantCulture))));
|
PolicyExplanationNode.Leaf("reachabilityWeight", "Reachability weight", components.ReachabilityWeight.ToString(CultureInfo.InvariantCulture))));
|
||||||
if (!string.IsNullOrWhiteSpace(components.TrustKey))
|
if (!string.IsNullOrWhiteSpace(components.TrustKey))
|
||||||
{
|
{
|
||||||
inputs[$"trustWeight.{components.TrustKey}"] = components.TrustWeight;
|
inputs[$"trustWeight.{components.TrustKey}"] = components.TrustWeight;
|
||||||
@@ -97,14 +97,14 @@ public static class PolicyEvaluation
|
|||||||
}
|
}
|
||||||
if (unknownConfidence is { Band.Description: { Length: > 0 } description })
|
if (unknownConfidence is { Band.Description: { Length: > 0 } description })
|
||||||
{
|
{
|
||||||
notes = AppendNote(notes, description);
|
notes = AppendNote(notes, description);
|
||||||
explanationNodes.Add(PolicyExplanationNode.Leaf("unknown", description));
|
explanationNodes.Add(PolicyExplanationNode.Leaf("unknown", description));
|
||||||
}
|
}
|
||||||
if (unknownConfidence is { } unknownDetails)
|
if (unknownConfidence is { } unknownDetails)
|
||||||
{
|
{
|
||||||
inputs["unknownConfidence"] = unknownDetails.Confidence;
|
inputs["unknownConfidence"] = unknownDetails.Confidence;
|
||||||
inputs["unknownAgeDays"] = unknownDetails.AgeDays;
|
inputs["unknownAgeDays"] = unknownDetails.AgeDays;
|
||||||
}
|
}
|
||||||
|
|
||||||
double score = components.BaseScore;
|
double score = components.BaseScore;
|
||||||
string? quietedBy = null;
|
string? quietedBy = null;
|
||||||
@@ -113,8 +113,8 @@ public static class PolicyEvaluation
|
|||||||
var quietRequested = action.Quiet;
|
var quietRequested = action.Quiet;
|
||||||
var quietAllowed = quietRequested && (action.RequireVex is not null || action.Type == PolicyActionType.RequireVex);
|
var quietAllowed = quietRequested && (action.RequireVex is not null || action.Type == PolicyActionType.RequireVex);
|
||||||
|
|
||||||
if (quietRequested && !quietAllowed)
|
if (quietRequested && !quietAllowed)
|
||||||
{
|
{
|
||||||
var warnInputs = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.OrdinalIgnoreCase);
|
var warnInputs = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.OrdinalIgnoreCase);
|
||||||
foreach (var pair in inputs)
|
foreach (var pair in inputs)
|
||||||
{
|
{
|
||||||
@@ -131,17 +131,17 @@ public static class PolicyEvaluation
|
|||||||
var warnScore = Math.Max(0, components.BaseScore - warnPenalty);
|
var warnScore = Math.Max(0, components.BaseScore - warnPenalty);
|
||||||
var warnNotes = AppendNote(notes, "Quiet flag ignored: rule must specify requireVex justifications.");
|
var warnNotes = AppendNote(notes, "Quiet flag ignored: rule must specify requireVex justifications.");
|
||||||
|
|
||||||
explanation = new PolicyExplanation(
|
explanation = new PolicyExplanation(
|
||||||
finding.FindingId,
|
finding.FindingId,
|
||||||
PolicyVerdictStatus.Warned,
|
PolicyVerdictStatus.Warned,
|
||||||
rule.Name,
|
rule.Name,
|
||||||
"Quiet flag ignored; requireVex not provided",
|
"Quiet flag ignored; requireVex not provided",
|
||||||
explanationNodes.ToImmutable());
|
explanationNodes.ToImmutable());
|
||||||
|
|
||||||
return new PolicyVerdict(
|
return new PolicyVerdict(
|
||||||
finding.FindingId,
|
finding.FindingId,
|
||||||
PolicyVerdictStatus.Warned,
|
PolicyVerdictStatus.Warned,
|
||||||
rule.Name,
|
rule.Name,
|
||||||
action.Type.ToString(),
|
action.Type.ToString(),
|
||||||
warnNotes,
|
warnNotes,
|
||||||
warnScore,
|
warnScore,
|
||||||
@@ -156,56 +156,49 @@ public static class PolicyEvaluation
|
|||||||
Reachability: components.ReachabilityKey);
|
Reachability: components.ReachabilityKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (status != PolicyVerdictStatus.Allowed)
|
if (status != PolicyVerdictStatus.Pass)
|
||||||
{
|
{
|
||||||
explanationNodes.Add(PolicyExplanationNode.Leaf("action", $"Action {action.Type}", status.ToString()));
|
explanationNodes.Add(PolicyExplanationNode.Leaf("action", $"Action {action.Type}", status.ToString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (status)
|
|
||||||
{
|
|
||||||
case PolicyVerdictStatus.Ignored:
|
|
||||||
score = ApplyPenalty(score, config.IgnorePenalty, inputs, "ignorePenalty");
|
|
||||||
explanationNodes.Add(PolicyExplanationNode.Leaf("penalty", "Ignore penalty", config.IgnorePenalty.ToString(CultureInfo.InvariantCulture)));
|
|
||||||
break;
|
|
||||||
case PolicyVerdictStatus.Warned:
|
|
||||||
score = ApplyPenalty(score, config.WarnPenalty, inputs, "warnPenalty");
|
|
||||||
explanationNodes.Add(PolicyExplanationNode.Leaf("penalty", "Warn penalty", config.WarnPenalty.ToString(CultureInfo.InvariantCulture)));
|
|
||||||
break;
|
|
||||||
case PolicyVerdictStatus.Deferred:
|
|
||||||
var deferPenalty = config.WarnPenalty / 2;
|
|
||||||
score = ApplyPenalty(score, deferPenalty, inputs, "deferPenalty");
|
|
||||||
explanationNodes.Add(PolicyExplanationNode.Leaf("penalty", "Defer penalty", deferPenalty.ToString(CultureInfo.InvariantCulture)));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (quietAllowed)
|
switch (status)
|
||||||
{
|
{
|
||||||
score = ApplyPenalty(score, config.QuietPenalty, inputs, "quietPenalty");
|
case PolicyVerdictStatus.Ignored:
|
||||||
quietedBy = rule.Name;
|
score = ApplyPenalty(score, config.IgnorePenalty, inputs, "ignorePenalty");
|
||||||
quiet = true;
|
explanationNodes.Add(PolicyExplanationNode.Leaf("penalty", "Ignore penalty", config.IgnorePenalty.ToString(CultureInfo.InvariantCulture)));
|
||||||
explanationNodes.Add(PolicyExplanationNode.Leaf("quiet", "Quiet applied", config.QuietPenalty.ToString(CultureInfo.InvariantCulture)));
|
break;
|
||||||
}
|
case PolicyVerdictStatus.Warned:
|
||||||
|
score = ApplyPenalty(score, config.WarnPenalty, inputs, "warnPenalty");
|
||||||
explanation = new PolicyExplanation(
|
explanationNodes.Add(PolicyExplanationNode.Leaf("penalty", "Warn penalty", config.WarnPenalty.ToString(CultureInfo.InvariantCulture)));
|
||||||
finding.FindingId,
|
break;
|
||||||
status,
|
case PolicyVerdictStatus.Deferred:
|
||||||
rule.Name,
|
var deferPenalty = config.WarnPenalty / 2;
|
||||||
notes,
|
score = ApplyPenalty(score, deferPenalty, inputs, "deferPenalty");
|
||||||
explanationNodes.ToImmutable());
|
explanationNodes.Add(PolicyExplanationNode.Leaf("penalty", "Defer penalty", deferPenalty.ToString(CultureInfo.InvariantCulture)));
|
||||||
|
break;
|
||||||
explanation = new PolicyExplanation(
|
}
|
||||||
finding.FindingId,
|
|
||||||
status,
|
if (quietAllowed)
|
||||||
rule.Name,
|
{
|
||||||
notes,
|
score = ApplyPenalty(score, config.QuietPenalty, inputs, "quietPenalty");
|
||||||
explanationNodes.ToImmutable());
|
quietedBy = rule.Name;
|
||||||
|
quiet = true;
|
||||||
return new PolicyVerdict(
|
explanationNodes.Add(PolicyExplanationNode.Leaf("quiet", "Quiet applied", config.QuietPenalty.ToString(CultureInfo.InvariantCulture)));
|
||||||
finding.FindingId,
|
}
|
||||||
status,
|
|
||||||
rule.Name,
|
explanation = new PolicyExplanation(
|
||||||
action.Type.ToString(),
|
finding.FindingId,
|
||||||
notes,
|
status,
|
||||||
|
rule.Name,
|
||||||
|
notes ?? string.Empty,
|
||||||
|
explanationNodes.ToImmutable());
|
||||||
|
|
||||||
|
return new PolicyVerdict(
|
||||||
|
finding.FindingId,
|
||||||
|
status,
|
||||||
|
rule.Name,
|
||||||
|
action.Type.ToString(),
|
||||||
|
notes,
|
||||||
score,
|
score,
|
||||||
config.Version,
|
config.Version,
|
||||||
inputs.ToImmutable(),
|
inputs.ToImmutable(),
|
||||||
@@ -229,12 +222,12 @@ public static class PolicyEvaluation
|
|||||||
return Math.Max(0, score - penalty);
|
return Math.Max(0, score - penalty);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static PolicyVerdict ApplyUnknownConfidence(PolicyVerdict verdict, UnknownConfidenceResult? unknownConfidence)
|
private static PolicyVerdict ApplyUnknownConfidence(PolicyVerdict verdict, UnknownConfidenceResult? unknownConfidence)
|
||||||
{
|
{
|
||||||
if (unknownConfidence is null)
|
if (unknownConfidence is null)
|
||||||
{
|
{
|
||||||
return verdict;
|
return verdict;
|
||||||
}
|
}
|
||||||
|
|
||||||
var inputsBuilder = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.OrdinalIgnoreCase);
|
var inputsBuilder = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.OrdinalIgnoreCase);
|
||||||
foreach (var pair in verdict.GetInputs())
|
foreach (var pair in verdict.GetInputs())
|
||||||
@@ -245,12 +238,12 @@ public static class PolicyEvaluation
|
|||||||
inputsBuilder["unknownConfidence"] = unknownConfidence.Value.Confidence;
|
inputsBuilder["unknownConfidence"] = unknownConfidence.Value.Confidence;
|
||||||
inputsBuilder["unknownAgeDays"] = unknownConfidence.Value.AgeDays;
|
inputsBuilder["unknownAgeDays"] = unknownConfidence.Value.AgeDays;
|
||||||
|
|
||||||
return verdict with
|
return verdict with
|
||||||
{
|
{
|
||||||
Inputs = inputsBuilder.ToImmutable(),
|
Inputs = inputsBuilder.ToImmutable(),
|
||||||
UnknownConfidence = unknownConfidence.Value.Confidence,
|
UnknownConfidence = unknownConfidence.Value.Confidence,
|
||||||
ConfidenceBand = unknownConfidence.Value.Band.Name,
|
ConfidenceBand = unknownConfidence.Value.Band.Name,
|
||||||
UnknownAgeDays = unknownConfidence.Value.AgeDays,
|
UnknownAgeDays = unknownConfidence.Value.AgeDays,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ public sealed record PolicyExplanation(
|
|||||||
ImmutableArray<PolicyExplanationNode> Nodes)
|
ImmutableArray<PolicyExplanationNode> Nodes)
|
||||||
{
|
{
|
||||||
public static PolicyExplanation Allow(string findingId, string? ruleName, string reason, params PolicyExplanationNode[] nodes) =>
|
public static PolicyExplanation Allow(string findingId, string? ruleName, string reason, params PolicyExplanationNode[] nodes) =>
|
||||||
new(findingId, PolicyVerdictStatus.Allowed, ruleName, reason, nodes.ToImmutableArray());
|
new(findingId, PolicyVerdictStatus.Pass, ruleName, reason, nodes.ToImmutableArray());
|
||||||
|
|
||||||
public static PolicyExplanation Block(string findingId, string? ruleName, string reason, params PolicyExplanationNode[] nodes) =>
|
public static PolicyExplanation Block(string findingId, string? ruleName, string reason, params PolicyExplanationNode[] nodes) =>
|
||||||
new(findingId, PolicyVerdictStatus.Blocked, ruleName, reason, nodes.ToImmutableArray());
|
new(findingId, PolicyVerdictStatus.Blocked, ruleName, reason, nodes.ToImmutableArray());
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ public static class SplCanonicalizer
|
|||||||
|
|
||||||
public static byte[] CanonicalizeToUtf8(ReadOnlySpan<byte> json)
|
public static byte[] CanonicalizeToUtf8(ReadOnlySpan<byte> json)
|
||||||
{
|
{
|
||||||
using var document = JsonDocument.Parse(json, DocumentOptions);
|
using var document = JsonDocument.Parse(json.ToArray().AsMemory(), DocumentOptions);
|
||||||
var buffer = new ArrayBufferWriter<byte>();
|
var buffer = new ArrayBufferWriter<byte>();
|
||||||
|
|
||||||
using (var writer = new Utf8JsonWriter(buffer, WriterOptions))
|
using (var writer = new Utf8JsonWriter(buffer, WriterOptions))
|
||||||
|
|||||||
@@ -49,8 +49,8 @@ public static class SplLayeringEngine
|
|||||||
|
|
||||||
private static JsonNode MergeToJsonNode(ReadOnlySpan<byte> basePolicyUtf8, ReadOnlySpan<byte> overlayPolicyUtf8)
|
private static JsonNode MergeToJsonNode(ReadOnlySpan<byte> basePolicyUtf8, ReadOnlySpan<byte> overlayPolicyUtf8)
|
||||||
{
|
{
|
||||||
using var baseDoc = JsonDocument.Parse(basePolicyUtf8, DocumentOptions);
|
using var baseDoc = JsonDocument.Parse(basePolicyUtf8.ToArray().AsMemory(), DocumentOptions);
|
||||||
using var overlayDoc = JsonDocument.Parse(overlayPolicyUtf8, DocumentOptions);
|
using var overlayDoc = JsonDocument.Parse(overlayPolicyUtf8.ToArray().AsMemory(), DocumentOptions);
|
||||||
|
|
||||||
var baseRoot = baseDoc.RootElement;
|
var baseRoot = baseDoc.RootElement;
|
||||||
var overlayRoot = overlayDoc.RootElement;
|
var overlayRoot = overlayDoc.RootElement;
|
||||||
@@ -209,4 +209,14 @@ public static class SplLayeringEngine
|
|||||||
|
|
||||||
return element.Value.TryGetProperty(name, out var value) ? value : (JsonElement?)null;
|
return element.Value.TryGetProperty(name, out var value) ? value : (JsonElement?)null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static JsonElement? GetPropertyOrNull(this JsonElement element, string name)
|
||||||
|
{
|
||||||
|
if (element.ValueKind != JsonValueKind.Object)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return element.TryGetProperty(name, out var value) ? value : (JsonElement?)null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using StellaOps.Policy.Engine.AdvisoryAI;
|
using StellaOps.Policy.Engine.AdvisoryAI;
|
||||||
|
|
||||||
namespace StellaOps.Policy.Engine.Tests;
|
namespace StellaOps.Policy.Engine.Tests;
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using StellaOps.Policy.Engine.Domain;
|
using StellaOps.Policy.Engine.Domain;
|
||||||
using StellaOps.Policy.Engine.Services;
|
using StellaOps.Policy.Engine.Services;
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using Microsoft.Extensions.Time.Testing;
|
using Microsoft.Extensions.Time.Testing;
|
||||||
using StellaOps.Policy.Engine.Ledger;
|
using StellaOps.Policy.Engine.Ledger;
|
||||||
using StellaOps.Policy.Engine.Orchestration;
|
using StellaOps.Policy.Engine.Orchestration;
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using Microsoft.Extensions.Time.Testing;
|
using Microsoft.Extensions.Time.Testing;
|
||||||
using StellaOps.Policy.Engine.Orchestration;
|
using StellaOps.Policy.Engine.Orchestration;
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using StellaOps.Policy.Engine.Overlay;
|
using StellaOps.Policy.Engine.Overlay;
|
||||||
using StellaOps.Policy.Engine.Services;
|
using StellaOps.Policy.Engine.Services;
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using System.Text.Json;
|
using System.Text.Json;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using StellaOps.Policy.Engine.Overlay;
|
using StellaOps.Policy.Engine.Overlay;
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using StellaOps.Policy.Engine.Streaming;
|
using StellaOps.Policy.Engine.Streaming;
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
using System;
|
using System;
|
||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
using StellaOps.Policy;
|
using StellaOps.Policy;
|
||||||
using StellaOps.Policy.Engine.Compilation;
|
using StellaOps.PolicyDsl;
|
||||||
using StellaOps.Policy.Engine.Options;
|
using StellaOps.Policy.Engine.Options;
|
||||||
using StellaOps.Policy.Engine.Services;
|
using StellaOps.Policy.Engine.Services;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using StellaOps.Policy;
|
using StellaOps.Policy;
|
||||||
using StellaOps.Policy.Engine.Compilation;
|
using StellaOps.PolicyDsl;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
using Xunit.Sdk;
|
using Xunit.Sdk;
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ using System.Collections.Generic;
|
|||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using StellaOps.Policy;
|
using StellaOps.Policy;
|
||||||
using StellaOps.Policy.Engine.Compilation;
|
using StellaOps.PolicyDsl;
|
||||||
using StellaOps.Policy.Engine.Evaluation;
|
using StellaOps.Policy.Engine.Evaluation;
|
||||||
using StellaOps.Policy.Engine.Services;
|
using StellaOps.Policy.Engine.Services;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
@@ -51,26 +51,26 @@ policy "Baseline Production Policy" syntax "stella-dsl@1" {
|
|||||||
because "Respect strong vendor VEX claims."
|
because "Respect strong vendor VEX claims."
|
||||||
}
|
}
|
||||||
|
|
||||||
rule alert_warn_eol_runtime priority 1 {
|
rule alert_warn_eol_runtime priority 1 {
|
||||||
when severity.normalized <= "Medium"
|
when severity.normalized <= "Medium"
|
||||||
and sbom.has_tag("runtime:eol")
|
and sbom.has_tag("runtime:eol")
|
||||||
then warn message "Runtime marked as EOL; upgrade recommended."
|
then warn message "Runtime marked as EOL; upgrade recommended."
|
||||||
because "Deprecated runtime should be upgraded."
|
because "Deprecated runtime should be upgraded."
|
||||||
}
|
}
|
||||||
|
|
||||||
rule block_ruby_dev priority 4 {
|
rule block_ruby_dev priority 4 {
|
||||||
when sbom.any_component(ruby.group("development") and ruby.declared_only())
|
when sbom.any_component(ruby.group("development") and ruby.declared_only())
|
||||||
then status := "blocked"
|
then status := "blocked"
|
||||||
because "Development-only Ruby gems without install evidence cannot ship."
|
because "Development-only Ruby gems without install evidence cannot ship."
|
||||||
}
|
}
|
||||||
|
|
||||||
rule warn_ruby_git_sources {
|
rule warn_ruby_git_sources {
|
||||||
when sbom.any_component(ruby.source("git"))
|
when sbom.any_component(ruby.source("git"))
|
||||||
then warn message "Git-sourced Ruby gem present; review required."
|
then warn message "Git-sourced Ruby gem present; review required."
|
||||||
because "Git-sourced Ruby dependencies require explicit review."
|
because "Git-sourced Ruby dependencies require explicit review."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""";
|
""";
|
||||||
|
|
||||||
private readonly PolicyCompiler compiler = new();
|
private readonly PolicyCompiler compiler = new();
|
||||||
private readonly PolicyEvaluationService evaluationService = new();
|
private readonly PolicyEvaluationService evaluationService = new();
|
||||||
@@ -125,11 +125,11 @@ policy "Baseline Production Policy" syntax "stella-dsl@1" {
|
|||||||
public void Evaluate_WarnRuleEmitsWarning()
|
public void Evaluate_WarnRuleEmitsWarning()
|
||||||
{
|
{
|
||||||
var document = CompileBaseline();
|
var document = CompileBaseline();
|
||||||
var tags = ImmutableHashSet.Create("runtime:eol");
|
var tags = ImmutableHashSet.Create("runtime:eol");
|
||||||
var context = CreateContext("Medium", "internal") with
|
var context = CreateContext("Medium", "internal") with
|
||||||
{
|
{
|
||||||
Sbom = new PolicyEvaluationSbom(tags)
|
Sbom = new PolicyEvaluationSbom(tags)
|
||||||
};
|
};
|
||||||
|
|
||||||
var result = evaluationService.Evaluate(document, context);
|
var result = evaluationService.Evaluate(document, context);
|
||||||
|
|
||||||
@@ -273,74 +273,74 @@ policy "Baseline Production Policy" syntax "stella-dsl@1" {
|
|||||||
Assert.NotNull(result.AppliedException);
|
Assert.NotNull(result.AppliedException);
|
||||||
Assert.Equal("exc-rule", result.AppliedException!.ExceptionId);
|
Assert.Equal("exc-rule", result.AppliedException!.ExceptionId);
|
||||||
Assert.Equal("Rule Critical Suppress", result.AppliedException!.Metadata["effectName"]);
|
Assert.Equal("Rule Critical Suppress", result.AppliedException!.Metadata["effectName"]);
|
||||||
Assert.Equal("alice", result.AppliedException!.Metadata["requestedBy"]);
|
Assert.Equal("alice", result.AppliedException!.Metadata["requestedBy"]);
|
||||||
Assert.Equal("alice", result.Annotations["exception.meta.requestedBy"]);
|
Assert.Equal("alice", result.Annotations["exception.meta.requestedBy"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public void Evaluate_RubyDevComponentBlocked()
|
public void Evaluate_RubyDevComponentBlocked()
|
||||||
{
|
{
|
||||||
var document = CompileBaseline();
|
var document = CompileBaseline();
|
||||||
var component = CreateRubyComponent(
|
var component = CreateRubyComponent(
|
||||||
name: "dev-only",
|
name: "dev-only",
|
||||||
version: "1.0.0",
|
version: "1.0.0",
|
||||||
groups: "development;test",
|
groups: "development;test",
|
||||||
declaredOnly: true,
|
declaredOnly: true,
|
||||||
source: "https://rubygems.org/",
|
source: "https://rubygems.org/",
|
||||||
capabilities: new[] { "exec" });
|
capabilities: new[] { "exec" });
|
||||||
|
|
||||||
var context = CreateContext("Medium", "internal") with
|
var context = CreateContext("Medium", "internal") with
|
||||||
{
|
{
|
||||||
Sbom = new PolicyEvaluationSbom(
|
Sbom = new PolicyEvaluationSbom(
|
||||||
ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase),
|
ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase),
|
||||||
ImmutableArray.Create(component))
|
ImmutableArray.Create(component))
|
||||||
};
|
};
|
||||||
|
|
||||||
var result = evaluationService.Evaluate(document, context);
|
var result = evaluationService.Evaluate(document, context);
|
||||||
|
|
||||||
Assert.True(result.Matched);
|
Assert.True(result.Matched);
|
||||||
Assert.Equal("block_ruby_dev", result.RuleName);
|
Assert.Equal("block_ruby_dev", result.RuleName);
|
||||||
Assert.Equal("blocked", result.Status);
|
Assert.Equal("blocked", result.Status);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public void Evaluate_RubyGitComponentWarns()
|
public void Evaluate_RubyGitComponentWarns()
|
||||||
{
|
{
|
||||||
var document = CompileBaseline();
|
var document = CompileBaseline();
|
||||||
var component = CreateRubyComponent(
|
var component = CreateRubyComponent(
|
||||||
name: "git-gem",
|
name: "git-gem",
|
||||||
version: "0.5.0",
|
version: "0.5.0",
|
||||||
groups: "default",
|
groups: "default",
|
||||||
declaredOnly: false,
|
declaredOnly: false,
|
||||||
source: "git:https://github.com/example/git-gem.git@0123456789abcdef0123456789abcdef01234567",
|
source: "git:https://github.com/example/git-gem.git@0123456789abcdef0123456789abcdef01234567",
|
||||||
capabilities: Array.Empty<string>(),
|
capabilities: Array.Empty<string>(),
|
||||||
schedulerCapabilities: new[] { "sidekiq" });
|
schedulerCapabilities: new[] { "sidekiq" });
|
||||||
|
|
||||||
var context = CreateContext("Low", "internal") with
|
var context = CreateContext("Low", "internal") with
|
||||||
{
|
{
|
||||||
Sbom = new PolicyEvaluationSbom(
|
Sbom = new PolicyEvaluationSbom(
|
||||||
ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase),
|
ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase),
|
||||||
ImmutableArray.Create(component))
|
ImmutableArray.Create(component))
|
||||||
};
|
};
|
||||||
|
|
||||||
var result = evaluationService.Evaluate(document, context);
|
var result = evaluationService.Evaluate(document, context);
|
||||||
|
|
||||||
Assert.True(result.Matched);
|
Assert.True(result.Matched);
|
||||||
Assert.Equal("warn_ruby_git_sources", result.RuleName);
|
Assert.Equal("warn_ruby_git_sources", result.RuleName);
|
||||||
Assert.Equal("warned", result.Status);
|
Assert.Equal("warned", result.Status);
|
||||||
Assert.Contains(result.Warnings, warning => warning.Contains("Git-sourced", StringComparison.OrdinalIgnoreCase));
|
Assert.Contains(result.Warnings, warning => warning.Contains("Git-sourced", StringComparison.OrdinalIgnoreCase));
|
||||||
}
|
}
|
||||||
|
|
||||||
private PolicyIrDocument CompileBaseline()
|
private PolicyIrDocument CompileBaseline()
|
||||||
{
|
{
|
||||||
var compilation = compiler.Compile(BaselinePolicy);
|
var compilation = compiler.Compile(BaselinePolicy);
|
||||||
if (!compilation.Success)
|
if (!compilation.Success)
|
||||||
{
|
{
|
||||||
Console.WriteLine(Describe(compilation.Diagnostics));
|
Console.WriteLine(Describe(compilation.Diagnostics));
|
||||||
}
|
}
|
||||||
Assert.True(compilation.Success, Describe(compilation.Diagnostics));
|
Assert.True(compilation.Success, Describe(compilation.Diagnostics));
|
||||||
return Assert.IsType<PolicyIrDocument>(compilation.Document);
|
return Assert.IsType<PolicyIrDocument>(compilation.Document);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static PolicyEvaluationContext CreateContext(string severity, string exposure, PolicyEvaluationExceptions? exceptions = null)
|
private static PolicyEvaluationContext CreateContext(string severity, string exposure, PolicyEvaluationExceptions? exceptions = null)
|
||||||
{
|
{
|
||||||
@@ -352,67 +352,67 @@ policy "Baseline Production Policy" syntax "stella-dsl@1" {
|
|||||||
}.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase)),
|
}.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase)),
|
||||||
new PolicyEvaluationAdvisory("GHSA", ImmutableDictionary<string, string>.Empty),
|
new PolicyEvaluationAdvisory("GHSA", ImmutableDictionary<string, string>.Empty),
|
||||||
PolicyEvaluationVexEvidence.Empty,
|
PolicyEvaluationVexEvidence.Empty,
|
||||||
PolicyEvaluationSbom.Empty,
|
PolicyEvaluationSbom.Empty,
|
||||||
exceptions ?? PolicyEvaluationExceptions.Empty);
|
exceptions ?? PolicyEvaluationExceptions.Empty);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string Describe(ImmutableArray<PolicyIssue> issues) =>
|
private static string Describe(ImmutableArray<PolicyIssue> issues) =>
|
||||||
string.Join(" | ", issues.Select(issue => $"{issue.Severity}:{issue.Code}:{issue.Message}"));
|
string.Join(" | ", issues.Select(issue => $"{issue.Severity}:{issue.Code}:{issue.Message}"));
|
||||||
|
|
||||||
private static PolicyEvaluationComponent CreateRubyComponent(
|
private static PolicyEvaluationComponent CreateRubyComponent(
|
||||||
string name,
|
string name,
|
||||||
string version,
|
string version,
|
||||||
string groups,
|
string groups,
|
||||||
bool declaredOnly,
|
bool declaredOnly,
|
||||||
string source,
|
string source,
|
||||||
IEnumerable<string>? capabilities = null,
|
IEnumerable<string>? capabilities = null,
|
||||||
IEnumerable<string>? schedulerCapabilities = null)
|
IEnumerable<string>? schedulerCapabilities = null)
|
||||||
{
|
{
|
||||||
var metadataBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.OrdinalIgnoreCase);
|
var metadataBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||||
if (!string.IsNullOrWhiteSpace(groups))
|
if (!string.IsNullOrWhiteSpace(groups))
|
||||||
{
|
{
|
||||||
metadataBuilder["groups"] = groups;
|
metadataBuilder["groups"] = groups;
|
||||||
}
|
}
|
||||||
|
|
||||||
metadataBuilder["declaredOnly"] = declaredOnly ? "true" : "false";
|
metadataBuilder["declaredOnly"] = declaredOnly ? "true" : "false";
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(source))
|
if (!string.IsNullOrWhiteSpace(source))
|
||||||
{
|
{
|
||||||
metadataBuilder["source"] = source.Trim();
|
metadataBuilder["source"] = source.Trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (capabilities is not null)
|
if (capabilities is not null)
|
||||||
{
|
{
|
||||||
foreach (var capability in capabilities)
|
foreach (var capability in capabilities)
|
||||||
{
|
{
|
||||||
if (!string.IsNullOrWhiteSpace(capability))
|
if (!string.IsNullOrWhiteSpace(capability))
|
||||||
{
|
{
|
||||||
metadataBuilder[$"capability.{capability.Trim()}"] = "true";
|
metadataBuilder[$"capability.{capability.Trim()}"] = "true";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (schedulerCapabilities is not null)
|
if (schedulerCapabilities is not null)
|
||||||
{
|
{
|
||||||
var schedulerList = string.Join(
|
var schedulerList = string.Join(
|
||||||
';',
|
';',
|
||||||
schedulerCapabilities
|
schedulerCapabilities
|
||||||
.Where(static s => !string.IsNullOrWhiteSpace(s))
|
.Where(static s => !string.IsNullOrWhiteSpace(s))
|
||||||
.Select(static s => s.Trim()));
|
.Select(static s => s.Trim()));
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(schedulerList))
|
if (!string.IsNullOrWhiteSpace(schedulerList))
|
||||||
{
|
{
|
||||||
metadataBuilder["capability.scheduler"] = schedulerList;
|
metadataBuilder["capability.scheduler"] = schedulerList;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
metadataBuilder["lockfile"] = "Gemfile.lock";
|
metadataBuilder["lockfile"] = "Gemfile.lock";
|
||||||
|
|
||||||
return new PolicyEvaluationComponent(
|
return new PolicyEvaluationComponent(
|
||||||
name,
|
name,
|
||||||
version,
|
version,
|
||||||
"gem",
|
"gem",
|
||||||
$"pkg:gem/{name}@{version}",
|
$"pkg:gem/{name}@{version}",
|
||||||
metadataBuilder.ToImmutable());
|
metadataBuilder.ToImmutable());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using System.Collections.Immutable;
|
using System.Collections.Immutable;
|
||||||
using StellaOps.Policy.Engine.Domain;
|
using StellaOps.Policy.Engine.Domain;
|
||||||
using StellaOps.Policy.Engine.Services;
|
using StellaOps.Policy.Engine.Services;
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using Microsoft.Extensions.Time.Testing;
|
using Microsoft.Extensions.Time.Testing;
|
||||||
using StellaOps.Policy.Engine.Orchestration;
|
using StellaOps.Policy.Engine.Orchestration;
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using Microsoft.Extensions.Time.Testing;
|
using Microsoft.Extensions.Time.Testing;
|
||||||
using StellaOps.Policy.Engine.Ledger;
|
using StellaOps.Policy.Engine.Ledger;
|
||||||
using StellaOps.Policy.Engine.Orchestration;
|
using StellaOps.Policy.Engine.Orchestration;
|
||||||
|
|||||||
@@ -6,9 +6,25 @@
|
|||||||
<Nullable>enable</Nullable>
|
<Nullable>enable</Nullable>
|
||||||
<ImplicitUsings>enable</ImplicitUsings>
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||||
|
<IsPackable>false</IsPackable>
|
||||||
|
<IsTestProject>true</IsTestProject>
|
||||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
|
||||||
|
<PackageReference Include="xunit" Version="2.9.3" />
|
||||||
|
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1">
|
||||||
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||||
|
<PrivateAssets>all</PrivateAssets>
|
||||||
|
</PackageReference>
|
||||||
|
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||||
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||||
|
<PrivateAssets>all</PrivateAssets>
|
||||||
|
</PackageReference>
|
||||||
|
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<ProjectReference Include="../../StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj" />
|
<ProjectReference Include="../../StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using StellaOps.Policy.Engine.TrustWeighting;
|
using StellaOps.Policy.Engine.TrustWeighting;
|
||||||
|
|
||||||
namespace StellaOps.Policy.Engine.Tests;
|
namespace StellaOps.Policy.Engine.Tests;
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using Xunit;
|
||||||
using Microsoft.Extensions.Time.Testing;
|
using Microsoft.Extensions.Time.Testing;
|
||||||
using StellaOps.Policy.Engine.Ledger;
|
using StellaOps.Policy.Engine.Ledger;
|
||||||
using StellaOps.Policy.Engine.Orchestration;
|
using StellaOps.Policy.Engine.Orchestration;
|
||||||
|
|||||||
@@ -0,0 +1,183 @@
|
|||||||
|
using FluentAssertions;
|
||||||
|
using StellaOps.PolicyDsl;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.PolicyDsl.Tests;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests for the policy DSL compiler.
|
||||||
|
/// </summary>
|
||||||
|
public class PolicyCompilerTests
|
||||||
|
{
|
||||||
|
private readonly PolicyCompiler _compiler = new();
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Compile_MinimalPolicy_Succeeds()
|
||||||
|
{
|
||||||
|
// Arrange - rule name is an identifier, not a string; then block has no braces; := for assignment
|
||||||
|
var source = """
|
||||||
|
policy "test" syntax "stella-dsl@1" {
|
||||||
|
rule always priority 1 {
|
||||||
|
when true
|
||||||
|
then
|
||||||
|
severity := "info"
|
||||||
|
because "always applies"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = _compiler.Compile(source);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||||
|
result.Document.Should().NotBeNull();
|
||||||
|
result.Document!.Name.Should().Be("test");
|
||||||
|
result.Document.Syntax.Should().Be("stella-dsl@1");
|
||||||
|
result.Document.Rules.Should().HaveCount(1);
|
||||||
|
result.Checksum.Should().NotBeNullOrEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Compile_WithMetadata_ParsesCorrectly()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source = """
|
||||||
|
policy "with-meta" syntax "stella-dsl@1" {
|
||||||
|
metadata {
|
||||||
|
version = "1.0.0"
|
||||||
|
author = "test"
|
||||||
|
}
|
||||||
|
rule r1 priority 1 {
|
||||||
|
when true
|
||||||
|
then
|
||||||
|
severity := "low"
|
||||||
|
because "required"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = _compiler.Compile(source);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||||
|
result.Document!.Metadata.Should().ContainKey("version");
|
||||||
|
result.Document.Metadata.Should().ContainKey("author");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Compile_WithProfile_ParsesCorrectly()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source = """
|
||||||
|
policy "with-profile" syntax "stella-dsl@1" {
|
||||||
|
profile standard {
|
||||||
|
trust_score = 0.85
|
||||||
|
}
|
||||||
|
rule r1 priority 1 {
|
||||||
|
when true
|
||||||
|
then
|
||||||
|
severity := "low"
|
||||||
|
because "required"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = _compiler.Compile(source);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
result.Success.Should().BeTrue(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||||
|
result.Document!.Profiles.Should().HaveCount(1);
|
||||||
|
result.Document.Profiles[0].Name.Should().Be("standard");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Compile_EmptySource_ReturnsError()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source = "";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = _compiler.Compile(source);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
result.Success.Should().BeFalse();
|
||||||
|
result.Diagnostics.Should().NotBeEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Compile_InvalidSyntax_ReturnsError()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source = """
|
||||||
|
policy "bad" syntax "invalid@1" {
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = _compiler.Compile(source);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
result.Success.Should().BeFalse();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Compile_SameSource_ProducesSameChecksum()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source = """
|
||||||
|
policy "deterministic" syntax "stella-dsl@1" {
|
||||||
|
rule r1 priority 1 {
|
||||||
|
when true
|
||||||
|
then
|
||||||
|
severity := "info"
|
||||||
|
because "always"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result1 = _compiler.Compile(source);
|
||||||
|
var result2 = _compiler.Compile(source);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
result1.Success.Should().BeTrue(string.Join("; ", result1.Diagnostics.Select(d => d.Message)));
|
||||||
|
result2.Success.Should().BeTrue(string.Join("; ", result2.Diagnostics.Select(d => d.Message)));
|
||||||
|
result1.Checksum.Should().Be(result2.Checksum);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Compile_DifferentSource_ProducesDifferentChecksum()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source1 = """
|
||||||
|
policy "test1" syntax "stella-dsl@1" {
|
||||||
|
rule r1 priority 1 {
|
||||||
|
when true
|
||||||
|
then
|
||||||
|
severity := "info"
|
||||||
|
because "always"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
var source2 = """
|
||||||
|
policy "test2" syntax "stella-dsl@1" {
|
||||||
|
rule r1 priority 1 {
|
||||||
|
when true
|
||||||
|
then
|
||||||
|
severity := "info"
|
||||||
|
because "always"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result1 = _compiler.Compile(source1);
|
||||||
|
var result2 = _compiler.Compile(source2);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
result1.Checksum.Should().NotBe(result2.Checksum);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,193 @@
|
|||||||
|
using FluentAssertions;
|
||||||
|
using StellaOps.PolicyDsl;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.PolicyDsl.Tests;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests for the policy evaluation engine.
|
||||||
|
/// </summary>
|
||||||
|
public class PolicyEngineTests
|
||||||
|
{
|
||||||
|
private readonly PolicyEngineFactory _factory = new();
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Evaluate_RuleMatches_ReturnsMatchedRules()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source = """
|
||||||
|
policy "test" syntax "stella-dsl@1" {
|
||||||
|
rule critical_rule priority 100 {
|
||||||
|
when finding.severity == "critical"
|
||||||
|
then
|
||||||
|
severity := "critical"
|
||||||
|
because "critical finding detected"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
var result = _factory.CreateFromSource(source);
|
||||||
|
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||||
|
var engine = result.Engine!;
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithObject("finding", new Dictionary<string, object?> { ["severity"] = "critical" })
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var evalResult = engine.Evaluate(context);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
evalResult.MatchedRules.Should().Contain("critical_rule");
|
||||||
|
evalResult.PolicyChecksum.Should().NotBeNullOrEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Evaluate_RuleDoesNotMatch_ExecutesElseBranch()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source = """
|
||||||
|
policy "test" syntax "stella-dsl@1" {
|
||||||
|
rule critical_only priority 100 {
|
||||||
|
when finding.severity == "critical"
|
||||||
|
then
|
||||||
|
severity := "critical"
|
||||||
|
else
|
||||||
|
severity := "info"
|
||||||
|
because "classify by severity"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
var result = _factory.CreateFromSource(source);
|
||||||
|
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||||
|
var engine = result.Engine!;
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithObject("finding", new Dictionary<string, object?> { ["severity"] = "low" })
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var evalResult = engine.Evaluate(context);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
evalResult.MatchedRules.Should().BeEmpty();
|
||||||
|
evalResult.Actions.Should().NotBeEmpty();
|
||||||
|
evalResult.Actions[0].WasElseBranch.Should().BeTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Evaluate_MultipleRules_EvaluatesInPriorityOrder()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source = """
|
||||||
|
policy "test" syntax "stella-dsl@1" {
|
||||||
|
rule low_priority priority 10 {
|
||||||
|
when true
|
||||||
|
then
|
||||||
|
severity := "low"
|
||||||
|
because "low priority rule"
|
||||||
|
}
|
||||||
|
rule high_priority priority 100 {
|
||||||
|
when true
|
||||||
|
then
|
||||||
|
severity := "high"
|
||||||
|
because "high priority rule"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
var result = _factory.CreateFromSource(source);
|
||||||
|
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||||
|
var engine = result.Engine!;
|
||||||
|
var context = new SignalContext();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var evalResult = engine.Evaluate(context);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
evalResult.MatchedRules.Should().HaveCount(2);
|
||||||
|
evalResult.MatchedRules[0].Should().Be("high_priority");
|
||||||
|
evalResult.MatchedRules[1].Should().Be("low_priority");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Evaluate_WithAndCondition_MatchesWhenBothTrue()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source = """
|
||||||
|
policy "test" syntax "stella-dsl@1" {
|
||||||
|
rule combined priority 100 {
|
||||||
|
when finding.severity == "critical" and reachability.state == "reachable"
|
||||||
|
then
|
||||||
|
severity := "critical"
|
||||||
|
because "critical and reachable"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
var result = _factory.CreateFromSource(source);
|
||||||
|
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||||
|
var engine = result.Engine!;
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithFinding("critical", 0.95m)
|
||||||
|
.WithReachability("reachable", 0.9m)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var evalResult = engine.Evaluate(context);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
evalResult.MatchedRules.Should().Contain("combined");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Evaluate_WithOrCondition_MatchesWhenEitherTrue()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source = """
|
||||||
|
policy "test" syntax "stella-dsl@1" {
|
||||||
|
rule either priority 100 {
|
||||||
|
when finding.severity == "critical" or finding.severity == "high"
|
||||||
|
then
|
||||||
|
severity := "elevated"
|
||||||
|
because "elevated severity"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
var result = _factory.CreateFromSource(source);
|
||||||
|
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||||
|
var engine = result.Engine!;
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithObject("finding", new Dictionary<string, object?> { ["severity"] = "high" })
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var evalResult = engine.Evaluate(context);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
evalResult.MatchedRules.Should().Contain("either");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Evaluate_WithNotCondition_InvertsResult()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var source = """
|
||||||
|
policy "test" syntax "stella-dsl@1" {
|
||||||
|
rule not_critical priority 100 {
|
||||||
|
when not finding.is_critical
|
||||||
|
then
|
||||||
|
severity := "low"
|
||||||
|
because "not critical"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
var result = _factory.CreateFromSource(source);
|
||||||
|
result.Engine.Should().NotBeNull(string.Join("; ", result.Diagnostics.Select(d => d.Message)));
|
||||||
|
var engine = result.Engine!;
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithObject("finding", new Dictionary<string, object?> { ["is_critical"] = false })
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var evalResult = engine.Evaluate(context);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
evalResult.MatchedRules.Should().Contain("not_critical");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,181 @@
|
|||||||
|
using FluentAssertions;
|
||||||
|
using StellaOps.PolicyDsl;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.PolicyDsl.Tests;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests for the signal context API.
|
||||||
|
/// </summary>
|
||||||
|
public class SignalContextTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void Builder_WithSignal_SetsSignalValue()
|
||||||
|
{
|
||||||
|
// Arrange & Act
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithSignal("test", "value")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
context.GetSignal("test").Should().Be("value");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Builder_WithFlag_SetsBooleanSignal()
|
||||||
|
{
|
||||||
|
// Arrange & Act
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithFlag("enabled")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
context.GetSignal<bool>("enabled").Should().BeTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Builder_WithNumber_SetsDecimalSignal()
|
||||||
|
{
|
||||||
|
// Arrange & Act
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithNumber("score", 0.95m)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
context.GetSignal<decimal>("score").Should().Be(0.95m);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Builder_WithString_SetsStringSignal()
|
||||||
|
{
|
||||||
|
// Arrange & Act
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithString("name", "test")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
context.GetSignal<string>("name").Should().Be("test");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Builder_WithFinding_SetsNestedFindingObject()
|
||||||
|
{
|
||||||
|
// Arrange & Act
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithFinding("critical", 0.95m, "CVE-2024-1234")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
context.HasSignal("finding").Should().BeTrue();
|
||||||
|
var finding = context.GetSignal("finding") as IDictionary<string, object?>;
|
||||||
|
finding.Should().NotBeNull();
|
||||||
|
finding!["severity"].Should().Be("critical");
|
||||||
|
finding["confidence"].Should().Be(0.95m);
|
||||||
|
finding["cve_id"].Should().Be("CVE-2024-1234");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Builder_WithReachability_SetsNestedReachabilityObject()
|
||||||
|
{
|
||||||
|
// Arrange & Act
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithReachability("reachable", 0.9m, hasRuntimeEvidence: true)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
context.HasSignal("reachability").Should().BeTrue();
|
||||||
|
var reachability = context.GetSignal("reachability") as IDictionary<string, object?>;
|
||||||
|
reachability.Should().NotBeNull();
|
||||||
|
reachability!["state"].Should().Be("reachable");
|
||||||
|
reachability["confidence"].Should().Be(0.9m);
|
||||||
|
reachability["has_runtime_evidence"].Should().Be(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Builder_WithTrustScore_SetsTrustSignals()
|
||||||
|
{
|
||||||
|
// Arrange & Act
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithTrustScore(0.85m, verified: true)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
context.GetSignal<decimal>("trust_score").Should().Be(0.85m);
|
||||||
|
context.GetSignal<bool>("trust_verified").Should().BeTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void SetSignal_UpdatesExistingValue()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var context = new SignalContext();
|
||||||
|
context.SetSignal("key", "value1");
|
||||||
|
|
||||||
|
// Act
|
||||||
|
context.SetSignal("key", "value2");
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
context.GetSignal("key").Should().Be("value2");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void RemoveSignal_RemovesExistingSignal()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var context = new SignalContext();
|
||||||
|
context.SetSignal("key", "value");
|
||||||
|
|
||||||
|
// Act
|
||||||
|
context.RemoveSignal("key");
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
context.HasSignal("key").Should().BeFalse();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Clone_CreatesIndependentCopy()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var original = SignalContext.Builder()
|
||||||
|
.WithSignal("key", "value")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var clone = original.Clone();
|
||||||
|
clone.SetSignal("key", "modified");
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
original.GetSignal("key").Should().Be("value");
|
||||||
|
clone.GetSignal("key").Should().Be("modified");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void SignalNames_ReturnsAllSignalKeys()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithSignal("a", 1)
|
||||||
|
.WithSignal("b", 2)
|
||||||
|
.WithSignal("c", 3)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
context.SignalNames.Should().BeEquivalentTo(new[] { "a", "b", "c" });
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Signals_ReturnsReadOnlyDictionary()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var context = SignalContext.Builder()
|
||||||
|
.WithSignal("key", "value")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var signals = context.Signals;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
signals.Should().ContainKey("key");
|
||||||
|
signals["key"].Should().Be("value");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
<PropertyGroup>
|
||||||
|
<TargetFramework>net10.0</TargetFramework>
|
||||||
|
<LangVersion>preview</LangVersion>
|
||||||
|
<Nullable>enable</Nullable>
|
||||||
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
|
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||||
|
<IsPackable>false</IsPackable>
|
||||||
|
<IsTestProject>true</IsTestProject>
|
||||||
|
<!-- Disable Concelier test infra to avoid duplicate package references -->
|
||||||
|
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||||
|
</PropertyGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
|
||||||
|
<PackageReference Include="xunit" Version="2.9.3" />
|
||||||
|
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1">
|
||||||
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||||
|
<PrivateAssets>all</PrivateAssets>
|
||||||
|
</PackageReference>
|
||||||
|
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||||
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||||
|
<PrivateAssets>all</PrivateAssets>
|
||||||
|
</PackageReference>
|
||||||
|
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ProjectReference Include="../../StellaOps.PolicyDsl/StellaOps.PolicyDsl.csproj" />
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<None Update="TestData\*.dsl">
|
||||||
|
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||||
|
</None>
|
||||||
|
</ItemGroup>
|
||||||
|
</Project>
|
||||||
@@ -0,0 +1,56 @@
|
|||||||
|
// Default reachability-aware policy
|
||||||
|
// syntax: stella-dsl@1
|
||||||
|
|
||||||
|
policy "default-reachability" syntax "stella-dsl@1" {
|
||||||
|
metadata {
|
||||||
|
version = "1.0.0"
|
||||||
|
description = "Default policy with reachability-aware rules"
|
||||||
|
author = "StellaOps"
|
||||||
|
}
|
||||||
|
|
||||||
|
settings {
|
||||||
|
default_action = "warn"
|
||||||
|
fail_on_critical = true
|
||||||
|
}
|
||||||
|
|
||||||
|
profile standard {
|
||||||
|
trust_score = 0.85
|
||||||
|
}
|
||||||
|
|
||||||
|
// Critical vulnerabilities with confirmed reachability
|
||||||
|
rule critical_reachable priority 100 {
|
||||||
|
when finding.severity == "critical" and reachability.state == "reachable"
|
||||||
|
then
|
||||||
|
severity := "critical"
|
||||||
|
annotate finding.priority := "immediate"
|
||||||
|
escalate to "security-team" when reachability.confidence > 0.9
|
||||||
|
because "Critical vulnerabilities with confirmed reachability require immediate action"
|
||||||
|
}
|
||||||
|
|
||||||
|
// High severity with runtime evidence
|
||||||
|
rule high_with_evidence priority 90 {
|
||||||
|
when finding.severity == "high" and reachability.has_runtime_evidence
|
||||||
|
then
|
||||||
|
severity := "high"
|
||||||
|
annotate finding.evidence := "runtime-confirmed"
|
||||||
|
else
|
||||||
|
defer until "reachability-assessment"
|
||||||
|
because "High severity findings need runtime evidence for prioritization"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Low severity unreachable can be ignored
|
||||||
|
rule low_unreachable priority 50 {
|
||||||
|
when finding.severity == "low" and reachability.state == "unreachable"
|
||||||
|
then
|
||||||
|
ignore until "next-scan" because "Low severity unreachable code"
|
||||||
|
because "Low severity unreachable vulnerabilities can be safely deferred"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unknown reachability requires VEX
|
||||||
|
rule unknown_reachability priority 40 {
|
||||||
|
when not reachability.state
|
||||||
|
then
|
||||||
|
warn message "Reachability assessment pending"
|
||||||
|
because "Unknown reachability requires manual assessment"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
// Minimal valid policy
|
||||||
|
// syntax: stella-dsl@1
|
||||||
|
|
||||||
|
policy "minimal" syntax "stella-dsl@1" {
|
||||||
|
rule always_pass priority 1 {
|
||||||
|
when true
|
||||||
|
then
|
||||||
|
severity := "info"
|
||||||
|
because "always applies"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
using System.Collections.Generic;
|
||||||
|
|
||||||
|
namespace StellaOps.Scanner.Worker.Determinism;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Deterministic metadata for a surface manifest: per-payload hashes and a Merkle-like root.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record DeterminismEvidence(
|
||||||
|
IReadOnlyDictionary<string, string> PayloadHashes,
|
||||||
|
string MerkleRootSha256);
|
||||||
@@ -0,0 +1,79 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
|
||||||
|
namespace StellaOps.Scanner.Worker.Determinism;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents a determinism score report produced by the worker replay harness.
|
||||||
|
/// This mirrors the determinism.json shape used in release bundles.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record DeterminismReport(
|
||||||
|
string Version,
|
||||||
|
string Release,
|
||||||
|
string Platform,
|
||||||
|
string? PolicySha,
|
||||||
|
string? FeedsSha,
|
||||||
|
string? ScannerSha,
|
||||||
|
double OverallScore,
|
||||||
|
double ThresholdOverall,
|
||||||
|
double ThresholdImage,
|
||||||
|
IReadOnlyList<DeterminismImageReport> Images)
|
||||||
|
{
|
||||||
|
public static DeterminismReport FromHarness(Harness.DeterminismReport harnessReport,
|
||||||
|
string release,
|
||||||
|
string platform,
|
||||||
|
string? policySha = null,
|
||||||
|
string? feedsSha = null,
|
||||||
|
string? scannerSha = null,
|
||||||
|
string version = "1")
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(harnessReport);
|
||||||
|
|
||||||
|
return new DeterminismReport(
|
||||||
|
Version: version,
|
||||||
|
Release: release,
|
||||||
|
Platform: platform,
|
||||||
|
PolicySha: policySha,
|
||||||
|
FeedsSha: feedsSha,
|
||||||
|
ScannerSha: scannerSha,
|
||||||
|
OverallScore: harnessReport.OverallScore,
|
||||||
|
ThresholdOverall: harnessReport.OverallThreshold,
|
||||||
|
ThresholdImage: harnessReport.ImageThreshold,
|
||||||
|
Images: harnessReport.Images.Select(DeterminismImageReport.FromHarness).ToList());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed record DeterminismImageReport(
|
||||||
|
string Image,
|
||||||
|
int Runs,
|
||||||
|
int Identical,
|
||||||
|
double Score,
|
||||||
|
IReadOnlyDictionary<string, string> ArtifactHashes,
|
||||||
|
IReadOnlyList<DeterminismRunReport> RunsDetail)
|
||||||
|
{
|
||||||
|
public static DeterminismImageReport FromHarness(Harness.DeterminismImageReport report)
|
||||||
|
{
|
||||||
|
return new DeterminismImageReport(
|
||||||
|
Image: report.ImageDigest,
|
||||||
|
Runs: report.Runs,
|
||||||
|
Identical: report.Identical,
|
||||||
|
Score: report.Score,
|
||||||
|
ArtifactHashes: report.BaselineHashes,
|
||||||
|
RunsDetail: report.RunReports.Select(DeterminismRunReport.FromHarness).ToList());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed record DeterminismRunReport(
|
||||||
|
int RunIndex,
|
||||||
|
IReadOnlyDictionary<string, string> ArtifactHashes,
|
||||||
|
IReadOnlyList<string> NonDeterministic)
|
||||||
|
{
|
||||||
|
public static DeterminismRunReport FromHarness(Harness.DeterminismRunReport report)
|
||||||
|
{
|
||||||
|
return new DeterminismRunReport(
|
||||||
|
RunIndex: report.RunIndex,
|
||||||
|
ArtifactHashes: report.ArtifactHashes,
|
||||||
|
NonDeterministic: report.NonDeterministicArtifacts);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
using System;
|
||||||
|
|
||||||
|
namespace StellaOps.Scanner.Worker.Processing.Replay;
|
||||||
|
|
||||||
|
public sealed record ReplayBundleContext(ReplaySealedBundleMetadata Metadata, string BundlePath)
|
||||||
|
{
|
||||||
|
public ReplayBundleContext : this(Metadata ?? throw new ArgumentNullException(nameof(Metadata)),
|
||||||
|
string.IsNullOrWhiteSpace(BundlePath) ? throw new ArgumentException("BundlePath required", nameof(BundlePath)) : BundlePath)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,97 @@
|
|||||||
|
using System;
|
||||||
|
using System.IO;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using StellaOps.Replay.Core;
|
||||||
|
using StellaOps.Scanner.Storage;
|
||||||
|
using StellaOps.Scanner.Storage.ObjectStore;
|
||||||
|
|
||||||
|
namespace StellaOps.Scanner.Worker.Processing.Replay;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Fetches a sealed replay bundle from the configured object store, verifies its SHA-256 hash,
|
||||||
|
/// and returns a local file path for downstream analyzers.
|
||||||
|
/// </summary>
|
||||||
|
internal sealed class ReplayBundleFetcher
|
||||||
|
{
|
||||||
|
private readonly IArtifactObjectStore _objectStore;
|
||||||
|
private readonly ScannerStorageOptions _storageOptions;
|
||||||
|
private readonly ILogger<ReplayBundleFetcher> _logger;
|
||||||
|
|
||||||
|
public ReplayBundleFetcher(IArtifactObjectStore objectStore, ScannerStorageOptions storageOptions, ILogger<ReplayBundleFetcher> logger)
|
||||||
|
{
|
||||||
|
_objectStore = objectStore ?? throw new ArgumentNullException(nameof(objectStore));
|
||||||
|
_storageOptions = storageOptions ?? throw new ArgumentNullException(nameof(storageOptions));
|
||||||
|
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<string?> FetchAsync(ReplaySealedBundleMetadata metadata, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(metadata);
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(metadata.BundleUri))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var (bucket, key) = ResolveDescriptor(metadata.BundleUri);
|
||||||
|
var descriptor = new ArtifactObjectDescriptor(bucket, key, Immutable: true);
|
||||||
|
|
||||||
|
await using var stream = await _objectStore.GetAsync(descriptor, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (stream is null)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"Replay bundle not found: {metadata.BundleUri}");
|
||||||
|
}
|
||||||
|
|
||||||
|
var tempPath = Path.Combine(Path.GetTempPath(), "stellaops", "replay", metadata.ManifestHash + ".tar.zst");
|
||||||
|
Directory.CreateDirectory(Path.GetDirectoryName(tempPath)!);
|
||||||
|
|
||||||
|
await using (var file = File.Create(tempPath))
|
||||||
|
{
|
||||||
|
await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify hash
|
||||||
|
await using (var file = File.OpenRead(tempPath))
|
||||||
|
{
|
||||||
|
var actualHex = DeterministicHash.Sha256Hex(file);
|
||||||
|
var expected = NormalizeHash(metadata.ManifestHash);
|
||||||
|
if (!string.Equals(actualHex, expected, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
File.Delete(tempPath);
|
||||||
|
throw new InvalidOperationException($"Replay bundle hash mismatch. Expected {expected} got {actualHex}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_logger.LogInformation("Fetched sealed replay bundle {Uri} (hash {Hash}) to {Path}", metadata.BundleUri, metadata.ManifestHash, tempPath);
|
||||||
|
return tempPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
private (string Bucket, string Key) ResolveDescriptor(string uri)
|
||||||
|
{
|
||||||
|
// Expect cas://bucket/key
|
||||||
|
if (!uri.StartsWith("cas://", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
// fallback to configured bucket + direct key
|
||||||
|
return (_storageOptions.ObjectStore.BucketName, uri.Trim('/'));
|
||||||
|
}
|
||||||
|
|
||||||
|
var trimmed = uri.Substring("cas://".Length);
|
||||||
|
var slash = trimmed.IndexOf('/') ;
|
||||||
|
if (slash < 0)
|
||||||
|
{
|
||||||
|
return (_storageOptions.ObjectStore.BucketName, trimmed);
|
||||||
|
}
|
||||||
|
|
||||||
|
var bucket = trimmed[..slash];
|
||||||
|
var key = trimmed[(slash + 1)..];
|
||||||
|
return (bucket, key);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string NormalizeHash(string hash)
|
||||||
|
{
|
||||||
|
var value = hash.Trim().ToLowerInvariant();
|
||||||
|
return value.StartsWith("sha256:", StringComparison.Ordinal) ? value[7..] : value;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
using System;
|
||||||
|
using System.IO;
|
||||||
|
|
||||||
|
namespace StellaOps.Scanner.Worker.Processing.Replay;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents a fetched replay bundle mounted on the local filesystem.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ReplayBundleMount : IDisposable
|
||||||
|
{
|
||||||
|
public ReplayBundleMount(string bundlePath)
|
||||||
|
{
|
||||||
|
BundlePath = bundlePath ?? throw new ArgumentNullException(nameof(bundlePath));
|
||||||
|
}
|
||||||
|
|
||||||
|
public string BundlePath { get; }
|
||||||
|
|
||||||
|
public void Dispose()
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if (File.Exists(BundlePath))
|
||||||
|
{
|
||||||
|
File.Delete(BundlePath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
// best-effort cleanup
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
namespace StellaOps.Scanner.Worker.Processing.Replay;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Captures sealed replay bundle metadata supplied via the job lease.
|
||||||
|
/// Used to keep analyzer execution hermetic and to emit Merkle metadata downstream.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record ReplaySealedBundleMetadata(
|
||||||
|
string ManifestHash,
|
||||||
|
string BundleUri,
|
||||||
|
string? PolicySnapshotId,
|
||||||
|
string? FeedSnapshotId);
|
||||||
@@ -0,0 +1,65 @@
|
|||||||
|
using System;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using StellaOps.Scanner.Core.Contracts;
|
||||||
|
|
||||||
|
namespace StellaOps.Scanner.Worker.Processing.Replay;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Reads sealed replay bundle metadata from the job lease and stores it in the analysis context.
|
||||||
|
/// This does not fetch the bundle contents (handled by upstream) but ensures downstream stages
|
||||||
|
/// know they must stay hermetic and use the provided bundle identifiers.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class ReplaySealedBundleStageExecutor : IScanStageExecutor
|
||||||
|
{
|
||||||
|
public const string BundleUriKey = "replay.bundle.uri";
|
||||||
|
public const string BundleHashKey = "replay.bundle.sha256";
|
||||||
|
private const string PolicyPinKey = "determinism.policy";
|
||||||
|
private const string FeedPinKey = "determinism.feed";
|
||||||
|
|
||||||
|
private readonly ILogger<ReplaySealedBundleStageExecutor> _logger;
|
||||||
|
|
||||||
|
public ReplaySealedBundleStageExecutor(ILogger<ReplaySealedBundleStageExecutor> logger)
|
||||||
|
{
|
||||||
|
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public string StageName => ScanStageNames.IngestReplay;
|
||||||
|
|
||||||
|
public ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(context);
|
||||||
|
|
||||||
|
var metadata = context.Lease.Metadata;
|
||||||
|
if (!metadata.TryGetValue(BundleUriKey, out var bundleUri) || string.IsNullOrWhiteSpace(bundleUri))
|
||||||
|
{
|
||||||
|
_logger.LogDebug("Replay bundle URI not provided; skipping sealed bundle ingestion.");
|
||||||
|
return ValueTask.CompletedTask;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!metadata.TryGetValue(BundleHashKey, out var bundleHash) || string.IsNullOrWhiteSpace(bundleHash))
|
||||||
|
{
|
||||||
|
_logger.LogWarning("Replay bundle URI provided without hash; skipping sealed bundle ingestion to avoid unverifiable input.");
|
||||||
|
return ValueTask.CompletedTask;
|
||||||
|
}
|
||||||
|
|
||||||
|
var policyPin = metadata.TryGetValue(PolicyPinKey, out var policy) && !string.IsNullOrWhiteSpace(policy)
|
||||||
|
? policy
|
||||||
|
: null;
|
||||||
|
var feedPin = metadata.TryGetValue(FeedPinKey, out var feed) && !string.IsNullOrWhiteSpace(feed)
|
||||||
|
? feed
|
||||||
|
: null;
|
||||||
|
|
||||||
|
var sealedMetadata = new ReplaySealedBundleMetadata(
|
||||||
|
ManifestHash: bundleHash.Trim(),
|
||||||
|
BundleUri: bundleUri.Trim(),
|
||||||
|
PolicySnapshotId: policyPin,
|
||||||
|
FeedSnapshotId: feedPin);
|
||||||
|
|
||||||
|
context.Analysis.Set(ScanAnalysisKeys.ReplaySealedBundleMetadata, sealedMetadata);
|
||||||
|
_logger.LogInformation("Replay sealed bundle pinned: uri={BundleUri} hash={BundleHash} policy={PolicyPin} feed={FeedPin}", bundleUri, bundleHash, policyPin, feedPin);
|
||||||
|
|
||||||
|
return ValueTask.CompletedTask;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -21,11 +21,13 @@ public sealed class ScanJobContext
|
|||||||
|
|
||||||
public DateTimeOffset StartUtc { get; }
|
public DateTimeOffset StartUtc { get; }
|
||||||
|
|
||||||
public CancellationToken CancellationToken { get; }
|
public CancellationToken CancellationToken { get; }
|
||||||
|
|
||||||
public string JobId => Lease.JobId;
|
public string JobId => Lease.JobId;
|
||||||
|
|
||||||
public string ScanId => Lease.ScanId;
|
public string ScanId => Lease.ScanId;
|
||||||
|
|
||||||
|
public string? ReplayBundlePath { get; set; }
|
||||||
|
|
||||||
public ScanAnalysisStore Analysis { get; }
|
public ScanAnalysisStore Analysis { get; }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,21 +7,24 @@ using StellaOps.Scanner.Reachability;
|
|||||||
|
|
||||||
namespace StellaOps.Scanner.Worker.Processing;
|
namespace StellaOps.Scanner.Worker.Processing;
|
||||||
|
|
||||||
public sealed class ScanJobProcessor
|
public sealed class ScanJobProcessor
|
||||||
{
|
{
|
||||||
private readonly IReadOnlyDictionary<string, IScanStageExecutor> _executors;
|
private readonly IReadOnlyDictionary<string, IScanStageExecutor> _executors;
|
||||||
private readonly ScanProgressReporter _progressReporter;
|
private readonly ScanProgressReporter _progressReporter;
|
||||||
private readonly ILogger<ScanJobProcessor> _logger;
|
private readonly ILogger<ScanJobProcessor> _logger;
|
||||||
private readonly IReachabilityUnionPublisherService _reachabilityPublisher;
|
private readonly IReachabilityUnionPublisherService _reachabilityPublisher;
|
||||||
|
private readonly Replay.ReplayBundleFetcher _replayBundleFetcher;
|
||||||
|
|
||||||
public ScanJobProcessor(
|
public ScanJobProcessor(
|
||||||
IEnumerable<IScanStageExecutor> executors,
|
IEnumerable<IScanStageExecutor> executors,
|
||||||
ScanProgressReporter progressReporter,
|
ScanProgressReporter progressReporter,
|
||||||
IReachabilityUnionPublisherService reachabilityPublisher,
|
IReachabilityUnionPublisherService reachabilityPublisher,
|
||||||
|
Replay.ReplayBundleFetcher replayBundleFetcher,
|
||||||
ILogger<ScanJobProcessor> logger)
|
ILogger<ScanJobProcessor> logger)
|
||||||
{
|
{
|
||||||
_progressReporter = progressReporter ?? throw new ArgumentNullException(nameof(progressReporter));
|
_progressReporter = progressReporter ?? throw new ArgumentNullException(nameof(progressReporter));
|
||||||
_reachabilityPublisher = reachabilityPublisher ?? throw new ArgumentNullException(nameof(reachabilityPublisher));
|
_reachabilityPublisher = reachabilityPublisher ?? throw new ArgumentNullException(nameof(reachabilityPublisher));
|
||||||
|
_replayBundleFetcher = replayBundleFetcher ?? throw new ArgumentNullException(nameof(replayBundleFetcher));
|
||||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
|
||||||
var map = new Dictionary<string, IScanStageExecutor>(StringComparer.OrdinalIgnoreCase);
|
var map = new Dictionary<string, IScanStageExecutor>(StringComparer.OrdinalIgnoreCase);
|
||||||
@@ -52,18 +55,17 @@ public sealed class ScanJobProcessor
|
|||||||
public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
|
public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
ArgumentNullException.ThrowIfNull(context);
|
ArgumentNullException.ThrowIfNull(context);
|
||||||
// Placeholder: reachability publisher will be fed once lifter outputs are routed here.
|
await EnsureReplayBundleFetchedAsync(context, cancellationToken).ConfigureAwait(false);
|
||||||
_ = _reachabilityPublisher;
|
|
||||||
|
|
||||||
foreach (var stage in ScanStageNames.Ordered)
|
foreach (var stage in ScanStageNames.Ordered)
|
||||||
{
|
{
|
||||||
cancellationToken.ThrowIfCancellationRequested();
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
if (!_executors.TryGetValue(stage, out var executor))
|
if (!_executors.TryGetValue(stage, out var executor))
|
||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
await _progressReporter.ExecuteStageAsync(
|
await _progressReporter.ExecuteStageAsync(
|
||||||
context,
|
context,
|
||||||
stage,
|
stage,
|
||||||
@@ -71,4 +73,19 @@ public sealed class ScanJobProcessor
|
|||||||
cancellationToken).ConfigureAwait(false);
|
cancellationToken).ConfigureAwait(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async Task EnsureReplayBundleFetchedAsync(ScanJobContext context, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (context.Analysis.TryGet<Replay.ReplaySealedBundleMetadata>(ScanAnalysisKeys.ReplaySealedBundleMetadata, out var sealedMetadata) && sealedMetadata is not null)
|
||||||
|
{
|
||||||
|
// Already fetched in this context
|
||||||
|
if (!string.IsNullOrWhiteSpace(context.ReplayBundlePath) && File.Exists(context.ReplayBundlePath))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var path = await _replayBundleFetcher.FetchAsync(sealedMetadata, cancellationToken).ConfigureAwait(false);
|
||||||
|
context.ReplayBundlePath = path;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,9 +2,10 @@ using System.Collections.Generic;
|
|||||||
|
|
||||||
namespace StellaOps.Scanner.Worker.Processing;
|
namespace StellaOps.Scanner.Worker.Processing;
|
||||||
|
|
||||||
public static class ScanStageNames
|
public static class ScanStageNames
|
||||||
{
|
{
|
||||||
public const string ResolveImage = "resolve-image";
|
public const string IngestReplay = "ingest-replay";
|
||||||
|
public const string ResolveImage = "resolve-image";
|
||||||
public const string PullLayers = "pull-layers";
|
public const string PullLayers = "pull-layers";
|
||||||
public const string BuildFilesystem = "build-filesystem";
|
public const string BuildFilesystem = "build-filesystem";
|
||||||
public const string ExecuteAnalyzers = "execute-analyzers";
|
public const string ExecuteAnalyzers = "execute-analyzers";
|
||||||
@@ -14,6 +15,7 @@ public static class ScanStageNames
|
|||||||
|
|
||||||
public static readonly IReadOnlyList<string> Ordered = new[]
|
public static readonly IReadOnlyList<string> Ordered = new[]
|
||||||
{
|
{
|
||||||
|
IngestReplay,
|
||||||
ResolveImage,
|
ResolveImage,
|
||||||
PullLayers,
|
PullLayers,
|
||||||
BuildFilesystem,
|
BuildFilesystem,
|
||||||
|
|||||||
@@ -36,7 +36,12 @@ internal sealed record SurfaceManifestRequest(
|
|||||||
IReadOnlyList<SurfaceManifestPayload> Payloads,
|
IReadOnlyList<SurfaceManifestPayload> Payloads,
|
||||||
string Component,
|
string Component,
|
||||||
string? Version,
|
string? Version,
|
||||||
string? WorkerInstance);
|
string? WorkerInstance,
|
||||||
|
string? DeterminismMerkleRoot = null,
|
||||||
|
string? ReplayBundleUri = null,
|
||||||
|
string? ReplayBundleHash = null,
|
||||||
|
string? ReplayPolicyPin = null,
|
||||||
|
string? ReplayFeedPin = null);
|
||||||
|
|
||||||
internal interface ISurfaceManifestPublisher
|
internal interface ISurfaceManifestPublisher
|
||||||
{
|
{
|
||||||
@@ -112,7 +117,17 @@ internal sealed class SurfaceManifestPublisher : ISurfaceManifestPublisher
|
|||||||
WorkerInstance = request.WorkerInstance,
|
WorkerInstance = request.WorkerInstance,
|
||||||
Attempt = request.Attempt
|
Attempt = request.Attempt
|
||||||
},
|
},
|
||||||
Artifacts = artifacts.ToImmutableArray()
|
Artifacts = artifacts.ToImmutableArray(),
|
||||||
|
DeterminismMerkleRoot = request.DeterminismMerkleRoot,
|
||||||
|
ReplayBundle = string.IsNullOrWhiteSpace(request.ReplayBundleUri)
|
||||||
|
? null
|
||||||
|
: new ReplayBundleReference
|
||||||
|
{
|
||||||
|
Uri = request.ReplayBundleUri!,
|
||||||
|
Sha256 = request.ReplayBundleHash ?? string.Empty,
|
||||||
|
PolicySnapshotId = request.ReplayPolicyPin,
|
||||||
|
FeedSnapshotId = request.ReplayFeedPin
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifestDocument, SerializerOptions);
|
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifestDocument, SerializerOptions);
|
||||||
@@ -177,7 +192,8 @@ internal sealed class SurfaceManifestPublisher : ISurfaceManifestPublisher
|
|||||||
ManifestDigest: manifestDigest,
|
ManifestDigest: manifestDigest,
|
||||||
ManifestUri: manifestUri,
|
ManifestUri: manifestUri,
|
||||||
ArtifactId: artifactId,
|
ArtifactId: artifactId,
|
||||||
Document: manifestDocument);
|
Document: manifestDocument,
|
||||||
|
DeterminismMerkleRoot: request.DeterminismMerkleRoot);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task<SurfaceManifestArtifact> StorePayloadAsync(SurfaceManifestPayload payload, string tenant, CancellationToken cancellationToken)
|
private async Task<SurfaceManifestArtifact> StorePayloadAsync(SurfaceManifestPayload payload, string tenant, CancellationToken cancellationToken)
|
||||||
|
|||||||
@@ -32,4 +32,8 @@ public static class ScanAnalysisKeys
|
|||||||
public const string FileEntries = "analysis.files.entries";
|
public const string FileEntries = "analysis.files.entries";
|
||||||
public const string EntropyReport = "analysis.entropy.report";
|
public const string EntropyReport = "analysis.entropy.report";
|
||||||
public const string EntropyLayerSummary = "analysis.entropy.layer.summary";
|
public const string EntropyLayerSummary = "analysis.entropy.layer.summary";
|
||||||
|
|
||||||
|
public const string DeterminismEvidence = "analysis.determinism.evidence";
|
||||||
|
|
||||||
|
public const string ReplaySealedBundleMetadata = "analysis.replay.sealed.bundle";
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -104,7 +104,7 @@ public sealed class FileSurfaceManifestStore :
|
|||||||
normalized.Tenant,
|
normalized.Tenant,
|
||||||
digest);
|
digest);
|
||||||
|
|
||||||
return new SurfaceManifestPublishResult(digest, uri, artifactId, normalized);
|
return new SurfaceManifestPublishResult(digest, uri, artifactId, normalized, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<SurfaceManifestDocument?> TryGetByDigestAsync(
|
public async Task<SurfaceManifestDocument?> TryGetByDigestAsync(
|
||||||
|
|||||||
@@ -40,6 +40,35 @@ public sealed record SurfaceManifestDocument
|
|||||||
[JsonPropertyName("artifacts")]
|
[JsonPropertyName("artifacts")]
|
||||||
public IReadOnlyList<SurfaceManifestArtifact> Artifacts { get; init; }
|
public IReadOnlyList<SurfaceManifestArtifact> Artifacts { get; init; }
|
||||||
= ImmutableArray<SurfaceManifestArtifact>.Empty;
|
= ImmutableArray<SurfaceManifestArtifact>.Empty;
|
||||||
|
|
||||||
|
[JsonPropertyName("determinismRoot")]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? DeterminismMerkleRoot { get; init; }
|
||||||
|
= null;
|
||||||
|
|
||||||
|
[JsonPropertyName("replayBundle")]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public ReplayBundleReference? ReplayBundle { get; init; }
|
||||||
|
= null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed record ReplayBundleReference
|
||||||
|
{
|
||||||
|
[JsonPropertyName("uri")]
|
||||||
|
public string Uri { get; init; } = string.Empty;
|
||||||
|
|
||||||
|
[JsonPropertyName("sha256")]
|
||||||
|
public string Sha256 { get; init; } = string.Empty;
|
||||||
|
|
||||||
|
[JsonPropertyName("policyPin")]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? PolicySnapshotId { get; init; }
|
||||||
|
= null;
|
||||||
|
|
||||||
|
[JsonPropertyName("feedPin")]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? FeedSnapshotId { get; init; }
|
||||||
|
= null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@@ -139,4 +168,5 @@ public sealed record SurfaceManifestPublishResult(
|
|||||||
string ManifestDigest,
|
string ManifestDigest,
|
||||||
string ManifestUri,
|
string ManifestUri,
|
||||||
string ArtifactId,
|
string ArtifactId,
|
||||||
SurfaceManifestDocument Document);
|
SurfaceManifestDocument Document,
|
||||||
|
string? DeterminismMerkleRoot = null);
|
||||||
|
|||||||
@@ -0,0 +1,127 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
|
||||||
|
namespace StellaOps.Scanner.Worker.Tests.Determinism;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Lightweight determinism harness used in tests to score repeated scanner runs.
|
||||||
|
/// Groups runs by image digest, compares artefact hashes to the baseline (run index 0),
|
||||||
|
/// and produces a report compatible with determinism.json expectations.
|
||||||
|
/// </summary>
|
||||||
|
internal static class DeterminismHarness
|
||||||
|
{
|
||||||
|
public static DeterminismReport Compute(IEnumerable<DeterminismRunInput> runs, double imageThreshold = 0.90, double overallThreshold = 0.95)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(runs);
|
||||||
|
|
||||||
|
var grouped = runs
|
||||||
|
.GroupBy(r => r.ImageDigest, StringComparer.OrdinalIgnoreCase)
|
||||||
|
.ToDictionary(g => g.Key, g => g.OrderBy(r => r.RunIndex).ToList(), StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
var imageReports = new List<DeterminismImageReport>();
|
||||||
|
var totalRuns = 0;
|
||||||
|
var totalIdentical = 0;
|
||||||
|
|
||||||
|
foreach (var (image, entries) in grouped)
|
||||||
|
{
|
||||||
|
if (entries.Count == 0)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var baseline = entries[0];
|
||||||
|
var baselineHashes = HashArtifacts(baseline.Artifacts);
|
||||||
|
var runReports = new List<DeterminismRunReport>();
|
||||||
|
var identical = 0;
|
||||||
|
|
||||||
|
foreach (var run in entries)
|
||||||
|
{
|
||||||
|
var hashes = HashArtifacts(run.Artifacts);
|
||||||
|
var diff = hashes
|
||||||
|
.Where(kv => !baselineHashes.TryGetValue(kv.Key, out var baselineHash) || !string.Equals(baselineHash, kv.Value, StringComparison.Ordinal))
|
||||||
|
.Select(kv => kv.Key)
|
||||||
|
.OrderBy(k => k, StringComparer.Ordinal)
|
||||||
|
.ToArray();
|
||||||
|
|
||||||
|
var isIdentical = diff.Length == 0;
|
||||||
|
if (isIdentical)
|
||||||
|
{
|
||||||
|
identical++;
|
||||||
|
}
|
||||||
|
|
||||||
|
runReports.Add(new DeterminismRunReport(run.RunIndex, hashes, diff));
|
||||||
|
}
|
||||||
|
|
||||||
|
var score = entries.Count == 0 ? 0d : (double)identical / entries.Count;
|
||||||
|
imageReports.Add(new DeterminismImageReport(image, entries.Count, identical, score, baselineHashes, runReports));
|
||||||
|
|
||||||
|
totalRuns += entries.Count;
|
||||||
|
totalIdentical += identical;
|
||||||
|
}
|
||||||
|
|
||||||
|
var overallScore = totalRuns == 0 ? 0d : (double)totalIdentical / totalRuns;
|
||||||
|
|
||||||
|
return new DeterminismReport(
|
||||||
|
OverallScore: overallScore,
|
||||||
|
OverallThreshold: overallThreshold,
|
||||||
|
ImageThreshold: imageThreshold,
|
||||||
|
Images: imageReports.OrderBy(r => r.ImageDigest, StringComparer.Ordinal).ToList());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyDictionary<string, string> HashArtifacts(IReadOnlyDictionary<string, string> artifacts)
|
||||||
|
{
|
||||||
|
var map = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||||
|
foreach (var kv in artifacts)
|
||||||
|
{
|
||||||
|
var digest = Sha256Hex(kv.Value);
|
||||||
|
map[kv.Key] = digest;
|
||||||
|
}
|
||||||
|
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string Sha256Hex(string content)
|
||||||
|
{
|
||||||
|
using var sha = SHA256.Create();
|
||||||
|
var bytes = Encoding.UTF8.GetBytes(content ?? string.Empty);
|
||||||
|
var hash = sha.ComputeHash(bytes);
|
||||||
|
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal sealed record DeterminismRunInput(string ImageDigest, int RunIndex, IReadOnlyDictionary<string, string> Artifacts);
|
||||||
|
|
||||||
|
internal sealed record DeterminismReport(
|
||||||
|
double OverallScore,
|
||||||
|
double OverallThreshold,
|
||||||
|
double ImageThreshold,
|
||||||
|
IReadOnlyList<DeterminismImageReport> Images)
|
||||||
|
{
|
||||||
|
public string ToJson()
|
||||||
|
{
|
||||||
|
var options = new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||||
|
{
|
||||||
|
WriteIndented = false,
|
||||||
|
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||||
|
};
|
||||||
|
return JsonSerializer.Serialize(this, options);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal sealed record DeterminismImageReport(
|
||||||
|
string ImageDigest,
|
||||||
|
int Runs,
|
||||||
|
int Identical,
|
||||||
|
double Score,
|
||||||
|
IReadOnlyDictionary<string, string> BaselineHashes,
|
||||||
|
IReadOnlyList<DeterminismRunReport> RunReports);
|
||||||
|
|
||||||
|
internal sealed record DeterminismRunReport(
|
||||||
|
int RunIndex,
|
||||||
|
IReadOnlyDictionary<string, string> ArtifactHashes,
|
||||||
|
IReadOnlyList<string> NonDeterministicArtifacts);
|
||||||
@@ -0,0 +1,44 @@
|
|||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using StellaOps.Scanner.Worker.Tests.Determinism;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.Scanner.Worker.Tests.DeterminismTests;
|
||||||
|
|
||||||
|
public sealed class DeterminismHarnessTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void ComputeScores_FlagsDivergentArtifacts()
|
||||||
|
{
|
||||||
|
var runs = new[]
|
||||||
|
{
|
||||||
|
new DeterminismRunInput("sha256:image", 0, new Dictionary<string, string>
|
||||||
|
{
|
||||||
|
["sbom.json"] = "sbom-a",
|
||||||
|
["findings.ndjson"] = "findings-a",
|
||||||
|
["log.ndjson"] = "log-1"
|
||||||
|
}),
|
||||||
|
new DeterminismRunInput("sha256:image", 1, new Dictionary<string, string>
|
||||||
|
{
|
||||||
|
["sbom.json"] = "sbom-a",
|
||||||
|
["findings.ndjson"] = "findings-a",
|
||||||
|
["log.ndjson"] = "log-1"
|
||||||
|
}),
|
||||||
|
new DeterminismRunInput("sha256:image", 2, new Dictionary<string, string>
|
||||||
|
{
|
||||||
|
["sbom.json"] = "sbom-a",
|
||||||
|
["findings.ndjson"] = "findings-a",
|
||||||
|
["log.ndjson"] = "log-2" // divergent
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
var report = DeterminismHarness.Compute(runs);
|
||||||
|
|
||||||
|
Assert.Equal(1.0 * 2 / 3, report.Images.Single().Score, precision: 3);
|
||||||
|
Assert.Equal(2, report.Images.Single().Identical);
|
||||||
|
|
||||||
|
var divergent = report.Images.Single().RunReports.Single(r => r.RunIndex == 2);
|
||||||
|
Assert.Contains("log.ndjson", divergent.NonDeterministicArtifacts);
|
||||||
|
Assert.DoesNotContain("sbom.json", divergent.NonDeterministicArtifacts);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,70 @@
|
|||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging.Abstractions;
|
||||||
|
using StellaOps.Scanner.Core.Contracts;
|
||||||
|
using StellaOps.Scanner.Worker.Processing.Replay;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.Scanner.Worker.Tests.Replay;
|
||||||
|
|
||||||
|
public sealed class ReplaySealedBundleStageExecutorTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public async Task ExecuteAsync_SetsMetadata_WhenUriAndHashProvided()
|
||||||
|
{
|
||||||
|
var executor = new ReplaySealedBundleStageExecutor(NullLogger<ReplaySealedBundleStageExecutor>.Instance);
|
||||||
|
var context = TestContexts.Create();
|
||||||
|
context.Lease.Metadata["replay.bundle.uri"] = "cas://replay/input.tar.zst";
|
||||||
|
context.Lease.Metadata["replay.bundle.sha256"] = "abc123";
|
||||||
|
context.Lease.Metadata["determinism.policy"] = "rev-1";
|
||||||
|
context.Lease.Metadata["determinism.feed"] = "feed-2";
|
||||||
|
|
||||||
|
await executor.ExecuteAsync(context, CancellationToken.None);
|
||||||
|
|
||||||
|
Assert.True(context.Analysis.TryGet<ReplaySealedBundleMetadata>(ScanAnalysisKeys.ReplaySealedBundleMetadata, out var metadata));
|
||||||
|
Assert.Equal("abc123", metadata.ManifestHash);
|
||||||
|
Assert.Equal("cas://replay/input.tar.zst", metadata.BundleUri);
|
||||||
|
Assert.Equal("rev-1", metadata.PolicySnapshotId);
|
||||||
|
Assert.Equal("feed-2", metadata.FeedSnapshotId);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ExecuteAsync_Skips_WhenHashMissing()
|
||||||
|
{
|
||||||
|
var executor = new ReplaySealedBundleStageExecutor(NullLogger<ReplaySealedBundleStageExecutor>.Instance);
|
||||||
|
var context = TestContexts.Create();
|
||||||
|
context.Lease.Metadata["replay.bundle.uri"] = "cas://replay/input.tar.zst";
|
||||||
|
|
||||||
|
await executor.ExecuteAsync(context, CancellationToken.None);
|
||||||
|
|
||||||
|
Assert.False(context.Analysis.TryGet<ReplaySealedBundleMetadata>(ScanAnalysisKeys.ReplaySealedBundleMetadata, out _));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal static class TestContexts
|
||||||
|
{
|
||||||
|
public static ScanJobContext Create()
|
||||||
|
{
|
||||||
|
var lease = new TestScanJobLease();
|
||||||
|
return new ScanJobContext(lease, TimeProvider.System, TimeProvider.System.GetUtcNow(), CancellationToken.None);
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class TestScanJobLease : IScanJobLease
|
||||||
|
{
|
||||||
|
public string JobId => "job-1";
|
||||||
|
public string ScanId => "scan-1";
|
||||||
|
public int Attempt => 1;
|
||||||
|
public DateTimeOffset EnqueuedAtUtc => DateTimeOffset.UtcNow;
|
||||||
|
public DateTimeOffset LeasedAtUtc => DateTimeOffset.UtcNow;
|
||||||
|
public TimeSpan LeaseDuration => TimeSpan.FromMinutes(5);
|
||||||
|
public Dictionary<string, string> MutableMetadata { get; } = new();
|
||||||
|
public IReadOnlyDictionary<string, string> Metadata => MutableMetadata;
|
||||||
|
|
||||||
|
public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||||
|
public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||||
|
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
|
||||||
|
public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||||
|
public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||||
|
}
|
||||||
|
}
|
||||||
208
src/StellaOps.Events.Mongo/EventProvenanceBackfillService.cs
Normal file
208
src/StellaOps.Events.Mongo/EventProvenanceBackfillService.cs
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
using System.Runtime.CompilerServices;
|
||||||
|
using MongoDB.Bson;
|
||||||
|
using MongoDB.Driver;
|
||||||
|
using StellaOps.Provenance.Mongo;
|
||||||
|
|
||||||
|
namespace StellaOps.Events.Mongo;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Service for backfilling historical events with DSSE provenance metadata.
|
||||||
|
/// Queries events missing provenance, resolves attestations, and updates events in place.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class EventProvenanceBackfillService
|
||||||
|
{
|
||||||
|
private readonly IMongoCollection<BsonDocument> _events;
|
||||||
|
private readonly IAttestationResolver _resolver;
|
||||||
|
private readonly EventProvenanceWriter _writer;
|
||||||
|
|
||||||
|
public EventProvenanceBackfillService(
|
||||||
|
IMongoDatabase database,
|
||||||
|
IAttestationResolver resolver,
|
||||||
|
string collectionName = "events")
|
||||||
|
{
|
||||||
|
if (database is null) throw new ArgumentNullException(nameof(database));
|
||||||
|
_resolver = resolver ?? throw new ArgumentNullException(nameof(resolver));
|
||||||
|
|
||||||
|
_events = database.GetCollection<BsonDocument>(collectionName);
|
||||||
|
_writer = new EventProvenanceWriter(database, collectionName);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Find events missing provenance for the specified kinds.
|
||||||
|
/// </summary>
|
||||||
|
public async IAsyncEnumerable<UnprovenEvent> FindUnprovenEventsAsync(
|
||||||
|
IEnumerable<string> kinds,
|
||||||
|
int? limit = null,
|
||||||
|
[EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var filter = ProvenanceMongoExtensions.BuildUnprovenEvidenceFilter(kinds);
|
||||||
|
var options = new FindOptions<BsonDocument>
|
||||||
|
{
|
||||||
|
Sort = Builders<BsonDocument>.Sort.Descending("ts"),
|
||||||
|
Limit = limit
|
||||||
|
};
|
||||||
|
|
||||||
|
using var cursor = await _events.FindAsync(filter, options, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false))
|
||||||
|
{
|
||||||
|
foreach (var doc in cursor.Current)
|
||||||
|
{
|
||||||
|
var eventId = ExtractEventId(doc);
|
||||||
|
var kind = doc.GetValue("kind", BsonNull.Value).AsString;
|
||||||
|
var subjectDigest = ExtractSubjectDigest(doc);
|
||||||
|
|
||||||
|
if (eventId is not null && kind is not null && subjectDigest is not null)
|
||||||
|
{
|
||||||
|
yield return new UnprovenEvent(eventId, kind, subjectDigest, doc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Backfill provenance for a single event by resolving its attestation.
|
||||||
|
/// </summary>
|
||||||
|
public async Task<BackfillResult> BackfillEventAsync(
|
||||||
|
UnprovenEvent unprovenEvent,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
if (unprovenEvent is null) throw new ArgumentNullException(nameof(unprovenEvent));
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var resolution = await _resolver.ResolveAsync(
|
||||||
|
unprovenEvent.SubjectDigestSha256,
|
||||||
|
unprovenEvent.Kind,
|
||||||
|
cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (resolution is null)
|
||||||
|
{
|
||||||
|
return new BackfillResult(unprovenEvent.EventId, BackfillStatus.NotFound);
|
||||||
|
}
|
||||||
|
|
||||||
|
await _writer.AttachAsync(
|
||||||
|
unprovenEvent.EventId,
|
||||||
|
resolution.Dsse,
|
||||||
|
resolution.Trust,
|
||||||
|
cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
return new BackfillResult(unprovenEvent.EventId, BackfillStatus.Success, resolution.AttestationId);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
return new BackfillResult(unprovenEvent.EventId, BackfillStatus.Error, ErrorMessage: ex.Message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Backfill all unproven events for the specified kinds.
|
||||||
|
/// </summary>
|
||||||
|
public async Task<BackfillSummary> BackfillAllAsync(
|
||||||
|
IEnumerable<string> kinds,
|
||||||
|
int? limit = null,
|
||||||
|
IProgress<BackfillResult>? progress = null,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var summary = new BackfillSummary();
|
||||||
|
|
||||||
|
await foreach (var unprovenEvent in FindUnprovenEventsAsync(kinds, limit, cancellationToken).ConfigureAwait(false))
|
||||||
|
{
|
||||||
|
summary.TotalProcessed++;
|
||||||
|
|
||||||
|
var result = await BackfillEventAsync(unprovenEvent, cancellationToken).ConfigureAwait(false);
|
||||||
|
progress?.Report(result);
|
||||||
|
|
||||||
|
switch (result.Status)
|
||||||
|
{
|
||||||
|
case BackfillStatus.Success:
|
||||||
|
summary.SuccessCount++;
|
||||||
|
break;
|
||||||
|
case BackfillStatus.NotFound:
|
||||||
|
summary.NotFoundCount++;
|
||||||
|
break;
|
||||||
|
case BackfillStatus.Error:
|
||||||
|
summary.ErrorCount++;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return summary;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Count events missing provenance for reporting/estimation.
|
||||||
|
/// </summary>
|
||||||
|
public async Task<long> CountUnprovenEventsAsync(
|
||||||
|
IEnumerable<string> kinds,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var filter = ProvenanceMongoExtensions.BuildUnprovenEvidenceFilter(kinds);
|
||||||
|
return await _events.CountDocumentsAsync(filter, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? ExtractEventId(BsonDocument doc)
|
||||||
|
{
|
||||||
|
if (!doc.TryGetValue("_id", out var idValue))
|
||||||
|
return null;
|
||||||
|
|
||||||
|
return idValue.BsonType == BsonType.ObjectId
|
||||||
|
? idValue.AsObjectId.ToString()
|
||||||
|
: idValue.AsString;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? ExtractSubjectDigest(BsonDocument doc)
|
||||||
|
{
|
||||||
|
if (!doc.TryGetValue("subject", out var subject) || subject.BsonType != BsonType.Document)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
var subjectDoc = subject.AsBsonDocument;
|
||||||
|
if (!subjectDoc.TryGetValue("digest", out var digest) || digest.BsonType != BsonType.Document)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
var digestDoc = digest.AsBsonDocument;
|
||||||
|
if (!digestDoc.TryGetValue("sha256", out var sha256))
|
||||||
|
return null;
|
||||||
|
|
||||||
|
return sha256.AsString;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents an event that needs provenance backfilled.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record UnprovenEvent(
|
||||||
|
string EventId,
|
||||||
|
string Kind,
|
||||||
|
string SubjectDigestSha256,
|
||||||
|
BsonDocument Document);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Result of a single backfill operation.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record BackfillResult(
|
||||||
|
string EventId,
|
||||||
|
BackfillStatus Status,
|
||||||
|
string? AttestationId = null,
|
||||||
|
string? ErrorMessage = null);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Status of a backfill operation.
|
||||||
|
/// </summary>
|
||||||
|
public enum BackfillStatus
|
||||||
|
{
|
||||||
|
Success,
|
||||||
|
NotFound,
|
||||||
|
Error
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Summary statistics from a backfill batch.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class BackfillSummary
|
||||||
|
{
|
||||||
|
public int TotalProcessed { get; set; }
|
||||||
|
public int SuccessCount { get; set; }
|
||||||
|
public int NotFoundCount { get; set; }
|
||||||
|
public int ErrorCount { get; set; }
|
||||||
|
}
|
||||||
33
src/StellaOps.Events.Mongo/IAttestationResolver.cs
Normal file
33
src/StellaOps.Events.Mongo/IAttestationResolver.cs
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
using StellaOps.Provenance.Mongo;
|
||||||
|
|
||||||
|
namespace StellaOps.Events.Mongo;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Resolves attestation provenance metadata for a given subject.
|
||||||
|
/// Implementations may query Rekor, CAS, local attestation stores, or external APIs.
|
||||||
|
/// </summary>
|
||||||
|
public interface IAttestationResolver
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Attempt to resolve provenance metadata for the given subject digest.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="subjectDigestSha256">SHA-256 digest of the subject (image, SBOM, etc.).</param>
|
||||||
|
/// <param name="eventKind">Event kind hint (SBOM, VEX, SCAN, etc.) for filtering.</param>
|
||||||
|
/// <param name="cancellationToken">Cancellation token.</param>
|
||||||
|
/// <returns>Resolved provenance and trust info, or null if not found.</returns>
|
||||||
|
Task<AttestationResolution?> ResolveAsync(
|
||||||
|
string subjectDigestSha256,
|
||||||
|
string eventKind,
|
||||||
|
CancellationToken cancellationToken = default);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Result of attestation resolution containing DSSE provenance and trust metadata.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class AttestationResolution
|
||||||
|
{
|
||||||
|
public required DsseProvenance Dsse { get; init; }
|
||||||
|
public required TrustInfo Trust { get; init; }
|
||||||
|
public string? AttestationId { get; init; }
|
||||||
|
public DateTimeOffset? ResolvedAtUtc { get; init; }
|
||||||
|
}
|
||||||
@@ -37,6 +37,25 @@ public static class MongoIndexes
|
|||||||
new CreateIndexOptions
|
new CreateIndexOptions
|
||||||
{
|
{
|
||||||
Name = "events_by_rekor_logindex"
|
Name = "events_by_rekor_logindex"
|
||||||
|
}),
|
||||||
|
|
||||||
|
new CreateIndexModel<BsonDocument>(
|
||||||
|
Builders<BsonDocument>.IndexKeys
|
||||||
|
.Ascending("provenance.dsse.envelopeDigest"),
|
||||||
|
new CreateIndexOptions
|
||||||
|
{
|
||||||
|
Name = "events_by_envelope_digest",
|
||||||
|
Sparse = true
|
||||||
|
}),
|
||||||
|
|
||||||
|
new CreateIndexModel<BsonDocument>(
|
||||||
|
Builders<BsonDocument>.IndexKeys
|
||||||
|
.Descending("ts")
|
||||||
|
.Ascending("kind")
|
||||||
|
.Ascending("trust.verified"),
|
||||||
|
new CreateIndexOptions
|
||||||
|
{
|
||||||
|
Name = "events_by_ts_kind_verified"
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
72
src/StellaOps.Events.Mongo/StubAttestationResolver.cs
Normal file
72
src/StellaOps.Events.Mongo/StubAttestationResolver.cs
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
using StellaOps.Provenance.Mongo;
|
||||||
|
|
||||||
|
namespace StellaOps.Events.Mongo;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Stub implementation of <see cref="IAttestationResolver"/> for testing and local development.
|
||||||
|
/// Always returns null (no attestation found) unless configured with test data.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class StubAttestationResolver : IAttestationResolver
|
||||||
|
{
|
||||||
|
private readonly Dictionary<string, AttestationResolution> _testData = new(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
public Task<AttestationResolution?> ResolveAsync(
|
||||||
|
string subjectDigestSha256,
|
||||||
|
string eventKind,
|
||||||
|
CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
var key = $"{subjectDigestSha256}:{eventKind}";
|
||||||
|
_testData.TryGetValue(key, out var resolution);
|
||||||
|
return Task.FromResult(resolution);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Add test data for a subject/kind combination.
|
||||||
|
/// </summary>
|
||||||
|
public void AddTestResolution(string subjectDigestSha256, string eventKind, AttestationResolution resolution)
|
||||||
|
{
|
||||||
|
var key = $"{subjectDigestSha256}:{eventKind}";
|
||||||
|
_testData[key] = resolution;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Create a sample resolution for testing.
|
||||||
|
/// </summary>
|
||||||
|
public static AttestationResolution CreateSampleResolution(
|
||||||
|
string envelopeDigest,
|
||||||
|
long? rekorLogIndex = null,
|
||||||
|
string? rekorUuid = null)
|
||||||
|
{
|
||||||
|
return new AttestationResolution
|
||||||
|
{
|
||||||
|
Dsse = new DsseProvenance
|
||||||
|
{
|
||||||
|
EnvelopeDigest = envelopeDigest,
|
||||||
|
PayloadType = "application/vnd.in-toto+json",
|
||||||
|
Key = new DsseKeyInfo
|
||||||
|
{
|
||||||
|
KeyId = "cosign:SHA256-PKIX:test-key-id",
|
||||||
|
Issuer = "test-issuer",
|
||||||
|
Algo = "ECDSA"
|
||||||
|
},
|
||||||
|
Rekor = rekorLogIndex is not null && rekorUuid is not null
|
||||||
|
? new DsseRekorInfo
|
||||||
|
{
|
||||||
|
LogIndex = rekorLogIndex.Value,
|
||||||
|
Uuid = rekorUuid,
|
||||||
|
IntegratedTime = DateTimeOffset.UtcNow.ToUnixTimeSeconds()
|
||||||
|
}
|
||||||
|
: null
|
||||||
|
},
|
||||||
|
Trust = new TrustInfo
|
||||||
|
{
|
||||||
|
Verified = true,
|
||||||
|
Verifier = "Authority@stella",
|
||||||
|
Witnesses = 1,
|
||||||
|
PolicyScore = 0.95
|
||||||
|
},
|
||||||
|
AttestationId = $"att:{Guid.NewGuid():N}",
|
||||||
|
ResolvedAtUtc = DateTimeOffset.UtcNow
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
<Project>
|
||||||
|
<PropertyGroup>
|
||||||
|
<!-- Override repo defaults to keep telemetry tests self-contained -->
|
||||||
|
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||||
|
<ConcelierTestingPath></ConcelierTestingPath>
|
||||||
|
<ConcelierSharedTestsPath></ConcelierSharedTestsPath>
|
||||||
|
</PropertyGroup>
|
||||||
|
</Project>
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
<Project>
|
||||||
|
<!-- Prevent global plugin/test copy targets from firing for telemetry tests -->
|
||||||
|
<Target Name="DisablePluginCopyTargets" BeforeTargets="ConcelierCopyPluginArtifacts;AuthorityCopyPluginArtifacts;NotifyCopyPluginArtifacts;ScannerCopyBuildxPluginArtifacts;ScannerCopyOsAnalyzerPluginArtifacts;ScannerCopyLangAnalyzerPluginArtifacts">
|
||||||
|
<PropertyGroup>
|
||||||
|
<ConcelierPluginOutputRoot></ConcelierPluginOutputRoot>
|
||||||
|
<AuthorityPluginOutputRoot></AuthorityPluginOutputRoot>
|
||||||
|
<NotifyPluginOutputRoot></NotifyPluginOutputRoot>
|
||||||
|
<ScannerBuildxPluginOutputRoot></ScannerBuildxPluginOutputRoot>
|
||||||
|
<ScannerOsAnalyzerPluginOutputRoot></ScannerOsAnalyzerPluginOutputRoot>
|
||||||
|
<ScannerLangAnalyzerPluginOutputRoot></ScannerLangAnalyzerPluginOutputRoot>
|
||||||
|
<IsConcelierPlugin>false</IsConcelierPlugin>
|
||||||
|
<IsAuthorityPlugin>false</IsAuthorityPlugin>
|
||||||
|
<IsNotifyPlugin>false</IsNotifyPlugin>
|
||||||
|
<IsScannerBuildxPlugin>false</IsScannerBuildxPlugin>
|
||||||
|
<IsScannerOsAnalyzerPlugin>false</IsScannerOsAnalyzerPlugin>
|
||||||
|
<IsScannerLangAnalyzerPlugin>false</IsScannerLangAnalyzerPlugin>
|
||||||
|
</PropertyGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ConcelierPluginArtifacts Remove="@(ConcelierPluginArtifacts)" />
|
||||||
|
<AuthorityPluginArtifacts Remove="@(AuthorityPluginArtifacts)" />
|
||||||
|
<NotifyPluginArtifacts Remove="@(NotifyPluginArtifacts)" />
|
||||||
|
<ScannerBuildxPluginArtifacts Remove="@(ScannerBuildxPluginArtifacts)" />
|
||||||
|
<ScannerOsAnalyzerPluginArtifacts Remove="@(ScannerOsAnalyzerPluginArtifacts)" />
|
||||||
|
<ScannerLangAnalyzerPluginArtifacts Remove="@(ScannerLangAnalyzerPluginArtifacts)" />
|
||||||
|
</ItemGroup>
|
||||||
|
</Target>
|
||||||
|
</Project>
|
||||||
@@ -0,0 +1,51 @@
|
|||||||
|
using System.Diagnostics.Metrics;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using StellaOps.Telemetry.Core;
|
||||||
|
|
||||||
|
public class MetricLabelGuardTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void Coerce_Enforces_Cardinality_Limit()
|
||||||
|
{
|
||||||
|
var options = Options.Create(new StellaOpsTelemetryOptions
|
||||||
|
{
|
||||||
|
Labels = new StellaOpsTelemetryOptions.MetricLabelOptions
|
||||||
|
{
|
||||||
|
MaxDistinctValuesPerLabel = 2,
|
||||||
|
MaxLabelLength = 8
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
var guard = new MetricLabelGuard(options);
|
||||||
|
|
||||||
|
var first = guard.Coerce("route", "/api/a");
|
||||||
|
var second = guard.Coerce("route", "/api/b");
|
||||||
|
var third = guard.Coerce("route", "/api/c");
|
||||||
|
|
||||||
|
Assert.Equal("/api/a", first);
|
||||||
|
Assert.Equal("/api/b", second);
|
||||||
|
Assert.Equal("other", third); // budget exceeded
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void RecordRequestDuration_Truncates_Long_Labels()
|
||||||
|
{
|
||||||
|
var options = Options.Create(new StellaOpsTelemetryOptions
|
||||||
|
{
|
||||||
|
Labels = new StellaOpsTelemetryOptions.MetricLabelOptions
|
||||||
|
{
|
||||||
|
MaxDistinctValuesPerLabel = 5,
|
||||||
|
MaxLabelLength = 5
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
var guard = new MetricLabelGuard(options);
|
||||||
|
using var meter = new Meter("test");
|
||||||
|
var histogram = meter.CreateHistogram<double>("request.duration");
|
||||||
|
|
||||||
|
histogram.RecordRequestDuration(guard, 42, "verylongroute", "GET", "200", "ok");
|
||||||
|
|
||||||
|
// No exception means recording succeeded; label value should be truncated internally to 5 chars.
|
||||||
|
Assert.Equal("veryl", guard.Coerce("route", "verylongroute"));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,8 +5,18 @@
|
|||||||
<ImplicitUsings>enable</ImplicitUsings>
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
<Nullable>enable</Nullable>
|
<Nullable>enable</Nullable>
|
||||||
<IsPackable>false</IsPackable>
|
<IsPackable>false</IsPackable>
|
||||||
|
<!-- Opt out of Concelier test infra to avoid pulling large cross-module graph -->
|
||||||
|
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<FrameworkReference Include="Microsoft.AspNetCore.App" />
|
||||||
|
<!-- Prevent repo-wide test infra from pulling Concelier shared test packages -->
|
||||||
|
<PackageReference Remove="Mongo2Go" />
|
||||||
|
<PackageReference Remove="Microsoft.AspNetCore.Mvc.Testing" />
|
||||||
|
<PackageReference Remove="Microsoft.Extensions.TimeProvider.Testing" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||||
<PackageReference Include="xunit" Version="2.9.2" />
|
<PackageReference Include="xunit" Version="2.9.2" />
|
||||||
|
|||||||
@@ -0,0 +1,52 @@
|
|||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Net;
|
||||||
|
using System.Net.Http;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using StellaOps.Telemetry.Core;
|
||||||
|
|
||||||
|
public class TelemetryPropagationHandlerTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public async Task Handler_Forwards_Context_Headers()
|
||||||
|
{
|
||||||
|
var options = Options.Create(new StellaOpsTelemetryOptions());
|
||||||
|
var accessor = new TelemetryContextAccessor
|
||||||
|
{
|
||||||
|
Current = new TelemetryContext(
|
||||||
|
"00-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-bbbbbbbbbbbbbbbb-01",
|
||||||
|
"tenant-b",
|
||||||
|
"actor-b",
|
||||||
|
"rule-b")
|
||||||
|
};
|
||||||
|
|
||||||
|
var terminal = new RecordingHandler();
|
||||||
|
var handler = new TelemetryPropagationHandler(accessor, options)
|
||||||
|
{
|
||||||
|
InnerHandler = terminal
|
||||||
|
};
|
||||||
|
|
||||||
|
var invoker = new HttpMessageInvoker(handler);
|
||||||
|
await invoker.SendAsync(new HttpRequestMessage(HttpMethod.Get, "http://example.com"), CancellationToken.None);
|
||||||
|
|
||||||
|
Assert.Equal("00-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-bbbbbbbbbbbbbbbb-01", terminal.SeenHeaders[options.Value.Propagation.TraceIdHeader]);
|
||||||
|
Assert.Equal("tenant-b", terminal.SeenHeaders[options.Value.Propagation.TenantHeader]);
|
||||||
|
Assert.Equal("actor-b", terminal.SeenHeaders[options.Value.Propagation.ActorHeader]);
|
||||||
|
Assert.Equal("rule-b", terminal.SeenHeaders[options.Value.Propagation.ImposedRuleHeader]);
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class RecordingHandler : HttpMessageHandler
|
||||||
|
{
|
||||||
|
public Dictionary<string, string?> SeenHeaders { get; } = new();
|
||||||
|
|
||||||
|
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
foreach (var header in request.Headers)
|
||||||
|
{
|
||||||
|
SeenHeaders[header.Key.ToLowerInvariant()] = header.Value.FirstOrDefault();
|
||||||
|
}
|
||||||
|
|
||||||
|
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
using System.Diagnostics;
|
||||||
|
using Microsoft.AspNetCore.Http;
|
||||||
|
using Microsoft.Extensions.Logging.Abstractions;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using StellaOps.Telemetry.Core;
|
||||||
|
|
||||||
|
public class TelemetryPropagationMiddlewareTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public async Task Middleware_Populates_Accessor_And_Activity_Tags()
|
||||||
|
{
|
||||||
|
var options = Options.Create(new StellaOpsTelemetryOptions());
|
||||||
|
var accessor = new TelemetryContextAccessor();
|
||||||
|
var middleware = new TelemetryPropagationMiddleware(
|
||||||
|
async context =>
|
||||||
|
{
|
||||||
|
// Assert inside the pipeline while context is set.
|
||||||
|
Assert.NotNull(accessor.Current);
|
||||||
|
Assert.Equal("tenant-a", accessor.Current!.TenantId);
|
||||||
|
Assert.Equal("service-x", accessor.Current.Actor);
|
||||||
|
Assert.Equal("policy-42", accessor.Current.ImposedRule);
|
||||||
|
await Task.CompletedTask;
|
||||||
|
},
|
||||||
|
accessor,
|
||||||
|
options,
|
||||||
|
NullLogger<TelemetryPropagationMiddleware>.Instance);
|
||||||
|
|
||||||
|
var httpContext = new DefaultHttpContext();
|
||||||
|
httpContext.Request.Headers[options.Value.Propagation.TenantHeader] = "tenant-a";
|
||||||
|
httpContext.Request.Headers[options.Value.Propagation.ActorHeader] = "service-x";
|
||||||
|
httpContext.Request.Headers[options.Value.Propagation.ImposedRuleHeader] = "policy-42";
|
||||||
|
httpContext.Request.Headers[options.Value.Propagation.TraceIdHeader] = "00-0123456789abcdef0123456789abcdef-0123456789abcdef-01";
|
||||||
|
|
||||||
|
Assert.Null(accessor.Current);
|
||||||
|
await middleware.InvokeAsync(httpContext);
|
||||||
|
Assert.Null(accessor.Current); // cleared after invocation
|
||||||
|
|
||||||
|
Assert.NotNull(Activity.Current);
|
||||||
|
Assert.Equal("tenant-a", Activity.Current!.GetTagItem("tenant_id"));
|
||||||
|
Assert.Equal("service-x", Activity.Current.GetTagItem("actor"));
|
||||||
|
Assert.Equal("policy-42", Activity.Current.GetTagItem("imposed_rule"));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,81 @@
|
|||||||
|
using System.Collections.Concurrent;
|
||||||
|
using System.Diagnostics.Metrics;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
|
||||||
|
namespace StellaOps.Telemetry.Core;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Guards metric label cardinality to keep exporters deterministic and affordable.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class MetricLabelGuard
|
||||||
|
{
|
||||||
|
private readonly int _maxValuesPerLabel;
|
||||||
|
private readonly int _maxLabelLength;
|
||||||
|
private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, byte>> _seen;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Initializes a new instance of the <see cref="MetricLabelGuard"/> class.
|
||||||
|
/// </summary>
|
||||||
|
public MetricLabelGuard(IOptions<StellaOpsTelemetryOptions> options)
|
||||||
|
{
|
||||||
|
var labelOptions = options?.Value?.Labels ?? new StellaOpsTelemetryOptions.MetricLabelOptions();
|
||||||
|
_maxValuesPerLabel = Math.Max(1, labelOptions.MaxDistinctValuesPerLabel);
|
||||||
|
_maxLabelLength = Math.Max(1, labelOptions.MaxLabelLength);
|
||||||
|
_seen = new ConcurrentDictionary<string, ConcurrentDictionary<string, byte>>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Adds a label value if within budget; otherwise falls back to a deterministic bucket label.
|
||||||
|
/// </summary>
|
||||||
|
public string Coerce(string key, string? value)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(key))
|
||||||
|
{
|
||||||
|
return key;
|
||||||
|
}
|
||||||
|
|
||||||
|
var sanitized = (value ?? string.Empty).Trim();
|
||||||
|
if (sanitized.Length > _maxLabelLength)
|
||||||
|
{
|
||||||
|
sanitized = sanitized[.._maxLabelLength];
|
||||||
|
}
|
||||||
|
|
||||||
|
var perKey = _seen.GetOrAdd(key, _ => new ConcurrentDictionary<string, byte>(StringComparer.Ordinal));
|
||||||
|
if (perKey.Count >= _maxValuesPerLabel && !perKey.ContainsKey(sanitized))
|
||||||
|
{
|
||||||
|
return "other";
|
||||||
|
}
|
||||||
|
|
||||||
|
perKey.TryAdd(sanitized, 0);
|
||||||
|
return sanitized;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Metric helpers aligned with StellaOps golden-signal defaults.
|
||||||
|
/// </summary>
|
||||||
|
public static class TelemetryMetrics
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Records a request duration histogram with cardinality-safe labels.
|
||||||
|
/// </summary>
|
||||||
|
public static void RecordRequestDuration(
|
||||||
|
this Histogram<double> histogram,
|
||||||
|
MetricLabelGuard guard,
|
||||||
|
double durationMs,
|
||||||
|
string route,
|
||||||
|
string verb,
|
||||||
|
string statusCode,
|
||||||
|
string result)
|
||||||
|
{
|
||||||
|
var tags = new KeyValuePair<string, object?>[]
|
||||||
|
{
|
||||||
|
new("route", guard.Coerce("route", route)),
|
||||||
|
new("verb", guard.Coerce("verb", verb)),
|
||||||
|
new("status_code", guard.Coerce("status_code", statusCode)),
|
||||||
|
new("result", guard.Coerce("result", result)),
|
||||||
|
};
|
||||||
|
|
||||||
|
histogram.Record(durationMs, tags);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -6,6 +6,10 @@
|
|||||||
<Nullable>enable</Nullable>
|
<Nullable>enable</Nullable>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<FrameworkReference Include="Microsoft.AspNetCore.App" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
|
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
|
||||||
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0" />
|
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0" />
|
||||||
|
|||||||
@@ -12,6 +12,16 @@ public sealed class StellaOpsTelemetryOptions
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public CollectorOptions Collector { get; set; } = new();
|
public CollectorOptions Collector { get; set; } = new();
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets propagation-specific settings used by middleware and handlers.
|
||||||
|
/// </summary>
|
||||||
|
public PropagationOptions Propagation { get; set; } = new();
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets metric label guard settings to prevent cardinality explosions.
|
||||||
|
/// </summary>
|
||||||
|
public MetricLabelOptions Labels { get; set; } = new();
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Options describing how the OTLP collector exporter should be configured.
|
/// Options describing how the OTLP collector exporter should be configured.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
@@ -63,6 +73,48 @@ public sealed class StellaOpsTelemetryOptions
|
|||||||
return Uri.TryCreate(Endpoint.Trim(), UriKind.Absolute, out endpoint);
|
return Uri.TryCreate(Endpoint.Trim(), UriKind.Absolute, out endpoint);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Options controlling telemetry context propagation.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class PropagationOptions
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Gets or sets the header name carrying the tenant identifier.
|
||||||
|
/// </summary>
|
||||||
|
public string TenantHeader { get; set; } = "x-stella-tenant";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets or sets the header name carrying the actor (user/service) identifier.
|
||||||
|
/// </summary>
|
||||||
|
public string ActorHeader { get; set; } = "x-stella-actor";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets or sets the header name carrying imposed rule/decision metadata.
|
||||||
|
/// </summary>
|
||||||
|
public string ImposedRuleHeader { get; set; } = "x-stella-imposed-rule";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets or sets the header name carrying the trace identifier when no Activity is present.
|
||||||
|
/// </summary>
|
||||||
|
public string TraceIdHeader { get; set; } = "x-stella-traceid";
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Options used to constrain metric label cardinality.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class MetricLabelOptions
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Gets or sets the maximum number of distinct values tracked per label key.
|
||||||
|
/// </summary>
|
||||||
|
public int MaxDistinctValuesPerLabel { get; set; } = 50;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets or sets the maximum length of any individual label value; longer values are trimmed.
|
||||||
|
/// </summary>
|
||||||
|
public int MaxLabelLength { get; set; } = 64;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user