Add Ruby language analyzer and related functionality
- Introduced global usings for Ruby analyzer. - Implemented RubyLockData, RubyLockEntry, and RubyLockParser for handling Gemfile.lock files. - Created RubyPackage and RubyPackageCollector to manage Ruby packages and vendor cache. - Developed RubyAnalyzerPlugin and RubyLanguageAnalyzer for analyzing Ruby projects. - Added tests for Ruby language analyzer with sample Gemfile.lock and expected output. - Included necessary project files and references for the Ruby analyzer. - Added third-party licenses for tree-sitter dependencies.
This commit is contained in:
8
NOTICE.md
Normal file
8
NOTICE.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# Third-Party Notices
|
||||
|
||||
This project bundles or links against the following third-party components in the scanner Ruby analyzer implementation:
|
||||
|
||||
- **tree-sitter** (MIT License, © 2018 Max Brunsfeld)
|
||||
- **tree-sitter-ruby** (MIT License, © 2016 Rob Rix)
|
||||
|
||||
License texts are available under third-party-licenses/.
|
||||
@@ -107,23 +107,43 @@ services:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:1ff0a3124d66d3a2702d8e421df40fbd98cc75cb605d95510598ebbae1433c50
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:29e2e1a0972707e092cbd3d370701341f9fec2aa9316fb5d8100480f2a1c76b5
|
||||
restart: unless-stopped
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:1ff0a3124d66d3a2702d8e421df40fbd98cc75cb605d95510598ebbae1433c50
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
issuer-directory:
|
||||
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- authority
|
||||
environment:
|
||||
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
||||
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}"
|
||||
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
||||
volumes:
|
||||
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
||||
ports:
|
||||
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:29e2e1a0972707e092cbd3d370701341f9fec2aa9316fb5d8100480f2a1c76b5
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- minio
|
||||
|
||||
@@ -107,25 +107,45 @@ services:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:5cc417948c029da01dccf36e4645d961a3f6d8de7e62fe98d845f07cd2282114
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:dafef3954eb4b837e2c424dd2d23e1e4d60fa83794840fac9cd3dea1d43bd085
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:5cc417948c029da01dccf36e4645d961a3f6d8de7e62fe98d845f07cd2282114
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
issuer-directory:
|
||||
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- authority
|
||||
environment:
|
||||
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
||||
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}"
|
||||
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
||||
volumes:
|
||||
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
||||
ports:
|
||||
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:dafef3954eb4b837e2c424dd2d23e1e4d60fa83794840fac9cd3dea1d43bd085
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- minio
|
||||
environment:
|
||||
CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
|
||||
@@ -112,24 +112,44 @@ services:
|
||||
- frontdoor
|
||||
labels: *release-labels
|
||||
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
- frontdoor
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
||||
restart: unless-stopped
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
- frontdoor
|
||||
labels: *release-labels
|
||||
|
||||
issuer-directory:
|
||||
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- authority
|
||||
environment:
|
||||
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
||||
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}"
|
||||
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
||||
volumes:
|
||||
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
||||
ports:
|
||||
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- minio
|
||||
|
||||
@@ -107,22 +107,42 @@ services:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
||||
attestor:
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- signer
|
||||
environment:
|
||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
||||
ports:
|
||||
- "${ATTESTOR_PORT:-8442}:8442"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
issuer-directory:
|
||||
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
- authority
|
||||
environment:
|
||||
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
||||
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
||||
ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}"
|
||||
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
||||
volumes:
|
||||
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
||||
ports:
|
||||
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
concelier:
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mongo
|
||||
|
||||
9
deploy/compose/env/airgap.env.example
vendored
9
deploy/compose/env/airgap.env.example
vendored
@@ -8,9 +8,12 @@ RUSTFS_HTTP_PORT=8080
|
||||
AUTHORITY_ISSUER=https://authority.airgap.local
|
||||
AUTHORITY_PORT=8440
|
||||
SIGNER_POE_INTROSPECT_URL=file:///offline/poe/introspect.json
|
||||
SIGNER_PORT=8441
|
||||
ATTESTOR_PORT=8442
|
||||
CONCELIER_PORT=8445
|
||||
SIGNER_PORT=8441
|
||||
ATTESTOR_PORT=8442
|
||||
ISSUER_DIRECTORY_PORT=8447
|
||||
ISSUER_DIRECTORY_MONGO_CONNECTION_STRING=mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017
|
||||
ISSUER_DIRECTORY_SEED_CSAF=true
|
||||
CONCELIER_PORT=8445
|
||||
SCANNER_WEB_PORT=8444
|
||||
UI_PORT=9443
|
||||
NATS_CLIENT_PORT=24222
|
||||
|
||||
9
deploy/compose/env/dev.env.example
vendored
9
deploy/compose/env/dev.env.example
vendored
@@ -8,9 +8,12 @@ RUSTFS_HTTP_PORT=8080
|
||||
AUTHORITY_ISSUER=https://authority.localtest.me
|
||||
AUTHORITY_PORT=8440
|
||||
SIGNER_POE_INTROSPECT_URL=https://licensing.svc.local/introspect
|
||||
SIGNER_PORT=8441
|
||||
ATTESTOR_PORT=8442
|
||||
CONCELIER_PORT=8445
|
||||
SIGNER_PORT=8441
|
||||
ATTESTOR_PORT=8442
|
||||
ISSUER_DIRECTORY_PORT=8447
|
||||
ISSUER_DIRECTORY_MONGO_CONNECTION_STRING=mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017
|
||||
ISSUER_DIRECTORY_SEED_CSAF=true
|
||||
CONCELIER_PORT=8445
|
||||
SCANNER_WEB_PORT=8444
|
||||
UI_PORT=8443
|
||||
NATS_CLIENT_PORT=4222
|
||||
|
||||
9
deploy/compose/env/prod.env.example
vendored
9
deploy/compose/env/prod.env.example
vendored
@@ -10,9 +10,12 @@ RUSTFS_HTTP_PORT=8080
|
||||
AUTHORITY_ISSUER=https://authority.prod.stella-ops.org
|
||||
AUTHORITY_PORT=8440
|
||||
SIGNER_POE_INTROSPECT_URL=https://licensing.prod.stella-ops.org/introspect
|
||||
SIGNER_PORT=8441
|
||||
ATTESTOR_PORT=8442
|
||||
CONCELIER_PORT=8445
|
||||
SIGNER_PORT=8441
|
||||
ATTESTOR_PORT=8442
|
||||
ISSUER_DIRECTORY_PORT=8447
|
||||
ISSUER_DIRECTORY_MONGO_CONNECTION_STRING=mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017
|
||||
ISSUER_DIRECTORY_SEED_CSAF=true
|
||||
CONCELIER_PORT=8445
|
||||
SCANNER_WEB_PORT=8444
|
||||
UI_PORT=8443
|
||||
NATS_CLIENT_PORT=4222
|
||||
|
||||
9
deploy/compose/env/stage.env.example
vendored
9
deploy/compose/env/stage.env.example
vendored
@@ -8,9 +8,12 @@ RUSTFS_HTTP_PORT=8080
|
||||
AUTHORITY_ISSUER=https://authority.stage.stella-ops.internal
|
||||
AUTHORITY_PORT=8440
|
||||
SIGNER_POE_INTROSPECT_URL=https://licensing.stage.stella-ops.internal/introspect
|
||||
SIGNER_PORT=8441
|
||||
ATTESTOR_PORT=8442
|
||||
CONCELIER_PORT=8445
|
||||
SIGNER_PORT=8441
|
||||
ATTESTOR_PORT=8442
|
||||
ISSUER_DIRECTORY_PORT=8447
|
||||
ISSUER_DIRECTORY_MONGO_CONNECTION_STRING=mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017
|
||||
ISSUER_DIRECTORY_SEED_CSAF=true
|
||||
CONCELIER_PORT=8445
|
||||
SCANNER_WEB_PORT=8444
|
||||
UI_PORT=8443
|
||||
NATS_CLIENT_PORT=4222
|
||||
|
||||
@@ -8,10 +8,12 @@
|
||||
image: registry.stella-ops.org/stellaops/authority@sha256:a8e8faec44a579aa5714e58be835f25575710430b1ad2ccd1282a018cd9ffcdd
|
||||
- name: signer
|
||||
image: registry.stella-ops.org/stellaops/signer@sha256:8bfef9a75783883d49fc18e3566553934e970b00ee090abee9cb110d2d5c3298
|
||||
- name: attestor
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:5cc417948c029da01dccf36e4645d961a3f6d8de7e62fe98d845f07cd2282114
|
||||
- name: scanner-web
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:e0dfdb087e330585a5953029fb4757f5abdf7610820a085bd61b457dbead9a11
|
||||
- name: attestor
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:5cc417948c029da01dccf36e4645d961a3f6d8de7e62fe98d845f07cd2282114
|
||||
- name: issuer-directory-web
|
||||
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
||||
- name: scanner-web
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:e0dfdb087e330585a5953029fb4757f5abdf7610820a085bd61b457dbead9a11
|
||||
- name: scanner-worker
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:92dda42f6f64b2d9522104a5c9ffb61d37b34dd193132b68457a259748008f37
|
||||
- name: concelier
|
||||
|
||||
@@ -423,6 +423,61 @@ curl -u orch-admin:s3cr3t! \
|
||||
|
||||
CLI clients configure these values via `Authority.BackfillReason` / `Authority.BackfillTicket` (environment variables `STELLAOPS_ORCH_BACKFILL_REASON` and `STELLAOPS_ORCH_BACKFILL_TICKET`). Tokens missing either field are rejected with `invalid_request`; audit events store the supplied values as `backfill.reason` and `backfill.ticket`.
|
||||
|
||||
### 7.4 Delegated service accounts
|
||||
|
||||
StellaOps Authority issues short-lived delegated tokens for service accounts so automation can operate on behalf of a tenant without sharing the underlying client identity.
|
||||
|
||||
**Configuration summary**
|
||||
|
||||
```yaml
|
||||
delegation:
|
||||
quotas:
|
||||
maxActiveTokens: 50
|
||||
serviceAccounts:
|
||||
- accountId: "svc-observer"
|
||||
tenant: "tenant-default"
|
||||
displayName: "Observability Exporter"
|
||||
description: "Delegated identity used by Export Center to read findings."
|
||||
enabled: true
|
||||
allowedScopes: [ "jobs:read", "findings:read" ]
|
||||
authorizedClients: [ "export-center-worker" ]
|
||||
|
||||
tenants:
|
||||
- name: "tenant-default"
|
||||
delegation:
|
||||
maxActiveTokens: 25
|
||||
```
|
||||
|
||||
* `delegation.quotas.maxActiveTokens` caps concurrent delegated tokens per tenant. Authority enforces both a tenant-wide ceiling and a per-account ceiling (the same value by default).
|
||||
* `serviceAccounts[].allowedScopes` lists scopes that the delegate may request. Requests for scopes outside this set return `invalid_scope`.
|
||||
* `serviceAccounts[].authorizedClients` restricts which OAuth clients may assume the delegate. Leave empty to allow any tenant client.
|
||||
* `tenants[].delegation.maxActiveTokens` optionally overrides the quota for a specific tenant.
|
||||
|
||||
**Requesting a delegated token**
|
||||
|
||||
```bash
|
||||
curl -u export-center-worker:s3cr3t \
|
||||
-d 'grant_type=client_credentials' \
|
||||
-d 'scope=jobs:read findings:read' \
|
||||
-d 'service_account=svc-observer' \
|
||||
https://authority.example.com/token
|
||||
```
|
||||
|
||||
Optional `delegation_actor` metadata appends an identity to the actor chain:
|
||||
|
||||
```bash
|
||||
-d 'delegation_actor=pipeline://exporter/step/42'
|
||||
```
|
||||
|
||||
**Token shape & observability**
|
||||
|
||||
* Access tokens include `stellaops:service_account` and an `act` claim describing the caller hierarchy (`client_id` ⇒ optional `delegation_actor`).
|
||||
* `authority_tokens` records `tokenKind = "service_account"`, the `serviceAccountId`, and the normalized `actorChain[]`.
|
||||
* Audit events (`authority.client_credentials.grant`) emit `delegation.service_account`, `delegation.actor`, and quota outcomes (`delegation.quota.exceeded = true/false`).
|
||||
* When quota limits are exceeded Authority returns `invalid_request` (`Delegation token quota exceeded for tenant/service account`) and annotates the audit log.
|
||||
|
||||
Delegated tokens still honour scope validation, tenant enforcement, sender constraints (DPoP/mTLS), and fresh-auth checks.
|
||||
|
||||
## 8. Offline & Sovereign Operation
|
||||
- **No outbound dependencies:** Authority only contacts MongoDB and local plugins. Discovery and JWKS are cached by clients with offline tolerances (`AllowOfflineCacheFallback`, `OfflineCacheTolerance`). Operators should mirror these responses for air-gapped use.
|
||||
- **Structured logging:** Every revocation export, signing rotation, bootstrap action, and token issuance emits structured logs with `traceId`, `client_id`, `subjectId`, and `network.remoteIp` where applicable. Mirror logs to your SIEM to retain audit trails without central connectivity.
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
| DOCS-SCANNER-BENCH-62-004 | TODO | Docs Guild, Java Analyzer Guild | DOCS-SCANNER-BENCH-62-003 | Document Java lockfile ingestion plan and associated policy templates per `scanning-gaps-stella-misses-from-competitors.md`. | Draft guidance published; policy examples reviewed. |
|
||||
| DOCS-SCANNER-BENCH-62-005 | TODO | Docs Guild, Go Analyzer Guild | DOCS-SCANNER-BENCH-62-004 | Document Go stripped-binary fallback enrichment guidance once implementation lands. | Docs updated with inferred module policy patterns. |
|
||||
| DOCS-SCANNER-BENCH-62-006 | TODO | Docs Guild, Rust Analyzer Guild | DOCS-SCANNER-BENCH-62-005 | Document Rust fingerprint enrichment guidance and policy examples. | Docs cover heuristic vs authoritative crate handling. |
|
||||
| DOCS-SCANNER-BENCH-62-007 | TODO | Docs Guild, Security Guild | DOCS-SCANNER-BENCH-62-006 | Produce secret leak detection documentation (rules, policy templates) once implementation lands. | Docs include rule bundle guidance and policy patterns. |
|
||||
| DOCS-SCANNER-BENCH-62-007 | DOING (2025-11-02) | Docs Guild, Security Guild | DOCS-SCANNER-BENCH-62-006 | Produce secret leak detection documentation (rules, policy templates) once implementation lands. | Docs include rule bundle guidance and policy patterns. |
|
||||
| DOCS-SCANNER-BENCH-62-008 | TODO | Docs Guild, EntryTrace Guild | DOCS-SCANNER-BENCH-62-007 | Publish EntryTrace explain/heuristic maintenance guide per `scanning-gaps-stella-misses-from-competitors.md`. | Guide covers cadence, contribution workflow, and policy predicates. |
|
||||
| DOCS-SCANNER-BENCH-62-009 | DONE (2025-11-02) | Docs Guild, Ruby Analyzer Guild | DOCS-SCANNER-BENCH-62-008 | Extend Ruby ecosystem gap analysis in `scanning-gaps-stella-misses-from-competitors.md` with implementation notes, detection tables, and backlog mapping. | Ruby section updated with competitor techniques, task linkage, and scoring rationale. |
|
||||
| DOCS-SCANNER-BENCH-62-010 | DONE (2025-11-02) | Docs Guild, PHP Analyzer Guild | DOCS-SCANNER-BENCH-62-009 | Document PHP analyzer parity gaps with detection technique tables and policy hooks. | PHP section merged with plan references and backlog linkage. |
|
||||
|
||||
@@ -86,6 +86,23 @@ Follow the sprint files below in order. Update task status in both `SPRINTS` and
|
||||
> 2025-11-02: DOCS-SCANNER-BENCH-62-013 marked DONE (Docs Guild, Swift Analyzer Guild) – Swift analyzer roadmap captured with policy hooks.
|
||||
> 2025-11-02: DOCS-SCANNER-BENCH-62-014 marked DONE (Docs Guild, Runtime Guild) – Kubernetes/VM alignment section published.
|
||||
> 2025-11-02: DOCS-SCANNER-BENCH-62-015 marked DONE (Docs Guild, Export Center Guild) – DSSE/Rekor enablement guidance appended to gap doc.
|
||||
> 2025-11-02: SCANNER-ENG-0009 moved to DOING (Ruby Analyzer Guild) – drafting Ruby analyzer parity design package.
|
||||
> 2025-11-02: SCANNER-ENG-0016 added (Ruby Analyzer Guild) – implementing Ruby lock collector & vendor cache ingestion.
|
||||
> 2025-11-02: SCANNER-ENG-0016 moved to DOING (Ruby Analyzer Guild) – lockfile parser skeleton committed with initial Gemfile.lock parsing.
|
||||
> 2025-11-02: SCANNER-ENG-0017 added (Ruby Analyzer Guild) – building runtime require/autoload graph builder.
|
||||
> 2025-11-02: SCANNER-ENG-0018 added (Ruby Analyzer Guild) – emitting Ruby capability and framework signals.
|
||||
> 2025-11-02: SCANNER-ENG-0019 added (Ruby Analyzer Guild, CLI Guild) – delivering Ruby CLI verbs and Offline Kit packaging.
|
||||
> 2025-11-02: SCANNER-LIC-0001 added (Scanner Guild, Legal Guild) – vetting tree-sitter Ruby licensing/offline packaging.
|
||||
> 2025-11-02: SCANNER-LIC-0001 moved to DOING (Scanner Guild, Legal Guild) – SPDX review in progress.
|
||||
> 2025-11-02: SCANNER-POLICY-0001 added (Policy Guild, Ruby Analyzer Guild) – defining Ruby capability predicates in Policy Engine.
|
||||
> 2025-11-02: SCANNER-CLI-0001 added (CLI Guild, Ruby Analyzer Guild) – coordinating CLI UX/docs for Ruby verbs.
|
||||
> 2025-11-02: AIAI-31-011 moved to DOING (Advisory AI Guild) – implementing Excititor VEX document provider.
|
||||
> 2025-11-02: AIAI-31-011 marked DONE (Advisory AI Guild) – Excititor VEX provider + OpenVEX chunking shipped with tests.
|
||||
> 2025-11-02: AIAI-31-002 moved to DOING (Advisory AI Guild, SBOM Service Guild) – building SBOM context retriever for timelines/paths/blast radius.
|
||||
> 2025-11-02: AIAI-31-002 progressing – SBOM context models/tests landed; awaiting SBOM guild client hookup.
|
||||
|
||||
> 2025-11-02: AIAI-31-003 moved to DOING – kicking off deterministic tooling (comparators, dependency lookup). First drop covers semver range evaluator + RPM EVR comparator.
|
||||
|
||||
> 2025-11-02: AIAI-31-004 moved to DOING – starting deterministic orchestration pipeline (summary/conflict/remediation flow).
|
||||
|
||||
> 2025-11-02: ISSUER-30-006 moved to DOING (Issuer Directory Guild, DevOps Guild) – deployment manifests, backup/restore, secret handling, and offline kit docs in progress.
|
||||
|
||||
@@ -83,6 +83,7 @@ AUTH-POLICY-27-002 | DONE (2025-11-02) | Provide attestation signing service bin
|
||||
> 2025-11-02: Added interactive-only `policy:publish`/`policy:promote` scopes with metadata requirements (`policy_reason`, `policy_ticket`, `policy_digest`), fresh-auth validation, audit enrichment, and updated config/docs for operators.
|
||||
AUTH-POLICY-27-003 | DOING (2025-11-02) | Update Authority configuration/docs for Policy Studio roles, signing policies, approval workflows, and CLI integration; include compliance checklist. Dependencies: AUTH-POLICY-27-001, AUTH-POLICY-27-002. | Authority Core & Docs Guild (src/Authority/StellaOps.Authority/TASKS.md)
|
||||
AUTH-TEN-49-001 | DOING (2025-11-02) | Implement service accounts & delegation tokens (`act` chain), per-tenant quotas, audit stream of auth decisions, and revocation APIs. Dependencies: AUTH-TEN-47-001. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md)
|
||||
> 2025-11-02: Service account store + configuration wired, delegation quotas enforced, token persistence extended with `serviceAccountId`/`tokenKind`/`actorChain`, docs & samples refreshed, and new tests cover delegated issuance/persistence.
|
||||
AUTH-VULN-29-001 | TODO | Define Vuln Explorer scopes/roles (`vuln:view`, `vuln:investigate`, `vuln:operate`, `vuln:audit`) with ABAC attributes (env, owner, business_tier) and update discovery metadata/offline kit defaults. Dependencies: AUTH-POLICY-27-001. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md)
|
||||
AUTH-VULN-29-002 | TODO | Enforce CSRF/anti-forgery tokens for workflow actions, sign attachment tokens, and record audit logs with ledger event hashes. Dependencies: AUTH-VULN-29-001, LEDGER-29-002. | Authority Core & Security Guild (src/Authority/StellaOps.Authority/TASKS.md)
|
||||
AUTH-VULN-29-003 | TODO | Update security docs/config samples for Vuln Explorer roles, ABAC policies, attachment signing, and ledger verification guidance. Dependencies: AUTH-VULN-29-001..002. | Authority Core & Docs Guild (src/Authority/StellaOps.Authority/TASKS.md)
|
||||
@@ -103,7 +104,7 @@ ISSUER-30-002 | DONE (2025-11-01) | Implement key management endpoints (add/rota
|
||||
ISSUER-30-003 | DOING | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. Dependencies: ISSUER-30-001. | Issuer Directory Guild, Policy Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md)
|
||||
ISSUER-30-004 | DONE (2025-11-01) | Integrate with VEX Lens and Excitator signature verification (client SDK, caching, retries). Dependencies: ISSUER-30-001..003. | Issuer Directory Guild, VEX Lens Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md)
|
||||
ISSUER-30-005 | DONE (2025-11-01) | Instrument metrics/logs (issuer changes, key rotation, verification failures) and dashboards/alerts. Dependencies: ISSUER-30-001..004. | Issuer Directory Guild, Observability Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md)
|
||||
ISSUER-30-006 | TODO | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. Dependencies: ISSUER-30-001..005. | Issuer Directory Guild, DevOps Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md)
|
||||
ISSUER-30-006 | DOING (2025-11-02) | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. Dependencies: ISSUER-30-001..005. | Issuer Directory Guild, DevOps Guild (src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md)
|
||||
|
||||
|
||||
[Identity & Signing] 100.D) __Libraries
|
||||
|
||||
@@ -7,8 +7,11 @@ Task ID | State | Task description | Owners (Source)
|
||||
--- | --- | --- | ---
|
||||
AIAI-31-001 | DONE (2025-11-02) | Implement structured and vector retrievers for advisories/VEX with paragraph anchors and citation metadata. Dependencies: CONCELIER-VULN-29-001, EXCITITOR-VULN-29-001. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-002 | DOING | Build SBOM context retriever (purl version timelines, dependency paths, env flags, blast radius estimator). Dependencies: SBOM-VULN-29-001. | Advisory AI Guild, SBOM Service Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-003 | TODO | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. Dependencies: AIAI-31-001..002. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-004 | TODO | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). Dependencies: AIAI-31-001..003, AUTH-VULN-29-001. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-003 | DOING | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. Dependencies: AIAI-31-001..002. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-004 | DOING | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). Dependencies: AIAI-31-001..003, AUTH-VULN-29-001. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-004A | TODO | Wire orchestrator into WebService/Worker, expose API + queue contract, emit metrics, stub cache. Dependencies: AIAI-31-004, AIAI-31-002. | Advisory AI Guild, Platform Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-004B | TODO | Implement prompt assembler, guardrails, cache persistence, DSSE provenance, golden outputs. Dependencies: AIAI-31-004A, DOCS-AIAI-31-003, AUTH-AIAI-31-004. | Advisory AI Guild, Security Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-004C | TODO | Deliver CLI `stella advise run` command, renderer, docs, CLI golden tests. Dependencies: AIAI-31-004B, CLI-AIAI-31-003. | Advisory AI Guild, CLI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-005 | TODO | Implement guardrails (redaction, injection defense, output validation, citation enforcement) and fail-safe handling. Dependencies: AIAI-31-004. | Advisory AI Guild, Security Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-006 | TODO | Expose REST API endpoints (`/advisory/ai/*`) with RBAC, rate limits, OpenAPI schemas, and batching support. Dependencies: AIAI-31-004..005. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-007 | TODO | Instrument metrics (`advisory_ai_latency`, `guardrail_blocks`, `validation_failures`, `citation_coverage`), logs, and traces; publish dashboards/alerts. Dependencies: AIAI-31-004..006. | Advisory AI Guild, Observability Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
@@ -17,6 +20,11 @@ AIAI-31-010 | DONE (2025-11-02) | Implement Concelier advisory raw document prov
|
||||
AIAI-31-011 | DONE (2025-11-02) | Implement Excititor VEX document provider to surface structured VEX statements for retrieval. Dependencies: EXCITITOR-LNM-21-201, EXCITITOR-CORE-AOC-19-002. | Advisory AI Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
AIAI-31-009 | TODO | Develop unit/golden/property/perf tests, injection harness, and regression suite; ensure determinism with seeded caches. Dependencies: AIAI-31-001..006. | Advisory AI Guild, QA Guild (src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md)
|
||||
|
||||
|
||||
|
||||
> 2025-11-02: AIAI-31-004 kicked off orchestration pipeline design – establishing deterministic task sequence (summary/conflict/remediation) and cache key strategy.
|
||||
> 2025-11-02: AIAI-31-004 orchestration prerequisites documented in docs/modules/advisory-ai/orchestration-pipeline.md (tasks 004A/004B/004C).
|
||||
> 2025-11-02: AIAI-31-003 moved to DOING – beginning deterministic tooling (comparators, dependency analysis) while awaiting SBOM context client. Semantic & EVR comparators shipped; toolset interface published for orchestrator adoption.
|
||||
> 2025-11-02: Structured + vector retrievers landed with deterministic CSAF/OSV/Markdown chunkers, deterministic hash embeddings, and unit coverage for sample advisories.
|
||||
> 2025-11-02: SBOM context request/result models finalized; retriever tests now validate environment-flag toggles and dependency-path dedupe. SBOM guild to wire real context service client.
|
||||
|
||||
|
||||
@@ -211,7 +211,7 @@ DOCS-SCANNER-BENCH-62-003 | TODO | Capture Python lockfile/editable install requ
|
||||
DOCS-SCANNER-BENCH-62-004 | TODO | Document Java lockfile ingestion guidance and policy templates. | Docs Guild, Java Analyzer Guild (docs/TASKS.md)
|
||||
DOCS-SCANNER-BENCH-62-005 | TODO | Document Go stripped-binary fallback enrichment guidance once implementation lands. | Docs Guild, Go Analyzer Guild (docs/TASKS.md)
|
||||
DOCS-SCANNER-BENCH-62-006 | TODO | Document Rust fingerprint enrichment guidance and policy examples. | Docs Guild, Rust Analyzer Guild (docs/TASKS.md)
|
||||
DOCS-SCANNER-BENCH-62-007 | TODO | Produce secret leak detection documentation (rules, policy templates). | Docs Guild, Security Guild (docs/TASKS.md)
|
||||
DOCS-SCANNER-BENCH-62-007 | DOING (2025-11-02) | Produce secret leak detection documentation (rules, policy templates). | Docs Guild, Security Guild (docs/TASKS.md)
|
||||
DOCS-SCANNER-BENCH-62-008 | TODO | Publish EntryTrace explain/heuristic maintenance guide. | Docs Guild, EntryTrace Guild (docs/TASKS.md)
|
||||
DOCS-SCANNER-BENCH-62-009 | TODO | Produce SAST integration documentation (connector framework, policy templates). | Docs Guild, Policy Guild (docs/TASKS.md)
|
||||
DOCS-TEN-47-001 | TODO | Publish `/docs/security/tenancy-overview.md` and `/docs/security/scopes-and-roles.md` outlining scope grammar, tenant model, imposed rule reminder. | Docs Guild, Authority Core (docs/TASKS.md)
|
||||
|
||||
@@ -1,100 +1,115 @@
|
||||
# Advisory AI architecture
|
||||
|
||||
> Captures the retrieval, guardrail, and inference packaging requirements defined in the Advisory AI implementation plan and related module guides.
|
||||
|
||||
## 1) Goals
|
||||
|
||||
- Summarise advisories/VEX evidence into operator-ready briefs with citations.
|
||||
- Explain conflicting statements with provenance and trust weights (using VEX Lens & Excititor data).
|
||||
- Suggest remediation plans aligned with Offline Kit deployment models and scheduler follow-ups.
|
||||
- Operate deterministically where possible; cache generated artefacts with digests for audit.
|
||||
|
||||
## 2) Pipeline overview
|
||||
|
||||
```
|
||||
+---------------------+
|
||||
Concelier/VEX Lens | Evidence Retriever |
|
||||
Policy Engine ----> | (vector + keyword) | ---> Context Pack (JSON)
|
||||
Zastava runtime +---------------------+
|
||||
|
|
||||
v
|
||||
+-------------+
|
||||
| Prompt |
|
||||
| Assembler |
|
||||
+-------------+
|
||||
|
|
||||
v
|
||||
+-------------+
|
||||
| Guarded LLM |
|
||||
| (local/host)|
|
||||
+-------------+
|
||||
|
|
||||
v
|
||||
+-----------------+
|
||||
| Citation & |
|
||||
| Validation |
|
||||
+-----------------+
|
||||
|
|
||||
v
|
||||
+----------------+
|
||||
| Output cache |
|
||||
| (hash, bundle) |
|
||||
+----------------+
|
||||
```
|
||||
|
||||
## 3) Retrieval & context
|
||||
|
||||
- Hybrid search: vector embeddings (SBERT-compatible) + keyword filters for advisory IDs, PURLs, CVEs.
|
||||
- Context packs include:
|
||||
- Advisory raw excerpts with highlighted sections and source URLs.
|
||||
- VEX statements (normalized tuples + trust metadata).
|
||||
- Policy explain traces for the affected finding.
|
||||
- Runtime/impact hints from Zastava (exposure, entrypoints).
|
||||
- Export-ready remediation data (fixed versions, patches).
|
||||
|
||||
All context references include `content_hash` and `source_id` enabling verifiable citations.
|
||||
|
||||
## 4) Guardrails
|
||||
|
||||
- Prompt templates enforce structure: summary, conflicts, remediation, references.
|
||||
- Response validator ensures:
|
||||
- No hallucinated advisories (every fact must map to input context).
|
||||
- Citations follow `[n]` indexing referencing actual sources.
|
||||
- Remediation suggestions only cite policy-approved sources (fixed versions, vendor hotfixes).
|
||||
- Moderation/PII filters prevent leaking secrets; responses failing validation are rejected and logged.
|
||||
|
||||
## 5) Output persistence
|
||||
|
||||
- Cached artefacts stored in `advisory_ai_outputs` with fields:
|
||||
- `output_hash` (sha256 of JSON response).
|
||||
- `input_digest` (hash of context pack).
|
||||
- `summary`, `conflicts`, `remediation`, `citations`.
|
||||
- `generated_at`, `model_id`, `profile` (Sovereign/FIPS etc.).
|
||||
- `signatures` (optional DSSE if run in deterministic mode).
|
||||
- Offline bundle format contains `summary.md`, `citations.json`, `context_manifest.json`, `signatures/`.
|
||||
|
||||
## 6) Profiles & sovereignty
|
||||
|
||||
- **Profiles:** `default`, `fips-local` (FIPS-compliant local model), `gost-local`, `cloud-openai` (optional, disabled by default). Each profile defines allowed models, key management, and telemetry endpoints.
|
||||
- **CryptoProfile/RootPack integration:** generated artefacts can be signed using configured CryptoProfile to satisfy procurement/trust requirements.
|
||||
|
||||
## 7) APIs
|
||||
|
||||
- `POST /v1/advisory-ai/summaries` — generate (or retrieve cached) summary for `{advisoryKey, artifactId, policyVersion}`.
|
||||
- `POST /v1/advisory-ai/conflicts` — explain conflicting VEX statements with trust ranking.
|
||||
- `POST /v1/advisory-ai/remediation` — fetch remediation plan with target fix versions, prerequisites, verification steps.
|
||||
- `GET /v1/advisory-ai/outputs/{hash}` — retrieve cached artefact (used by CLI/Console/Export Center).
|
||||
|
||||
All endpoints accept `profile` parameter (default `fips-local`) and return `output_hash`, `input_digest`, and `citations` for verification.
|
||||
|
||||
## 8) Observability
|
||||
|
||||
- Metrics: `advisory_ai_requests_total{profile,type}`, `advisory_ai_latency_seconds`, `advisory_ai_validation_failures_total`.
|
||||
- Logs: include `output_hash`, `input_digest`, `profile`, `model_id`, `tenant`, `artifacts`. Sensitive context is not logged.
|
||||
- Traces: spans for retrieval, prompt assembly, model inference, validation, cache write.
|
||||
|
||||
## 9) Operational controls
|
||||
|
||||
- Feature flags per tenant (`ai.summary.enabled`, `ai.remediation.enabled`).
|
||||
- Rate limits (per tenant, per profile) enforced by Orchestrator to prevent runaway usage.
|
||||
- Offline/air-gapped deployments run local models packaged with Offline Kit; model weights validated via manifest digests.
|
||||
# Advisory AI architecture
|
||||
|
||||
> Captures the retrieval, guardrail, and inference packaging requirements defined in the Advisory AI implementation plan and related module guides.
|
||||
|
||||
## 1) Goals
|
||||
|
||||
- Summarise advisories/VEX evidence into operator-ready briefs with citations.
|
||||
- Explain conflicting statements with provenance and trust weights (using VEX Lens & Excititor data).
|
||||
- Suggest remediation plans aligned with Offline Kit deployment models and scheduler follow-ups.
|
||||
- Operate deterministically where possible; cache generated artefacts with digests for audit.
|
||||
|
||||
## 2) Pipeline overview
|
||||
|
||||
```
|
||||
+---------------------+
|
||||
Concelier/VEX Lens | Evidence Retriever |
|
||||
Policy Engine ----> | (vector + keyword) | ---> Context Pack (JSON)
|
||||
Zastava runtime +---------------------+
|
||||
|
|
||||
v
|
||||
+-------------+
|
||||
| Prompt |
|
||||
| Assembler |
|
||||
+-------------+
|
||||
|
|
||||
v
|
||||
+-------------+
|
||||
| Guarded LLM |
|
||||
| (local/host)|
|
||||
+-------------+
|
||||
|
|
||||
v
|
||||
+-----------------+
|
||||
| Citation & |
|
||||
| Validation |
|
||||
+-----------------+
|
||||
|
|
||||
v
|
||||
+----------------+
|
||||
| Output cache |
|
||||
| (hash, bundle) |
|
||||
+----------------+
|
||||
```
|
||||
|
||||
## 3) Retrieval & context
|
||||
|
||||
- Hybrid search: vector embeddings (SBERT-compatible) + keyword filters for advisory IDs, PURLs, CVEs.
|
||||
- Context packs include:
|
||||
- Advisory raw excerpts with highlighted sections and source URLs.
|
||||
- VEX statements (normalized tuples + trust metadata).
|
||||
- Policy explain traces for the affected finding.
|
||||
- Runtime/impact hints from Zastava (exposure, entrypoints).
|
||||
- Export-ready remediation data (fixed versions, patches).
|
||||
- **SBOM context retriever** (AIAI-31-002) hydrates:
|
||||
- Version timelines (first/last observed, status, fix availability).
|
||||
- Dependency paths (runtime vs build/test, deduped by coordinate chain).
|
||||
- Tenant environment flags (prod/stage toggles) with optional blast radius summary.
|
||||
- Service-side clamps: max 500 timeline entries, 200 dependency paths, with client-provided toggles for env/blast data.
|
||||
|
||||
Retriever requests and results are trimmed/normalized before hashing; metadata (counts, provenance keys) is returned for downstream guardrails. Unit coverage ensures deterministic ordering and flag handling.
|
||||
|
||||
All context references include `content_hash` and `source_id` enabling verifiable citations.
|
||||
|
||||
## 4) Guardrails
|
||||
|
||||
- Prompt templates enforce structure: summary, conflicts, remediation, references.
|
||||
- Response validator ensures:
|
||||
- No hallucinated advisories (every fact must map to input context).
|
||||
- Citations follow `[n]` indexing referencing actual sources.
|
||||
- Remediation suggestions only cite policy-approved sources (fixed versions, vendor hotfixes).
|
||||
- Moderation/PII filters prevent leaking secrets; responses failing validation are rejected and logged.
|
||||
|
||||
## 5) Deterministic tooling
|
||||
|
||||
- **Version comparators** — offline semantic version + RPM EVR parsers with range evaluators. Supports chained constraints (`>=`, `<=`, `!=`) used by remediation advice and blast radius calcs.
|
||||
- Registered via `AddAdvisoryDeterministicToolset` for reuse across orchestrator, CLI, and services.
|
||||
- **Orchestration pipeline** — see `orchestration-pipeline.md` for prerequisites, task breakdown, and cross-guild responsibilities before wiring the execution flows.
|
||||
- **Planned extensions** — NEVRA/EVR comparators, ecosystem-specific normalisers, dependency chain scorers (AIAI-31-003 scope).
|
||||
- Exposed via internal interfaces to allow orchestrator/toolchain reuse; all helpers stay side-effect free and deterministic for golden testing.
|
||||
|
||||
## 6) Output persistence
|
||||
|
||||
- Cached artefacts stored in `advisory_ai_outputs` with fields:
|
||||
- `output_hash` (sha256 of JSON response).
|
||||
- `input_digest` (hash of context pack).
|
||||
- `summary`, `conflicts`, `remediation`, `citations`.
|
||||
- `generated_at`, `model_id`, `profile` (Sovereign/FIPS etc.).
|
||||
- `signatures` (optional DSSE if run in deterministic mode).
|
||||
- Offline bundle format contains `summary.md`, `citations.json`, `context_manifest.json`, `signatures/`.
|
||||
|
||||
## 7) Profiles & sovereignty
|
||||
|
||||
- **Profiles:** `default`, `fips-local` (FIPS-compliant local model), `gost-local`, `cloud-openai` (optional, disabled by default). Each profile defines allowed models, key management, and telemetry endpoints.
|
||||
- **CryptoProfile/RootPack integration:** generated artefacts can be signed using configured CryptoProfile to satisfy procurement/trust requirements.
|
||||
|
||||
## 8) APIs
|
||||
|
||||
- `POST /v1/advisory-ai/summaries` — generate (or retrieve cached) summary for `{advisoryKey, artifactId, policyVersion}`.
|
||||
- `POST /v1/advisory-ai/conflicts` — explain conflicting VEX statements with trust ranking.
|
||||
- `POST /v1/advisory-ai/remediation` — fetch remediation plan with target fix versions, prerequisites, verification steps.
|
||||
- `GET /v1/advisory-ai/outputs/{hash}` — retrieve cached artefact (used by CLI/Console/Export Center).
|
||||
|
||||
All endpoints accept `profile` parameter (default `fips-local`) and return `output_hash`, `input_digest`, and `citations` for verification.
|
||||
|
||||
## 9) Observability
|
||||
|
||||
- Metrics: `advisory_ai_requests_total{profile,type}`, `advisory_ai_latency_seconds`, `advisory_ai_validation_failures_total`.
|
||||
- Logs: include `output_hash`, `input_digest`, `profile`, `model_id`, `tenant`, `artifacts`. Sensitive context is not logged.
|
||||
- Traces: spans for retrieval, prompt assembly, model inference, validation, cache write.
|
||||
|
||||
## 10) Operational controls
|
||||
|
||||
- Feature flags per tenant (`ai.summary.enabled`, `ai.remediation.enabled`).
|
||||
- Rate limits (per tenant, per profile) enforced by Orchestrator to prevent runaway usage.
|
||||
- Offline/air-gapped deployments run local models packaged with Offline Kit; model weights validated via manifest digests.
|
||||
|
||||
82
docs/modules/advisory-ai/orchestration-pipeline.md
Normal file
82
docs/modules/advisory-ai/orchestration-pipeline.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# Advisory AI Orchestration Pipeline (Planning Notes)
|
||||
|
||||
> **Status:** Draft – prerequisite design for AIAI-31-004 integration work.
|
||||
> **Audience:** Advisory AI guild, WebService/Worker guilds, CLI guild, Docs/QA support teams.
|
||||
|
||||
## 1. Goal
|
||||
|
||||
Wire the deterministic pipeline (Summary / Conflict / Remediation flows) into the Advisory AI service, workers, and CLI with deterministic caching, prompt preparation, and guardrail fallback. This document captures the pre-integration checklist and task breakdown so each guild understands their responsibilities before coding begins.
|
||||
|
||||
## 2. Prerequisites
|
||||
|
||||
| Area | Requirement | Owner | Status |
|
||||
|------|-------------|-------|--------|
|
||||
| **Toolset** | Deterministic comparators, dependency analyzer (`IDeterministicToolset`, `AdvisoryPipelineOrchestrator`) | Advisory AI | ✅ landed (AIAI-31-003) |
|
||||
| **SBOM context** | Real SBOM context client delivering timelines + dependency paths | SBOM Service Guild | ⏳ pending (AIAI-31-002) |
|
||||
| **Prompt artifacts** | Liquid/Handlebars prompt templates for summary/conflict/remediation | Advisory AI Docs Guild | ⏳ authoring needed |
|
||||
| **Cache strategy** | Decision on DSSE or hash-only cache entries, TTLs, and eviction policy | Advisory AI + Platform | 🔲 define |
|
||||
| **Auth scopes** | Confirm service account scopes for new API endpoints/worker-to-service calls | Authority Guild | 🔲 define |
|
||||
|
||||
**Blocking risk:** SBOM client and prompt templates must exist (even stubbed) before the orchestrator can produce stable plans.
|
||||
|
||||
## 3. Integration plan (high-level)
|
||||
|
||||
1. **Service layer (WebService / Worker)**
|
||||
- Inject `IAdvisoryPipelineOrchestrator` via `AddAdvisoryPipeline`.
|
||||
- Define REST endpoint `POST /v1/advisories/{key}/pipeline/{task}` (task ∈ summary/conflict/remediation).
|
||||
- Worker consumes queue messages (`advisory.pipeline.execute`) -> fetches plan -> executes prompt -> persists output & provenance.
|
||||
- Add metrics: `advisory_pipeline_requests_total`, `advisory_pipeline_plan_cache_hits_total`, `advisory_pipeline_latency_seconds`.
|
||||
2. **CLI**
|
||||
- New command `stella advise run <task>` with flags for artifact id, profile, policy version, `--force-refresh`.
|
||||
- Render JSON/Markdown outputs; handle caching hints (print cache key, status).
|
||||
3. **Caching / storage**
|
||||
- Choose storage (Mongo collection vs existing DSSE output store).
|
||||
- Persist `AdvisoryTaskPlan` metadata + generated output keyed by cache key + policy version.
|
||||
- Expose TTL/force-refresh semantics.
|
||||
4. **Docs & QA**
|
||||
- Publish API spec (`docs/advisory-ai/api.md`) + CLI docs.
|
||||
- Add golden outputs for deterministic runs; property tests for cache key stability.
|
||||
|
||||
## 4. Task Breakdown
|
||||
|
||||
### AIAI-31-004A (Service orchestration wiring)
|
||||
|
||||
- **Scope:** WebService/Worker injection, REST/queue plumbing, metrics counters, basic cache stub.
|
||||
- **Dependencies:** `AddAdvisoryPipeline`, SBOM client stub.
|
||||
- **Exit:** API responds with plan metadata + queue message; worker logs execution attempt; metrics emitted.
|
||||
|
||||
### AIAI-31-004B (Prompt assembly & cache persistence)
|
||||
|
||||
- **Scope:** Implement prompt assembler, connect to guardrails, persist cache entries w/ DSSE metadata.
|
||||
- **Dependencies:** Prompt templates, cache storage decision, guardrail interface.
|
||||
- **Exit:** Deterministic outputs stored; force-refresh honoured; tests cover prompt assembly + caching.
|
||||
|
||||
### AIAI-31-004C (CLI integration & docs)
|
||||
|
||||
- **Scope:** CLI command + output renderer, docs updates, CLI tests (golden outputs).
|
||||
- **Dependencies:** Service endpoints stable, caching semantics documented.
|
||||
- **Exit:** CLI command produces deterministic output, docs updated, smoke tests recorded.
|
||||
|
||||
### Supporting tasks (other guilds)
|
||||
|
||||
- **AUTH-AIAI-31-004** – Update scopes and DSSE policy (Authority guild).
|
||||
- **DOCS-AIAI-31-003** – Publish API documentation, CLI guide updates (Docs guild).
|
||||
- **QA-AIAI-31-004** – Golden/properties/perf suite for pipeline (QA guild).
|
||||
|
||||
## 5. Acceptance checklist (per task)
|
||||
|
||||
| Item | Notes |
|
||||
|------|-------|
|
||||
| Cache key stability | `AdvisoryPipelineOrchestrator` hash must remain stable under re-run of identical inputs. |
|
||||
| Metrics & logging | Request id, cache key, task type, profile, latency; guardrail results logged without sensitive prompt data. |
|
||||
| Offline readiness | All prompt templates bundled with Offline Kit; CLI works in air-gapped mode with cached data. |
|
||||
| Policy awareness | Plans encode policy version used; outputs reference policy digest for audit. |
|
||||
| Testing | Unit tests (plan generation, cache keys, DI), integration (service endpoint, worker, CLI), deterministic golden outputs. |
|
||||
|
||||
## 6. Next steps
|
||||
|
||||
1. Finalize SBOM context client (AIAI-31-002) and prompt templates.
|
||||
2. Create queue schema spec (`docs/modules/advisory-ai/queue-contracts.md`) if not already available.
|
||||
3. Schedule cross-guild kickoff to agree on cache store & DSSE policy.
|
||||
|
||||
_Last updated: 2025-11-02_
|
||||
@@ -90,6 +90,11 @@ Payloads follow the contract in `Contracts/IssuerDtos.cs` and align with domain
|
||||
3. **SDK integration (ISSUER-30-004)** — supply cached issuer metadata to VEX Lens and Excititor clients.
|
||||
4. **Observability & Ops (ISSUER-30-005/006)** — metrics, dashboards, deployment automation, offline kit.
|
||||
|
||||
## 9. Operations & runbooks
|
||||
- [Deployment guide](operations/deployment.md)
|
||||
- [Backup & restore](operations/backup-restore.md)
|
||||
- [Offline kit notes](operations/offline-kit.md)
|
||||
|
||||
---
|
||||
|
||||
*Document owner: Issuer Directory Guild*
|
||||
|
||||
103
docs/modules/issuer-directory/operations/backup-restore.md
Normal file
103
docs/modules/issuer-directory/operations/backup-restore.md
Normal file
@@ -0,0 +1,103 @@
|
||||
# Issuer Directory Backup & Restore
|
||||
|
||||
## Scope
|
||||
- **Applies to:** Issuer Directory when deployed via Docker Compose (`deploy/compose/docker-compose.*.yaml`) or the Helm chart (`deploy/helm/stellaops`).
|
||||
- **Artifacts covered:** MongoDB database `issuer-directory`, service configuration (`etc/issuer-directory.yaml`), CSAF seed file (`data/csaf-publishers.json`), and secret material for the Mongo connection string.
|
||||
- **Frequency:** Take a hot backup before every upgrade and at least daily in production. Keep encrypted copies off-site/air-gapped according to your compliance program.
|
||||
|
||||
## Inventory checklist
|
||||
| Component | Location (Compose default) | Notes |
|
||||
| --- | --- | --- |
|
||||
| Mongo data | `mongo-data` volume (`/var/lib/docker/volumes/.../mongo-data`) | Contains `issuers`, `issuer_keys`, `issuer_trust_overrides`, and `issuer_audit` collections. |
|
||||
| Configuration | `etc/issuer-directory.yaml` | Mounted read-only at `/etc/issuer-directory.yaml` inside the container. |
|
||||
| CSAF seed file | `src/IssuerDirectory/StellaOps.IssuerDirectory/data/csaf-publishers.json` | Ensure customised seeds are part of the backup; regenerate if you ship regional overrides. |
|
||||
| Mongo secret | `.env` entry `ISSUER_DIRECTORY_MONGO_CONNECTION_STRING` or secret store export | Required to restore connectivity; treat as sensitive. |
|
||||
|
||||
> **Tip:** Export the secret via `kubectl get secret issuer-directory-secrets -o yaml` (sanitize before storage) or copy the Compose `.env` file into an encrypted vault.
|
||||
|
||||
## Hot backup (no downtime)
|
||||
1. **Create output directory**
|
||||
```bash
|
||||
BACKUP_DIR=backup/issuer-directory/$(date +%Y-%m-%dT%H%M%S)
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
```
|
||||
2. **Dump Mongo collections**
|
||||
```bash
|
||||
docker compose -f deploy/compose/docker-compose.prod.yaml exec mongo \
|
||||
mongodump --archive=/dump/issuer-directory-$(date +%Y%m%dT%H%M%SZ).gz \
|
||||
--gzip --db issuer-directory
|
||||
|
||||
docker compose -f deploy/compose/docker-compose.prod.yaml cp \
|
||||
mongo:/dump/issuer-directory-$(date +%Y%m%dT%H%M%SZ).gz "$BACKUP_DIR/"
|
||||
```
|
||||
For Kubernetes, run the same `mongodump` command inside the `stellaops-mongo` pod and copy the archive via `kubectl cp`.
|
||||
3. **Capture configuration and seeds**
|
||||
```bash
|
||||
cp etc/issuer-directory.yaml "$BACKUP_DIR/"
|
||||
cp src/IssuerDirectory/StellaOps.IssuerDirectory/data/csaf-publishers.json "$BACKUP_DIR/"
|
||||
```
|
||||
4. **Capture secrets**
|
||||
```bash
|
||||
grep '^ISSUER_DIRECTORY_MONGO_CONNECTION_STRING=' dev.env > "$BACKUP_DIR/issuer-directory.mongo.secret"
|
||||
chmod 600 "$BACKUP_DIR/issuer-directory.mongo.secret"
|
||||
```
|
||||
5. **Generate checksums and encrypt**
|
||||
```bash
|
||||
(cd "$BACKUP_DIR" && sha256sum * > SHA256SUMS)
|
||||
tar czf "$BACKUP_DIR.tar.gz" -C "$BACKUP_DIR" .
|
||||
age -r you@example.org "$BACKUP_DIR.tar.gz" > "$BACKUP_DIR.tar.gz.age"
|
||||
```
|
||||
|
||||
## Cold backup (planned downtime)
|
||||
1. Notify stakeholders and pause automation calling the API.
|
||||
2. Stop services:
|
||||
```bash
|
||||
docker compose -f deploy/compose/docker-compose.prod.yaml down issuer-directory
|
||||
```
|
||||
(For Helm: `kubectl scale deploy stellaops-issuer-directory --replicas=0`.)
|
||||
3. Snapshot volumes:
|
||||
```bash
|
||||
docker run --rm -v mongo-data:/data \
|
||||
-v "$(pwd)":/backup busybox tar czf /backup/mongo-data-$(date +%Y%m%d).tar.gz -C /data .
|
||||
```
|
||||
4. Copy configuration, seeds, and secrets as in the hot backup.
|
||||
5. Restart services and confirm `/health/live` returns `200 OK`.
|
||||
|
||||
## Restore procedure
|
||||
1. **Provision clean volumes**
|
||||
- Compose: `docker volume rm mongo-data` (optional) then `docker compose up -d mongo`.
|
||||
- Helm: delete the Mongo PVC or attach a fresh volume snapshot.
|
||||
2. **Restore Mongo**
|
||||
```bash
|
||||
docker compose exec -T mongo \
|
||||
mongorestore --archive \
|
||||
--gzip --drop < issuer-directory-YYYYMMDDTHHMMSSZ.gz
|
||||
```
|
||||
3. **Restore configuration/secrets**
|
||||
- Copy `issuer-directory.yaml` into `etc/`.
|
||||
- Reapply the secret: `kubectl apply -f issuer-directory-secret.yaml` or repopulate `.env`.
|
||||
4. **Restore CSAF seeds** (optional)
|
||||
- If you maintain a customised seed file, copy it back before starting the container. Otherwise the bundled file will be used.
|
||||
5. **Start services**
|
||||
```bash
|
||||
docker compose up -d issuer-directory
|
||||
# or
|
||||
kubectl scale deploy stellaops-issuer-directory --replicas=1
|
||||
```
|
||||
6. **Validate**
|
||||
- `curl -fsSL https://localhost:8447/health/live`
|
||||
- Issue an access token and list issuers to confirm results.
|
||||
- Check Mongo counts match expectations (`db.issuers.countDocuments()`, etc.).
|
||||
|
||||
## Disaster recovery notes
|
||||
- **Retention:** Maintain 30 daily + 12 monthly archives. Store copies in geographically separate, access-controlled vaults.
|
||||
- **Audit reconciliation:** Ensure `issuer_audit` entries cover the restore window; export them for compliance.
|
||||
- **Seed replay:** If the CSAF seed file was lost, set `ISSUER_DIRECTORY_SEED_CSAF=true` for the first restart to rehydrate the global tenant.
|
||||
- **Testing:** Run quarterly restore drills in a staging environment to validate procedure drift.
|
||||
|
||||
## Verification checklist
|
||||
- [ ] `/health/live` returns `200 OK`.
|
||||
- [ ] Mongo collections (`issuers`, `issuer_keys`, `issuer_trust_overrides`) have expected counts.
|
||||
- [ ] `issuer_directory_changes_total` and `issuer_directory_key_operations_total` metrics resume within 1 minute.
|
||||
- [ ] Audit entries exist for post-restore CRUD activity.
|
||||
- [ ] Client integrations (VEX Lens, Excititor) resolve issuers successfully.
|
||||
100
docs/modules/issuer-directory/operations/deployment.md
Normal file
100
docs/modules/issuer-directory/operations/deployment.md
Normal file
@@ -0,0 +1,100 @@
|
||||
# Issuer Directory Deployment Guide
|
||||
|
||||
## Scope
|
||||
- **Applies to:** Issuer Directory WebService (`stellaops/issuer-directory-web`) running via the provided Docker Compose bundles (`deploy/compose/docker-compose.*.yaml`) or the Helm chart (`deploy/helm/stellaops`).
|
||||
- **Covers:** Environment prerequisites, secret handling, Compose + Helm rollout steps, and post-deploy verification.
|
||||
- **Audience:** Platform/DevOps engineers responsible for Identity & Signing sprint deliverables.
|
||||
|
||||
## 1 · Prerequisites
|
||||
- Authority must be running and reachable at the issuer URL you configure (default Compose host: `https://authority:8440`).
|
||||
- MongoDB 4.2+ with credentials for the `issuer-directory` database (Compose defaults to the root user defined in `.env`).
|
||||
- Network access to Authority, MongoDB, and (optionally) Prometheus if you scrape metrics.
|
||||
- Issuer Directory configuration file `etc/issuer-directory.yaml` checked and customised for your environment (tenant header, audiences, telemetry level, CSAF seed path).
|
||||
|
||||
> **Secrets:** Use `etc/secrets/issuer-directory.mongo.secret.example` as a template. Store the real connection string in an untracked file or secrets manager and reference it via environment variables (`ISSUER_DIRECTORY_MONGO_CONNECTION_STRING`) rather than committing credentials.
|
||||
|
||||
## 2 · Deploy with Docker Compose
|
||||
1. **Prepare environment variables**
|
||||
```bash
|
||||
cp deploy/compose/env/dev.env.example dev.env
|
||||
cp etc/secrets/issuer-directory.mongo.secret.example issuer-directory.mongo.env
|
||||
# Edit dev.env and issuer-directory.mongo.env with production-ready secrets.
|
||||
```
|
||||
|
||||
2. **Inspect the merged configuration**
|
||||
```bash
|
||||
docker compose \
|
||||
--env-file dev.env \
|
||||
--env-file issuer-directory.mongo.env \
|
||||
-f deploy/compose/docker-compose.dev.yaml config
|
||||
```
|
||||
The command confirms the new `issuer-directory` service resolves the port (`${ISSUER_DIRECTORY_PORT:-8447}`) and the Mongo connection string is in place.
|
||||
|
||||
3. **Launch the stack**
|
||||
```bash
|
||||
docker compose \
|
||||
--env-file dev.env \
|
||||
--env-file issuer-directory.mongo.env \
|
||||
-f deploy/compose/docker-compose.dev.yaml up -d issuer-directory
|
||||
```
|
||||
Compose automatically mounts `../../etc/issuer-directory.yaml` into the container at `/etc/issuer-directory.yaml`, seeds CSAF publishers, and exposes the API on `https://localhost:8447`.
|
||||
|
||||
4. **Smoke test**
|
||||
```bash
|
||||
curl -k https://localhost:8447/health/live
|
||||
stellaops-cli issuer-directory issuers list \
|
||||
--base-url https://localhost:8447 \
|
||||
--tenant demo \
|
||||
--access-token "$(stellaops-cli auth token issue --scope issuer-directory:read)"
|
||||
```
|
||||
|
||||
5. **Upgrade & rollback**
|
||||
- Update Compose images to the desired release manifest (`deploy/releases/*.yaml`), re-run `docker compose config`, then `docker compose up -d`.
|
||||
- Rollbacks follow the same steps with the previous manifest. Mongo collections are backwards compatible within `2025.10.x`.
|
||||
|
||||
## 3 · Deploy with Helm
|
||||
1. **Create or update the secret**
|
||||
```bash
|
||||
kubectl create secret generic issuer-directory-secrets \
|
||||
--from-literal=ISSUERDIRECTORY__MONGO__CONNECTIONSTRING='mongodb://stellaops:<password>@stellaops-mongo:27017' \
|
||||
--dry-run=client -o yaml | kubectl apply -f -
|
||||
```
|
||||
Add optional overrides (e.g. `ISSUERDIRECTORY__AUTHORITY__ISSUER`) if your Authority issuer differs from the default.
|
||||
|
||||
2. **Template for validation**
|
||||
```bash
|
||||
helm template issuer-directory deploy/helm/stellaops \
|
||||
-f deploy/helm/stellaops/values-prod.yaml \
|
||||
--set services.issuer-directory.env.ISSUERDIRECTORY__AUTHORITY__ISSUER=https://authority.prod.stella-ops.org \
|
||||
> /tmp/issuer-directory.yaml
|
||||
```
|
||||
|
||||
3. **Install / upgrade**
|
||||
```bash
|
||||
helm upgrade --install stellaops deploy/helm/stellaops \
|
||||
-f deploy/helm/stellaops/values-prod.yaml \
|
||||
--set services.issuer-directory.env.ISSUERDIRECTORY__AUTHORITY__ISSUER=https://authority.prod.stella-ops.org
|
||||
```
|
||||
The chart provisions:
|
||||
- ConfigMap `stellaops-issuer-directory-config` with `IssuerDirectory` settings.
|
||||
- Deployment `stellaops-issuer-directory` with readiness/liveness probes on `/health/live`.
|
||||
- Service on port `8080` (ClusterIP by default).
|
||||
|
||||
4. **Expose for operators (optional)**
|
||||
- Use an Ingress/HTTPRoute to publish `https://issuer-directory.<env>.stella-ops.org`.
|
||||
- Ensure the upstream includes DPoP headers if proxied through an API gateway.
|
||||
|
||||
5. **Post-deploy validation**
|
||||
```bash
|
||||
kubectl exec deploy/stellaops-issuer-directory -- \
|
||||
curl -sf http://127.0.0.1:8080/health/live
|
||||
kubectl logs deploy/stellaops-issuer-directory | grep 'IssuerDirectory Mongo connected'
|
||||
```
|
||||
Prometheus should begin scraping `issuer_directory_changes_total` and related metrics (labels: `tenant`, `issuer`, `action`).
|
||||
|
||||
## 4 · Operational checklist
|
||||
- **Secrets:** Connection strings live in `issuer-directory-secrets` (Helm) or an `.env` file stored in your secrets vault (Compose). Rotate credentials via secret update + pod restart.
|
||||
- **Audit streams:** Confirm `issuer_directory_audit` collection receives entries when CRUD operations run; export logs for compliance.
|
||||
- **Tenants:** The service enforces the `X-StellaOps-Tenant` header. For multi-tenant staging, configure the reverse proxy to inject the correct tenant or issue scoped tokens.
|
||||
- **CSAF seeds:** `ISSUER_DIRECTORY_SEED_CSAF=true` replays `data/csaf-publishers.json` on startup. Set to `false` once production tenants are fully managed, or override `csafSeedPath` with a curated bundle.
|
||||
- **Release alignment:** Before promotion, run `deploy/tools/validate-profiles.sh` to lint Compose/Helm bundles, then verify the new `issuer-directory-web` entry in `deploy/releases/2025.10-edge.yaml` (or the relevant manifest) matches the channel you intend to ship.
|
||||
71
docs/modules/issuer-directory/operations/offline-kit.md
Normal file
71
docs/modules/issuer-directory/operations/offline-kit.md
Normal file
@@ -0,0 +1,71 @@
|
||||
# Issuer Directory Offline Kit Notes
|
||||
|
||||
## Purpose
|
||||
Operators bundling Stella Ops for fully disconnected environments must include the Issuer Directory service so VEX Lens, Excititor, and Policy Engine can resolve trusted issuers without reaching external registries.
|
||||
|
||||
## 1 · Bundle contents
|
||||
Include the following artefacts in your Offline Update Kit staging tree:
|
||||
|
||||
| Path (within kit) | Source | Notes |
|
||||
| --- | --- | --- |
|
||||
| `images/issuer-directory-web.tar` | `registry.stella-ops.org/stellaops/issuer-directory-web` (digest from `deploy/releases/<channel>.yaml`) | Export with `crane pull --format=tar` or `skopeo copy docker://... oci:...`. |
|
||||
| `config/issuer-directory/issuer-directory.yaml` | `etc/issuer-directory.yaml` (customised) | Replace Authority issuer, tenant header, and log level as required. |
|
||||
| `config/issuer-directory/csaf-publishers.json` | `src/IssuerDirectory/StellaOps.IssuerDirectory/data/csaf-publishers.json` or regional override | Operators can edit before import to add private publishers. |
|
||||
| `secrets/issuer-directory/connection.env` | Secure secret store export (`ISSUER_DIRECTORY_MONGO_CONNECTION_STRING=`) | Encrypt at rest; Offline Kit importer places it in the Compose/Helm secret. |
|
||||
| `docs/issuer-directory/deployment.md` | `docs/modules/issuer-directory/operations/deployment.md` | Ship alongside kit documentation for operators. |
|
||||
|
||||
> **Image digests:** Update `deploy/releases/2025.10-edge.yaml` (or the relevant manifest) with the exact digest before building the kit so `offline-manifest.json` can assert integrity.
|
||||
|
||||
## 2 · Compose (air-gapped) deployment
|
||||
1. Load images locally on the target:
|
||||
```bash
|
||||
docker load < images/issuer-directory-web.tar
|
||||
```
|
||||
2. Copy Compose artefacts:
|
||||
```bash
|
||||
cp deploy/compose/docker-compose.airgap.yaml .
|
||||
cp deploy/compose/env/airgap.env.example airgap.env
|
||||
cp secrets/issuer-directory/connection.env issuer-directory.mongo.env
|
||||
```
|
||||
3. Update `airgap.env` with site-specific values (Authority issuer, tenant, ports) and remove outbound endpoints.
|
||||
4. Bring up the service:
|
||||
```bash
|
||||
docker compose \
|
||||
--env-file airgap.env \
|
||||
--env-file issuer-directory.mongo.env \
|
||||
-f docker-compose.airgap.yaml up -d issuer-directory
|
||||
```
|
||||
5. Verify via `curl -k https://issuer-directory.airgap.local:8447/health/live`.
|
||||
|
||||
## 3 · Kubernetes (air-gapped) deployment
|
||||
1. Pre-load the OCI image into your local registry mirror and update `values-airgap.yaml` to reference it.
|
||||
2. Apply the secret bundled in the kit:
|
||||
```bash
|
||||
kubectl apply -f secrets/issuer-directory/connection-secret.yaml
|
||||
```
|
||||
(Generate this file during packaging with `kubectl create secret generic issuer-directory-secrets ... --dry-run=client -o yaml`.)
|
||||
3. Install/upgrade the chart:
|
||||
```bash
|
||||
helm upgrade --install stellaops deploy/helm/stellaops \
|
||||
-f deploy/helm/stellaops/values-airgap.yaml \
|
||||
--set services.issuer-directory.env.ISSUERDIRECTORY__AUTHORITY__ISSUER=https://authority.airgap.local/realms/stellaops
|
||||
```
|
||||
4. Confirm `issuer_directory_changes_total` is visible in your offline Prometheus stack.
|
||||
|
||||
## 4 · Import workflow summary
|
||||
1. Run `ops/offline-kit/build_offline_kit.py` with the additional artefacts noted above.
|
||||
2. Sign the resulting tarball and manifest (Cosign) and record the SHA-256 in the release notes.
|
||||
3. At the destination:
|
||||
```bash
|
||||
stellaops-cli offline kit import \
|
||||
--bundle stella-ops-offline-kit-<version>-airgap.tar.gz \
|
||||
--destination /opt/stellaops/offline-kit
|
||||
```
|
||||
4. Follow the Compose or Helm path depending on your topology.
|
||||
|
||||
## 5 · Post-import validation
|
||||
- [ ] `docker images | grep issuer-directory` (Compose) or `kubectl get deploy stellaops-issuer-directory` (Helm) shows the expected version.
|
||||
- [ ] `csaf-publishers.json` in the container matches the offline bundle (hash check).
|
||||
- [ ] `/issuer-directory/issuers` returns global seed issuers (requires token with `issuer-directory:read` scope).
|
||||
- [ ] Audit collection receives entries when you create/update issuers offline.
|
||||
- [ ] Offline kit manifest (`offline-manifest.json`) lists `images/issuer-directory-web.tar` and `config/issuer-directory/issuer-directory.yaml` with SHA-256 values you recorded during packaging.
|
||||
@@ -8,13 +8,20 @@
|
||||
| SCANNER-DOCS-0002 | DONE (2025-11-02) | Docs Guild | Keep scanner benchmark comparisons (Trivy/Grype/Snyk) and deep-dive matrix current with source references. | Coordinate with docs/benchmarks owners |
|
||||
| SCANNER-DOCS-0003 | TODO | Docs Guild, Product Guild | Gather Windows/macOS analyzer demand signals and record findings in `docs/benchmarks/scanner/windows-macos-demand.md`. | Coordinate with Product Marketing & Sales enablement |
|
||||
| SCANNER-ENG-0008 | TODO | EntryTrace Guild, QA Guild | Maintain EntryTrace heuristic cadence per `docs/benchmarks/scanner/scanning-gaps-stella-misses-from-competitors.md`. | Include quarterly pattern review + explain trace updates |
|
||||
| SCANNER-ENG-0009 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-001..012 | Deliver Ruby analyzer parity and observation pipeline per gap doc (lockfiles, runtime graph, policy signals). | Design complete; fixtures published; CLI/Offline docs updated. |
|
||||
| SCANNER-ENG-0009 | DOING (2025-11-02) | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-001..012 | Deliver Ruby analyzer parity and observation pipeline per gap doc (lockfiles, runtime graph, policy signals). | Design complete; fixtures published; CLI/Offline docs updated. |
|
||||
| SCANNER-ENG-0010 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-001..012 | Ship PHP analyzer pipeline (composer lock, autoload graph, capability signals) to close comparison gaps. | Analyzer + policy integration merged; fixtures + docs aligned. |
|
||||
| SCANNER-ENG-0011 | TODO | Language Analyzer Guild | — | Scope Deno runtime analyzer (lockfile resolver, import graphs) based on competitor techniques. | Design doc approved; backlog split into analyzer/runtime work. |
|
||||
| SCANNER-ENG-0012 | TODO | Language Analyzer Guild | — | Evaluate Dart analyzer requirements (pubspec parsing, AOT artifacts) to restore parity. | Investigation summary + task split filed with Dart guild. |
|
||||
| SCANNER-ENG-0013 | TODO | Swift Analyzer Guild | — | Plan Swift Package Manager coverage (Package.resolved, xcframeworks, runtime hints) with policy hooks. | Design brief approved; backlog seeded with analyzer tasks. |
|
||||
| SCANNER-ENG-0014 | TODO | Runtime Guild, Zastava Guild | — | Align Kubernetes/VM target coverage roadmap between Scanner and Zastava per comparison findings. | Joint roadmap doc approved; cross-guild tasks opened. |
|
||||
| SCANNER-ENG-0015 | TODO | Export Center Guild, Scanner Guild | — | Document DSSE/Rekor operator enablement guidance and rollout levers surfaced in gap analysis. | Playbook drafted; Export Center backlog updated. |
|
||||
| SCANNER-ENG-0016 | DOING (2025-11-02) | Ruby Analyzer Guild (Lockfile Squad) | Implement `RubyLockCollector` and vendor cache ingestion per design §4.1–4.3. | Coordinate fixtures under `fixtures/lang/ruby/lockfiles`; target alpha by Sprint 21. |
|
||||
| SCANNER-ENG-0017 | TODO | Ruby Analyzer Guild (Runtime Squad) | Build runtime require/autoload graph builder with tree-sitter Ruby per design §4.4. | Deliver edges with reason codes and integrate EntryTrace hints. |
|
||||
| SCANNER-ENG-0018 | TODO | Ruby Analyzer Guild (Capability Squad) | Emit Ruby capability and framework surface signals as defined in design §4.5. | Policy predicates prototyped; capability records available in SBOM overlays. |
|
||||
| SCANNER-ENG-0019 | TODO | Ruby Analyzer Guild, CLI Guild | Ship Ruby CLI verbs (`stella ruby inspect|resolve`) and Offline Kit packaging per design §4.6. | CLI commands documented; offline manifest updated; e2e tests pass. |
|
||||
| SCANNER-LIC-0001 | DOING (2025-11-02) | Scanner Guild, Legal Guild | Vet tree-sitter Ruby licensing and Offline Kit packaging requirements. | SPDX review complete; packaging plan approved. |
|
||||
| SCANNER-POLICY-0001 | TODO | Policy Guild, Ruby Analyzer Guild | Define Policy Engine predicates for Ruby groups/capabilities and align lattice weights. | Policy schema merged; tests cover new predicates. |
|
||||
| SCANNER-CLI-0001 | TODO | CLI Guild, Ruby Analyzer Guild | Coordinate CLI UX/help text for new Ruby verbs and update CLI docs. | CLI help + docs updated; golden outputs recorded. |
|
||||
| SCANNER-ENG-0002 | TODO | Scanner Guild, CLI Guild | Design Node.js lockfile collector/CLI validator per `docs/benchmarks/scanner/scanning-gaps-stella-misses-from-competitors.md`. | Capture Surface & policy requirements before implementation |
|
||||
| SCANNER-ENG-0003 | TODO | Python Analyzer Guild, CLI Guild | Design Python lockfile/editable install parity checks per `docs/benchmarks/scanner/scanning-gaps-stella-misses-from-competitors.md`. | Include policy predicates & CLI story in design |
|
||||
| SCANNER-ENG-0004 | TODO | Java Analyzer Guild, CLI Guild | Design Java lockfile ingestion & validation per `docs/benchmarks/scanner/scanning-gaps-stella-misses-from-competitors.md`. | Cover Gradle/SBT collectors, CLI verb, policy hooks |
|
||||
|
||||
137
docs/modules/scanner/design/ruby-analyzer.md
Normal file
137
docs/modules/scanner/design/ruby-analyzer.md
Normal file
@@ -0,0 +1,137 @@
|
||||
# Ruby Analyzer Parity Design (SCANNER-ENG-0009)
|
||||
|
||||
**Status:** Draft • Owner: Ruby Analyzer Guild • Updated: 2025-11-02
|
||||
|
||||
## 1. Goals & Non-Goals
|
||||
- **Goals**
|
||||
- Deterministically catalogue Ruby application dependencies (Gemfile/Gemfile.lock, vendored specs, .gem archives) for container layers and local workspaces.
|
||||
- Build runtime usage graphs (require/require_relative, Zeitwerk autoloads, Rack boot chains, Sidekiq/ActiveJob schedulers).
|
||||
- Emit capability signals (exec/fs/net/serialization, framework fingerprints, job schedulers) consumable by Policy Engine and explain traces.
|
||||
- Provide CLI verbs (`stella ruby inspect`, `stella ruby resolve`) and Offline Kit parity for air-gapped deployments.
|
||||
- **Non-Goals**
|
||||
- Shipping dynamic runtime profilers (log-based or APM) in this iteration.
|
||||
- Implementing UI changes beyond exposing explain traces the Policy/UI guilds already support.
|
||||
|
||||
## 2. Scope & Inputs
|
||||
| Input | Location | Notes |
|
||||
|-------|----------|-------|
|
||||
| Gemfile / Gemfile.lock | Source tree, layer filesystem | Handle multiple apps per repo; honour Bundler groups. |
|
||||
| Vendor bundles (`vendor/bundle`, `.bundle/config`) | Layer filesystem | Needed for offline/built images; avoid double-counting platform-specific gems. |
|
||||
| `.gemspec` files / cached specs | `~/.bundle/cache`, `vendor/cache`, gems in layers | Support deterministic parsing without executing gem metadata. |
|
||||
| Framework configs | `config/application.rb`, `config/routes.rb`, `config/sidekiq.yml`, etc. | Feed framework surface mapper. |
|
||||
| Container metadata | Layer digests via RustFS CAS | Support incremental composition per layer. |
|
||||
|
||||
## 3. High-Level Architecture
|
||||
```
|
||||
┌─────────────────────────┐ ┌────────────────────┐
|
||||
│ Bundler Lock Collector │───────▶│ Package Graph │
|
||||
└─────────────────────────┘ │ Aggregator │
|
||||
└─────────┬──────────┘
|
||||
┌─────────────────────────┐ │
|
||||
│ Gemspec Inspector │───────────────▶│
|
||||
└─────────────────────────┘ │
|
||||
▼
|
||||
┌────────────────────┐
|
||||
┌─────────────────────────┐ │ Runtime Graph │
|
||||
│ Require/Autoload Scan │───────▶│ Builder (Zeitwerk) │
|
||||
└─────────────────────────┘ └─────────┬──────────┘
|
||||
│
|
||||
▼
|
||||
┌────────────────────┐
|
||||
│ Capability Emitter │
|
||||
└─────────┬──────────┘
|
||||
│
|
||||
▼
|
||||
┌────────────────────┐
|
||||
│ SBOM Writer │
|
||||
│ + Policy Signals │
|
||||
└────────────────────┘
|
||||
```
|
||||
|
||||
## 4. Detailed Components
|
||||
### 4.1 Bundler Lock Collector
|
||||
- Parse `Gemfile.lock` deterministically (no network) using new `RubyLockCollector` under `StellaOps.Scanner.Analyzers.Lang.Ruby`.
|
||||
- Support alternative manifests (`gems.rb`, `gems.locked`) and workspace overrides.
|
||||
- Emit package nodes with fields: `name`, `version`, `source` (path/git/rubygems), `bundlerGroup[]`, `platform`, `declaredOnly` flag.
|
||||
- Implementation:
|
||||
- Reuse parsing strategy from Trivy (`pkg/fanal/analyzer/language/ruby/bundler`) but port to C# with streaming reader and stable ordering.
|
||||
- Integrate with Surface.Validation to enforce size limits and tenant allowlists for git/path sources.
|
||||
|
||||
### 4.2 Gemspec Inspector
|
||||
- Scan cached specs under `vendor/cache`, `.bundle/cache`, and gem directories to pick up transitive packages when lockfiles missing.
|
||||
- Parse without executing Ruby by using a deterministic DSL subset (similar to Trivy gemspec parser).
|
||||
- Link results to lockfile entries by `<name, version, platform>`; create new records flagged `InferredFromSpec` when lockfile absent.
|
||||
|
||||
### 4.3 Package Aggregator
|
||||
- New orchestrator `RubyPackageAggregator` merges lock and gemspec data with installed gems from container layers (once runtime analyzer ships).
|
||||
- Precedence: Installed > Lockfile > Gemspec.
|
||||
- Deduplicate by package key (name+version+platform) and attach provenance bits for Policy Engine.
|
||||
|
||||
### 4.4 Runtime Graph Builder
|
||||
- Static analysis for `require`, `require_relative`, `autoload`, Zeitwerk conventions, and Rails initialisers.
|
||||
- Implementation phases:
|
||||
1. Parse AST using tree-sitter Ruby embedded under `StellaOps.Scanner.Analyzers.Lang.Ruby.Syntax` with deterministic bindings.
|
||||
2. Generate edges `entrypoint -> file` and `file -> package` with reason codes (`require-static`, `autoload-zeitwerk`, `autoload-const_missing`).
|
||||
3. Identify framework entrypoints (Rails controllers, Rack middleware, Sidekiq workers) via heuristics defined in `SCANNER-ANALYZERS-RUBY-28-*` tasks.
|
||||
- Output merges with EntryTrace usage hints to support runtime filtering in Policy Engine.
|
||||
|
||||
### 4.5 Capability & Surface Signals
|
||||
- Emit evidence documents for:
|
||||
- Process/exec usage (`Kernel.system`, `` `cmd` ``, `Open3`).
|
||||
- Network clients (`Net::HTTP`, `Faraday`, `Redis`, `ActiveRecord::Base.establish_connection`).
|
||||
- Serialization sinks (`Marshal.load`, `YAML.load`, `Oj.load`).
|
||||
- Job schedulers (Sidekiq, Resque, ActiveJob, Whenever, Clockwork) with schedule metadata.
|
||||
- Capability records flow to Policy Engine under `capability.ruby.*` namespaces to allow gating on dangerous constructs.
|
||||
|
||||
### 4.6 CLI & Offline Integration
|
||||
- Add CLI verbs:
|
||||
- `stella ruby inspect <path>` – runs collector locally, outputs JSON summary with provenance.
|
||||
- `stella ruby resolve --image <ref>` – fetches scan artifacts, prints dependency graph grouped by bundler group/platform.
|
||||
- Ship analyzer DLLs and rules in Offline Kit manifest; include autoload/zeitwerk fingerprints and heuristics hashed for determinism.
|
||||
|
||||
## 5. Data Contracts
|
||||
| Artifact | Shape | Consumer |
|
||||
|----------|-------|----------|
|
||||
| `ruby_packages.json` | Array `{id, name, version, source, provenance, groups[], platform}` | SBOM Composer, Policy Engine |
|
||||
| `ruby_runtime_edges.json` | Edges `{from, to, reason, confidence}` | EntryTrace overlay, Policy explain traces |
|
||||
| `ruby_capabilities.json` | Capability `{kind, location, evidenceHash, params}` | Policy Engine (capability predicates) |
|
||||
|
||||
All records follow AOC appender rules (immutable, tenant-scoped) and include `hash`, `layerDigest`, and `timestamp` normalized to UTC ISO-8601.
|
||||
|
||||
## 6. Testing Strategy
|
||||
- **Fixtures**: Extend `fixtures/lang/ruby` with Rails, Sinatra, Sidekiq, Rack, container images (with/without vendor cache).
|
||||
- **Determinism**: Golden snapshots for package lists and capability outputs across repeated runs.
|
||||
- **Integration**: Worker e2e to ensure per-layer aggregation; CLI golden outputs (`stella ruby inspect`).
|
||||
- **Policy**: Unit tests verifying new predicates (`ruby.group`, `ruby.capability.exec`, etc.) in Policy Engine test suite.
|
||||
|
||||
## 7. Rollout Plan & Dependencies
|
||||
1. Implement collectors and aggregators (SCANNER-ANALYZERS-RUBY-28-001..004).
|
||||
2. Add capability analyzer and observations (SCANNER-ANALYZERS-RUBY-28-005..008).
|
||||
3. Wire CLI commands and Offline Kit packaging (SCANNER-ANALYZERS-RUBY-28-011).
|
||||
4. Update docs (DOCS-SCANNER-BENCH-62-009 follow-up) once analyzer alpha ready.
|
||||
|
||||
**Dependencies**
|
||||
- Tree-sitter Ruby grammar inclusion (needs Offline Kit packaging and licensing check).
|
||||
- Policy Engine support for new predicates and capability schemas.
|
||||
- Surface.Validation updates for git/path gem sources and secret resolution.
|
||||
|
||||
## 8. Open Questions
|
||||
- Do we require dynamic runtime logs (e.g., `ActiveSupport::Notifications`) for confidence boosts? (defer to future iteration)
|
||||
- Should we enforce signed gem provenance in MVP? Pending Product decision.
|
||||
- Need alignment with Export Center on Ruby-specific manifest emissions.
|
||||
|
||||
## 9. Licensing & Offline Packaging (SCANNER-LIC-0001)
|
||||
- **License**: tree-sitter core and `tree-sitter-ruby` grammar are MIT licensed (confirmed via upstream LICENSE files retrieved 2025-11-02).
|
||||
- **Obligations**:
|
||||
1. Include both MIT license texts in `/third-party-licenses/` and in Offline Kit manifests.
|
||||
2. Update `NOTICE.md` to acknowledge embedded grammars per company policy.
|
||||
3. Record the grammar commit hashes in build metadata; regenerate generated C/WASM artifacts deterministically.
|
||||
4. Ensure build pipeline uses `tree-sitter-cli` only as a build-time tool (not redistributed) to avoid extra licensing obligations.
|
||||
- **Deliverables**:
|
||||
- SCANNER-LIC-0001 to capture Legal sign-off and update packaging scripts.
|
||||
- Export Center to mirror license files into Offline Kit bundle.
|
||||
|
||||
---
|
||||
*References:*
|
||||
- Trivy: `pkg/fanal/analyzer/language/ruby/bundler`, `pkg/fanal/analyzer/language/ruby/gemspec`
|
||||
- Gap analysis: `docs/benchmarks/scanner/scanning-gaps-stella-misses-from-competitors.md#ruby-analyzer-parity-trivy-grype-snyk`
|
||||
@@ -285,6 +285,31 @@ clients:
|
||||
serviceIdentity: cartographer
|
||||
```
|
||||
|
||||
### 3.3 Delegated service accounts
|
||||
|
||||
Add delegated service accounts when automation needs scoped tokens with shorter lifetimes:
|
||||
|
||||
```yaml
|
||||
delegation:
|
||||
quotas:
|
||||
maxActiveTokens: 50
|
||||
serviceAccounts:
|
||||
- accountId: "svc-observer"
|
||||
tenant: "tenant-default"
|
||||
allowedScopes: [ "jobs:read", "findings:read" ]
|
||||
authorizedClients: [ "export-center-worker" ]
|
||||
|
||||
tenants:
|
||||
- name: "tenant-default"
|
||||
delegation:
|
||||
maxActiveTokens: 25
|
||||
```
|
||||
|
||||
- Clients request delegated tokens by supplying `service_account=<accountId>` (and optional `delegation_actor`) alongside the usual client-credentials payload.
|
||||
- Authority enforces both tenant and service-account quotas. Exceeding either returns `invalid_request` and records `delegation.quota.exceeded` in audit events.
|
||||
- Only scopes listed in `allowedScopes` are granted; `authorizedClients` restricts which OAuth clients may impersonate the delegate.
|
||||
- Delegated tokens include `stellaops:service_account` and an `act` claim. The token store persists `tokenKind = "service_account"`, `serviceAccountId`, and the normalized actor chain for offline auditing.
|
||||
|
||||
---
|
||||
|
||||
## 4 · Operational safeguards
|
||||
@@ -323,10 +348,12 @@ clients:
|
||||
- [ ] Claim transforms enforce `serviceIdentity` for `effective:write`.
|
||||
- [ ] Claim transforms enforce `serviceIdentity` for `graph:write`.
|
||||
- [ ] Concelier/Excititor smoke tests cover missing tenant rejection.
|
||||
- [ ] Delegation quotas configured (`delegation.quotas.maxActiveTokens`, `tenants[].delegation.maxActiveTokens` where required).
|
||||
- [ ] Service account seeds (`delegation.serviceAccounts`) reviewed for allowed scopes and authorized clients; audit dashboards show `delegation.service_account` usage.
|
||||
- [ ] Offline kit credentials reviewed for least privilege.
|
||||
- [ ] Audit/monitoring guidance validated with Observability Guild.
|
||||
- [ ] Authority Core sign-off recorded (owner: @authority-core, due 2025-10-28).
|
||||
|
||||
---
|
||||
|
||||
*Last updated: 2025-10-27 (Sprint 19).*
|
||||
*Last updated: 2025-11-02 (Sprint 19).*
|
||||
|
||||
@@ -65,6 +65,28 @@ notifications:
|
||||
scope: "notify.escalate"
|
||||
requireAdminScope: true
|
||||
|
||||
delegation:
|
||||
quotas:
|
||||
# Maximum concurrent delegated (service account) tokens per tenant.
|
||||
maxActiveTokens: 50
|
||||
serviceAccounts:
|
||||
- accountId: "svc-observer"
|
||||
tenant: "tenant-default"
|
||||
displayName: "Observability Exporter"
|
||||
description: "Delegated identity used by Export Center to read findings."
|
||||
enabled: true
|
||||
allowedScopes:
|
||||
- "jobs:read"
|
||||
- "findings:read"
|
||||
authorizedClients:
|
||||
- "export-center-worker"
|
||||
# - accountId: "svc-airgap-import"
|
||||
# tenant: "tenant-default"
|
||||
# displayName: "Airgap Import Service Account"
|
||||
# enabled: true
|
||||
# allowedScopes: [ "airgap:import", "airgap:status:read" ]
|
||||
# authorizedClients: [ "airgap-importer" ]
|
||||
|
||||
apiLifecycle:
|
||||
legacyAuth:
|
||||
enabled: true
|
||||
@@ -428,6 +450,9 @@ tenants:
|
||||
scopes: [ "notify.viewer", "notify.operator" ]
|
||||
notify-admin:
|
||||
scopes: [ "notify.viewer", "notify.operator", "notify.admin" ]
|
||||
delegation:
|
||||
# Override the default maxActiveTokens for this tenant (optional).
|
||||
maxActiveTokens: 25
|
||||
observability-viewer:
|
||||
scopes: [ "obs:read", "timeline:read", "evidence:read", "attest:read" ]
|
||||
observability-investigator:
|
||||
|
||||
6
etc/secrets/issuer-directory.mongo.secret.example
Normal file
6
etc/secrets/issuer-directory.mongo.secret.example
Normal file
@@ -0,0 +1,6 @@
|
||||
# Replace this value with the MongoDB connection string used by Issuer Directory.
|
||||
# Keep the file out of version control; mount it via docker-compose env_file or
|
||||
# your secrets manager when running the service. Compose expects the helper
|
||||
# variable below and injects it into ISSUERDIRECTORY__MONGO__CONNECTIONSTRING
|
||||
# at container runtime.
|
||||
ISSUER_DIRECTORY_MONGO_CONNECTION_STRING=mongodb://stellaops:change-me@mongo:27017
|
||||
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"id": "stellaops.analyzer.lang.ruby",
|
||||
"displayName": "StellaOps Ruby Analyzer",
|
||||
"version": "0.1.0",
|
||||
"requiresRestart": true,
|
||||
"entryPoint": {
|
||||
"type": "dotnet",
|
||||
"assembly": "StellaOps.Scanner.Analyzers.Lang.Ruby.dll",
|
||||
"typeName": "StellaOps.Scanner.Analyzers.Lang.Ruby.RubyAnalyzerPlugin"
|
||||
},
|
||||
"capabilities": [
|
||||
"language-analyzer",
|
||||
"ruby"
|
||||
],
|
||||
"metadata": {
|
||||
"org.stellaops.analyzer.language": "ruby",
|
||||
"org.stellaops.analyzer.kind": "language",
|
||||
"org.stellaops.restart.required": "true"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Orchestration;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.DependencyInjection;
|
||||
|
||||
public static class ToolsetServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddAdvisoryDeterministicToolset(this IServiceCollection services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
services.TryAddSingleton<IDeterministicToolset, DeterministicToolset>();
|
||||
return services;
|
||||
}
|
||||
|
||||
public static IServiceCollection AddAdvisoryPipeline(this IServiceCollection services, Action<AdvisoryPipelineOptions>? configure = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
services.AddAdvisoryDeterministicToolset();
|
||||
|
||||
var optionsBuilder = services.AddOptions<AdvisoryPipelineOptions>();
|
||||
optionsBuilder.Configure(options => options.ApplyDefaults());
|
||||
if (configure is not null)
|
||||
{
|
||||
optionsBuilder.Configure(configure);
|
||||
}
|
||||
|
||||
services.TryAddSingleton<IAdvisoryPipelineOrchestrator, AdvisoryPipelineOrchestrator>();
|
||||
return services;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
/// <summary>
|
||||
/// Queue payload sent to workers to execute a pipeline plan.
|
||||
/// </summary>
|
||||
public sealed class AdvisoryPipelineExecutionMessage
|
||||
{
|
||||
public AdvisoryPipelineExecutionMessage(
|
||||
string planCacheKey,
|
||||
AdvisoryTaskRequest request,
|
||||
IReadOnlyDictionary<string, string> planMetadata)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(planCacheKey);
|
||||
PlanCacheKey = planCacheKey;
|
||||
Request = request ?? throw new ArgumentNullException(nameof(request));
|
||||
PlanMetadata = planMetadata ?? throw new ArgumentNullException(nameof(planMetadata));
|
||||
}
|
||||
|
||||
public string PlanCacheKey { get; }
|
||||
|
||||
public AdvisoryTaskRequest Request { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> PlanMetadata { get; }
|
||||
}
|
||||
@@ -0,0 +1,159 @@
|
||||
using System.Collections.ObjectModel;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
public sealed class AdvisoryPipelineOptions
|
||||
{
|
||||
private static readonly AdvisoryTaskConfiguration SummaryDefaults = new()
|
||||
{
|
||||
PromptTemplate = "prompts/advisory/summary.liquid",
|
||||
StructuredMaxChunks = 25,
|
||||
VectorTopK = 5,
|
||||
SbomMaxTimelineEntries = 10,
|
||||
SbomMaxDependencyPaths = 20,
|
||||
IncludeEnvironmentFlags = true,
|
||||
IncludeBlastRadius = true,
|
||||
Budget = new AdvisoryTaskBudget { PromptTokens = 2048, CompletionTokens = 512 },
|
||||
VectorQueries = { "Summarize key facts", "What is impacted?" },
|
||||
};
|
||||
|
||||
private static readonly AdvisoryTaskConfiguration ConflictDefaults = new()
|
||||
{
|
||||
PromptTemplate = "prompts/advisory/conflict.liquid",
|
||||
StructuredMaxChunks = 30,
|
||||
VectorTopK = 6,
|
||||
SbomMaxTimelineEntries = 8,
|
||||
SbomMaxDependencyPaths = 15,
|
||||
IncludeEnvironmentFlags = true,
|
||||
IncludeBlastRadius = false,
|
||||
Budget = new AdvisoryTaskBudget { PromptTokens = 2048, CompletionTokens = 512 },
|
||||
VectorQueries = { "Highlight conflicting statements", "Where do sources disagree?" },
|
||||
};
|
||||
|
||||
private static readonly AdvisoryTaskConfiguration RemediationDefaults = new()
|
||||
{
|
||||
PromptTemplate = "prompts/advisory/remediation.liquid",
|
||||
StructuredMaxChunks = 35,
|
||||
VectorTopK = 6,
|
||||
SbomMaxTimelineEntries = 12,
|
||||
SbomMaxDependencyPaths = 25,
|
||||
IncludeEnvironmentFlags = true,
|
||||
IncludeBlastRadius = true,
|
||||
Budget = new AdvisoryTaskBudget { PromptTokens = 2048, CompletionTokens = 640 },
|
||||
VectorQueries = { "Provide remediation steps", "Outline mitigations and fixes" },
|
||||
};
|
||||
|
||||
public IDictionary<AdvisoryTaskType, AdvisoryTaskConfiguration> Tasks { get; } =
|
||||
new Dictionary<AdvisoryTaskType, AdvisoryTaskConfiguration>
|
||||
{
|
||||
[AdvisoryTaskType.Summary] = SummaryDefaults.Clone(),
|
||||
[AdvisoryTaskType.Conflict] = ConflictDefaults.Clone(),
|
||||
[AdvisoryTaskType.Remediation] = RemediationDefaults.Clone(),
|
||||
};
|
||||
|
||||
public void ApplyDefaults()
|
||||
{
|
||||
if (!Tasks.ContainsKey(AdvisoryTaskType.Summary))
|
||||
{
|
||||
Tasks[AdvisoryTaskType.Summary] = SummaryDefaults.Clone();
|
||||
}
|
||||
|
||||
if (!Tasks.ContainsKey(AdvisoryTaskType.Conflict))
|
||||
{
|
||||
Tasks[AdvisoryTaskType.Conflict] = ConflictDefaults.Clone();
|
||||
}
|
||||
|
||||
if (!Tasks.ContainsKey(AdvisoryTaskType.Remediation))
|
||||
{
|
||||
Tasks[AdvisoryTaskType.Remediation] = RemediationDefaults.Clone();
|
||||
}
|
||||
|
||||
foreach (var entry in Tasks)
|
||||
{
|
||||
entry.Value.ApplyDefaults();
|
||||
}
|
||||
}
|
||||
|
||||
public AdvisoryTaskConfiguration GetConfiguration(AdvisoryTaskType taskType)
|
||||
{
|
||||
ApplyDefaults();
|
||||
if (!Tasks.TryGetValue(taskType, out var configuration))
|
||||
{
|
||||
throw new InvalidOperationException($"No configuration registered for task type '{taskType}'.");
|
||||
}
|
||||
|
||||
configuration.ApplyDefaults();
|
||||
return configuration;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class AdvisoryTaskConfiguration
|
||||
{
|
||||
public string PromptTemplate { get; set; } = string.Empty;
|
||||
|
||||
public List<string> VectorQueries { get; set; } = new();
|
||||
|
||||
public int VectorTopK { get; set; }
|
||||
|
||||
public int StructuredMaxChunks { get; set; }
|
||||
|
||||
public int SbomMaxTimelineEntries { get; set; }
|
||||
|
||||
public int SbomMaxDependencyPaths { get; set; }
|
||||
|
||||
public bool IncludeEnvironmentFlags { get; set; }
|
||||
|
||||
public bool IncludeBlastRadius { get; set; }
|
||||
|
||||
public AdvisoryTaskBudget Budget { get; set; } = new();
|
||||
|
||||
internal AdvisoryTaskConfiguration Clone()
|
||||
{
|
||||
var clone = (AdvisoryTaskConfiguration)MemberwiseClone();
|
||||
clone.VectorQueries = new List<string>(VectorQueries);
|
||||
clone.Budget = new AdvisoryTaskBudget
|
||||
{
|
||||
PromptTokens = Budget.PromptTokens,
|
||||
CompletionTokens = Budget.CompletionTokens,
|
||||
};
|
||||
return clone;
|
||||
}
|
||||
|
||||
internal void ApplyDefaults()
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(PromptTemplate))
|
||||
{
|
||||
PromptTemplate = "prompts/advisory/default.liquid";
|
||||
}
|
||||
|
||||
if (VectorQueries.Count == 0)
|
||||
{
|
||||
VectorQueries.Add("Provide relevant advisory details");
|
||||
}
|
||||
|
||||
if (VectorTopK <= 0)
|
||||
{
|
||||
VectorTopK = 5;
|
||||
}
|
||||
|
||||
if (StructuredMaxChunks <= 0)
|
||||
{
|
||||
StructuredMaxChunks = 20;
|
||||
}
|
||||
|
||||
if (SbomMaxTimelineEntries < 0)
|
||||
{
|
||||
SbomMaxTimelineEntries = 0;
|
||||
}
|
||||
|
||||
if (SbomMaxDependencyPaths < 0)
|
||||
{
|
||||
SbomMaxDependencyPaths = 0;
|
||||
}
|
||||
|
||||
Budget ??= new AdvisoryTaskBudget();
|
||||
}
|
||||
|
||||
internal IReadOnlyList<string> GetVectorQueries()
|
||||
=> new ReadOnlyCollection<string>(VectorQueries);
|
||||
}
|
||||
@@ -0,0 +1,230 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Abstractions;
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
internal sealed class AdvisoryPipelineOrchestrator : IAdvisoryPipelineOrchestrator
|
||||
{
|
||||
private readonly IAdvisoryStructuredRetriever _structuredRetriever;
|
||||
private readonly IAdvisoryVectorRetriever _vectorRetriever;
|
||||
private readonly ISbomContextRetriever _sbomContextRetriever;
|
||||
private readonly IDeterministicToolset _toolset;
|
||||
private readonly AdvisoryPipelineOptions _options;
|
||||
private readonly ILogger<AdvisoryPipelineOrchestrator>? _logger;
|
||||
|
||||
public AdvisoryPipelineOrchestrator(
|
||||
IAdvisoryStructuredRetriever structuredRetriever,
|
||||
IAdvisoryVectorRetriever vectorRetriever,
|
||||
ISbomContextRetriever sbomContextRetriever,
|
||||
IDeterministicToolset toolset,
|
||||
IOptions<AdvisoryPipelineOptions> options,
|
||||
ILogger<AdvisoryPipelineOrchestrator>? logger = null)
|
||||
{
|
||||
_structuredRetriever = structuredRetriever ?? throw new ArgumentNullException(nameof(structuredRetriever));
|
||||
_vectorRetriever = vectorRetriever ?? throw new ArgumentNullException(nameof(vectorRetriever));
|
||||
_sbomContextRetriever = sbomContextRetriever ?? throw new ArgumentNullException(nameof(sbomContextRetriever));
|
||||
_toolset = toolset ?? throw new ArgumentNullException(nameof(toolset));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_options.ApplyDefaults();
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<AdvisoryTaskPlan> CreatePlanAsync(AdvisoryTaskRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var config = _options.GetConfiguration(request.TaskType);
|
||||
|
||||
var structuredRequest = new AdvisoryRetrievalRequest(
|
||||
request.AdvisoryKey,
|
||||
request.PreferredSections,
|
||||
config.StructuredMaxChunks);
|
||||
|
||||
var structured = await _structuredRetriever
|
||||
.RetrieveAsync(structuredRequest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var vectorResults = await RetrieveVectorMatchesAsync(request, structuredRequest, config, cancellationToken).ConfigureAwait(false);
|
||||
var (sbomContext, dependencyAnalysis) = await RetrieveSbomContextAsync(request, config, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var metadata = BuildMetadata(request, structured, vectorResults, sbomContext, dependencyAnalysis);
|
||||
var cacheKey = ComputeCacheKey(request, structured, vectorResults, sbomContext, dependencyAnalysis);
|
||||
|
||||
var plan = new AdvisoryTaskPlan(
|
||||
request,
|
||||
cacheKey,
|
||||
config.PromptTemplate,
|
||||
structured.Chunks.ToImmutableArray(),
|
||||
vectorResults,
|
||||
sbomContext,
|
||||
dependencyAnalysis,
|
||||
config.Budget,
|
||||
metadata);
|
||||
|
||||
return plan;
|
||||
}
|
||||
|
||||
private async Task<ImmutableArray<AdvisoryVectorResult>> RetrieveVectorMatchesAsync(
|
||||
AdvisoryTaskRequest request,
|
||||
AdvisoryRetrievalRequest structuredRequest,
|
||||
AdvisoryTaskConfiguration configuration,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (configuration.VectorQueries.Count == 0)
|
||||
{
|
||||
return ImmutableArray<AdvisoryVectorResult>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableArray.CreateBuilder<AdvisoryVectorResult>(configuration.VectorQueries.Count);
|
||||
foreach (var query in configuration.GetVectorQueries())
|
||||
{
|
||||
var vectorRequest = new VectorRetrievalRequest(structuredRequest, query, configuration.VectorTopK);
|
||||
var matches = await _vectorRetriever
|
||||
.SearchAsync(vectorRequest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
builder.Add(new AdvisoryVectorResult(query, matches.ToImmutableArray()));
|
||||
}
|
||||
|
||||
return builder.MoveToImmutable();
|
||||
}
|
||||
|
||||
private async Task<(SbomContextResult? Context, DependencyAnalysisResult? Analysis)> RetrieveSbomContextAsync(
|
||||
AdvisoryTaskRequest request,
|
||||
AdvisoryTaskConfiguration configuration,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrEmpty(request.ArtifactId))
|
||||
{
|
||||
return (null, null);
|
||||
}
|
||||
|
||||
var sbomRequest = new SbomContextRequest(
|
||||
artifactId: request.ArtifactId!,
|
||||
purl: request.ArtifactPurl,
|
||||
maxTimelineEntries: configuration.SbomMaxTimelineEntries,
|
||||
maxDependencyPaths: configuration.SbomMaxDependencyPaths,
|
||||
includeEnvironmentFlags: configuration.IncludeEnvironmentFlags,
|
||||
includeBlastRadius: configuration.IncludeBlastRadius);
|
||||
|
||||
var context = await _sbomContextRetriever
|
||||
.RetrieveAsync(sbomRequest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var analysis = _toolset.AnalyzeDependencies(context);
|
||||
return (context, analysis);
|
||||
}
|
||||
|
||||
private static ImmutableDictionary<string, string> BuildMetadata(
|
||||
AdvisoryTaskRequest request,
|
||||
AdvisoryRetrievalResult structured,
|
||||
ImmutableArray<AdvisoryVectorResult> vectors,
|
||||
SbomContextResult? sbom,
|
||||
DependencyAnalysisResult? dependency)
|
||||
{
|
||||
var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
|
||||
builder["task_type"] = request.TaskType.ToString();
|
||||
builder["advisory_key"] = request.AdvisoryKey;
|
||||
builder["profile"] = request.Profile;
|
||||
builder["structured_chunk_count"] = structured.Chunks.Count.ToString(CultureInfo.InvariantCulture);
|
||||
builder["vector_query_count"] = vectors.Length.ToString(CultureInfo.InvariantCulture);
|
||||
builder["vector_match_count"] = vectors.Sum(result => result.Matches.Length).ToString(CultureInfo.InvariantCulture);
|
||||
builder["includes_sbom"] = (sbom is not null).ToString();
|
||||
builder["dependency_node_count"] = (dependency?.Nodes.Length ?? 0).ToString(CultureInfo.InvariantCulture);
|
||||
builder["force_refresh"] = request.ForceRefresh.ToString();
|
||||
|
||||
if (!string.IsNullOrEmpty(request.PolicyVersion))
|
||||
{
|
||||
builder["policy_version"] = request.PolicyVersion!;
|
||||
}
|
||||
|
||||
if (sbom is not null)
|
||||
{
|
||||
builder["sbom_version_count"] = sbom.VersionTimeline.Count.ToString(CultureInfo.InvariantCulture);
|
||||
builder["sbom_dependency_path_count"] = sbom.DependencyPaths.Count.ToString(CultureInfo.InvariantCulture);
|
||||
}
|
||||
|
||||
return builder.ToImmutable();
|
||||
}
|
||||
|
||||
private static string ComputeCacheKey(
|
||||
AdvisoryTaskRequest request,
|
||||
AdvisoryRetrievalResult structured,
|
||||
ImmutableArray<AdvisoryVectorResult> vectors,
|
||||
SbomContextResult? sbom,
|
||||
DependencyAnalysisResult? dependency)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
builder.Append(request.TaskType)
|
||||
.Append('|').Append(request.AdvisoryKey)
|
||||
.Append('|').Append(request.ArtifactId ?? string.Empty)
|
||||
.Append('|').Append(request.PolicyVersion ?? string.Empty)
|
||||
.Append('|').Append(request.Profile);
|
||||
|
||||
if (request.PreferredSections is not null)
|
||||
{
|
||||
foreach (var section in request.PreferredSections.OrderBy(s => s, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
builder.Append('|').Append(section);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var chunkId in structured.Chunks
|
||||
.Select(chunk => chunk.ChunkId)
|
||||
.OrderBy(id => id, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|chunk:").Append(chunkId);
|
||||
}
|
||||
|
||||
foreach (var vector in vectors)
|
||||
{
|
||||
builder.Append("|query:").Append(vector.Query);
|
||||
foreach (var match in vector.Matches
|
||||
.OrderBy(m => m.ChunkId, StringComparer.Ordinal)
|
||||
.ThenBy(m => m.Score))
|
||||
{
|
||||
builder.Append("|match:")
|
||||
.Append(match.ChunkId)
|
||||
.Append('@')
|
||||
.Append(match.Score.ToString("G", CultureInfo.InvariantCulture));
|
||||
}
|
||||
}
|
||||
|
||||
if (sbom is not null)
|
||||
{
|
||||
builder.Append("|sbom:timeline=").Append(sbom.VersionTimeline.Count);
|
||||
builder.Append("|sbom:paths=").Append(sbom.DependencyPaths.Count);
|
||||
foreach (var kvp in sbom.Metadata.OrderBy(k => k.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|sbommeta:")
|
||||
.Append(kvp.Key)
|
||||
.Append('=')
|
||||
.Append(kvp.Value);
|
||||
}
|
||||
}
|
||||
|
||||
if (dependency is not null)
|
||||
{
|
||||
foreach (var node in dependency.Nodes
|
||||
.OrderBy(n => n.Identifier, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|dep:")
|
||||
.Append(node.Identifier)
|
||||
.Append(':')
|
||||
.Append(node.RuntimeOccurrences)
|
||||
.Append(':')
|
||||
.Append(node.DevelopmentOccurrences);
|
||||
}
|
||||
}
|
||||
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(builder.ToString()));
|
||||
return Convert.ToHexString(hash);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
/// <summary>
|
||||
/// DTO exposed via service/CLI APIs describing the orchestration plan without leaking raw chunks.
|
||||
/// </summary>
|
||||
public sealed class AdvisoryPipelinePlanResponse
|
||||
{
|
||||
private AdvisoryPipelinePlanResponse(
|
||||
string taskType,
|
||||
string cacheKey,
|
||||
string promptTemplate,
|
||||
AdvisoryTaskBudget budget,
|
||||
IReadOnlyList<PipelineChunkSummary> chunks,
|
||||
IReadOnlyList<PipelineVectorSummary> vectors,
|
||||
PipelineSbomSummary? sbom,
|
||||
IReadOnlyDictionary<string, string> metadata)
|
||||
{
|
||||
TaskType = taskType;
|
||||
CacheKey = cacheKey;
|
||||
PromptTemplate = promptTemplate;
|
||||
Budget = budget;
|
||||
Chunks = chunks;
|
||||
Vectors = vectors;
|
||||
Sbom = sbom;
|
||||
Metadata = metadata;
|
||||
}
|
||||
|
||||
public string TaskType { get; }
|
||||
|
||||
public string CacheKey { get; }
|
||||
|
||||
public string PromptTemplate { get; }
|
||||
|
||||
public AdvisoryTaskBudget Budget { get; }
|
||||
|
||||
public IReadOnlyList<PipelineChunkSummary> Chunks { get; }
|
||||
|
||||
public IReadOnlyList<PipelineVectorSummary> Vectors { get; }
|
||||
|
||||
public PipelineSbomSummary? Sbom { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> Metadata { get; }
|
||||
|
||||
public static AdvisoryPipelinePlanResponse FromPlan(AdvisoryTaskPlan plan)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(plan);
|
||||
|
||||
var chunkSummaries = plan.StructuredChunks
|
||||
.Select(chunk => new PipelineChunkSummary(
|
||||
chunk.DocumentId,
|
||||
chunk.ChunkId,
|
||||
chunk.Section,
|
||||
chunk.Metadata.ContainsKey("section") ? chunk.Metadata["section"] : chunk.Section))
|
||||
.ToImmutableArray();
|
||||
|
||||
var vectorSummaries = plan.VectorResults
|
||||
.Select(vector => new PipelineVectorSummary(
|
||||
vector.Query,
|
||||
vector.Matches
|
||||
.Select(match => new PipelineVectorMatchSummary(match.ChunkId, match.Score))
|
||||
.ToImmutableArray()))
|
||||
.ToImmutableArray();
|
||||
|
||||
PipelineSbomSummary? sbomSummary = null;
|
||||
if (plan.SbomContext is not null)
|
||||
{
|
||||
sbomSummary = new PipelineSbomSummary(
|
||||
plan.SbomContext.ArtifactId,
|
||||
plan.SbomContext.VersionTimeline.Count,
|
||||
plan.SbomContext.DependencyPaths.Count,
|
||||
plan.DependencyAnalysis?.Nodes.Length ?? 0);
|
||||
}
|
||||
|
||||
return new AdvisoryPipelinePlanResponse(
|
||||
plan.Request.TaskType.ToString(),
|
||||
plan.CacheKey,
|
||||
plan.PromptTemplate,
|
||||
plan.Budget,
|
||||
chunkSummaries,
|
||||
vectorSummaries,
|
||||
sbomSummary,
|
||||
plan.Metadata);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record PipelineChunkSummary(
|
||||
string DocumentId,
|
||||
string ChunkId,
|
||||
string Section,
|
||||
string DisplaySection);
|
||||
|
||||
public sealed record PipelineVectorSummary(
|
||||
string Query,
|
||||
ImmutableArray<PipelineVectorMatchSummary> Matches);
|
||||
|
||||
public sealed record PipelineVectorMatchSummary(
|
||||
string ChunkId,
|
||||
double Score);
|
||||
|
||||
public sealed record PipelineSbomSummary(
|
||||
string ArtifactId,
|
||||
int VersionTimelineCount,
|
||||
int DependencyPathCount,
|
||||
int DependencyNodeCount);
|
||||
@@ -0,0 +1,69 @@
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.AdvisoryAI.Abstractions;
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
public sealed class AdvisoryTaskPlan
|
||||
{
|
||||
public AdvisoryTaskPlan(
|
||||
AdvisoryTaskRequest request,
|
||||
string cacheKey,
|
||||
string promptTemplate,
|
||||
ImmutableArray<AdvisoryChunk> structuredChunks,
|
||||
ImmutableArray<AdvisoryVectorResult> vectorResults,
|
||||
SbomContextResult? sbomContext,
|
||||
DependencyAnalysisResult? dependencyAnalysis,
|
||||
AdvisoryTaskBudget budget,
|
||||
ImmutableDictionary<string, string> metadata)
|
||||
{
|
||||
Request = request ?? throw new ArgumentNullException(nameof(request));
|
||||
CacheKey = cacheKey ?? throw new ArgumentNullException(nameof(cacheKey));
|
||||
PromptTemplate = promptTemplate ?? throw new ArgumentNullException(nameof(promptTemplate));
|
||||
StructuredChunks = structuredChunks;
|
||||
VectorResults = vectorResults;
|
||||
SbomContext = sbomContext;
|
||||
DependencyAnalysis = dependencyAnalysis;
|
||||
Budget = budget ?? throw new ArgumentNullException(nameof(budget));
|
||||
Metadata = metadata ?? throw new ArgumentNullException(nameof(metadata));
|
||||
}
|
||||
|
||||
public AdvisoryTaskRequest Request { get; }
|
||||
|
||||
public string CacheKey { get; }
|
||||
|
||||
public string PromptTemplate { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryChunk> StructuredChunks { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryVectorResult> VectorResults { get; }
|
||||
|
||||
public SbomContextResult? SbomContext { get; }
|
||||
|
||||
public DependencyAnalysisResult? DependencyAnalysis { get; }
|
||||
|
||||
public AdvisoryTaskBudget Budget { get; }
|
||||
|
||||
public ImmutableDictionary<string, string> Metadata { get; }
|
||||
}
|
||||
|
||||
public sealed class AdvisoryVectorResult
|
||||
{
|
||||
public AdvisoryVectorResult(string query, ImmutableArray<VectorRetrievalMatch> matches)
|
||||
{
|
||||
Query = string.IsNullOrWhiteSpace(query) ? throw new ArgumentException(nameof(query)) : query;
|
||||
Matches = matches;
|
||||
}
|
||||
|
||||
public string Query { get; }
|
||||
|
||||
public ImmutableArray<VectorRetrievalMatch> Matches { get; }
|
||||
}
|
||||
|
||||
public sealed class AdvisoryTaskBudget
|
||||
{
|
||||
public int PromptTokens { get; init; } = 2048;
|
||||
|
||||
public int CompletionTokens { get; init; } = 512;
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
public sealed class AdvisoryTaskRequest
|
||||
{
|
||||
public AdvisoryTaskRequest(
|
||||
AdvisoryTaskType taskType,
|
||||
string advisoryKey,
|
||||
string? artifactId = null,
|
||||
string? artifactPurl = null,
|
||||
string? policyVersion = null,
|
||||
string profile = "default",
|
||||
IReadOnlyCollection<string>? preferredSections = null,
|
||||
bool forceRefresh = false)
|
||||
{
|
||||
if (!Enum.IsDefined(typeof(AdvisoryTaskType), taskType))
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(taskType));
|
||||
}
|
||||
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(advisoryKey);
|
||||
|
||||
TaskType = taskType;
|
||||
AdvisoryKey = advisoryKey.Trim();
|
||||
ArtifactId = string.IsNullOrWhiteSpace(artifactId) ? null : artifactId.Trim();
|
||||
ArtifactPurl = string.IsNullOrWhiteSpace(artifactPurl) ? null : artifactPurl.Trim();
|
||||
PolicyVersion = string.IsNullOrWhiteSpace(policyVersion) ? null : policyVersion.Trim();
|
||||
Profile = string.IsNullOrWhiteSpace(profile) ? "default" : profile.Trim();
|
||||
PreferredSections = preferredSections;
|
||||
ForceRefresh = forceRefresh;
|
||||
}
|
||||
|
||||
public AdvisoryTaskType TaskType { get; }
|
||||
|
||||
public string AdvisoryKey { get; }
|
||||
|
||||
public string? ArtifactId { get; }
|
||||
|
||||
public string? ArtifactPurl { get; }
|
||||
|
||||
public string? PolicyVersion { get; }
|
||||
|
||||
public string Profile { get; }
|
||||
|
||||
public IReadOnlyCollection<string>? PreferredSections { get; }
|
||||
|
||||
public bool ForceRefresh { get; }
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
public enum AdvisoryTaskType
|
||||
{
|
||||
Summary,
|
||||
Conflict,
|
||||
Remediation,
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
public interface IAdvisoryPipelineOrchestrator
|
||||
{
|
||||
Task<AdvisoryTaskPlan> CreatePlanAsync(AdvisoryTaskRequest request, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -3,8 +3,11 @@
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AIAI-31-001 | DONE (2025-11-02) | Advisory AI Guild | CONCELIER-VULN-29-001, EXCITITOR-VULN-29-001 | Implement structured and vector retrievers for advisories/VEX with paragraph anchors and citation metadata. | Retrievers return deterministic chunks with source IDs/sections; unit tests cover CSAF/OSV/vendor formats. |
|
||||
| AIAI-31-002 | DOING | Advisory AI Guild, SBOM Service Guild | SBOM-VULN-29-001 | Build SBOM context retriever (purl version timelines, dependency paths, env flags, blast radius estimator). | Retriever returns paths/metrics under SLA; tests cover ecosystems. |
|
||||
| AIAI-31-003 | TODO | Advisory AI Guild | AIAI-31-001..002 | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. | Tools validated with property tests; outputs cached; docs updated. |
|
||||
| AIAI-31-004 | TODO | Advisory AI Guild | AIAI-31-001..003, AUTH-VULN-29-001 | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). | Pipeline executes tasks deterministically; caches keyed by tuple+policy; integration tests cover tasks. |
|
||||
| AIAI-31-003 | DOING | Advisory AI Guild | AIAI-31-001..002 | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. | Tools validated with property tests; outputs cached; docs updated. |
|
||||
| AIAI-31-004 | DOING | Advisory AI Guild | AIAI-31-001..003, AUTH-VULN-29-001 | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). | Pipeline executes tasks deterministically; caches keyed by tuple+policy; integration tests cover tasks. |
|
||||
| AIAI-31-004A | TODO | Advisory AI Guild, Platform Guild | AIAI-31-004, AIAI-31-002 | Wire `AdvisoryPipelineOrchestrator` into WebService/Worker, expose API/queue contracts, emit metrics, and stand up cache stub. | API returns plan metadata; worker executes queue message; metrics recorded; doc updated. |
|
||||
| AIAI-31-004B | TODO | Advisory AI Guild, Security Guild | AIAI-31-004A, DOCS-AIAI-31-003, AUTH-AIAI-31-004 | Implement prompt assembler, guardrail plumbing, cache persistence, DSSE provenance; add golden outputs. | Deterministic outputs cached; guardrails enforced; tests cover prompt assembly + caching. |
|
||||
| AIAI-31-004C | TODO | Advisory AI Guild, CLI Guild, Docs Guild | AIAI-31-004B, CLI-AIAI-31-003 | Deliver CLI `stella advise run <task>` command, renderers, documentation updates, and CLI golden tests. | CLI command produces deterministic output; docs published; smoke run recorded. |
|
||||
| AIAI-31-005 | TODO | Advisory AI Guild, Security Guild | AIAI-31-004 | Implement guardrails (redaction, injection defense, output validation, citation enforcement) and fail-safe handling. | Guardrails block adversarial inputs; output validator enforces schemas; security tests pass. |
|
||||
| AIAI-31-006 | TODO | Advisory AI Guild | AIAI-31-004..005 | Expose REST API endpoints (`/advisory/ai/*`) with RBAC, rate limits, OpenAPI schemas, and batching support. | Endpoints deployed with schema validation; rate limits enforced; integration tests cover error codes. |
|
||||
| AIAI-31-007 | TODO | Advisory AI Guild, Observability Guild | AIAI-31-004..006 | Instrument metrics (`advisory_ai_latency`, `guardrail_blocks`, `validation_failures`, `citation_coverage`), logs, and traces; publish dashboards/alerts. | Telemetry live; dashboards approved; alerts configured. |
|
||||
@@ -14,3 +17,9 @@
|
||||
| AIAI-31-009 | TODO | Advisory AI Guild, QA Guild | AIAI-31-001..006 | Develop unit/golden/property/perf tests, injection harness, and regression suite; ensure determinism with seeded caches. | Test suite green; golden outputs stored; injection tests pass; perf targets documented. |
|
||||
|
||||
> 2025-11-02: AIAI-31-002 – SBOM context domain models finalized with limiter guards; retriever tests now cover flag toggles and path dedupe. Service client integration still pending with SBOM guild.
|
||||
|
||||
> 2025-11-02: AIAI-31-003 moved to DOING – starting deterministic tooling surface (version comparators & dependency analysis). Added semantic-version + EVR comparators and published toolset interface; awaiting downstream wiring.
|
||||
|
||||
> 2025-11-02: AIAI-31-004 started orchestration pipeline work – begin designing summary/conflict/remediation workflow (deterministic sequence + cache keys).
|
||||
|
||||
> 2025-11-02: AIAI-31-004 orchestration prerequisites documented in docs/modules/advisory-ai/orchestration-pipeline.md (task breakdown 004A/004B/004C).
|
||||
|
||||
@@ -0,0 +1,85 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
/// <summary>
|
||||
/// Summarises dependency graph characteristics used by deterministic tooling.
|
||||
/// </summary>
|
||||
public sealed class DependencyAnalysisResult
|
||||
{
|
||||
private DependencyAnalysisResult(
|
||||
string artifactId,
|
||||
ImmutableArray<DependencyNodeSummary> nodes,
|
||||
ImmutableDictionary<string, string> metadata)
|
||||
{
|
||||
ArtifactId = artifactId;
|
||||
Nodes = nodes;
|
||||
Metadata = metadata;
|
||||
}
|
||||
|
||||
public string ArtifactId { get; }
|
||||
|
||||
public ImmutableArray<DependencyNodeSummary> Nodes { get; }
|
||||
|
||||
public ImmutableDictionary<string, string> Metadata { get; }
|
||||
|
||||
public static DependencyAnalysisResult Create(
|
||||
string artifactId,
|
||||
IEnumerable<DependencyNodeSummary> nodes,
|
||||
IReadOnlyDictionary<string, string> metadata)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(artifactId);
|
||||
ArgumentNullException.ThrowIfNull(nodes);
|
||||
ArgumentNullException.ThrowIfNull(metadata);
|
||||
|
||||
return new DependencyAnalysisResult(
|
||||
artifactId.Trim(),
|
||||
nodes.ToImmutableArray(),
|
||||
metadata.ToImmutableDictionary(StringComparer.Ordinal));
|
||||
}
|
||||
|
||||
public static DependencyAnalysisResult Empty(string artifactId)
|
||||
=> new DependencyAnalysisResult(
|
||||
artifactId?.Trim() ?? string.Empty,
|
||||
ImmutableArray<DependencyNodeSummary>.Empty,
|
||||
ImmutableDictionary<string, string>.Empty);
|
||||
}
|
||||
|
||||
public sealed class DependencyNodeSummary
|
||||
{
|
||||
public DependencyNodeSummary(
|
||||
string identifier,
|
||||
IReadOnlyList<string> versions,
|
||||
int runtimeOccurrences,
|
||||
int developmentOccurrences)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(identifier);
|
||||
|
||||
Identifier = identifier.Trim();
|
||||
Versions = versions?.ToImmutableArray() ?? ImmutableArray<string>.Empty;
|
||||
RuntimeOccurrences = Math.Max(runtimeOccurrences, 0);
|
||||
DevelopmentOccurrences = Math.Max(developmentOccurrences, 0);
|
||||
}
|
||||
|
||||
public string Identifier { get; }
|
||||
|
||||
public ImmutableArray<string> Versions { get; }
|
||||
|
||||
public int RuntimeOccurrences { get; }
|
||||
|
||||
public int DevelopmentOccurrences { get; }
|
||||
}
|
||||
|
||||
internal sealed class NodeAccumulator
|
||||
{
|
||||
public string Identifier { get; set; } = string.Empty;
|
||||
|
||||
public HashSet<string> Versions { get; set; } = new(StringComparer.Ordinal);
|
||||
|
||||
public int RuntimeOccurrences { get; set; }
|
||||
|
||||
public int DevelopmentOccurrences { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,382 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
/// <summary>
|
||||
/// Default deterministic toolset covering semantic versioning and RPM-style EVR comparisons.
|
||||
/// </summary>
|
||||
internal sealed class DeterministicToolset : IDeterministicToolset
|
||||
{
|
||||
private const string SchemeSemver = "semver";
|
||||
private const string SchemeEvr = "evr";
|
||||
|
||||
public bool TryCompare(string scheme, string left, string right, out int comparison)
|
||||
{
|
||||
comparison = 0;
|
||||
scheme = NormalizeScheme(scheme);
|
||||
|
||||
return scheme switch
|
||||
{
|
||||
SchemeSemver => TryCompareSemver(left, right, out comparison),
|
||||
SchemeEvr => TryCompareEvr(left, right, out comparison),
|
||||
_ => false,
|
||||
};
|
||||
}
|
||||
|
||||
public bool SatisfiesRange(string scheme, string version, string rangeExpression)
|
||||
{
|
||||
scheme = NormalizeScheme(scheme);
|
||||
return scheme switch
|
||||
{
|
||||
SchemeSemver => SemanticVersionRange.Satisfies(version, rangeExpression),
|
||||
SchemeEvr => EvrRangeSatisfies(version, rangeExpression),
|
||||
_ => throw new NotSupportedException($"Scheme '{scheme}' not supported."),
|
||||
};
|
||||
}
|
||||
|
||||
public DependencyAnalysisResult AnalyzeDependencies(SbomContextResult context)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
if (context.DependencyPaths.Count == 0)
|
||||
{
|
||||
return DependencyAnalysisResult.Empty(context.ArtifactId);
|
||||
}
|
||||
|
||||
var nodes = new Dictionary<string, NodeAccumulator>(StringComparer.Ordinal);
|
||||
var totalPaths = 0;
|
||||
var runtimePaths = 0;
|
||||
|
||||
foreach (var path in context.DependencyPaths)
|
||||
{
|
||||
totalPaths++;
|
||||
if (path.IsRuntime)
|
||||
{
|
||||
runtimePaths++;
|
||||
}
|
||||
|
||||
foreach (var node in path.Nodes)
|
||||
{
|
||||
var key = node.Identifier;
|
||||
if (!nodes.TryGetValue(key, out var accumulator))
|
||||
{
|
||||
accumulator = new NodeAccumulator
|
||||
{
|
||||
Identifier = node.Identifier,
|
||||
Versions = new HashSet<string>(StringComparer.Ordinal),
|
||||
};
|
||||
nodes[key] = accumulator;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(node.Version))
|
||||
{
|
||||
accumulator.Versions.Add(node.Version!);
|
||||
}
|
||||
|
||||
if (path.IsRuntime)
|
||||
{
|
||||
accumulator.RuntimeOccurrences++;
|
||||
}
|
||||
else
|
||||
{
|
||||
accumulator.DevelopmentOccurrences++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var summaries = nodes.Values
|
||||
.Select(acc => new DependencyNodeSummary(
|
||||
acc.Identifier,
|
||||
acc.Versions.OrderBy(v => v, StringComparer.Ordinal).ToArray(),
|
||||
acc.RuntimeOccurrences,
|
||||
acc.DevelopmentOccurrences))
|
||||
.OrderBy(summary => summary.Identifier, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["artifact_id"] = context.ArtifactId,
|
||||
["path_count"] = totalPaths.ToString(CultureInfo.InvariantCulture),
|
||||
["runtime_path_count"] = runtimePaths.ToString(CultureInfo.InvariantCulture),
|
||||
["development_path_count"] = (totalPaths - runtimePaths).ToString(CultureInfo.InvariantCulture),
|
||||
["unique_nodes"] = summaries.Length.ToString(CultureInfo.InvariantCulture),
|
||||
};
|
||||
|
||||
return new DependencyAnalysisResult(context.ArtifactId, summaries, metadata);
|
||||
}
|
||||
|
||||
private static string NormalizeScheme(string scheme)
|
||||
=> string.IsNullOrWhiteSpace(scheme) ? SchemeSemver : scheme.Trim().ToLowerInvariant();
|
||||
|
||||
private static bool TryCompareSemver(string left, string right, out int comparison)
|
||||
{
|
||||
comparison = 0;
|
||||
if (!SemanticVersion.TryParse(left, out var leftVersion) ||
|
||||
!SemanticVersion.TryParse(right, out var rightVersion))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
comparison = leftVersion.CompareTo(rightVersion);
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryCompareEvr(string left, string right, out int comparison)
|
||||
{
|
||||
comparison = 0;
|
||||
if (!EvrVersion.TryParse(left, out var leftVersion) ||
|
||||
!EvrVersion.TryParse(right, out var rightVersion))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
comparison = leftVersion.CompareTo(rightVersion);
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool EvrRangeSatisfies(string version, string rangeExpression)
|
||||
{
|
||||
if (!EvrVersion.TryParse(version, out var parsed))
|
||||
{
|
||||
throw new FormatException($"Invalid EVR version '{version}'.");
|
||||
}
|
||||
|
||||
var clauses = rangeExpression
|
||||
.Split(new[] { ',', ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
foreach (var clause in clauses)
|
||||
{
|
||||
if (!EvaluateEvrClause(parsed, clause))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool EvaluateEvrClause(EvrVersion version, string clause)
|
||||
{
|
||||
if (!TryParseComparator(clause, out var comparator, out var targetRaw))
|
||||
{
|
||||
targetRaw = clause;
|
||||
comparator = Comparator.Equals;
|
||||
}
|
||||
|
||||
if (!EvrVersion.TryParse(targetRaw, out var target))
|
||||
{
|
||||
throw new FormatException($"Invalid EVR version '{targetRaw}' in clause '{clause}'.");
|
||||
}
|
||||
|
||||
var compare = version.CompareTo(target);
|
||||
return comparator switch
|
||||
{
|
||||
Comparator.Equals => compare == 0,
|
||||
Comparator.NotEquals => compare != 0,
|
||||
Comparator.GreaterThan => compare > 0,
|
||||
Comparator.GreaterThanOrEqual => compare >= 0,
|
||||
Comparator.LessThan => compare < 0,
|
||||
Comparator.LessThanOrEqual => compare <= 0,
|
||||
_ => throw new InvalidOperationException(),
|
||||
};
|
||||
}
|
||||
|
||||
private static bool TryParseComparator(string clause, out Comparator comparator, out string version)
|
||||
{
|
||||
comparator = Comparator.Equals;
|
||||
version = clause;
|
||||
if (string.IsNullOrWhiteSpace(clause))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (clause.StartsWith(">=", StringComparison.Ordinal))
|
||||
{
|
||||
comparator = Comparator.GreaterThanOrEqual;
|
||||
version = clause[2..];
|
||||
return true;
|
||||
}
|
||||
|
||||
if (clause.StartsWith("<=", StringComparison.Ordinal))
|
||||
{
|
||||
comparator = Comparator.LessThanOrEqual;
|
||||
version = clause[2..];
|
||||
return true;
|
||||
}
|
||||
|
||||
if (clause.StartsWith("!=", StringComparison.Ordinal) || clause.StartsWith("<>", StringComparison.Ordinal))
|
||||
{
|
||||
comparator = Comparator.NotEquals;
|
||||
version = clause[2..];
|
||||
return true;
|
||||
}
|
||||
|
||||
if (clause.StartsWith(">", StringComparison.Ordinal))
|
||||
{
|
||||
comparator = Comparator.GreaterThan;
|
||||
version = clause[1..];
|
||||
return true;
|
||||
}
|
||||
|
||||
if (clause.StartsWith("<", StringComparison.Ordinal))
|
||||
{
|
||||
comparator = Comparator.LessThan;
|
||||
version = clause[1..];
|
||||
return true;
|
||||
}
|
||||
|
||||
if (clause.StartsWith("=", StringComparison.Ordinal))
|
||||
{
|
||||
comparator = Comparator.Equals;
|
||||
version = clause[1..];
|
||||
return true;
|
||||
}
|
||||
|
||||
if (clause.StartsWith("==", StringComparison.Ordinal))
|
||||
{
|
||||
comparator = Comparator.Equals;
|
||||
version = clause[2..];
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private enum Comparator
|
||||
{
|
||||
Equals,
|
||||
NotEquals,
|
||||
GreaterThan,
|
||||
GreaterThanOrEqual,
|
||||
LessThan,
|
||||
LessThanOrEqual,
|
||||
}
|
||||
|
||||
private readonly struct EvrVersion : IComparable<EvrVersion>
|
||||
{
|
||||
public EvrVersion(int epoch, IReadOnlyList<string> versionSegments, IReadOnlyList<string> releaseSegments)
|
||||
{
|
||||
Epoch = epoch;
|
||||
VersionSegments = versionSegments;
|
||||
ReleaseSegments = releaseSegments;
|
||||
}
|
||||
|
||||
public int Epoch { get; }
|
||||
|
||||
public IReadOnlyList<string> VersionSegments { get; }
|
||||
|
||||
public IReadOnlyList<string> ReleaseSegments { get; }
|
||||
|
||||
public static bool TryParse(string value, out EvrVersion version)
|
||||
{
|
||||
version = default;
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var trimmed = value.Trim();
|
||||
var epochSplit = trimmed.Split(':', 2);
|
||||
int epoch = 0;
|
||||
string remainder;
|
||||
|
||||
if (epochSplit.Length == 2)
|
||||
{
|
||||
if (!int.TryParse(epochSplit[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out epoch))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
remainder = epochSplit[1];
|
||||
}
|
||||
else
|
||||
{
|
||||
remainder = trimmed;
|
||||
}
|
||||
|
||||
var releaseSplit = remainder.Split('-', 2);
|
||||
var versionPart = releaseSplit[0];
|
||||
var releasePart = releaseSplit.Length == 2 ? releaseSplit[1] : string.Empty;
|
||||
|
||||
var versionSegments = SplitSegments(versionPart);
|
||||
var releaseSegments = SplitSegments(releasePart);
|
||||
|
||||
version = new EvrVersion(epoch, versionSegments, releaseSegments);
|
||||
return true;
|
||||
}
|
||||
|
||||
public int CompareTo(EvrVersion other)
|
||||
{
|
||||
var epochCompare = Epoch.CompareTo(other.Epoch);
|
||||
if (epochCompare != 0)
|
||||
{
|
||||
return epochCompare;
|
||||
}
|
||||
|
||||
var versionCompare = CompareSegments(VersionSegments, other.VersionSegments);
|
||||
if (versionCompare != 0)
|
||||
{
|
||||
return versionCompare;
|
||||
}
|
||||
|
||||
return CompareSegments(ReleaseSegments, other.ReleaseSegments);
|
||||
}
|
||||
|
||||
private static List<string> SplitSegments(string value)
|
||||
{
|
||||
if (string.IsNullOrEmpty(value))
|
||||
{
|
||||
return new List<string>(0);
|
||||
}
|
||||
|
||||
var segments = value.Split(new[] { '.', '-', '_' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
return segments.ToList();
|
||||
}
|
||||
|
||||
private static int CompareSegments(IReadOnlyList<string> left, IReadOnlyList<string> right)
|
||||
{
|
||||
var length = Math.Max(left.Count, right.Count);
|
||||
for (var i = 0; i < length; i++)
|
||||
{
|
||||
var leftSegment = i < left.Count ? left[i] : string.Empty;
|
||||
var rightSegment = i < right.Count ? right[i] : string.Empty;
|
||||
|
||||
var leftNumeric = int.TryParse(leftSegment, NumberStyles.Integer, CultureInfo.InvariantCulture, out var leftValue);
|
||||
var rightNumeric = int.TryParse(rightSegment, NumberStyles.Integer, CultureInfo.InvariantCulture, out var rightValue);
|
||||
|
||||
if (leftNumeric && rightNumeric)
|
||||
{
|
||||
var compare = leftValue.CompareTo(rightValue);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (leftNumeric)
|
||||
{
|
||||
return 1; // numeric segments sort after alpha if other not numeric
|
||||
}
|
||||
|
||||
if (rightNumeric)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
var cmp = string.CompareOrdinal(leftSegment, rightSegment);
|
||||
if (cmp != 0)
|
||||
{
|
||||
return cmp;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
/// <summary>
|
||||
/// Provides deterministic version comparison and range evaluation helpers used across Advisory AI tooling.
|
||||
/// </summary>
|
||||
public interface IDeterministicToolset
|
||||
{
|
||||
/// <summary>
|
||||
/// Attempts to compare two versions using the specified scheme.
|
||||
/// </summary>
|
||||
bool TryCompare(string scheme, string left, string right, out int comparison);
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates whether a version satisfies the given range expression for the specified scheme.
|
||||
/// </summary>
|
||||
bool SatisfiesRange(string scheme, string version, string rangeExpression);
|
||||
|
||||
/// <summary>
|
||||
/// Analyses dependency paths to produce deterministic summaries for policy/tooling decisions.
|
||||
/// </summary>
|
||||
DependencyAnalysisResult AnalyzeDependencies(SbomContextResult context);
|
||||
}
|
||||
256
src/AdvisoryAI/StellaOps.AdvisoryAI/Tools/SemanticVersion.cs
Normal file
256
src/AdvisoryAI/StellaOps.AdvisoryAI/Tools/SemanticVersion.cs
Normal file
@@ -0,0 +1,256 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic semantic version parser that supports major.minor.patch with optional pre-release/build metadata.
|
||||
/// No external dependencies to remain offline-friendly.
|
||||
/// </summary>
|
||||
public readonly struct SemanticVersion : IComparable<SemanticVersion>
|
||||
{
|
||||
private SemanticVersion(
|
||||
int major,
|
||||
int minor,
|
||||
int patch,
|
||||
IReadOnlyList<string> preRelease,
|
||||
string? build)
|
||||
{
|
||||
Major = major;
|
||||
Minor = minor;
|
||||
Patch = patch;
|
||||
PreRelease = preRelease;
|
||||
BuildMetadata = build;
|
||||
}
|
||||
|
||||
public int Major { get; }
|
||||
|
||||
public int Minor { get; }
|
||||
|
||||
public int Patch { get; }
|
||||
|
||||
public IReadOnlyList<string> PreRelease { get; }
|
||||
|
||||
public string? BuildMetadata { get; }
|
||||
|
||||
public static bool TryParse(string value, out SemanticVersion version)
|
||||
{
|
||||
version = default;
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var span = value.Trim();
|
||||
var buildSplit = span.Split('+', 2, StringSplitOptions.RemoveEmptyEntries);
|
||||
string? build = null;
|
||||
if (buildSplit.Length == 2)
|
||||
{
|
||||
span = buildSplit[0];
|
||||
build = buildSplit[1];
|
||||
}
|
||||
|
||||
var preReleaseSplit = span.Split('-', 2, StringSplitOptions.None);
|
||||
string? preReleaseSegment = null;
|
||||
if (preReleaseSplit.Length == 2)
|
||||
{
|
||||
span = preReleaseSplit[0];
|
||||
preReleaseSegment = preReleaseSplit[1];
|
||||
if (string.IsNullOrEmpty(preReleaseSegment))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
var parts = span.Split('.', StringSplitOptions.None);
|
||||
if (parts.Length < 1 || parts.Length > 3)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!TryParseNumericPart(parts[0], out var major))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var minor = 0;
|
||||
if (parts.Length > 1 && !TryParseNumericPart(parts[1], out minor))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var patch = 0;
|
||||
if (parts.Length > 2 && !TryParseNumericPart(parts[2], out patch))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var preRelease = Array.Empty<string>();
|
||||
if (!string.IsNullOrEmpty(preReleaseSegment))
|
||||
{
|
||||
var segments = preReleaseSegment.Split('.', StringSplitOptions.RemoveEmptyEntries);
|
||||
if (segments.Length == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (var identifier in segments)
|
||||
{
|
||||
if (!IsValidIdentifier(identifier))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
preRelease = segments;
|
||||
}
|
||||
|
||||
version = new SemanticVersion(major, minor, patch, preRelease, build);
|
||||
return true;
|
||||
}
|
||||
|
||||
public static SemanticVersion Parse(string value)
|
||||
=> TryParse(value, out var version)
|
||||
? version
|
||||
: throw new FormatException($"Invalid semantic version '{value}'.");
|
||||
|
||||
public int CompareTo(SemanticVersion other)
|
||||
{
|
||||
var majorCompare = Major.CompareTo(other.Major);
|
||||
if (majorCompare != 0)
|
||||
{
|
||||
return majorCompare;
|
||||
}
|
||||
|
||||
var minorCompare = Minor.CompareTo(other.Minor);
|
||||
if (minorCompare != 0)
|
||||
{
|
||||
return minorCompare;
|
||||
}
|
||||
|
||||
var patchCompare = Patch.CompareTo(other.Patch);
|
||||
if (patchCompare != 0)
|
||||
{
|
||||
return patchCompare;
|
||||
}
|
||||
|
||||
return ComparePreRelease(PreRelease, other.PreRelease);
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
var core = $"{Major}.{Minor}.{Patch}";
|
||||
if (PreRelease.Count > 0)
|
||||
{
|
||||
core += "-" + string.Join('.', PreRelease);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(BuildMetadata))
|
||||
{
|
||||
core += "+" + BuildMetadata;
|
||||
}
|
||||
|
||||
return core;
|
||||
}
|
||||
|
||||
private static bool TryParseNumericPart(string value, out int result)
|
||||
{
|
||||
if (value.Length == 0)
|
||||
{
|
||||
result = 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (value.Length > 1 && value[0] == '0')
|
||||
{
|
||||
result = 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
return int.TryParse(value, NumberStyles.None, CultureInfo.InvariantCulture, out result);
|
||||
}
|
||||
|
||||
private static bool IsValidIdentifier(string identifier)
|
||||
{
|
||||
if (identifier.Length == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (var ch in identifier)
|
||||
{
|
||||
if (char.IsLetterOrDigit(ch) || ch == '-')
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if (identifier.Length > 1 && identifier[0] == '0' && char.IsDigit(identifier[1]))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static int ComparePreRelease(IReadOnlyList<string> left, IReadOnlyList<string> right)
|
||||
{
|
||||
var leftEmpty = left.Count == 0;
|
||||
var rightEmpty = right.Count == 0;
|
||||
|
||||
if (leftEmpty && rightEmpty)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (leftEmpty)
|
||||
{
|
||||
return 1; // release > pre-release
|
||||
}
|
||||
|
||||
if (rightEmpty)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
var length = Math.Min(left.Count, right.Count);
|
||||
for (var i = 0; i < length; i++)
|
||||
{
|
||||
var leftId = left[i];
|
||||
var rightId = right[i];
|
||||
|
||||
var leftNumeric = int.TryParse(leftId, NumberStyles.None, CultureInfo.InvariantCulture, out var leftValue);
|
||||
var rightNumeric = int.TryParse(rightId, NumberStyles.None, CultureInfo.InvariantCulture, out var rightValue);
|
||||
|
||||
if (leftNumeric && rightNumeric)
|
||||
{
|
||||
var compare = leftValue.CompareTo(rightValue);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
}
|
||||
else if (leftNumeric)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
else if (rightNumeric)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
var compare = string.CompareOrdinal(leftId, rightId);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return left.Count.CompareTo(right.Count);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,121 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates simple semantic version ranges used by Advisory AI deterministic tooling.
|
||||
/// Supports comparators (>, >=, <, <=, =, ==, !=) combined with commas (logical AND).
|
||||
/// </summary>
|
||||
public static class SemanticVersionRange
|
||||
{
|
||||
private static readonly char[] ComparatorChars = ['>', '<', '!', '='];
|
||||
|
||||
public static bool Satisfies(string version, string rangeExpression)
|
||||
{
|
||||
if (!SemanticVersion.TryParse(version, out var parsedVersion))
|
||||
{
|
||||
throw new FormatException($"Invalid version '{version}'.");
|
||||
}
|
||||
|
||||
var clauses = ParseClauses(rangeExpression);
|
||||
foreach (var clause in clauses)
|
||||
{
|
||||
if (!EvaluateClause(parsedVersion, clause))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<RangeClause> ParseClauses(string expression)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(expression))
|
||||
{
|
||||
return Array.Empty<RangeClause>();
|
||||
}
|
||||
|
||||
var tokens = expression.Split(new[] { ',', ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
var clauses = new List<RangeClause>(tokens.Length);
|
||||
foreach (var token in tokens)
|
||||
{
|
||||
clauses.Add(ParseClause(token));
|
||||
}
|
||||
|
||||
return clauses;
|
||||
}
|
||||
|
||||
private static RangeClause ParseClause(string token)
|
||||
{
|
||||
var opLength = 0;
|
||||
for (; opLength < token.Length; opLength++)
|
||||
{
|
||||
var ch = token[opLength];
|
||||
if (Array.IndexOf(ComparatorChars, ch) < 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (opLength == 0)
|
||||
{
|
||||
// implicit equality
|
||||
if (!SemanticVersion.TryParse(token, out var implicitVersion))
|
||||
{
|
||||
throw new FormatException($"Invalid range clause '{token}'.");
|
||||
}
|
||||
|
||||
return new RangeClause(Comparator.Equals, implicitVersion);
|
||||
}
|
||||
|
||||
var opToken = token[..opLength];
|
||||
var comparator = opToken switch
|
||||
{
|
||||
">" => Comparator.GreaterThan,
|
||||
">=" => Comparator.GreaterThanOrEqual,
|
||||
"<" => Comparator.LessThan,
|
||||
"<=" => Comparator.LessThanOrEqual,
|
||||
"=" or "==" => Comparator.Equals,
|
||||
"!=" or "<>" => Comparator.NotEquals,
|
||||
_ => throw new FormatException($"Unsupported comparator '{opToken}'."),
|
||||
};
|
||||
|
||||
var versionToken = token[opLength..];
|
||||
if (!SemanticVersion.TryParse(versionToken, out var version))
|
||||
{
|
||||
throw new FormatException($"Invalid version '{versionToken}' in clause '{token}'.");
|
||||
}
|
||||
|
||||
return new RangeClause(comparator, version);
|
||||
}
|
||||
|
||||
private static bool EvaluateClause(SemanticVersion version, RangeClause clause)
|
||||
{
|
||||
var compare = version.CompareTo(clause.Version);
|
||||
return clause.Comparator switch
|
||||
{
|
||||
Comparator.Equals => compare == 0,
|
||||
Comparator.NotEquals => compare != 0,
|
||||
Comparator.GreaterThan => compare > 0,
|
||||
Comparator.GreaterThanOrEqual => compare >= 0,
|
||||
Comparator.LessThan => compare < 0,
|
||||
Comparator.LessThanOrEqual => compare <= 0,
|
||||
_ => throw new InvalidOperationException($"Unsupported comparator {clause.Comparator}."),
|
||||
};
|
||||
}
|
||||
|
||||
private readonly record struct RangeClause(Comparator Comparator, SemanticVersion Version);
|
||||
|
||||
private enum Comparator
|
||||
{
|
||||
Equals,
|
||||
NotEquals,
|
||||
GreaterThan,
|
||||
GreaterThanOrEqual,
|
||||
LessThan,
|
||||
LessThanOrEqual,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Documents;
|
||||
using StellaOps.AdvisoryAI.Abstractions;
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
using StellaOps.AdvisoryAI.Orchestration;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public sealed class AdvisoryPipelineOrchestratorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task CreatePlanAsync_ComposesDeterministicPlan()
|
||||
{
|
||||
var structuredRetriever = new FakeStructuredRetriever();
|
||||
var vectorRetriever = new FakeVectorRetriever();
|
||||
var sbomRetriever = new FakeSbomContextRetriever();
|
||||
var options = Options.Create(new AdvisoryPipelineOptions());
|
||||
options.Value.Tasks[AdvisoryTaskType.Summary].VectorQueries.Clear();
|
||||
options.Value.Tasks[AdvisoryTaskType.Summary].VectorQueries.Add("summary-query");
|
||||
options.Value.Tasks[AdvisoryTaskType.Summary].VectorTopK = 2;
|
||||
options.Value.Tasks[AdvisoryTaskType.Summary].StructuredMaxChunks = 5;
|
||||
options.Value.Tasks[AdvisoryTaskType.Summary].PromptTemplate = "prompts/summary.liquid";
|
||||
var orchestrator = new AdvisoryPipelineOrchestrator(
|
||||
structuredRetriever,
|
||||
vectorRetriever,
|
||||
sbomRetriever,
|
||||
new DeterministicToolset(),
|
||||
options,
|
||||
NullLogger<AdvisoryPipelineOrchestrator>.Instance);
|
||||
|
||||
var request = new AdvisoryTaskRequest(
|
||||
AdvisoryTaskType.Summary,
|
||||
advisoryKey: "adv-key",
|
||||
artifactId: "artifact-1",
|
||||
artifactPurl: "pkg:docker/sample@1.0.0",
|
||||
policyVersion: "policy-42",
|
||||
profile: "default");
|
||||
|
||||
var plan = await orchestrator.CreatePlanAsync(request, CancellationToken.None);
|
||||
|
||||
Assert.Equal("prompts/summary.liquid", plan.PromptTemplate);
|
||||
Assert.Equal(2, plan.StructuredChunks.Length);
|
||||
Assert.Single(plan.VectorResults);
|
||||
Assert.Equal("summary-query", plan.VectorResults[0].Query);
|
||||
Assert.Equal(2, plan.VectorResults[0].Matches.Length);
|
||||
Assert.NotNull(plan.SbomContext);
|
||||
Assert.NotNull(plan.DependencyAnalysis);
|
||||
Assert.NotEmpty(plan.CacheKey);
|
||||
Assert.Equal("adv-key", plan.Metadata["advisory_key"]);
|
||||
Assert.Equal("Summary", plan.Metadata["task_type"]);
|
||||
Assert.Equal("1", plan.Metadata["runtime_path_count"]);
|
||||
|
||||
var secondPlan = await orchestrator.CreatePlanAsync(request, CancellationToken.None);
|
||||
Assert.Equal(plan.CacheKey, secondPlan.CacheKey);
|
||||
}
|
||||
|
||||
private sealed class FakeStructuredRetriever : IAdvisoryStructuredRetriever
|
||||
{
|
||||
public Task<AdvisoryRetrievalResult> RetrieveAsync(AdvisoryRetrievalRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
var chunks = new[]
|
||||
{
|
||||
AdvisoryChunk.Create("doc-1", "doc-1:0001", "Summary", "summary[0]", "Summary section", new Dictionary<string, string>
|
||||
{
|
||||
["section"] = "Summary",
|
||||
}),
|
||||
AdvisoryChunk.Create("doc-1", "doc-1:0002", "Remediation", "remediation[0]", "Remediation section", new Dictionary<string, string>
|
||||
{
|
||||
["section"] = "Remediation",
|
||||
}),
|
||||
};
|
||||
|
||||
return Task.FromResult(AdvisoryRetrievalResult.Create(request.AdvisoryKey, chunks));
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeVectorRetriever : IAdvisoryVectorRetriever
|
||||
{
|
||||
public Task<IReadOnlyList<VectorRetrievalMatch>> SearchAsync(VectorRetrievalRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
var matches = new[]
|
||||
{
|
||||
new VectorRetrievalMatch("doc-1", "doc-1:0002", "Remediation section", 0.95, ImmutableDictionary<string, string>.Empty),
|
||||
new VectorRetrievalMatch("doc-1", "doc-1:0001", "Summary section", 0.90, ImmutableDictionary<string, string>.Empty),
|
||||
};
|
||||
|
||||
return Task.FromResult<IReadOnlyList<VectorRetrievalMatch>>(matches);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeSbomContextRetriever : ISbomContextRetriever
|
||||
{
|
||||
public Task<SbomContextResult> RetrieveAsync(SbomContextRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
var versionTimeline = new[]
|
||||
{
|
||||
new SbomVersionTimelineEntry("1.0.0", DateTimeOffset.UtcNow.AddDays(-10), null, "affected", "scanner"),
|
||||
};
|
||||
|
||||
var dependencyPaths = new[]
|
||||
{
|
||||
new SbomDependencyPath(
|
||||
new[]
|
||||
{
|
||||
new SbomDependencyNode("root", "1.0.0"),
|
||||
new SbomDependencyNode("runtime-lib", "2.1.0"),
|
||||
},
|
||||
isRuntime: true),
|
||||
new SbomDependencyPath(
|
||||
new[]
|
||||
{
|
||||
new SbomDependencyNode("root", "1.0.0"),
|
||||
new SbomDependencyNode("dev-lib", "0.9.0"),
|
||||
},
|
||||
isRuntime: false),
|
||||
};
|
||||
|
||||
var result = SbomContextResult.Create(
|
||||
request.ArtifactId,
|
||||
request.Purl,
|
||||
versionTimeline,
|
||||
dependencyPaths);
|
||||
|
||||
return Task.FromResult(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using FluentAssertions;
|
||||
using StellaOps.AdvisoryAI.Abstractions;
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
using StellaOps.AdvisoryAI.Orchestration;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public sealed class AdvisoryPipelinePlanResponseTests
|
||||
{
|
||||
[Fact]
|
||||
public void FromPlan_ProjectsMetadataAndCounts()
|
||||
{
|
||||
var request = new AdvisoryTaskRequest(AdvisoryTaskType.Summary, "adv-key");
|
||||
var chunks = ImmutableArray.Create(
|
||||
AdvisoryChunk.Create("doc-1", "doc-1:0001", "Summary", "summary[0]", "Summary text", new Dictionary<string, string>
|
||||
{
|
||||
["section"] = "Summary",
|
||||
}),
|
||||
AdvisoryChunk.Create("doc-1", "doc-1:0002", "Remediation", "remediation[0]", "Remediation text", new Dictionary<string, string>
|
||||
{
|
||||
["section"] = "Remediation",
|
||||
}));
|
||||
|
||||
var vectorResults = ImmutableArray.Create(
|
||||
new AdvisoryVectorResult(
|
||||
"Summary query",
|
||||
ImmutableArray.Create(
|
||||
new VectorRetrievalMatch("doc-1", "doc-1:0001", "Summary text", 0.9, ImmutableDictionary<string, string>.Empty))));
|
||||
|
||||
var sbom = SbomContextResult.Create(
|
||||
"artifact-1",
|
||||
null,
|
||||
new[]
|
||||
{
|
||||
new SbomVersionTimelineEntry("1.0.0", DateTimeOffset.UtcNow.AddDays(-1), null, "affected", "scanner"),
|
||||
},
|
||||
new[]
|
||||
{
|
||||
new SbomDependencyPath(
|
||||
new[]
|
||||
{
|
||||
new SbomDependencyNode("root", "1.0.0"),
|
||||
},
|
||||
true),
|
||||
});
|
||||
|
||||
var dependency = DependencyAnalysisResult.Create(
|
||||
sbom.ArtifactId,
|
||||
sbom.DependencyPaths.Select(path => new DependencyNodeSummary(
|
||||
path.Nodes.Last().Identifier,
|
||||
Array.Empty<string>(),
|
||||
runtimeOccurrences: path.IsRuntime ? 1 : 0,
|
||||
developmentOccurrences: path.IsRuntime ? 0 : 1)),
|
||||
ImmutableDictionary<string, string>.Empty);
|
||||
|
||||
var plan = new AdvisoryTaskPlan(
|
||||
request,
|
||||
cacheKey: "ABC123",
|
||||
promptTemplate: "prompts/advisory/summary.liquid",
|
||||
structuredChunks: chunks,
|
||||
vectorResults: vectorResults,
|
||||
sbomContext: sbom,
|
||||
dependencyAnalysis: dependency,
|
||||
budget: new AdvisoryTaskBudget { PromptTokens = 1024, CompletionTokens = 256 },
|
||||
metadata: ImmutableDictionary<string, string>.Empty);
|
||||
|
||||
var response = AdvisoryPipelinePlanResponse.FromPlan(plan);
|
||||
|
||||
response.TaskType.Should().Be("Summary");
|
||||
response.CacheKey.Should().Be("ABC123");
|
||||
response.Chunks.Should().HaveCount(2);
|
||||
response.Vectors.Should().HaveCount(1);
|
||||
response.Sbom.Should().NotBeNull();
|
||||
response.Sbom!.DependencyNodeCount.Should().Be(1);
|
||||
response.Budget.CompletionTokens.Should().Be(256);
|
||||
}
|
||||
}
|
||||
@@ -49,8 +49,8 @@ public sealed class AdvisoryStructuredRetrieverTests
|
||||
|
||||
result.Chunks.Should().NotBeEmpty();
|
||||
result.Chunks.Should().ContainSingle(c => c.Section == "summary");
|
||||
result.Chunks.Should().Contain(c => c.Section == "affected.ranges");
|
||||
result.Chunks.First(c => c.Section == "affected.ranges").Metadata.Should().ContainKey("package");
|
||||
result.Chunks.Should().Contain(c => c.Section.StartsWith("affected", StringComparison.OrdinalIgnoreCase));
|
||||
result.Chunks.First(c => c.Section.StartsWith("affected", StringComparison.OrdinalIgnoreCase)).Metadata.Should().ContainKey("package");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -85,14 +85,19 @@ public sealed class AdvisoryStructuredRetrieverTests
|
||||
await LoadAsync("sample-vendor.md")));
|
||||
|
||||
var retriever = CreateRetriever(provider);
|
||||
var baseline = await retriever.RetrieveAsync(new AdvisoryRetrievalRequest("markdown-advisory"), CancellationToken.None);
|
||||
var impactSection = baseline.Chunks
|
||||
.Select(chunk => chunk.Section)
|
||||
.First(section => section.Contains("Impact", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
var request = new AdvisoryRetrievalRequest(
|
||||
"markdown-advisory",
|
||||
PreferredSections: new[] { "Impact" });
|
||||
PreferredSections: new[] { impactSection });
|
||||
|
||||
var result = await retriever.RetrieveAsync(request, CancellationToken.None);
|
||||
|
||||
result.Chunks.Should().NotBeEmpty();
|
||||
result.Chunks.Should().OnlyContain(chunk => chunk.Section.StartsWith("Impact", StringComparison.Ordinal));
|
||||
result.Chunks.Should().OnlyContain(chunk => chunk.Section == impactSection);
|
||||
}
|
||||
|
||||
private static AdvisoryStructuredRetriever CreateRetriever(IAdvisoryDocumentProvider provider)
|
||||
|
||||
@@ -47,11 +47,11 @@ public sealed class AdvisoryVectorRetrieverTests
|
||||
new VectorRetrievalRequest(
|
||||
new AdvisoryRetrievalRequest("adv"),
|
||||
Query: "How do I remediate the vulnerability?",
|
||||
TopK: 1),
|
||||
TopK: 3),
|
||||
CancellationToken.None);
|
||||
|
||||
matches.Should().HaveCount(1);
|
||||
matches[0].Section().Should().Be("Remediation");
|
||||
matches.Should().NotBeEmpty();
|
||||
matches.Should().Contain(match => match.Text.Contains("Update to version", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ public sealed class ConcelierAdvisoryDocumentProviderTests
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
public Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken)
|
||||
=> Task.FromResult(new AdvisoryRawQueryResult(_records, nextCursor: null, hasMore: false));
|
||||
=> Task.FromResult(new AdvisoryRawQueryResult(_records, NextCursor: null, HasMore: false));
|
||||
|
||||
public Task<AdvisoryRawVerificationResult> VerifyAsync(AdvisoryRawVerificationRequest request, CancellationToken cancellationToken)
|
||||
=> throw new NotImplementedException();
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using FluentAssertions;
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public sealed class DeterministicToolsetTests
|
||||
{
|
||||
[Fact]
|
||||
public void AnalyzeDependencies_ComputesRuntimeAndDevelopmentCounts()
|
||||
{
|
||||
var context = SbomContextResult.Create(
|
||||
"artifact-123",
|
||||
purl: null,
|
||||
versionTimeline: Array.Empty<SbomVersionTimelineEntry>(),
|
||||
dependencyPaths: new[]
|
||||
{
|
||||
new SbomDependencyPath(
|
||||
new[]
|
||||
{
|
||||
new SbomDependencyNode("root", "1.0.0"),
|
||||
new SbomDependencyNode("lib-a", "2.0.0"),
|
||||
},
|
||||
isRuntime: true),
|
||||
new SbomDependencyPath(
|
||||
new[]
|
||||
{
|
||||
new SbomDependencyNode("root", "1.0.0"),
|
||||
new SbomDependencyNode("lib-b", "3.1.4"),
|
||||
},
|
||||
isRuntime: false),
|
||||
});
|
||||
|
||||
IDeterministicToolset toolset = new DeterministicToolset();
|
||||
var analysis = toolset.AnalyzeDependencies(context);
|
||||
|
||||
analysis.ArtifactId.Should().Be("artifact-123");
|
||||
analysis.Metadata["path_count"].Should().Be("2");
|
||||
analysis.Metadata["runtime_path_count"].Should().Be("1");
|
||||
analysis.Metadata["development_path_count"].Should().Be("1");
|
||||
analysis.Nodes.Should().HaveCount(3);
|
||||
|
||||
var libA = analysis.Nodes.Single(node => node.Identifier == "lib-a");
|
||||
libA.RuntimeOccurrences.Should().Be(1);
|
||||
libA.DevelopmentOccurrences.Should().Be(0);
|
||||
|
||||
var libB = analysis.Nodes.Single(node => node.Identifier == "lib-b");
|
||||
libB.RuntimeOccurrences.Should().Be(0);
|
||||
libB.DevelopmentOccurrences.Should().Be(1);
|
||||
}
|
||||
}
|
||||
@@ -64,7 +64,7 @@ public sealed class ExcititorVexDocumentProviderTests
|
||||
service.LastOptions.Should().NotBeNull();
|
||||
service.LastOptions!.Tenant.Should().Be(tenantId);
|
||||
service.LastOptions.ProviderIds.Should().ContainSingle().Which.Should().Be(providerId);
|
||||
service.LastOptions.Statuses.Should().ContainSingle(VexClaimStatus.NotAffected);
|
||||
service.LastOptions.Statuses.Should().ContainSingle(status => status == VexClaimStatus.NotAffected);
|
||||
service.LastOptions.VulnerabilityIds.Should().Contain(vulnerabilityId);
|
||||
service.LastOptions.Limit.Should().Be(5);
|
||||
}
|
||||
@@ -79,7 +79,7 @@ public sealed class ExcititorVexDocumentProviderTests
|
||||
{
|
||||
var upstream = new VexObservationUpstream(
|
||||
"VEX-1",
|
||||
1,
|
||||
"1",
|
||||
DateTimeOffset.Parse("2025-10-10T08:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-10T08:05:00Z"),
|
||||
"hash-abc123",
|
||||
|
||||
@@ -93,7 +93,7 @@ public sealed class SbomContextRetrieverTests
|
||||
result.DependencyPaths.Should().HaveCount(2);
|
||||
result.DependencyPaths.First().IsRuntime.Should().BeTrue();
|
||||
result.DependencyPaths.First().Nodes.Select(n => n.Identifier).Should().Equal("app", "lib-a", "lib-b");
|
||||
result.EnvironmentFlags.Keys.Should().Equal(new[] { "environment/dev", "environment/prod" });
|
||||
result.EnvironmentFlags.Keys.Should().BeEquivalentTo(new[] { "environment/dev", "environment/prod" });
|
||||
result.EnvironmentFlags["environment/prod"].Should().Be("true");
|
||||
result.BlastRadius.Should().NotBeNull();
|
||||
result.BlastRadius!.ImpactedAssets.Should().Be(12);
|
||||
|
||||
@@ -0,0 +1,78 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public sealed class SemanticVersionTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData("1.2.3", 1, 2, 3, false)]
|
||||
[InlineData("1.2.3-alpha", 1, 2, 3, true)]
|
||||
[InlineData("0.0.1+build", 0, 0, 1, false)]
|
||||
[InlineData("2.0.0-rc.1+exp.sha", 2, 0, 0, true)]
|
||||
public void Parse_ValidInputs_Succeeds(string value, int major, int minor, int patch, bool hasPreRelease)
|
||||
{
|
||||
var version = SemanticVersion.Parse(value);
|
||||
|
||||
version.Major.Should().Be(major);
|
||||
version.Minor.Should().Be(minor);
|
||||
version.Patch.Should().Be(patch);
|
||||
(version.PreRelease.Count > 0).Should().Be(hasPreRelease);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("01.0.0")]
|
||||
[InlineData("1..0")]
|
||||
[InlineData("1.0.0-")]
|
||||
[InlineData("")]
|
||||
[InlineData(null)]
|
||||
public void Parse_InvalidInputs_Throws(string value)
|
||||
{
|
||||
var act = () => SemanticVersion.Parse(value!);
|
||||
act.Should().Throw<FormatException>();
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("1.2.3", "1.2.3", 0)]
|
||||
[InlineData("1.2.3", "1.2.4", -1)]
|
||||
[InlineData("1.3.0", "1.2.9", 1)]
|
||||
[InlineData("1.2.3-alpha", "1.2.3", -1)]
|
||||
[InlineData("1.2.3-alpha.2", "1.2.3-alpha.10", -1)]
|
||||
[InlineData("1.2.3-beta", "1.2.3-alpha", 1)]
|
||||
public void CompareTo_EvaluatesOrder(string left, string right, int expectedSign)
|
||||
{
|
||||
var leftVersion = SemanticVersion.Parse(left);
|
||||
var rightVersion = SemanticVersion.Parse(right);
|
||||
|
||||
Math.Sign(leftVersion.CompareTo(rightVersion)).Should().Be(expectedSign);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("1.2.3", ">=1.0.0,<2.0.0", true)]
|
||||
[InlineData("0.9.0", ">=1.0.0", false)]
|
||||
[InlineData("1.2.3-beta", ">=1.2.3", false)]
|
||||
[InlineData("1.2.3-beta", ">=1.2.3-rc.1", false)]
|
||||
[InlineData("1.2.3-rc.1", ">=1.2.3-beta", true)]
|
||||
[InlineData("1.2.3", "!=1.2.3", false)]
|
||||
[InlineData("1.2.3", "1.2.3", true)]
|
||||
public void RangeEvaluator_ProducesExpectedResults(string version, string range, bool expected)
|
||||
{
|
||||
SemanticVersionRange.Satisfies(version, range).Should().Be(expected);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DeterministicToolset_ComparesSemverAndEvr()
|
||||
{
|
||||
IDeterministicToolset toolset = new DeterministicToolset();
|
||||
|
||||
toolset.TryCompare("semver", "1.2.3", "1.2.4", out var semverComparison).Should().BeTrue();
|
||||
semverComparison.Should().BeLessThan(0);
|
||||
|
||||
toolset.TryCompare("evr", "1:1.0.0-1", "1:1.0.0-2", out var evrComparison).Should().BeTrue();
|
||||
evrComparison.Should().BeLessThan(0);
|
||||
|
||||
toolset.SatisfiesRange("semver", "1.2.3", ">=1.0.0,<2.0.0").Should().BeTrue();
|
||||
toolset.SatisfiesRange("evr", "0:1.0.1-3", ">=1.0.0-0,!=1.0.1-2").Should().BeTrue();
|
||||
}
|
||||
}
|
||||
@@ -16,9 +16,9 @@
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
|
||||
<ProjectReference Include="..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="..\..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
|
||||
<ProjectReference Include="..\..\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
|
||||
<ProjectReference Include="..\..\..\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Update="TestData/*.json">
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.AdvisoryAI.DependencyInjection;
|
||||
using StellaOps.AdvisoryAI.Orchestration;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public sealed class ToolsetServiceCollectionExtensionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void AddAdvisoryDeterministicToolset_RegistersSingleton()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
|
||||
services.AddAdvisoryDeterministicToolset();
|
||||
|
||||
var provider = services.BuildServiceProvider();
|
||||
var toolsetA = provider.GetRequiredService<IDeterministicToolset>();
|
||||
var toolsetB = provider.GetRequiredService<IDeterministicToolset>();
|
||||
|
||||
Assert.Same(toolsetA, toolsetB);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddAdvisoryPipeline_RegistersOrchestrator()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
|
||||
services.AddAdvisoryPipeline();
|
||||
|
||||
var provider = services.BuildServiceProvider();
|
||||
var orchestrator = provider.GetRequiredService<IAdvisoryPipelineOrchestrator>();
|
||||
Assert.NotNull(orchestrator);
|
||||
var again = provider.GetRequiredService<IAdvisoryPipelineOrchestrator>();
|
||||
Assert.Same(orchestrator, again);
|
||||
}
|
||||
}
|
||||
@@ -1,56 +1,61 @@
|
||||
namespace StellaOps.Auth.Abstractions;
|
||||
|
||||
/// <summary>
|
||||
/// Canonical claim type identifiers used across StellaOps services.
|
||||
/// </summary>
|
||||
public static class StellaOpsClaimTypes
|
||||
{
|
||||
/// <summary>
|
||||
/// Subject identifier claim (maps to <c>sub</c> in JWTs).
|
||||
/// </summary>
|
||||
public const string Subject = "sub";
|
||||
|
||||
/// <summary>
|
||||
/// StellaOps tenant identifier claim (multi-tenant deployments).
|
||||
/// </summary>
|
||||
public const string Tenant = "stellaops:tenant";
|
||||
|
||||
/// <summary>
|
||||
/// StellaOps project identifier claim (optional project scoping within a tenant).
|
||||
/// </summary>
|
||||
public const string Project = "stellaops:project";
|
||||
|
||||
/// <summary>
|
||||
/// OAuth2/OIDC client identifier claim (maps to <c>client_id</c>).
|
||||
/// </summary>
|
||||
public const string ClientId = "client_id";
|
||||
|
||||
/// <summary>
|
||||
/// Unique token identifier claim (maps to <c>jti</c>).
|
||||
/// </summary>
|
||||
public const string TokenId = "jti";
|
||||
|
||||
/// <summary>
|
||||
/// Authentication method reference claim (<c>amr</c>).
|
||||
/// </summary>
|
||||
public const string AuthenticationMethod = "amr";
|
||||
|
||||
/// <summary>
|
||||
/// Space separated scope list (<c>scope</c>).
|
||||
/// </summary>
|
||||
public const string Scope = "scope";
|
||||
|
||||
/// <summary>
|
||||
/// Individual scope items (<c>scp</c>).
|
||||
/// </summary>
|
||||
public const string ScopeItem = "scp";
|
||||
|
||||
/// <summary>
|
||||
/// OAuth2 resource audiences (<c>aud</c>).
|
||||
/// </summary>
|
||||
public const string Audience = "aud";
|
||||
|
||||
/// <summary>
|
||||
namespace StellaOps.Auth.Abstractions;
|
||||
|
||||
/// <summary>
|
||||
/// Canonical claim type identifiers used across StellaOps services.
|
||||
/// </summary>
|
||||
public static class StellaOpsClaimTypes
|
||||
{
|
||||
/// <summary>
|
||||
/// Subject identifier claim (maps to <c>sub</c> in JWTs).
|
||||
/// </summary>
|
||||
public const string Subject = "sub";
|
||||
|
||||
/// <summary>
|
||||
/// StellaOps tenant identifier claim (multi-tenant deployments).
|
||||
/// </summary>
|
||||
public const string Tenant = "stellaops:tenant";
|
||||
|
||||
/// <summary>
|
||||
/// StellaOps project identifier claim (optional project scoping within a tenant).
|
||||
/// </summary>
|
||||
public const string Project = "stellaops:project";
|
||||
|
||||
/// <summary>
|
||||
/// OAuth2/OIDC client identifier claim (maps to <c>client_id</c>).
|
||||
/// </summary>
|
||||
public const string ClientId = "client_id";
|
||||
|
||||
/// <summary>
|
||||
/// Service account identifier associated with delegated tokens.
|
||||
/// </summary>
|
||||
public const string ServiceAccount = "stellaops:service_account";
|
||||
|
||||
/// <summary>
|
||||
/// Unique token identifier claim (maps to <c>jti</c>).
|
||||
/// </summary>
|
||||
public const string TokenId = "jti";
|
||||
|
||||
/// <summary>
|
||||
/// Authentication method reference claim (<c>amr</c>).
|
||||
/// </summary>
|
||||
public const string AuthenticationMethod = "amr";
|
||||
|
||||
/// <summary>
|
||||
/// Space separated scope list (<c>scope</c>).
|
||||
/// </summary>
|
||||
public const string Scope = "scope";
|
||||
|
||||
/// <summary>
|
||||
/// Individual scope items (<c>scp</c>).
|
||||
/// </summary>
|
||||
public const string ScopeItem = "scp";
|
||||
|
||||
/// <summary>
|
||||
/// OAuth2 resource audiences (<c>aud</c>).
|
||||
/// </summary>
|
||||
public const string Audience = "aud";
|
||||
|
||||
/// <summary>
|
||||
/// Identity provider hint for downstream services.
|
||||
/// </summary>
|
||||
public const string IdentityProvider = "stellaops:idp";
|
||||
|
||||
@@ -20,10 +20,10 @@ public sealed class AuthorityTokenDocument
|
||||
|
||||
[BsonElement("type")]
|
||||
public string Type { get; set; } = string.Empty;
|
||||
[BsonElement(tokenKind)]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? TokenKind { get; set; }
|
||||
|
||||
|
||||
[BsonElement("tokenKind")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? TokenKind { get; set; }
|
||||
|
||||
[BsonElement("subjectId")]
|
||||
[BsonIgnoreIfNull]
|
||||
@@ -97,12 +97,12 @@ public sealed class AuthorityTokenDocument
|
||||
[BsonElement("revokedMetadata")]
|
||||
[BsonIgnoreIfNull]
|
||||
public Dictionary<string, string?>? RevokedMetadata { get; set; }
|
||||
|
||||
[BsonElement(serviceAccountId)]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ServiceAccountId { get; set; }
|
||||
|
||||
[BsonElement(actors)]
|
||||
[BsonIgnoreIfNull]
|
||||
public List<string>? ActorChain { get; set; }
|
||||
|
||||
[BsonElement("serviceAccountId")]
|
||||
[BsonIgnoreIfNull]
|
||||
public string? ServiceAccountId { get; set; }
|
||||
|
||||
[BsonElement("actors")]
|
||||
[BsonIgnoreIfNull]
|
||||
public List<string>? ActorChain { get; set; }
|
||||
}
|
||||
|
||||
@@ -36,7 +36,18 @@ internal sealed class AuthorityTokenCollectionInitializer : IAuthorityCollection
|
||||
new CreateIndexOptions<AuthorityTokenDocument> { Name = "token_sender_thumbprint", Sparse = true })
|
||||
};
|
||||
|
||||
var expirationFilter = Builders<AuthorityTokenDocument>.Filter.Exists(t => t.ExpiresAt, true);
|
||||
var serviceAccountFilter = Builders<AuthorityTokenDocument>.Filter.Exists(t => t.ServiceAccountId, true);
|
||||
indexModels.Add(new CreateIndexModel<AuthorityTokenDocument>(
|
||||
Builders<AuthorityTokenDocument>.IndexKeys
|
||||
.Ascending(t => t.Tenant)
|
||||
.Ascending(t => t.ServiceAccountId),
|
||||
new CreateIndexOptions<AuthorityTokenDocument>
|
||||
{
|
||||
Name = "token_tenant_service_account",
|
||||
PartialFilterExpression = serviceAccountFilter
|
||||
}));
|
||||
|
||||
var expirationFilter = Builders<AuthorityTokenDocument>.Filter.Exists(t => t.ExpiresAt, true);
|
||||
indexModels.Add(new CreateIndexModel<AuthorityTokenDocument>(
|
||||
Builders<AuthorityTokenDocument>.IndexKeys.Ascending(t => t.ExpiresAt),
|
||||
new CreateIndexOptions<AuthorityTokenDocument>
|
||||
|
||||
@@ -13,6 +13,7 @@ namespace StellaOps.Authority.Storage.Mongo.Stores;
|
||||
|
||||
internal sealed class AuthorityTokenStore : IAuthorityTokenStore
|
||||
{
|
||||
private const string ServiceAccountTokenKind = "service_account";
|
||||
private readonly IMongoCollection<AuthorityTokenDocument> collection;
|
||||
private readonly ILogger<AuthorityTokenStore> logger;
|
||||
|
||||
@@ -190,6 +191,97 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore
|
||||
return new TokenUsageUpdateResult(suspicious ? TokenUsageUpdateStatus.SuspectedReplay : TokenUsageUpdateStatus.Recorded, normalizedAddress, normalizedAgent);
|
||||
}
|
||||
|
||||
public async ValueTask<long> CountActiveDelegationTokensAsync(
|
||||
string tenant,
|
||||
string? serviceAccountId,
|
||||
CancellationToken cancellationToken,
|
||||
IClientSessionHandle? session = null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var normalizedTenant = tenant.Trim().ToLowerInvariant();
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var filter = Builders<AuthorityTokenDocument>.Filter.And(new[]
|
||||
{
|
||||
Builders<AuthorityTokenDocument>.Filter.Eq(t => t.Status, "valid"),
|
||||
Builders<AuthorityTokenDocument>.Filter.Eq(t => t.Tenant, normalizedTenant),
|
||||
Builders<AuthorityTokenDocument>.Filter.Exists(t => t.ServiceAccountId, true),
|
||||
Builders<AuthorityTokenDocument>.Filter.Eq(t => t.TokenKind, ServiceAccountTokenKind),
|
||||
Builders<AuthorityTokenDocument>.Filter.Or(
|
||||
Builders<AuthorityTokenDocument>.Filter.Eq(t => t.ExpiresAt, null),
|
||||
Builders<AuthorityTokenDocument>.Filter.Gt(t => t.ExpiresAt, now))
|
||||
});
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(serviceAccountId))
|
||||
{
|
||||
var normalizedAccount = serviceAccountId.Trim();
|
||||
filter &= Builders<AuthorityTokenDocument>.Filter.Eq(t => t.ServiceAccountId, normalizedAccount);
|
||||
}
|
||||
|
||||
var query = session is { }
|
||||
? collection.Find(session, filter)
|
||||
: collection.Find(filter);
|
||||
|
||||
return await query.CountDocumentsAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async ValueTask<IReadOnlyList<AuthorityTokenDocument>> ListActiveDelegationTokensAsync(
|
||||
string tenant,
|
||||
string? serviceAccountId,
|
||||
CancellationToken cancellationToken,
|
||||
IClientSessionHandle? session = null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
return Array.Empty<AuthorityTokenDocument>();
|
||||
}
|
||||
|
||||
var normalizedTenant = tenant.Trim().ToLowerInvariant();
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var filters = new List<FilterDefinition<AuthorityTokenDocument>>
|
||||
{
|
||||
Builders<AuthorityTokenDocument>.Filter.Eq(t => t.Status, "valid"),
|
||||
Builders<AuthorityTokenDocument>.Filter.Eq(t => t.Tenant, normalizedTenant),
|
||||
Builders<AuthorityTokenDocument>.Filter.Eq(t => t.TokenKind, ServiceAccountTokenKind),
|
||||
Builders<AuthorityTokenDocument>.Filter.Or(
|
||||
Builders<AuthorityTokenDocument>.Filter.Eq(t => t.ExpiresAt, null),
|
||||
Builders<AuthorityTokenDocument>.Filter.Gt(t => t.ExpiresAt, now))
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(serviceAccountId))
|
||||
{
|
||||
filters.Add(Builders<AuthorityTokenDocument>.Filter.Eq(
|
||||
t => t.ServiceAccountId,
|
||||
serviceAccountId.Trim()));
|
||||
}
|
||||
|
||||
var filter = Builders<AuthorityTokenDocument>.Filter.And(filters);
|
||||
var options = new FindOptions<AuthorityTokenDocument>
|
||||
{
|
||||
Sort = Builders<AuthorityTokenDocument>.Sort
|
||||
.Descending(t => t.CreatedAt)
|
||||
.Descending(t => t.TokenId)
|
||||
};
|
||||
|
||||
IAsyncCursor<AuthorityTokenDocument> cursor;
|
||||
if (session is { })
|
||||
{
|
||||
cursor = await collection.FindAsync(session, filter, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
cursor = await collection.FindAsync(filter, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var documents = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false);
|
||||
return documents;
|
||||
}
|
||||
|
||||
private static string? GetString(BsonDocument document, string name)
|
||||
{
|
||||
if (!document.TryGetValue(name, out var value))
|
||||
|
||||
@@ -6,7 +6,7 @@ using StellaOps.Authority.Storage.Mongo.Documents;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Mongo.Stores;
|
||||
|
||||
internal interface IAuthorityServiceAccountStore
|
||||
public interface IAuthorityServiceAccountStore
|
||||
{
|
||||
ValueTask<AuthorityServiceAccountDocument?> FindByAccountIdAsync(string accountId, CancellationToken cancellationToken, IClientSessionHandle? session = null);
|
||||
|
||||
|
||||
@@ -36,6 +36,18 @@ public interface IAuthorityTokenStore
|
||||
int limit,
|
||||
CancellationToken cancellationToken,
|
||||
IClientSessionHandle? session = null);
|
||||
|
||||
ValueTask<long> CountActiveDelegationTokensAsync(
|
||||
string tenant,
|
||||
string? serviceAccountId,
|
||||
CancellationToken cancellationToken,
|
||||
IClientSessionHandle? session = null);
|
||||
|
||||
ValueTask<IReadOnlyList<AuthorityTokenDocument>> ListActiveDelegationTokensAsync(
|
||||
string tenant,
|
||||
string? serviceAccountId,
|
||||
CancellationToken cancellationToken,
|
||||
IClientSessionHandle? session = null);
|
||||
}
|
||||
|
||||
public enum TokenUsageUpdateStatus
|
||||
|
||||
@@ -0,0 +1,526 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Authority.OpenIddict;
|
||||
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Bootstrap;
|
||||
|
||||
public sealed class ServiceAccountAdminEndpointsTests : IClassFixture<AuthorityWebApplicationFactory>
|
||||
{
|
||||
private const string BootstrapKey = "test-bootstrap-key";
|
||||
private const string TenantId = "tenant-default";
|
||||
private const string ServiceAccountId = "svc-observer";
|
||||
|
||||
private readonly AuthorityWebApplicationFactory factory;
|
||||
|
||||
public ServiceAccountAdminEndpointsTests(AuthorityWebApplicationFactory factory)
|
||||
{
|
||||
this.factory = factory ?? throw new ArgumentNullException(nameof(factory));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsUnauthorized_WhenBootstrapKeyMissing()
|
||||
{
|
||||
using var app = CreateApplication(builder =>
|
||||
{
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
using var client = app.CreateClient();
|
||||
|
||||
var response = await client.GetAsync($"/internal/service-accounts?tenant={TenantId}");
|
||||
|
||||
Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsBadRequest_WhenTenantMissing()
|
||||
{
|
||||
using var app = CreateApplication(builder =>
|
||||
{
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("X-StellaOps-Bootstrap-Key", BootstrapKey);
|
||||
|
||||
var response = await client.GetAsync("/internal/service-accounts");
|
||||
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsServiceAccountsForTenant()
|
||||
{
|
||||
using var app = CreateApplication(builder =>
|
||||
{
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("X-StellaOps-Bootstrap-Key", BootstrapKey);
|
||||
|
||||
var response = await client.GetAsync($"/internal/service-accounts?tenant={TenantId}");
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<ServiceAccountResponse[]>(default);
|
||||
Assert.NotNull(payload);
|
||||
|
||||
var serviceAccount = Assert.Single(payload!);
|
||||
Assert.Equal(ServiceAccountId, serviceAccount.AccountId);
|
||||
Assert.Equal(TenantId, serviceAccount.Tenant);
|
||||
Assert.Equal("Observability Exporter", serviceAccount.DisplayName);
|
||||
Assert.True(serviceAccount.Enabled);
|
||||
Assert.Equal(new[] { "findings:read", "jobs:read" }, serviceAccount.AllowedScopes);
|
||||
Assert.Equal(new[] { "export-center-worker" }, serviceAccount.AuthorizedClients);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Tokens_ReturnsActiveDelegationTokens()
|
||||
{
|
||||
using var app = CreateApplication();
|
||||
|
||||
await using (var scope = app.Services.CreateAsyncScope())
|
||||
{
|
||||
var tokenStore = scope.ServiceProvider.GetRequiredService<IAuthorityTokenStore>();
|
||||
var document = new AuthorityTokenDocument
|
||||
{
|
||||
TokenId = "token-1",
|
||||
ClientId = "export-center-worker",
|
||||
Status = "valid",
|
||||
Scope = new List<string> { "jobs:read", "findings:read" },
|
||||
CreatedAt = DateTimeOffset.UtcNow.AddMinutes(-10),
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddMinutes(20),
|
||||
Tenant = TenantId,
|
||||
ServiceAccountId = ServiceAccountId,
|
||||
TokenKind = "service_account"
|
||||
};
|
||||
|
||||
await tokenStore.InsertAsync(document, CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("X-StellaOps-Bootstrap-Key", BootstrapKey);
|
||||
|
||||
var response = await client.GetAsync($"/internal/service-accounts/{ServiceAccountId}/tokens");
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<ServiceAccountTokenResponse[]>(default);
|
||||
Assert.NotNull(payload);
|
||||
|
||||
var token = Assert.Single(payload!);
|
||||
Assert.Equal("token-1", token.TokenId);
|
||||
Assert.Equal("export-center-worker", token.ClientId);
|
||||
Assert.Equal("valid", token.Status);
|
||||
Assert.Equal(new[] { "findings:read", "jobs:read" }, token.Scopes);
|
||||
Assert.Empty(token.Actors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Tokens_ReturnsNotFound_WhenServiceAccountMissing()
|
||||
{
|
||||
using var app = CreateApplication(builder =>
|
||||
{
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("X-StellaOps-Bootstrap-Key", BootstrapKey);
|
||||
|
||||
var response = await client.GetAsync("/internal/service-accounts/svc-missing/tokens");
|
||||
|
||||
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_RevokesAllActiveTokens_AndEmitsAuditEvent()
|
||||
{
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T18:00:00Z"));
|
||||
|
||||
using var app = CreateApplication(builder =>
|
||||
{
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
services.Replace(ServiceDescriptor.Singleton<TimeProvider>(timeProvider));
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
var tokenIds = new[] { "token-a", "token-b" };
|
||||
|
||||
await using (var scope = app.Services.CreateAsyncScope())
|
||||
{
|
||||
var tokenStore = scope.ServiceProvider.GetRequiredService<IAuthorityTokenStore>();
|
||||
|
||||
foreach (var tokenId in tokenIds)
|
||||
{
|
||||
await tokenStore.InsertAsync(new AuthorityTokenDocument
|
||||
{
|
||||
TokenId = tokenId,
|
||||
ClientId = "export-center-worker",
|
||||
Status = "valid",
|
||||
Scope = new List<string> { "jobs:read" },
|
||||
CreatedAt = DateTimeOffset.UtcNow.AddMinutes(-5),
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddMinutes(30),
|
||||
Tenant = TenantId,
|
||||
ServiceAccountId = ServiceAccountId,
|
||||
TokenKind = "service_account"
|
||||
}, CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("X-StellaOps-Bootstrap-Key", BootstrapKey);
|
||||
|
||||
var response = await client.PostAsJsonAsync($"/internal/service-accounts/{ServiceAccountId}/revocations", new
|
||||
{
|
||||
reason = "operator_request",
|
||||
reasonDescription = "Rotate credentials"
|
||||
});
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<ServiceAccountRevokeResponse>(default);
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(2, payload!.RevokedCount);
|
||||
Assert.Equal(tokenIds.OrderBy(id => id, StringComparer.Ordinal), payload.TokenIds.OrderBy(id => id, StringComparer.Ordinal));
|
||||
|
||||
await using (var scope = app.Services.CreateAsyncScope())
|
||||
{
|
||||
var tokenStore = scope.ServiceProvider.GetRequiredService<IAuthorityTokenStore>();
|
||||
|
||||
foreach (var tokenId in tokenIds)
|
||||
{
|
||||
var sessionAccessor = scope.ServiceProvider.GetRequiredService<IAuthorityMongoSessionAccessor>();
|
||||
var session = await sessionAccessor.GetSessionAsync().ConfigureAwait(false);
|
||||
var token = await tokenStore.FindByTokenIdAsync(tokenId, CancellationToken.None, session).ConfigureAwait(false);
|
||||
Assert.NotNull(token);
|
||||
Assert.Equal("revoked", token!.Status);
|
||||
}
|
||||
}
|
||||
|
||||
var audit = Assert.Single(sink.Events.Where(evt => evt.EventType == "authority.delegation.revoked"));
|
||||
Assert.Equal(AuthEventOutcome.Success, audit.Outcome);
|
||||
Assert.Equal("operator_request", audit.Reason);
|
||||
Assert.Contains(audit.Properties, property =>
|
||||
string.Equals(property.Name, "delegation.service_account", StringComparison.Ordinal) &&
|
||||
string.Equals(property.Value.Value, ServiceAccountId, StringComparison.Ordinal));
|
||||
Assert.Contains(audit.Properties, property =>
|
||||
string.Equals(property.Name, "delegation.revoked_count", StringComparison.Ordinal) &&
|
||||
string.Equals(property.Value.Value, "2", StringComparison.Ordinal));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_ReturnsNotFound_WhenServiceAccountMissing()
|
||||
{
|
||||
var sink = new RecordingAuthEventSink();
|
||||
|
||||
using var app = CreateApplication(builder =>
|
||||
{
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("X-StellaOps-Bootstrap-Key", BootstrapKey);
|
||||
|
||||
var response = await client.PostAsJsonAsync("/internal/service-accounts/svc-unknown/revocations", new { reason = "rotate" });
|
||||
|
||||
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
|
||||
Assert.Empty(sink.Events);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_ReturnsNotFound_WhenTokenNotFound()
|
||||
{
|
||||
var sink = new RecordingAuthEventSink();
|
||||
|
||||
using var app = CreateApplication(builder =>
|
||||
{
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("X-StellaOps-Bootstrap-Key", BootstrapKey);
|
||||
|
||||
var response = await client.PostAsJsonAsync($"/internal/service-accounts/{ServiceAccountId}/revocations", new { tokenId = "missing-token", reason = "cleanup" });
|
||||
|
||||
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
|
||||
Assert.Empty(sink.Events);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_ReturnsFailure_WhenNoActiveTokens()
|
||||
{
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T09:00:00Z"));
|
||||
|
||||
using var app = CreateApplication(builder =>
|
||||
{
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
services.Replace(ServiceDescriptor.Singleton<TimeProvider>(timeProvider));
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
await using (var scope = app.Services.CreateAsyncScope())
|
||||
{
|
||||
var tokenStore = scope.ServiceProvider.GetRequiredService<IAuthorityTokenStore>();
|
||||
await tokenStore.InsertAsync(new AuthorityTokenDocument
|
||||
{
|
||||
TokenId = "token-revoked",
|
||||
ClientId = "export-center-worker",
|
||||
Status = "revoked",
|
||||
Scope = new List<string> { "jobs:read" },
|
||||
CreatedAt = DateTimeOffset.UtcNow.AddMinutes(-20),
|
||||
Tenant = TenantId,
|
||||
ServiceAccountId = ServiceAccountId,
|
||||
TokenKind = "service_account"
|
||||
}, CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("X-StellaOps-Bootstrap-Key", BootstrapKey);
|
||||
|
||||
var response = await client.PostAsJsonAsync($"/internal/service-accounts/{ServiceAccountId}/revocations", new { reason = "cleanup" });
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<ServiceAccountRevokeResponse>(default);
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(0, payload!.RevokedCount);
|
||||
Assert.Empty(payload.TokenIds);
|
||||
|
||||
var audit = Assert.Single(sink.Events);
|
||||
Assert.Equal(AuthEventOutcome.Failure, audit.Outcome);
|
||||
Assert.Equal("cleanup", audit.Reason);
|
||||
Assert.Equal("0", GetPropertyValue(audit, "delegation.revoked_count"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_ReturnsSuccess_WhenPartiallyRevokingTokens()
|
||||
{
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T09:30:00Z"));
|
||||
|
||||
using var app = CreateApplication(builder =>
|
||||
{
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
services.Replace(ServiceDescriptor.Singleton<TimeProvider>(timeProvider));
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
await using (var scope = app.Services.CreateAsyncScope())
|
||||
{
|
||||
var tokenStore = scope.ServiceProvider.GetRequiredService<IAuthorityTokenStore>();
|
||||
|
||||
await tokenStore.InsertAsync(new AuthorityTokenDocument
|
||||
{
|
||||
TokenId = "token-active",
|
||||
ClientId = "export-center-worker",
|
||||
Status = "valid",
|
||||
Scope = new List<string> { "jobs:read" },
|
||||
CreatedAt = DateTimeOffset.UtcNow.AddMinutes(-10),
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddMinutes(30),
|
||||
Tenant = TenantId,
|
||||
ServiceAccountId = ServiceAccountId,
|
||||
TokenKind = "service_account"
|
||||
}, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
await tokenStore.InsertAsync(new AuthorityTokenDocument
|
||||
{
|
||||
TokenId = "token-already-revoked",
|
||||
ClientId = "export-center-worker",
|
||||
Status = "revoked",
|
||||
Scope = new List<string> { "jobs:read" },
|
||||
CreatedAt = DateTimeOffset.UtcNow.AddMinutes(-25),
|
||||
Tenant = TenantId,
|
||||
ServiceAccountId = ServiceAccountId,
|
||||
TokenKind = "service_account"
|
||||
}, CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("X-StellaOps-Bootstrap-Key", BootstrapKey);
|
||||
|
||||
var response = await client.PostAsJsonAsync($"/internal/service-accounts/{ServiceAccountId}/revocations", new { reason = "partial" });
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<ServiceAccountRevokeResponse>(default);
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(1, payload!.RevokedCount);
|
||||
Assert.Equal(new[] { "token-active" }, payload.TokenIds);
|
||||
|
||||
var audit = Assert.Single(sink.Events);
|
||||
Assert.Equal(AuthEventOutcome.Success, audit.Outcome);
|
||||
Assert.Equal("partial", audit.Reason);
|
||||
Assert.Equal("1", GetPropertyValue(audit, "delegation.revoked_count"));
|
||||
Assert.Equal("token-active", GetPropertyValue(audit, "delegation.revoked_token[0]"));
|
||||
}
|
||||
|
||||
private WebApplicationFactory<Program> CreateApplication(Action<IWebHostBuilder>? configure = null)
|
||||
{
|
||||
return factory.WithWebHostBuilder(host =>
|
||||
{
|
||||
host.ConfigureAppConfiguration((_, configuration) =>
|
||||
{
|
||||
configuration.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["Authority:Bootstrap:Enabled"] = "true",
|
||||
["Authority:Bootstrap:ApiKey"] = BootstrapKey,
|
||||
["Authority:Bootstrap:DefaultIdentityProvider"] = "standard",
|
||||
["Authority:Tenants:0:Id"] = TenantId,
|
||||
["Authority:Tenants:0:DisplayName"] = "Default Tenant",
|
||||
["Authority:Delegation:Quotas:MaxActiveTokens"] = "50",
|
||||
["Authority:Delegation:ServiceAccounts:0:AccountId"] = ServiceAccountId,
|
||||
["Authority:Delegation:ServiceAccounts:0:Tenant"] = TenantId,
|
||||
["Authority:Delegation:ServiceAccounts:0:DisplayName"] = "Observability Exporter",
|
||||
["Authority:Delegation:ServiceAccounts:0:Description"] = "Automates evidence exports.",
|
||||
["Authority:Delegation:ServiceAccounts:0:AllowedScopes:0"] = "jobs:read",
|
||||
["Authority:Delegation:ServiceAccounts:0:AllowedScopes:1"] = "findings:read",
|
||||
["Authority:Delegation:ServiceAccounts:0:AuthorizedClients:0"] = "export-center-worker"
|
||||
});
|
||||
});
|
||||
|
||||
configure?.Invoke(host);
|
||||
});
|
||||
}
|
||||
|
||||
private static string? GetPropertyValue(AuthEventRecord record, string name)
|
||||
{
|
||||
return record.Properties
|
||||
.FirstOrDefault(property => string.Equals(property.Name, name, StringComparison.Ordinal))
|
||||
?.Value.Value;
|
||||
}
|
||||
|
||||
private sealed record ServiceAccountResponse(
|
||||
string AccountId,
|
||||
string Tenant,
|
||||
string? DisplayName,
|
||||
string? Description,
|
||||
bool Enabled,
|
||||
IReadOnlyList<string> AllowedScopes,
|
||||
IReadOnlyList<string> AuthorizedClients);
|
||||
|
||||
private sealed record ServiceAccountTokenResponse(
|
||||
string TokenId,
|
||||
string? ClientId,
|
||||
string Status,
|
||||
IReadOnlyList<string> Scopes,
|
||||
IReadOnlyList<string> Actors);
|
||||
|
||||
private sealed record ServiceAccountRevokeResponse(int RevokedCount, IReadOnlyList<string> TokenIds);
|
||||
|
||||
private sealed class RecordingAuthEventSink : IAuthEventSink
|
||||
{
|
||||
private readonly List<AuthEventRecord> events = new();
|
||||
|
||||
public IReadOnlyList<AuthEventRecord> Events => events;
|
||||
|
||||
public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (events)
|
||||
{
|
||||
events.Add(record);
|
||||
}
|
||||
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -59,6 +59,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -91,6 +93,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -123,6 +127,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -160,6 +166,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -176,6 +184,128 @@ public class ClientCredentialsHandlersTests
|
||||
Assert.Equal(new[] { "advisory:ingest" }, grantedScopes);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateClientCredentials_AllowsServiceAccountWhenAuthorized()
|
||||
{
|
||||
var clientDocument = CreateClient(
|
||||
secret: "s3cr3t!",
|
||||
allowedGrantTypes: "client_credentials",
|
||||
allowedScopes: "jobs:read",
|
||||
tenant: "tenant-alpha");
|
||||
|
||||
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
|
||||
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||
var options = TestHelpers.CreateAuthorityOptions(opts =>
|
||||
{
|
||||
opts.Delegation.Quotas.MaxActiveTokens = 5;
|
||||
});
|
||||
|
||||
var serviceAccount = new AuthorityServiceAccountDocument
|
||||
{
|
||||
AccountId = "svc-observer",
|
||||
Tenant = "tenant-alpha",
|
||||
AllowedScopes = new List<string> { "jobs:read" },
|
||||
AuthorizedClients = new List<string> { clientDocument.ClientId }
|
||||
};
|
||||
|
||||
var serviceAccountStore = new TestServiceAccountStore(serviceAccount);
|
||||
var tokenStore = new TestTokenStore();
|
||||
var handler = new ValidateClientCredentialsHandler(
|
||||
new TestClientStore(clientDocument),
|
||||
registry,
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
metadataAccessor,
|
||||
serviceAccountStore,
|
||||
tokenStore,
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
options,
|
||||
NullLogger<ValidateClientCredentialsHandler>.Instance);
|
||||
|
||||
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
|
||||
transaction.Request.SetParameter(AuthorityOpenIddictConstants.ServiceAccountParameterName, "svc-observer");
|
||||
|
||||
var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
|
||||
await handler.HandleAsync(context);
|
||||
|
||||
Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}");
|
||||
Assert.True(context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ServiceAccountProperty, out var serviceAccountObj));
|
||||
var resolvedAccount = Assert.IsType<AuthorityServiceAccountDocument>(serviceAccountObj);
|
||||
Assert.Equal("svc-observer", resolvedAccount.AccountId);
|
||||
var grantedScopes = Assert.IsType<string[]>(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]);
|
||||
Assert.Contains("jobs:read", grantedScopes);
|
||||
Assert.Equal("svc-observer", metadataAccessor.GetMetadata()?.SubjectId);
|
||||
Assert.Equal(AuthorityTokenKinds.ServiceAccount, context.Transaction.Properties[AuthorityOpenIddictConstants.TokenKindProperty]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateClientCredentials_RejectsWhenServiceAccountQuotaExceeded()
|
||||
{
|
||||
var clientDocument = CreateClient(
|
||||
secret: "s3cr3t!",
|
||||
allowedGrantTypes: "client_credentials",
|
||||
allowedScopes: "jobs:read",
|
||||
tenant: "tenant-alpha");
|
||||
|
||||
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
|
||||
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||
var options = TestHelpers.CreateAuthorityOptions(opts =>
|
||||
{
|
||||
opts.Delegation.Quotas.MaxActiveTokens = 1;
|
||||
});
|
||||
|
||||
var serviceAccount = new AuthorityServiceAccountDocument
|
||||
{
|
||||
AccountId = "svc-observer",
|
||||
Tenant = "tenant-alpha",
|
||||
AllowedScopes = new List<string> { "jobs:read" },
|
||||
AuthorizedClients = new List<string> { clientDocument.ClientId }
|
||||
};
|
||||
|
||||
var serviceAccountStore = new TestServiceAccountStore(serviceAccount);
|
||||
var tokenStore = new TestTokenStore
|
||||
{
|
||||
Inserted = new AuthorityTokenDocument
|
||||
{
|
||||
TokenId = "existing-token",
|
||||
Status = "valid",
|
||||
Tenant = "tenant-alpha",
|
||||
ClientId = clientDocument.ClientId,
|
||||
ServiceAccountId = "svc-observer",
|
||||
TokenKind = AuthorityTokenKinds.ServiceAccount,
|
||||
CreatedAt = DateTimeOffset.UtcNow.AddMinutes(-1),
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddMinutes(5),
|
||||
Scope = new List<string> { "jobs:read" }
|
||||
}
|
||||
};
|
||||
|
||||
var handler = new ValidateClientCredentialsHandler(
|
||||
new TestClientStore(clientDocument),
|
||||
registry,
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
metadataAccessor,
|
||||
serviceAccountStore,
|
||||
tokenStore,
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
options,
|
||||
NullLogger<ValidateClientCredentialsHandler>.Instance);
|
||||
|
||||
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
|
||||
transaction.Request.SetParameter(AuthorityOpenIddictConstants.ServiceAccountParameterName, "svc-observer");
|
||||
|
||||
var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
|
||||
await handler.HandleAsync(context);
|
||||
|
||||
Assert.True(context.IsRejected);
|
||||
Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error);
|
||||
Assert.Equal("Delegation token quota exceeded for service account.", context.ErrorDescription);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateClientCredentials_RejectsAdvisoryReadWithoutAocVerify()
|
||||
{
|
||||
@@ -193,6 +323,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -227,6 +359,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -265,6 +399,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -304,6 +440,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -336,6 +474,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -370,6 +510,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -403,6 +545,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -437,6 +581,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -471,6 +617,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -503,6 +651,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -538,6 +688,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -575,6 +727,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -610,6 +764,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -646,6 +802,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -682,6 +840,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -717,6 +877,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -751,6 +913,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -787,6 +951,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -822,6 +988,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -857,6 +1025,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -894,6 +1064,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -931,6 +1103,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -971,6 +1145,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1014,6 +1190,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1050,6 +1228,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1084,6 +1264,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1120,6 +1302,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1156,6 +1340,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1196,6 +1382,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1234,6 +1422,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1268,6 +1458,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1302,6 +1494,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1349,6 +1543,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1384,6 +1580,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1418,6 +1616,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1452,6 +1652,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1494,6 +1696,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1529,6 +1733,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1562,6 +1768,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1596,6 +1804,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1631,6 +1841,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1665,6 +1877,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1700,6 +1914,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1733,6 +1949,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1767,6 +1985,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1802,6 +2022,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -1837,6 +2059,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
sink,
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -2094,6 +2318,8 @@ public class ClientCredentialsHandlersTests
|
||||
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
|
||||
var auditSink = new TestAuthEventSink();
|
||||
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||
var serviceAccountStore = new TestServiceAccountStore();
|
||||
var tokenStore = new TestTokenStore();
|
||||
var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() };
|
||||
httpContextAccessor.HttpContext!.Connection.ClientCertificate = certificate;
|
||||
|
||||
@@ -2105,6 +2331,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
auditSink,
|
||||
metadataAccessor,
|
||||
serviceAccountStore,
|
||||
tokenStore,
|
||||
TimeProvider.System,
|
||||
validator,
|
||||
httpContextAccessor,
|
||||
@@ -2152,6 +2380,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
validator,
|
||||
httpContextAccessor,
|
||||
@@ -2192,6 +2422,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -2238,6 +2470,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
new TestAuthEventSink(),
|
||||
new TestRateLimiterMetadataAccessor(),
|
||||
new TestServiceAccountStore(),
|
||||
new TestTokenStore(),
|
||||
TimeProvider.System,
|
||||
certificateValidator,
|
||||
httpContextAccessor,
|
||||
@@ -2272,6 +2506,7 @@ public class ClientCredentialsHandlersTests
|
||||
var sessionAccessor = new NullMongoSessionAccessor();
|
||||
var authSink = new TestAuthEventSink();
|
||||
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||
var serviceAccountStore = new TestServiceAccountStore();
|
||||
var options = TestHelpers.CreateAuthorityOptions();
|
||||
var validateHandler = new ValidateClientCredentialsHandler(
|
||||
new TestClientStore(clientDocument),
|
||||
@@ -2279,6 +2514,8 @@ public class ClientCredentialsHandlersTests
|
||||
TestActivitySource,
|
||||
authSink,
|
||||
metadataAccessor,
|
||||
serviceAccountStore,
|
||||
tokenStore,
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
@@ -2335,6 +2572,88 @@ public class ClientCredentialsHandlersTests
|
||||
Assert.Equal("tenant-alpha", persisted.Tenant);
|
||||
Assert.Equal(new[] { "jobs:trigger" }, persisted.Scope);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HandleClientCredentials_PersistsServiceAccountMetadata()
|
||||
{
|
||||
var clientDocument = CreateClient(
|
||||
secret: "s3cr3t!",
|
||||
allowedGrantTypes: "client_credentials",
|
||||
allowedScopes: "jobs:read",
|
||||
tenant: "tenant-alpha");
|
||||
|
||||
var serviceAccount = new AuthorityServiceAccountDocument
|
||||
{
|
||||
AccountId = "svc-ops",
|
||||
Tenant = "tenant-alpha",
|
||||
AllowedScopes = new List<string> { "jobs:read" },
|
||||
AuthorizedClients = new List<string> { clientDocument.ClientId }
|
||||
};
|
||||
|
||||
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
|
||||
var tokenStore = new TestTokenStore();
|
||||
var sessionAccessor = new NullMongoSessionAccessor();
|
||||
var authSink = new TestAuthEventSink();
|
||||
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||
var serviceAccountStore = new TestServiceAccountStore(serviceAccount);
|
||||
var options = TestHelpers.CreateAuthorityOptions(opts =>
|
||||
{
|
||||
opts.Delegation.Quotas.MaxActiveTokens = 5;
|
||||
});
|
||||
|
||||
var validateHandler = new ValidateClientCredentialsHandler(
|
||||
new TestClientStore(clientDocument),
|
||||
registry,
|
||||
TestActivitySource,
|
||||
authSink,
|
||||
metadataAccessor,
|
||||
serviceAccountStore,
|
||||
tokenStore,
|
||||
TimeProvider.System,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
options,
|
||||
NullLogger<ValidateClientCredentialsHandler>.Instance);
|
||||
|
||||
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
|
||||
transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(10);
|
||||
transaction.Request.SetParameter(AuthorityOpenIddictConstants.ServiceAccountParameterName, "svc-ops");
|
||||
|
||||
var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
|
||||
await validateHandler.HandleAsync(validateContext);
|
||||
Assert.False(validateContext.IsRejected);
|
||||
|
||||
var handleHandler = new HandleClientCredentialsHandler(
|
||||
registry,
|
||||
tokenStore,
|
||||
sessionAccessor,
|
||||
metadataAccessor,
|
||||
TimeProvider.System,
|
||||
TestActivitySource,
|
||||
NullLogger<HandleClientCredentialsHandler>.Instance);
|
||||
var persistHandler = new PersistTokensHandler(tokenStore, sessionAccessor, TimeProvider.System, TestActivitySource, NullLogger<PersistTokensHandler>.Instance);
|
||||
|
||||
var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction);
|
||||
await handleHandler.HandleAsync(handleContext);
|
||||
Assert.True(handleContext.IsRequestHandled);
|
||||
|
||||
var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction)
|
||||
{
|
||||
Principal = handleContext.Principal,
|
||||
AccessTokenPrincipal = handleContext.Principal
|
||||
};
|
||||
|
||||
await persistHandler.HandleAsync(signInContext);
|
||||
|
||||
var inserted = tokenStore.Inserted;
|
||||
Assert.NotNull(inserted);
|
||||
Assert.Equal("svc-ops", inserted!.ServiceAccountId);
|
||||
Assert.Equal("service_account", inserted.TokenKind);
|
||||
Assert.NotNull(inserted.ActorChain);
|
||||
Assert.Contains(clientDocument.ClientId, inserted.ActorChain!);
|
||||
Assert.Equal("tenant-alpha", inserted.Tenant);
|
||||
Assert.Contains("jobs:read", inserted.Scope);
|
||||
}
|
||||
}
|
||||
|
||||
public class TokenValidationHandlersTests
|
||||
@@ -2953,6 +3272,65 @@ internal sealed class TestClientStore : IAuthorityClientStore
|
||||
=> ValueTask.FromResult(clients.Remove(clientId));
|
||||
}
|
||||
|
||||
internal sealed class TestServiceAccountStore : IAuthorityServiceAccountStore
|
||||
{
|
||||
private readonly Dictionary<string, AuthorityServiceAccountDocument> accounts = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public TestServiceAccountStore(params AuthorityServiceAccountDocument[] documents)
|
||||
{
|
||||
foreach (var document in documents)
|
||||
{
|
||||
accounts[NormalizeKey(document.AccountId)] = document;
|
||||
}
|
||||
}
|
||||
|
||||
public ValueTask<AuthorityServiceAccountDocument?> FindByAccountIdAsync(string accountId, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(accountId))
|
||||
{
|
||||
return ValueTask.FromResult<AuthorityServiceAccountDocument?>(null);
|
||||
}
|
||||
|
||||
accounts.TryGetValue(NormalizeKey(accountId), out var document);
|
||||
return ValueTask.FromResult(document);
|
||||
}
|
||||
|
||||
public ValueTask<IReadOnlyList<AuthorityServiceAccountDocument>> ListByTenantAsync(string tenant, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
return ValueTask.FromResult<IReadOnlyList<AuthorityServiceAccountDocument>>(Array.Empty<AuthorityServiceAccountDocument>());
|
||||
}
|
||||
|
||||
var normalizedTenant = tenant.Trim().ToLowerInvariant();
|
||||
var results = accounts.Values
|
||||
.Where(account => string.Equals(account.Tenant, normalizedTenant, StringComparison.OrdinalIgnoreCase))
|
||||
.ToList();
|
||||
|
||||
return ValueTask.FromResult<IReadOnlyList<AuthorityServiceAccountDocument>>(results);
|
||||
}
|
||||
|
||||
public ValueTask UpsertAsync(AuthorityServiceAccountDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(document);
|
||||
accounts[NormalizeKey(document.AccountId)] = document;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask<bool> DeleteAsync(string accountId, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(accountId))
|
||||
{
|
||||
return ValueTask.FromResult(false);
|
||||
}
|
||||
|
||||
return ValueTask.FromResult(accounts.Remove(NormalizeKey(accountId)));
|
||||
}
|
||||
|
||||
private static string NormalizeKey(string value)
|
||||
=> string.IsNullOrWhiteSpace(value) ? string.Empty : value.Trim().ToLowerInvariant();
|
||||
}
|
||||
|
||||
internal sealed class TestTokenStore : IAuthorityTokenStore
|
||||
{
|
||||
public AuthorityTokenDocument? Inserted { get; set; }
|
||||
@@ -3001,6 +3379,47 @@ internal sealed class TestTokenStore : IAuthorityTokenStore
|
||||
return ValueTask.FromResult<IReadOnlyList<AuthorityTokenDocument>>(Array.Empty<AuthorityTokenDocument>());
|
||||
}
|
||||
|
||||
public ValueTask<long> CountActiveDelegationTokensAsync(string tenant, string? serviceAccountId, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
{
|
||||
if (Inserted is null)
|
||||
{
|
||||
return ValueTask.FromResult(0L);
|
||||
}
|
||||
|
||||
var tenantMatches = string.Equals(Inserted.Tenant, tenant, StringComparison.OrdinalIgnoreCase);
|
||||
var accountMatches = string.IsNullOrWhiteSpace(serviceAccountId) ||
|
||||
string.Equals(Inserted.ServiceAccountId, serviceAccountId, StringComparison.OrdinalIgnoreCase);
|
||||
var active = string.Equals(Inserted.Status, "valid", StringComparison.OrdinalIgnoreCase) &&
|
||||
(!Inserted.ExpiresAt.HasValue || Inserted.ExpiresAt.Value > DateTimeOffset.UtcNow) &&
|
||||
!string.IsNullOrWhiteSpace(Inserted.ServiceAccountId) &&
|
||||
string.Equals(Inserted.TokenKind, AuthorityTokenKinds.ServiceAccount, StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
return ValueTask.FromResult(tenantMatches && accountMatches && active ? 1L : 0L);
|
||||
}
|
||||
|
||||
public ValueTask<IReadOnlyList<AuthorityTokenDocument>> ListActiveDelegationTokensAsync(string tenant, string? serviceAccountId, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
{
|
||||
if (Inserted is null)
|
||||
{
|
||||
return ValueTask.FromResult<IReadOnlyList<AuthorityTokenDocument>>(Array.Empty<AuthorityTokenDocument>());
|
||||
}
|
||||
|
||||
var tenantMatches = string.Equals(Inserted.Tenant, tenant, StringComparison.OrdinalIgnoreCase);
|
||||
var accountMatches = string.IsNullOrWhiteSpace(serviceAccountId) ||
|
||||
string.Equals(Inserted.ServiceAccountId, serviceAccountId, StringComparison.OrdinalIgnoreCase);
|
||||
var active = string.Equals(Inserted.Status, "valid", StringComparison.OrdinalIgnoreCase) &&
|
||||
(!Inserted.ExpiresAt.HasValue || Inserted.ExpiresAt.Value > DateTimeOffset.UtcNow) &&
|
||||
!string.IsNullOrWhiteSpace(Inserted.ServiceAccountId) &&
|
||||
string.Equals(Inserted.TokenKind, AuthorityTokenKinds.ServiceAccount, StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
if (tenantMatches && accountMatches && active)
|
||||
{
|
||||
return ValueTask.FromResult<IReadOnlyList<AuthorityTokenDocument>>(new[] { Inserted });
|
||||
}
|
||||
|
||||
return ValueTask.FromResult<IReadOnlyList<AuthorityTokenDocument>>(Array.Empty<AuthorityTokenDocument>());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
internal sealed class TestClaimsEnricher : IClaimsEnricher
|
||||
|
||||
@@ -47,8 +47,9 @@ public sealed class TokenPersistenceIntegrationTests
|
||||
|
||||
await using var provider = await BuildMongoProviderAsync(clock);
|
||||
|
||||
var clientStore = provider.GetRequiredService<IAuthorityClientStore>();
|
||||
var tokenStore = provider.GetRequiredService<IAuthorityTokenStore>();
|
||||
var clientStore = provider.GetRequiredService<IAuthorityClientStore>();
|
||||
var tokenStore = provider.GetRequiredService<IAuthorityTokenStore>();
|
||||
var serviceAccountStore = provider.GetRequiredService<IAuthorityServiceAccountStore>();
|
||||
|
||||
var clientDocument = TestHelpers.CreateClient(
|
||||
secret: "s3cr3t!",
|
||||
@@ -67,7 +68,19 @@ public sealed class TokenPersistenceIntegrationTests
|
||||
await using var scope = provider.CreateAsyncScope();
|
||||
var sessionAccessor = scope.ServiceProvider.GetRequiredService<IAuthorityMongoSessionAccessor>();
|
||||
var options = TestHelpers.CreateAuthorityOptions();
|
||||
var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, authSink, metadataAccessor, clock, new NoopCertificateValidator(), new HttpContextAccessor(), options, NullLogger<ValidateClientCredentialsHandler>.Instance);
|
||||
var validateHandler = new ValidateClientCredentialsHandler(
|
||||
clientStore,
|
||||
registry,
|
||||
TestActivitySource,
|
||||
authSink,
|
||||
metadataAccessor,
|
||||
serviceAccountStore,
|
||||
tokenStore,
|
||||
clock,
|
||||
new NoopCertificateValidator(),
|
||||
new HttpContextAccessor(),
|
||||
options,
|
||||
NullLogger<ValidateClientCredentialsHandler>.Instance);
|
||||
var handleHandler = new HandleClientCredentialsHandler(registry, tokenStore, sessionAccessor, metadataAccessor, clock, TestActivitySource, NullLogger<HandleClientCredentialsHandler>.Instance);
|
||||
var persistHandler = new PersistTokensHandler(tokenStore, sessionAccessor, clock, TestActivitySource, NullLogger<PersistTokensHandler>.Instance);
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
namespace StellaOps.Authority.OpenIddict;
|
||||
|
||||
internal static class AuthorityOpenIddictConstants
|
||||
{
|
||||
internal const string ProviderParameterName = "authority_provider";
|
||||
internal const string ProviderTransactionProperty = "authority:identity_provider";
|
||||
internal const string ClientTransactionProperty = "authority:client";
|
||||
internal const string ClientProviderTransactionProperty = "authority:client_provider";
|
||||
internal const string ClientGrantedScopesProperty = "authority:client_granted_scopes";
|
||||
internal const string TokenTransactionProperty = "authority:token";
|
||||
internal const string AuditCorrelationProperty = "authority:audit_correlation_id";
|
||||
namespace StellaOps.Authority.OpenIddict;
|
||||
|
||||
internal static class AuthorityOpenIddictConstants
|
||||
{
|
||||
internal const string ProviderParameterName = "authority_provider";
|
||||
internal const string ProviderTransactionProperty = "authority:identity_provider";
|
||||
internal const string ClientTransactionProperty = "authority:client";
|
||||
internal const string ClientProviderTransactionProperty = "authority:client_provider";
|
||||
internal const string ClientGrantedScopesProperty = "authority:client_granted_scopes";
|
||||
internal const string TokenTransactionProperty = "authority:token";
|
||||
internal const string AuditCorrelationProperty = "authority:audit_correlation_id";
|
||||
internal const string AuditClientIdProperty = "authority:audit_client_id";
|
||||
internal const string AuditProviderProperty = "authority:audit_provider";
|
||||
internal const string AuditConfidentialProperty = "authority:audit_confidential";
|
||||
@@ -46,14 +46,9 @@ internal static class AuthorityOpenIddictConstants
|
||||
internal const string BackfillTicketProperty = "authority:backfill_ticket";
|
||||
internal const string BackfillReasonParameterName = "backfill_reason";
|
||||
internal const string BackfillTicketParameterName = "backfill_ticket";
|
||||
internal const string PolicyReasonProperty = "authority:policy_reason";
|
||||
internal const string PolicyTicketProperty = "authority:policy_ticket";
|
||||
internal const string PolicyDigestProperty = "authority:policy_digest";
|
||||
internal const string PolicyOperationProperty = "authority:policy_operation";
|
||||
internal const string PolicyAuditPropertiesProperty = "authority:policy_audit_properties";
|
||||
internal const string PolicyReasonParameterName = "policy_reason";
|
||||
internal const string PolicyTicketParameterName = "policy_ticket";
|
||||
internal const string PolicyDigestParameterName = "policy_digest";
|
||||
internal const string PolicyOperationPublishValue = "publish";
|
||||
internal const string PolicyOperationPromoteValue = "promote";
|
||||
internal const string ServiceAccountParameterName = "service_account";
|
||||
internal const string DelegationActorParameterName = "delegation_actor";
|
||||
internal const string ServiceAccountProperty = "authority:service_account";
|
||||
internal const string TokenKindProperty = "authority:token_kind";
|
||||
internal const string ActorChainProperty = "authority:actor_chain";
|
||||
}
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
namespace StellaOps.Authority.OpenIddict;
|
||||
|
||||
internal static class AuthorityTokenKinds
|
||||
{
|
||||
internal const string ServiceAccount = "service_account";
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -31,9 +31,10 @@ using StellaOps.Authority.Notifications.Ack;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.Plugins;
|
||||
using StellaOps.Authority.Bootstrap;
|
||||
using StellaOps.Authority.Storage.Mongo.Extensions;
|
||||
using StellaOps.Authority.Storage.Mongo.Initialization;
|
||||
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||
using StellaOps.Authority.Storage.Mongo.Extensions;
|
||||
using StellaOps.Authority.Storage.Mongo.Initialization;
|
||||
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||
using StellaOps.Authority.Storage.Mongo.Sessions;
|
||||
using StellaOps.Authority.RateLimiting;
|
||||
using StellaOps.Configuration;
|
||||
using StellaOps.Plugin.DependencyInjection;
|
||||
@@ -51,7 +52,6 @@ using StellaOps.Authority.Signing;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.Kms;
|
||||
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||
using StellaOps.Authority.Security;
|
||||
using StellaOps.Authority.OpenApi;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
@@ -1201,11 +1201,11 @@ if (authorityOptions.Bootstrap.Enabled)
|
||||
}
|
||||
});
|
||||
|
||||
bootstrapGroup.MapGet("/revocations/export", async (
|
||||
AuthorityRevocationExportService exportService,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
var package = await exportService.ExportAsync(cancellationToken).ConfigureAwait(false);
|
||||
bootstrapGroup.MapGet("/revocations/export", async (
|
||||
AuthorityRevocationExportService exportService,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
var package = await exportService.ExportAsync(cancellationToken).ConfigureAwait(false);
|
||||
var build = package.Bundle;
|
||||
|
||||
var response = new RevocationExportResponse
|
||||
@@ -1232,14 +1232,272 @@ if (authorityOptions.Bootstrap.Enabled)
|
||||
}
|
||||
};
|
||||
|
||||
return Results.Ok(response);
|
||||
});
|
||||
|
||||
bootstrapGroup.MapPost("/signing/rotate", (
|
||||
SigningRotationRequest? request,
|
||||
AuthoritySigningKeyManager signingManager,
|
||||
ILogger<AuthoritySigningKeyManager> signingLogger) =>
|
||||
{
|
||||
return Results.Ok(response);
|
||||
});
|
||||
|
||||
bootstrapGroup.MapGet("/service-accounts", async (
|
||||
string? tenant,
|
||||
IAuthorityServiceAccountStore accountStore,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
return Results.BadRequest(new { error = "invalid_request", message = "Query parameter 'tenant' is required." });
|
||||
}
|
||||
|
||||
var documents = await accountStore.ListByTenantAsync(tenant, cancellationToken).ConfigureAwait(false);
|
||||
if (documents.Count == 0)
|
||||
{
|
||||
return Results.Ok(Array.Empty<ServiceAccountResponse>());
|
||||
}
|
||||
|
||||
var response = documents
|
||||
.OrderBy(account => account.AccountId, StringComparer.Ordinal)
|
||||
.Select(MapServiceAccount)
|
||||
.ToArray();
|
||||
|
||||
return Results.Ok(response);
|
||||
});
|
||||
|
||||
bootstrapGroup.MapGet("/service-accounts/{accountId}/tokens", async (
|
||||
string accountId,
|
||||
IAuthorityServiceAccountStore accountStore,
|
||||
IAuthorityTokenStore tokenStore,
|
||||
IAuthorityMongoSessionAccessor sessionAccessor,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(accountId))
|
||||
{
|
||||
return Results.BadRequest(new { error = "invalid_request", message = "Account identifier is required." });
|
||||
}
|
||||
|
||||
var document = await accountStore.FindByAccountIdAsync(accountId, cancellationToken).ConfigureAwait(false);
|
||||
if (document is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
var session = await sessionAccessor.GetSessionAsync(cancellationToken).ConfigureAwait(false);
|
||||
var tokens = await tokenStore.ListActiveDelegationTokensAsync(document.Tenant, document.AccountId, cancellationToken, session).ConfigureAwait(false);
|
||||
|
||||
var response = tokens
|
||||
.Select(MapDelegatedToken)
|
||||
.ToArray();
|
||||
|
||||
return Results.Ok(response);
|
||||
});
|
||||
|
||||
bootstrapGroup.MapPost("/service-accounts/{accountId}/revocations", async (
|
||||
string accountId,
|
||||
ServiceAccountRevokeRequest? request,
|
||||
HttpContext httpContext,
|
||||
IAuthorityServiceAccountStore accountStore,
|
||||
IAuthorityTokenStore tokenStore,
|
||||
IAuthorityMongoSessionAccessor sessionAccessor,
|
||||
IAuthEventSink auditSink,
|
||||
TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." });
|
||||
}
|
||||
|
||||
var document = await accountStore.FindByAccountIdAsync(accountId, cancellationToken).ConfigureAwait(false);
|
||||
if (document is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
var session = await sessionAccessor.GetSessionAsync(cancellationToken).ConfigureAwait(false);
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
var targetTokens = new List<AuthorityTokenDocument>();
|
||||
if (!string.IsNullOrWhiteSpace(request.TokenId))
|
||||
{
|
||||
var token = await tokenStore.FindByTokenIdAsync(request.TokenId.Trim(), cancellationToken, session).ConfigureAwait(false);
|
||||
if (token is not null &&
|
||||
string.Equals(token.ServiceAccountId, document.AccountId, StringComparison.OrdinalIgnoreCase) &&
|
||||
string.Equals(token.TokenKind, "service_account", StringComparison.OrdinalIgnoreCase) &&
|
||||
string.Equals(token.Tenant, document.Tenant, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
targetTokens.Add(token);
|
||||
}
|
||||
else
|
||||
{
|
||||
return Results.NotFound(new { error = "not_found", message = "Delegated token not found for service account." });
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var active = await tokenStore.ListActiveDelegationTokensAsync(document.Tenant, document.AccountId, cancellationToken, session).ConfigureAwait(false);
|
||||
targetTokens.AddRange(active);
|
||||
}
|
||||
|
||||
if (targetTokens.Count == 0)
|
||||
{
|
||||
await auditSink.WriteAsync(new AuthEventRecord
|
||||
{
|
||||
EventType = "authority.delegation.revoked",
|
||||
OccurredAt = now,
|
||||
Outcome = AuthEventOutcome.Failure,
|
||||
Reason = request.Reason ?? "no_active_tokens",
|
||||
CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture),
|
||||
Subject = new AuthEventSubject
|
||||
{
|
||||
SubjectId = ClassifiedString.Public(document.AccountId),
|
||||
Realm = ClassifiedString.Public(document.Tenant)
|
||||
},
|
||||
Tenant = ClassifiedString.Public(document.Tenant),
|
||||
Properties = new[]
|
||||
{
|
||||
new AuthEventProperty { Name = "delegation.service_account", Value = ClassifiedString.Public(document.AccountId) },
|
||||
new AuthEventProperty { Name = "delegation.revoked_count", Value = ClassifiedString.Public("0") }
|
||||
}
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(new ServiceAccountRevokeResponse(0, Array.Empty<string>()));
|
||||
}
|
||||
|
||||
var revokedTokens = new List<string>(targetTokens.Count);
|
||||
foreach (var token in targetTokens)
|
||||
{
|
||||
if (string.Equals(token.Status, "revoked", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["delegation.service_account"] = document.AccountId
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.Reason))
|
||||
{
|
||||
metadata["delegation.reason"] = request.Reason;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.TokenId))
|
||||
{
|
||||
metadata["delegation.token"] = request.TokenId;
|
||||
}
|
||||
|
||||
await tokenStore.UpdateStatusAsync(
|
||||
token.TokenId,
|
||||
"revoked",
|
||||
now,
|
||||
string.IsNullOrWhiteSpace(request.Reason) ? "delegation_revoked" : request.Reason,
|
||||
request.ReasonDescription,
|
||||
metadata,
|
||||
cancellationToken,
|
||||
session).ConfigureAwait(false);
|
||||
|
||||
revokedTokens.Add(token.TokenId);
|
||||
}
|
||||
|
||||
var orderedRevokedTokens = revokedTokens
|
||||
.OrderBy(tokenId => tokenId, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var properties = new List<AuthEventProperty>
|
||||
{
|
||||
new() { Name = "delegation.service_account", Value = ClassifiedString.Public(document.AccountId) },
|
||||
new() { Name = "delegation.revoked_count", Value = ClassifiedString.Public(orderedRevokedTokens.Length.ToString(CultureInfo.InvariantCulture)) }
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.Reason))
|
||||
{
|
||||
properties.Add(new AuthEventProperty { Name = "delegation.reason", Value = ClassifiedString.Public(request.Reason) });
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.ReasonDescription))
|
||||
{
|
||||
properties.Add(new AuthEventProperty { Name = "delegation.reason_description", Value = ClassifiedString.Public(request.ReasonDescription) });
|
||||
}
|
||||
|
||||
for (var index = 0; index < orderedRevokedTokens.Length; index++)
|
||||
{
|
||||
properties.Add(new AuthEventProperty
|
||||
{
|
||||
Name = $"delegation.revoked_token[{index}]",
|
||||
Value = ClassifiedString.Public(orderedRevokedTokens[index])
|
||||
});
|
||||
}
|
||||
|
||||
await auditSink.WriteAsync(new AuthEventRecord
|
||||
{
|
||||
EventType = "authority.delegation.revoked",
|
||||
OccurredAt = now,
|
||||
Outcome = orderedRevokedTokens.Length > 0 ? AuthEventOutcome.Success : AuthEventOutcome.Failure,
|
||||
Reason = request.Reason,
|
||||
CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture),
|
||||
Subject = new AuthEventSubject
|
||||
{
|
||||
SubjectId = ClassifiedString.Public(document.AccountId),
|
||||
Realm = ClassifiedString.Public(document.Tenant)
|
||||
},
|
||||
Tenant = ClassifiedString.Public(document.Tenant),
|
||||
Properties = properties
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(new ServiceAccountRevokeResponse(orderedRevokedTokens.Length, orderedRevokedTokens));
|
||||
});
|
||||
|
||||
static ServiceAccountResponse MapServiceAccount(AuthorityServiceAccountDocument document)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(document);
|
||||
|
||||
var scopes = document.AllowedScopes is { Count: > 0 }
|
||||
? document.AllowedScopes.OrderBy(scope => scope, StringComparer.Ordinal).ToArray()
|
||||
: Array.Empty<string>();
|
||||
|
||||
var clients = document.AuthorizedClients is { Count: > 0 }
|
||||
? document.AuthorizedClients.OrderBy(client => client, StringComparer.Ordinal).ToArray()
|
||||
: Array.Empty<string>();
|
||||
|
||||
return new ServiceAccountResponse(
|
||||
document.AccountId,
|
||||
document.Tenant,
|
||||
document.DisplayName,
|
||||
document.Description,
|
||||
document.Enabled,
|
||||
scopes,
|
||||
clients);
|
||||
}
|
||||
|
||||
static ServiceAccountTokenResponse MapDelegatedToken(AuthorityTokenDocument document)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(document);
|
||||
|
||||
var scopes = document.Scope is { Count: > 0 }
|
||||
? document.Scope.OrderBy(scope => scope, StringComparer.Ordinal).ToArray()
|
||||
: Array.Empty<string>();
|
||||
|
||||
var actors = document.ActorChain is { Count: > 0 }
|
||||
? document.ActorChain
|
||||
.Where(actor => !string.IsNullOrWhiteSpace(actor))
|
||||
.Select(actor => actor.Trim())
|
||||
.Where(actor => actor.Length > 0)
|
||||
.OrderBy(actor => actor, StringComparer.Ordinal)
|
||||
.ToArray()
|
||||
: Array.Empty<string>();
|
||||
|
||||
return new ServiceAccountTokenResponse(
|
||||
document.TokenId,
|
||||
document.ClientId,
|
||||
document.Status,
|
||||
document.CreatedAt,
|
||||
document.ExpiresAt,
|
||||
document.SenderConstraint,
|
||||
scopes,
|
||||
actors);
|
||||
}
|
||||
|
||||
bootstrapGroup.MapPost("/signing/rotate", (
|
||||
SigningRotationRequest? request,
|
||||
AuthoritySigningKeyManager signingManager,
|
||||
ILogger<AuthoritySigningKeyManager> signingLogger) =>
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
signingLogger.LogWarning("Signing rotation request payload missing.");
|
||||
@@ -2457,16 +2715,16 @@ app.MapGet("/jwks", (AuthorityJwksService jwksService, HttpContext context) =>
|
||||
.WithName("JsonWebKeySet");
|
||||
|
||||
// Ensure signing key manager initialises key material on startup.
|
||||
app.Services.GetRequiredService<AuthorityAckTokenKeyManager>();
|
||||
app.Services.GetRequiredService<AuthoritySigningKeyManager>();
|
||||
|
||||
app.Run();
|
||||
|
||||
static PluginHostOptions BuildPluginHostOptions(StellaOpsAuthorityOptions options, string basePath)
|
||||
{
|
||||
var pluginDirectory = options.PluginDirectories.FirstOrDefault();
|
||||
var hostOptions = new PluginHostOptions
|
||||
{
|
||||
app.Services.GetRequiredService<AuthorityAckTokenKeyManager>();
|
||||
app.Services.GetRequiredService<AuthoritySigningKeyManager>();
|
||||
|
||||
app.Run();
|
||||
|
||||
static PluginHostOptions BuildPluginHostOptions(StellaOpsAuthorityOptions options, string basePath)
|
||||
{
|
||||
var pluginDirectory = options.PluginDirectories.FirstOrDefault();
|
||||
var hostOptions = new PluginHostOptions
|
||||
{
|
||||
BaseDirectory = basePath,
|
||||
PluginsDirectory = string.IsNullOrWhiteSpace(pluginDirectory)
|
||||
? "StellaOps.Authority.PluginBinaries"
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Authority;
|
||||
|
||||
internal sealed record ServiceAccountResponse(
|
||||
string AccountId,
|
||||
string Tenant,
|
||||
string? DisplayName,
|
||||
string? Description,
|
||||
bool Enabled,
|
||||
IReadOnlyList<string> AllowedScopes,
|
||||
IReadOnlyList<string> AuthorizedClients);
|
||||
|
||||
internal sealed record ServiceAccountTokenResponse(
|
||||
string TokenId,
|
||||
string? ClientId,
|
||||
string Status,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset? ExpiresAt,
|
||||
string? SenderConstraint,
|
||||
IReadOnlyList<string> Scopes,
|
||||
IReadOnlyList<string> Actors);
|
||||
|
||||
internal sealed record ServiceAccountRevokeRequest(
|
||||
string? TokenId,
|
||||
string? Reason,
|
||||
string? ReasonDescription);
|
||||
|
||||
internal sealed record ServiceAccountRevokeResponse(int RevokedCount, IReadOnlyList<string> TokenIds);
|
||||
@@ -130,6 +130,7 @@
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-28: Tidied advisory raw idempotency migration to avoid LINQ-on-`BsonValue` (explicit array copy) while continuing duplicate guardrail validation; scoped scanner/policy token call sites updated to honor new metadata parameter.
|
||||
| AUTH-TEN-49-001 | DOING (2025-11-02) | Authority Core & Security Guild | AUTH-TEN-47-001 | Implement service accounts & delegation tokens (`act` chain), per-tenant quotas, audit stream of auth decisions, and revocation APIs. | Service tokens minted with scopes/TTL; delegation logged; quotas configurable; audit stream live; docs updated. |
|
||||
> 2025-11-02: Added Mongo service-account store, seeded options/collection initializers, token persistence metadata (`tokenKind`, `serviceAccountId`, `actorChain`), and docs/config samples. Introduced quota checks + tests covering service account issuance and persistence.
|
||||
|
||||
## Observability & Forensics (Epic 15)
|
||||
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
| ISSUER-30-003 | DOING | Issuer Directory Guild, Policy Guild | ISSUER-30-001 | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. | Trust overrides persisted; policy integration confirmed; tests cover overrides. |
|
||||
| ISSUER-30-004 | DONE (2025-11-01) | Issuer Directory Guild, VEX Lens Guild | ISSUER-30-001..003 | Integrate with VEX Lens and Excitator signature verification (client SDK, caching, retries). | Lens/Excitator resolve issuer metadata via SDK; integration tests cover network failures. |
|
||||
| ISSUER-30-005 | DONE (2025-11-01) | Issuer Directory Guild, Observability Guild | ISSUER-30-001..004 | Instrument metrics/logs (issuer changes, key rotation, verification failures) and dashboards/alerts. | Telemetry live; alerts configured; docs updated. |
|
||||
| ISSUER-30-006 | TODO | Issuer Directory Guild, DevOps Guild | ISSUER-30-001..005 | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. | Deployment docs merged; smoke deploy validated; backup tested; offline kit updated. |
|
||||
| ISSUER-30-006 | DOING (2025-11-02) | Issuer Directory Guild, DevOps Guild | ISSUER-30-001..005 | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. | Deployment docs merged; smoke deploy validated; backup tested; offline kit updated. |
|
||||
|
||||
> 2025-11-01: Excititor worker now queries Issuer Directory via during attestation verification, caching active key metadata and trust weights for tenant/global scopes.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,10 @@
|
||||
global using System.Collections.Generic;
|
||||
global using System.Diagnostics.CodeAnalysis;
|
||||
global using System.Globalization;
|
||||
global using System.IO;
|
||||
global using System.Linq;
|
||||
global using System.Text;
|
||||
global using System.Text.RegularExpressions;
|
||||
global using System.Threading;
|
||||
global using System.Threading.Tasks;
|
||||
global using StellaOps.Scanner.Analyzers.Lang;
|
||||
@@ -0,0 +1,39 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Ruby.Internal;
|
||||
|
||||
internal sealed class RubyLockData
|
||||
{
|
||||
private RubyLockData(string? lockFilePath, IReadOnlyList<RubyLockEntry> entries, string bundledWith)
|
||||
{
|
||||
LockFilePath = lockFilePath;
|
||||
Entries = entries;
|
||||
BundledWith = bundledWith;
|
||||
}
|
||||
|
||||
public string? LockFilePath { get; }
|
||||
|
||||
public string BundledWith { get; }
|
||||
|
||||
public IReadOnlyList<RubyLockEntry> Entries { get; }
|
||||
|
||||
public bool IsEmpty => Entries.Count == 0;
|
||||
|
||||
public static async ValueTask<RubyLockData> LoadAsync(string rootPath, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(rootPath);
|
||||
|
||||
var lockPath = Path.Combine(rootPath, "Gemfile.lock");
|
||||
if (!File.Exists(lockPath))
|
||||
{
|
||||
return Empty;
|
||||
}
|
||||
|
||||
await using var stream = new FileStream(lockPath, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
using var reader = new StreamReader(stream);
|
||||
var content = await reader.ReadToEndAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var parser = RubyLockParser.Parse(content);
|
||||
return new RubyLockData(lockPath, parser.Entries, parser.BundledWith);
|
||||
}
|
||||
|
||||
public static RubyLockData Empty { get; } = new(lockFilePath: null, Array.Empty<RubyLockEntry>(), bundledWith: string.Empty);
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Ruby.Internal;
|
||||
|
||||
internal sealed record RubyLockEntry(
|
||||
string Name,
|
||||
string Version,
|
||||
string Source,
|
||||
string? Platform,
|
||||
IReadOnlyCollection<string> Groups);
|
||||
@@ -0,0 +1,129 @@
|
||||
using System.IO;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Ruby.Internal;
|
||||
|
||||
internal static class RubyLockParser
|
||||
{
|
||||
private static readonly Regex SpecLineRegex = new(@"^\s{4}([^\s]+)\s\(([^)]+)\)", RegexOptions.Compiled);
|
||||
|
||||
public static RubyLockParserResult Parse(string contents)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(contents))
|
||||
{
|
||||
return new RubyLockParserResult(Array.Empty<RubyLockEntry>(), string.Empty);
|
||||
}
|
||||
|
||||
var entries = new List<RubyLockEntry>();
|
||||
using var reader = new StringReader(contents);
|
||||
string? line;
|
||||
string currentSection = string.Empty;
|
||||
string? currentSource = null;
|
||||
bool inSpecs = false;
|
||||
var bundledWith = string.Empty;
|
||||
|
||||
while ((line = reader.ReadLine()) is not null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!char.IsWhiteSpace(line[0]))
|
||||
{
|
||||
currentSection = line.Trim();
|
||||
inSpecs = false;
|
||||
|
||||
if (string.Equals(currentSection, "GEM", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
currentSource = "rubygems";
|
||||
}
|
||||
else if (string.Equals(currentSection, "GIT", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
currentSource = null;
|
||||
}
|
||||
else if (string.Equals(currentSection, "PATH", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
currentSource = null;
|
||||
}
|
||||
else if (string.Equals(currentSection, "BUNDLED WITH", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var versionLine = reader.ReadLine();
|
||||
if (!string.IsNullOrWhiteSpace(versionLine))
|
||||
{
|
||||
bundledWith = versionLine.Trim();
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.StartsWith(" remote:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
currentSource = line[9..].Trim();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.StartsWith(" revision:", StringComparison.OrdinalIgnoreCase)
|
||||
&& currentSection.Equals("GIT", StringComparison.OrdinalIgnoreCase)
|
||||
&& currentSource is not null)
|
||||
{
|
||||
currentSource = $"{currentSource}@{line[10..].Trim()}";
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.StartsWith(" path:", StringComparison.OrdinalIgnoreCase)
|
||||
&& currentSection.Equals("PATH", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
currentSource = $"path:{line[6..].Trim()}";
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.StartsWith(" specs:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
inSpecs = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!inSpecs)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var match = SpecLineRegex.Match(line);
|
||||
if (!match.Success)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.Length > 4 && char.IsWhiteSpace(line[4]))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var name = match.Groups[1].Value.Trim();
|
||||
var versionToken = match.Groups[2].Value.Trim();
|
||||
|
||||
string version;
|
||||
string? platform = null;
|
||||
|
||||
var tokens = versionToken.Split(new[] { " " }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (tokens.Length > 1)
|
||||
{
|
||||
version = tokens[0];
|
||||
platform = string.Join(" ", tokens.Skip(1));
|
||||
}
|
||||
else
|
||||
{
|
||||
version = versionToken;
|
||||
}
|
||||
|
||||
var source = currentSource ?? "unknown";
|
||||
entries.Add(new RubyLockEntry(name, version, source, platform, Array.Empty<string>()));
|
||||
}
|
||||
|
||||
return new RubyLockParserResult(entries, bundledWith);
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record RubyLockParserResult(IReadOnlyList<RubyLockEntry> Entries, string BundledWith);
|
||||
@@ -0,0 +1,113 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Ruby.Internal;
|
||||
|
||||
internal sealed class RubyPackage
|
||||
{
|
||||
private RubyPackage(
|
||||
string name,
|
||||
string version,
|
||||
string source,
|
||||
string? platform,
|
||||
IReadOnlyCollection<string> groups,
|
||||
string locator,
|
||||
bool declaredOnly)
|
||||
{
|
||||
Name = name;
|
||||
Version = version;
|
||||
Source = source;
|
||||
Platform = platform;
|
||||
Groups = groups;
|
||||
Locator = locator;
|
||||
DeclaredOnly = declaredOnly;
|
||||
}
|
||||
|
||||
public string Name { get; }
|
||||
|
||||
public string Version { get; }
|
||||
|
||||
public string Source { get; }
|
||||
|
||||
public string? Platform { get; }
|
||||
|
||||
public IReadOnlyCollection<string> Groups { get; }
|
||||
|
||||
public string Locator { get; }
|
||||
|
||||
public bool DeclaredOnly { get; }
|
||||
|
||||
public string Purl => $"pkg:gem/{Name}@{Version}";
|
||||
|
||||
public string ComponentKey => $"purl::{Purl}";
|
||||
|
||||
public IReadOnlyCollection<KeyValuePair<string, string?>> CreateMetadata(RubyCapabilities? capabilities)
|
||||
{
|
||||
var metadata = new List<KeyValuePair<string, string?>>
|
||||
{
|
||||
new("source", Source),
|
||||
new("lockfile", string.IsNullOrWhiteSpace(Locator) ? "Gemfile.lock" : Locator),
|
||||
new("declaredOnly", DeclaredOnly ? "true" : "false")
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(Platform))
|
||||
{
|
||||
metadata.Add(new KeyValuePair<string, string?>("platform", Platform));
|
||||
}
|
||||
|
||||
if (Groups.Count > 0)
|
||||
{
|
||||
metadata.Add(new KeyValuePair<string, string?>("groups", string.Join(';', Groups)));
|
||||
}
|
||||
|
||||
if (capabilities is not null)
|
||||
{
|
||||
if (capabilities.UsesExec)
|
||||
{
|
||||
metadata.Add(new KeyValuePair<string, string?>("capability.exec", "true"));
|
||||
}
|
||||
|
||||
if (capabilities.UsesNetwork)
|
||||
{
|
||||
metadata.Add(new KeyValuePair<string, string?>("capability.net", "true"));
|
||||
}
|
||||
|
||||
if (capabilities.UsesSerialization)
|
||||
{
|
||||
metadata.Add(new KeyValuePair<string, string?>("capability.serialization", "true"));
|
||||
}
|
||||
}
|
||||
|
||||
return metadata
|
||||
.OrderBy(static pair => pair.Key, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
public IReadOnlyCollection<LanguageComponentEvidence> CreateEvidence()
|
||||
{
|
||||
var locator = string.IsNullOrWhiteSpace(Locator)
|
||||
? "Gemfile.lock"
|
||||
: Locator;
|
||||
|
||||
return new[]
|
||||
{
|
||||
new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.File,
|
||||
"Gemfile.lock",
|
||||
locator,
|
||||
Value: null,
|
||||
Sha256: null)
|
||||
};
|
||||
}
|
||||
|
||||
public static RubyPackage From(RubyLockEntry entry, string lockFileRelativePath)
|
||||
{
|
||||
var groups = entry.Groups.Count == 0
|
||||
? Array.Empty<string>()
|
||||
: entry.Groups.OrderBy(static g => g, StringComparer.OrdinalIgnoreCase).ToArray();
|
||||
|
||||
return new RubyPackage(entry.Name, entry.Version, entry.Source, entry.Platform, groups, lockFileRelativePath, declaredOnly: true);
|
||||
}
|
||||
|
||||
public static RubyPackage FromVendor(string name, string version, string source, string? platform, string locator)
|
||||
{
|
||||
return new RubyPackage(name, version, source, platform, Array.Empty<string>(), locator, declaredOnly: true);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,105 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Ruby.Internal;
|
||||
|
||||
internal static class RubyPackageCollector
|
||||
{
|
||||
public static IReadOnlyList<RubyPackage> CollectPackages(RubyLockData lockData, LanguageAnalyzerContext context)
|
||||
{
|
||||
var packages = new List<RubyPackage>();
|
||||
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (!lockData.IsEmpty)
|
||||
{
|
||||
var relativeLockPath = lockData.LockFilePath is null
|
||||
? Gemfile.lock
|
||||
: context.GetRelativePath(lockData.LockFilePath);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(relativeLockPath))
|
||||
{
|
||||
relativeLockPath = Gemfile.lock;
|
||||
}
|
||||
|
||||
foreach (var entry in lockData.Entries)
|
||||
{
|
||||
var key = ${entry.Name}@{entry.Version};
|
||||
if (!seen.Add(key))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
packages.Add(RubyPackage.From(entry, relativeLockPath));
|
||||
}
|
||||
}
|
||||
|
||||
CollectVendorCachePackages(context, packages, seen);
|
||||
|
||||
return packages;
|
||||
}
|
||||
|
||||
private static void CollectVendorCachePackages(LanguageAnalyzerContext context, List<RubyPackage> packages, HashSet<string> seen)
|
||||
{
|
||||
var vendorCache = Path.Combine(context.RootPath, vendor, cache);
|
||||
if (!Directory.Exists(vendorCache))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var gemPath in Directory.EnumerateFiles(vendorCache, *.gem, SearchOption.AllDirectories))
|
||||
{
|
||||
if (!TryParseGemArchive(gemPath, out var name, out var version, out var platform))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var key = ${name}@{version};
|
||||
if (!seen.Add(key))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var locator = context.GetRelativePath(gemPath);
|
||||
packages.Add(RubyPackage.FromVendor(name, version, source: vendor-cache, platform, locator));
|
||||
}
|
||||
}
|
||||
|
||||
private static bool TryParseGemArchive(string gemPath, out string name, out string version, out string? platform)
|
||||
{
|
||||
name = string.Empty;
|
||||
version = string.Empty;
|
||||
platform = null;
|
||||
|
||||
var fileName = Path.GetFileNameWithoutExtension(gemPath);
|
||||
if (string.IsNullOrWhiteSpace(fileName))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var segments = fileName.Split('-', StringSplitOptions.RemoveEmptyEntries);
|
||||
if (segments.Length < 2)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var versionIndex = -1;
|
||||
for (var i = 1; i < segments.Length; i++)
|
||||
{
|
||||
if (char.IsDigit(segments[i][0]))
|
||||
{
|
||||
versionIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (versionIndex <= 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
name = string.Join('-', segments[..versionIndex]);
|
||||
version = segments[versionIndex];
|
||||
platform = segments.Length > versionIndex + 1
|
||||
? string.Join('-', segments[(versionIndex + 1)..])
|
||||
: null;
|
||||
|
||||
return !string.IsNullOrWhiteSpace(name) && !string.IsNullOrWhiteSpace(version);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
using System;
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Plugin;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Ruby;
|
||||
|
||||
public sealed class RubyAnalyzerPlugin : ILanguageAnalyzerPlugin
|
||||
{
|
||||
public string Name => StellaOps.Scanner.Analyzers.Lang.Ruby;
|
||||
|
||||
public bool IsAvailable(IServiceProvider services) => services is not null;
|
||||
|
||||
public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
return new RubyLanguageAnalyzer();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
using StellaOps.Scanner.Analyzers.Lang.Ruby.Internal;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Ruby;
|
||||
|
||||
public sealed class RubyLanguageAnalyzer : ILanguageAnalyzer
|
||||
{
|
||||
public string Id => "ruby";
|
||||
|
||||
public string DisplayName => "Ruby Analyzer";
|
||||
|
||||
public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
ArgumentNullException.ThrowIfNull(writer);
|
||||
|
||||
var lockData = await RubyLockData.LoadAsync(context.RootPath, cancellationToken).ConfigureAwait(false);
|
||||
if (lockData.IsEmpty)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var packages = RubyPackageCollector.CollectPackages(lockData, context);
|
||||
foreach (var package in packages.OrderBy(static p => p.ComponentKey, StringComparer.Ordinal))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
writer.AddFromPurl(
|
||||
analyzerId: Id,
|
||||
purl: package.Purl,
|
||||
name: package.Name,
|
||||
version: package.Version,
|
||||
type: "gem",
|
||||
metadata: package.CreateMetadata(),
|
||||
evidence: package.CreateEvidence(),
|
||||
usedByEntrypoint: false);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<EnableDefaultItems>false</EnableDefaultItems>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Include="**\*.cs" Exclude="obj\**;bin\**" />
|
||||
<EmbeddedResource Include="**\*.json" Exclude="obj\**;bin\**" />
|
||||
<None Include="**\*" Exclude="**\*.cs;**\*.json;bin\**;obj\**" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,16 @@
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
puma (6.4.2)
|
||||
nio4r (~> 2.0)
|
||||
rake (13.1.0)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
puma (~> 6.4)
|
||||
rake (~> 13.0)
|
||||
|
||||
BUNDLED WITH
|
||||
2.5.10
|
||||
@@ -0,0 +1,65 @@
|
||||
[
|
||||
{
|
||||
analyzerId: ruby,
|
||||
componentKey: purl::pkg:gem/custom-gem@1.0.0,
|
||||
purl: pkg:gem/custom-gem@1.0.0,
|
||||
name: custom-gem,
|
||||
version: 1.0.0,
|
||||
type: gem,
|
||||
usedByEntrypoint: false,
|
||||
metadata: {
|
||||
declaredOnly: true,
|
||||
lockfile: vendor/cache/custom-gem-1.0.0.gem,
|
||||
source: vendor-cache
|
||||
},
|
||||
evidence: [
|
||||
{
|
||||
kind: file,
|
||||
source: vendor-cache,
|
||||
locator: vendor/cache/custom-gem-1.0.0.gem
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
analyzerId: ruby,
|
||||
componentKey: purl::pkg:gem/puma@6.4.2,
|
||||
purl: pkg:gem/puma@6.4.2,
|
||||
name: puma,
|
||||
version: 6.4.2,
|
||||
type: gem,
|
||||
usedByEntrypoint: false,
|
||||
metadata: {
|
||||
declaredOnly: true,
|
||||
lockfile: Gemfile.lock,
|
||||
source: rubygems
|
||||
},
|
||||
evidence: [
|
||||
{
|
||||
kind: file,
|
||||
source: rubygems,
|
||||
locator: Gemfile.lock
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
analyzerId: ruby,
|
||||
componentKey: purl::pkg:gem/rake@13.1.0,
|
||||
purl: pkg:gem/rake@13.1.0,
|
||||
name: rake,
|
||||
version: 13.1.0,
|
||||
type: gem,
|
||||
usedByEntrypoint: false,
|
||||
metadata: {
|
||||
declaredOnly: true,
|
||||
lockfile: Gemfile.lock,
|
||||
source: rubygems
|
||||
},
|
||||
evidence: [
|
||||
{
|
||||
kind: file,
|
||||
source: rubygems,
|
||||
locator: Gemfile.lock
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,19 @@
|
||||
using StellaOps.Scanner.Analyzers.Lang.Ruby;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.Harness;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.Lang.Ruby;
|
||||
|
||||
public sealed class RubyLanguageAnalyzerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task GemfileLockProducesDeterministicInventoryAsync()
|
||||
{
|
||||
var fixture = TestPaths.ResolveFixture(lang, ruby, basic);
|
||||
var golden = Path.Combine(fixture, expected.json);
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixture,
|
||||
golden,
|
||||
new ILanguageAnalyzer[] { new RubyLanguageAnalyzer() });
|
||||
}
|
||||
}
|
||||
@@ -33,6 +33,7 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/StellaOps.Scanner.Analyzers.Lang.Ruby.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
|
||||
</ItemGroup>
|
||||
@@ -44,4 +45,4 @@
|
||||
<ItemGroup>
|
||||
<None Include="Fixtures\**\*" CopyToOutputDirectory="PreserveNewest" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
||||
|
||||
@@ -861,6 +861,7 @@ public sealed class AuthorityTenantOptions
|
||||
public sealed class AuthorityDelegationOptions
|
||||
{
|
||||
private readonly IList<AuthorityServiceAccountSeedOptions> serviceAccounts = new List<AuthorityServiceAccountSeedOptions>();
|
||||
private readonly Dictionary<string, AuthorityTenantDelegationOptions> tenantOverrides = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public AuthorityDelegationQuotaOptions Quotas { get; } = new();
|
||||
|
||||
@@ -878,6 +879,17 @@ public sealed class AuthorityDelegationOptions
|
||||
: new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
var seenAccounts = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
tenantOverrides.Clear();
|
||||
foreach (var tenant in tenants)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(tenant.Id))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var normalizedTenant = tenant.Id.Trim().ToLowerInvariant();
|
||||
tenantOverrides[normalizedTenant] = tenant.Delegation;
|
||||
}
|
||||
|
||||
foreach (var account in serviceAccounts)
|
||||
{
|
||||
@@ -890,6 +902,22 @@ public sealed class AuthorityDelegationOptions
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public int ResolveMaxActiveTokens(string? tenantId)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
return Quotas.MaxActiveTokens;
|
||||
}
|
||||
|
||||
var normalized = tenantId.Trim().ToLowerInvariant();
|
||||
if (tenantOverrides.TryGetValue(normalized, out var options))
|
||||
{
|
||||
return options.ResolveMaxActiveTokens(this);
|
||||
}
|
||||
|
||||
return Quotas.MaxActiveTokens;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class AuthorityDelegationQuotaOptions
|
||||
|
||||
21
third-party-licenses/tree-sitter-MIT.txt
Normal file
21
third-party-licenses/tree-sitter-MIT.txt
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2018 Max Brunsfeld
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
21
third-party-licenses/tree-sitter-ruby-MIT.txt
Normal file
21
third-party-licenses/tree-sitter-ruby-MIT.txt
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Rob Rix
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
Reference in New Issue
Block a user