up
This commit is contained in:
4
.gitignore
vendored
4
.gitignore
vendored
@@ -19,3 +19,7 @@ obj/
|
|||||||
TestResults/
|
TestResults/
|
||||||
|
|
||||||
.dotnet
|
.dotnet
|
||||||
|
.DS_Store
|
||||||
|
seed-data/ics-cisa/*.csv
|
||||||
|
seed-data/ics-cisa/*.xlsx
|
||||||
|
seed-data/ics-cisa/*.sha256
|
||||||
|
|||||||
45
SPRINTS.md
45
SPRINTS.md
@@ -15,6 +15,7 @@
|
|||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Feedser operator guide for enforcement cutoff<br>Install guide reiterates the 2025-12-31 cutoff and links audit signals to the rollout checklist. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Feedser operator guide for enforcement cutoff<br>Install guide reiterates the 2025-12-31 cutoff and links audit signals to the rollout checklist. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.HOST | Rate limiter policy binding<br>Authority host now applies configuration-driven fixed windows to `/token`, `/authorize`, and `/internal/*`; integration tests assert 429 + `Retry-After` headers; docs/config samples refreshed for Docs guild diagrams. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.HOST | Rate limiter policy binding<br>Authority host now applies configuration-driven fixed windows to `/token`, `/authorize`, and `/internal/*`; integration tests assert 429 + `Retry-After` headers; docs/config samples refreshed for Docs guild diagrams. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.BUILD | Authority rate-limiter follow-through<br>`Security.RateLimiting` now fronts token/authorize/internal limiters; Authority + Configuration matrices (`dotnet test src/StellaOps.Authority/StellaOps.Authority.sln`, `dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj`) passed on 2025-10-11; awaiting #authority-core broadcast. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.BUILD | Authority rate-limiter follow-through<br>`Security.RateLimiting` now fronts token/authorize/internal limiters; Authority + Configuration matrices (`dotnet test src/StellaOps.Authority/StellaOps.Authority.sln`, `dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj`) passed on 2025-10-11; awaiting #authority-core broadcast. |
|
||||||
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-14) | Team Authority Platform & Security Guild | AUTHCORE-BUILD-OPENIDDICT / AUTHCORE-STORAGE-DEVICE-TOKENS / AUTHCORE-BOOTSTRAP-INVITES | Address remaining Authority compile blockers (OpenIddict transaction shim, token device document, bootstrap invite cleanup) so `dotnet build src/StellaOps.Authority.sln` returns success. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | PLG6.DOC | Plugin developer guide polish<br>Section 9 now documents rate limiter metadata, config keys, and lockout interplay; YAML samples updated alongside Authority config templates. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | PLG6.DOC | Plugin developer guide polish<br>Section 9 now documents rate limiter metadata, config keys, and lockout interplay; YAML samples updated alongside Authority config templates. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-001 | Fetch pipeline & state tracking<br>Summary planner now drives monthly/yearly VINCE fetches, persists pending summaries/notes, and hydrates VINCE detail queue with telemetry.<br>Team instructions: Read ./AGENTS.md and src/StellaOps.Feedser.Source.CertCc/AGENTS.md. Coordinate daily with Models/Merge leads so new normalizedVersions output and provenance tags stay aligned with ./src/FASTER_MODELING_AND_NORMALIZATION.md. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-001 | Fetch pipeline & state tracking<br>Summary planner now drives monthly/yearly VINCE fetches, persists pending summaries/notes, and hydrates VINCE detail queue with telemetry.<br>Team instructions: Read ./AGENTS.md and src/StellaOps.Feedser.Source.CertCc/AGENTS.md. Coordinate daily with Models/Merge leads so new normalizedVersions output and provenance tags stay aligned with ./src/FASTER_MODELING_AND_NORMALIZATION.md. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-002 | VINCE note detail fetcher<br>Summary planner queues VINCE note detail endpoints, persists raw JSON with SHA/ETag metadata, and records retry/backoff metrics. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-002 | VINCE note detail fetcher<br>Summary planner queues VINCE note detail endpoints, persists raw JSON with SHA/ETag metadata, and records retry/backoff metrics. |
|
||||||
@@ -23,7 +24,7 @@
|
|||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-005 | Deterministic fixtures/tests<br>Snapshot harness refreshed 2025-10-12; `certcc-*.snapshot.json` regenerated and regression suite green without UPDATE flag drift. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-005 | Deterministic fixtures/tests<br>Snapshot harness refreshed 2025-10-12; `certcc-*.snapshot.json` regenerated and regression suite green without UPDATE flag drift. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-006 | Telemetry & documentation<br>`CertCcDiagnostics` publishes summary/detail/parse/map metrics (meter `StellaOps.Feedser.Source.CertCc`), README documents instruments, and log guidance captured for Ops on 2025-10-12. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-006 | Telemetry & documentation<br>`CertCcDiagnostics` publishes summary/detail/parse/map metrics (meter `StellaOps.Feedser.Source.CertCc`), README documents instruments, and log guidance captured for Ops on 2025-10-12. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-007 | Connector test harness remediation<br>Harness now wires `AddSourceCommon`, resets `FakeTimeProvider`, and passes canned-response regression run dated 2025-10-12. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-007 | Connector test harness remediation<br>Harness now wires `AddSourceCommon`, resets `FakeTimeProvider`, and passes canned-response regression run dated 2025-10-12. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | BLOCKED (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-008 | Snapshot coverage handoff<br>Upstream repo version lacks SemVer primitives + provenance decision reason fields, so snapshot regeneration fails; resume once Models/Storage sprint lands those changes. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-008 | Snapshot coverage handoff<br>Fixtures regenerated with normalized ranges + provenance fields on 2025-10-11; QA handoff notes published and merge backfill unblocked. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-012 | Schema sync & snapshot regen follow-up<br>Fixtures regenerated with normalizedVersions + provenance decision reasons; handoff notes updated for Merge backfill 2025-10-12. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-012 | Schema sync & snapshot regen follow-up<br>Fixtures regenerated with normalizedVersions + provenance decision reasons; handoff notes updated for Merge backfill 2025-10-12. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-009 | Detail/map reintegration plan<br>Staged reintegration plan published in `src/StellaOps.Feedser.Source.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md`; coordinates enablement with FEEDCONN-CERTCC-02-004. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-009 | Detail/map reintegration plan<br>Staged reintegration plan published in `src/StellaOps.Feedser.Source.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md`; coordinates enablement with FEEDCONN-CERTCC-02-004. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-010 | Partial-detail graceful degradation<br>Detail fetch now tolerates 404/403/410 responses and regression tests cover mixed endpoint availability. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-010 | Partial-detail graceful degradation<br>Detail fetch now tolerates 404/403/410 responses and regression tests cover mixed endpoint availability. |
|
||||||
@@ -48,16 +49,20 @@
|
|||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-OPS-01-007 | Authority resilience adoption<br>Deployment docs and CLI notes explain the LIB5 resilience knobs for rollout.<br>Instructions to work:<br>DONE Read ./AGENTS.md and src/StellaOps.Feedser.WebService/AGENTS.md. These items were mid-flight; resume implementation ensuring docs/operators receive timely updates. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-OPS-01-007 | Authority resilience adoption<br>Deployment docs and CLI notes explain the LIB5 resilience knobs for rollout.<br>Instructions to work:<br>DONE Read ./AGENTS.md and src/StellaOps.Feedser.WebService/AGENTS.md. These items were mid-flight; resume implementation ensuring docs/operators receive timely updates. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCORE-ENGINE-01-001 | CORE8.RL — Rate limiter plumbing validated; integration tests green and docs handoff recorded for middleware ordering + Retry-After headers (see `docs/dev/authority-rate-limit-tuning-outline.md` for continuing guidance). |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCORE-ENGINE-01-001 | CORE8.RL — Rate limiter plumbing validated; integration tests green and docs handoff recorded for middleware ordering + Retry-After headers (see `docs/dev/authority-rate-limit-tuning-outline.md` for continuing guidance). |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCRYPTO-ENGINE-01-001 | SEC3.A — Shared metadata resolver confirmed via host test run; SEC3.B now unblocked for tuning guidance (outline captured in `docs/dev/authority-rate-limit-tuning-outline.md`). |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCRYPTO-ENGINE-01-001 | SEC3.A — Shared metadata resolver confirmed via host test run; SEC3.B now unblocked for tuning guidance (outline captured in `docs/dev/authority-rate-limit-tuning-outline.md`). |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DOING (2025-10-11) | Team Authority Platform & Security Guild | AUTHPLUG-DOCS-01-001 | PLG6.DOC — Docs guild resuming diagram/copy updates using the captured limiter context + configuration notes (reference `docs/dev/authority-rate-limit-tuning-outline.md` for tuning matrix + observability copy).<br>Instructions to work:<br>Read ./AGENTS.md plus module-specific AGENTS. Restart the blocked rate-limiter workstream (Authority host + cryptography) so the plugin docs team can finish diagrams. Coordinate daily; use ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md where rate limiting interacts with conflict policy. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-13) | Team Authority Platform & Security Guild | AUTHSEC-DOCS-01-002 | SEC3.B — Published `docs/security/rate-limits.md` with tuning matrix, alert thresholds, and lockout interplay guidance; Docs guild can lift copy into plugin guide. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Normalization/TASKS.md | — | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter<br>Instructions to work:<br>Read ./AGENTS.md and module AGENTS. Use ./src/FASTER_MODELING_AND_NORMALIZATION.md to build the shared rule generator; sync daily with storage and connector owners. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-14) | Team Authority Platform & Security Guild | AUTHSEC-CRYPTO-02-001 | SEC5.B1 — Introduce libsodium signing provider and parity tests to unblock CLI verification enhancements. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | — | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill |
|
| Sprint 1 | Bootstrap & Replay Hardening | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-14) | Security Guild | AUTHSEC-CRYPTO-02-004 | SEC5.D/E — Finish bootstrap invite lifecycle (API/store/cleanup) and token device heuristics; build currently red due to pending handler integration. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | — | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence |
|
| Sprint 1 | Developer Tooling | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI | AUTHCLI-DIAG-01-001 | Surface password policy diagnostics in CLI startup/output so operators see weakened overrides immediately. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | — | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing<br>Instructions to work:<br>Read ./AGENTS.md and storage AGENTS. Implement dual-write/backfill and index creation using the shapes from ./src/FASTER_MODELING_AND_NORMALIZATION.md; coordinate with connectors entering the sprint. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHPLUG-DOCS-01-001 | PLG6.DOC — Developer guide copy + diagrams merged 2025-10-11; limiter guidance incorporated and handed to Docs guild for asset export. |
|
||||||
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Normalization/TASKS.md | DONE (2025-10-12) | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter<br>`SemVerRangeRuleBuilder` shipped 2025-10-12 with comparator/`||` support and fixtures aligning to `FASTER_MODELING_AND_NORMALIZATION.md`. |
|
||||||
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill |
|
||||||
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence |
|
||||||
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing<br>Indexes seeded + docs updated 2025-10-11 to cover flattened normalized rules for connector adoption. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Merge/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDMERGE-ENGINE-02-002 | Normalized versions union & dedupe<br>Affected package resolver unions/dedupes normalized rules, stamps merge provenance with `decisionReason`, and tests cover the rollout. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Merge/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDMERGE-ENGINE-02-002 | Normalized versions union & dedupe<br>Affected package resolver unions/dedupes normalized rules, stamps merge provenance with `decisionReason`, and tests cover the rollout. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-004 | GHSA credits & ecosystem severity mapping |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-004 | GHSA credits & ecosystem severity mapping |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-005 | GitHub quota monitoring & retries |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-005 | GitHub quota monitoring & retries |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-006 | Production credential & scheduler rollout |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-006 | Production credential & scheduler rollout |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-007 | Credit parity regression fixtures |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-007 | Credit parity regression fixtures |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-004 | NVD CVSS & CWE precedence payloads |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-004 | NVD CVSS & CWE precedence payloads |
|
||||||
@@ -65,17 +70,17 @@
|
|||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-004 | OSV references & credits alignment |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-004 | OSV references & credits alignment |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-005 | Fixture updater workflow<br>Resolved 2025-10-12: OSV mapper now derives canonical PURLs for Go + scoped npm packages when raw payloads omit `purl`; conflict fixtures unchanged for invalid npm names. Verified via `dotnet test src/StellaOps.Feedser.Source.Osv.Tests`, `src/StellaOps.Feedser.Source.Ghsa.Tests`, `src/StellaOps.Feedser.Source.Nvd.Tests`, and backbone normalization/storage suites. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-005 | Fixture updater workflow<br>Resolved 2025-10-12: OSV mapper now derives canonical PURLs for Go + scoped npm packages when raw payloads omit `purl`; conflict fixtures unchanged for invalid npm names. Verified via `dotnet test src/StellaOps.Feedser.Source.Osv.Tests`, `src/StellaOps.Feedser.Source.Ghsa.Tests`, `src/StellaOps.Feedser.Source.Nvd.Tests`, and backbone normalization/storage suites. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Acsc/TASKS.md | Implementation DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ACSC-02-001 … 02-008 | Fetch pipeline, DTO parser, canonical mapper, fixtures, and README shipped 2025-10-12; downstream export integration still pending future tasks. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Acsc/TASKS.md | Implementation DONE (2025-10-12) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ACSC-02-001 … 02-008 | Fetch→parse→map pipeline, fixtures, diagnostics, and README finished 2025-10-12; awaiting downstream export follow-ups tracked separately. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Cccs/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CCCS-02-001 … 02-007 | Atom feed verified 2025-10-11, history/caching review and FR locale enumeration pending. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Cccs/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CCCS-02-001 … 02-008 | Observability meter, historical harvest plan, and DOM sanitizer refinements wrapped; ops notes live under `docs/ops/feedser-cccs-operations.md` with fixtures validating EN/FR list handling. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.CertBund/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CERTBUND-02-001 … 02-007 | BSI RSS directory confirmed CERT-Bund feed 2025-10-11, history assessment pending. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.CertBund/TASKS.md | DONE (2025-10-15) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CERTBUND-02-001 … 02-008 | Telemetry/docs (02-006) and history/locale sweep (02-007) completed alongside pipeline; runbook `docs/ops/feedser-certbund-operations.md` captures locale guidance and offline packaging. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Kisa/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-KISA-02-001 … 02-007 | KNVD RSS endpoint identified 2025-10-11, access headers/session strategy outstanding. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Kisa/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-KISA-02-001 … 02-007 | Connector, tests, and telemetry/docs (02-006) finalized; localisation notes in `docs/dev/kisa_connector_notes.md` complete rollout. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md | Build DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-RUBDU-02-001 … 02-008 | TLS bundle + connectors landed 2025-10-12; fetch/parse/map flow emits advisories, fixtures & telemetry follow-up pending. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-RUBDU-02-001 … 02-008 | Fetch/parser/mapper refinements, regression fixtures, telemetry/docs, access options, and trusted root packaging all landed; README documents offline access strategy. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md | Build DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-NKCKI-02-001 … 02-008 | JSON bulletin fetch + canonical mapping live 2025-10-12; regression fixtures added but blocked on Mongo2Go libcrypto dependency for test execution. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md | DONE (2025-10-13) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-NKCKI-02-001 … 02-008 | Listing fetch, parser, mapper, fixtures, telemetry/docs, and archive plan finished; Mongo2Go/libcrypto dependency resolved via bundled OpenSSL noted in ops guide. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ICSCISA-02-001 … 02-008 | new ICS RSS endpoint logged 2025-10-11 but Akamai blocks direct pulls, fallback strategy task opened. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ICSCISA-02-001 … 02-011 | Feed parser attachment fixes, SemVer exact values, regression suites, telemetry/docs updates, and handover complete; ops runbook now details attachment verification + proxy usage. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Vndr.Cisco/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CISCO-02-001 … 02-007 | openVuln API + RSS reviewed 2025-10-11, auth/pagination memo pending. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Vndr.Cisco/TASKS.md | Implementation DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CISCO-02-001 … 02-007 | OAuth fetch pipeline, DTO/mapping, tests, and telemetry/docs shipped; monitoring enablement now tracked via follow-up ops tasks (02-006+). |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-MSRC-02-001 … 02-007 | MSRC API docs reviewed 2025-10-11, auth/throttling comparison memo pending.<br>Instructions to work:<br>Read ./AGENTS.md plus each module's AGENTS file. Parallelize research, ingestion, mapping, fixtures, and docs using the normalized rule shape from ./src/FASTER_MODELING_AND_NORMALIZATION.md. Coordinate daily with the merge coordination task from Sprint 1. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md | DONE (2025-10-15) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-MSRC-02-001 … 02-008 | Azure AD onboarding (02-008) unblocked fetch/parse/map pipeline; fixtures, telemetry/docs, and Offline Kit guidance published in `docs/ops/feedser-msrc-operations.md`. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Cve/TASKS.md | — | Team Connector Support & Monitoring | FEEDCONN-CVE-02-001 … 02-002 | Instructions to work:<br>Read ./AGENTS.md and module AGENTS. Deliver operator docs and monitoring instrumentation required for broader feed rollout. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Cve/TASKS.md | DONE (2025-10-15) | Team Connector Support & Monitoring | FEEDCONN-CVE-02-001 … 02-002 | CVE data-source selection, fetch pipeline, and docs landed 2025-10-10. 2025-10-15: smoke verified using the seeded mirror fallback; connector now logs a warning and pulls from `seed-data/cve/` until live CVE Services credentials arrive. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Kev/TASKS.md | — | Team Connector Support & Monitoring | FEEDCONN-KEV-02-001 … 02-002 | Instructions to work:<br>Read ./AGENTS.md and module AGENTS. Deliver operator docs and monitoring instrumentation required for broader feed rollout. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Kev/TASKS.md | DONE (2025-10-12) | Team Connector Support & Monitoring | FEEDCONN-KEV-02-001 … 02-002 | KEV catalog ingestion, fixtures, telemetry, and schema validation completed 2025-10-12; ops dashboard published. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-01-001 | Canonical schema docs refresh<br>Updated canonical schema + provenance guides with SemVer style, normalized version rules, decision reason change log, and migration notes. |
|
| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-01-001 | Canonical schema docs refresh<br>Updated canonical schema + provenance guides with SemVer style, normalized version rules, decision reason change log, and migration notes. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-001 | Feedser-SemVer Playbook<br>Published merge playbook covering mapper patterns, dedupe flow, indexes, and rollout checklist. |
|
| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-001 | Feedser-SemVer Playbook<br>Published merge playbook covering mapper patterns, dedupe flow, indexes, and rollout checklist. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-002 | Normalized versions query guide<br>Delivered Mongo index/query addendum with `$unwind` recipes, dedupe checks, and operational checklist.<br>Instructions to work:<br>DONE Read ./AGENTS.md and docs/AGENTS.md. Document every schema/index/query change produced in Sprint 1-2 leveraging ./src/FASTER_MODELING_AND_NORMALIZATION.md. |
|
| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-002 | Normalized versions query guide<br>Delivered Mongo index/query addendum with `$unwind` recipes, dedupe checks, and operational checklist.<br>Instructions to work:<br>DONE Read ./AGENTS.md and docs/AGENTS.md. Document every schema/index/query change produced in Sprint 1-2 leveraging ./src/FASTER_MODELING_AND_NORMALIZATION.md. |
|
||||||
@@ -92,4 +97,4 @@
|
|||||||
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-NVD-04-002 | NVD conflict regression fixtures |
|
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-NVD-04-002 | NVD conflict regression fixtures |
|
||||||
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-OSV-04-002 | OSV conflict regression fixtures<br>Instructions to work:<br>Read ./AGENTS.md and module AGENTS. Produce fixture triples supporting the precedence/tie-breaker paths defined in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md and hand them to Merge QA. |
|
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-OSV-04-002 | OSV conflict regression fixtures<br>Instructions to work:<br>Read ./AGENTS.md and module AGENTS. Produce fixture triples supporting the precedence/tie-breaker paths defined in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md and hand them to Merge QA. |
|
||||||
| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-11) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-001 | Feedser Conflict Rules<br>Runbook published at `docs/ops/feedser-conflict-resolution.md`; metrics/log guidance aligned with Sprint 3 merge counters. |
|
| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-11) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-001 | Feedser Conflict Rules<br>Runbook published at `docs/ops/feedser-conflict-resolution.md`; metrics/log guidance aligned with Sprint 3 merge counters. |
|
||||||
| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | TODO | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-002 | Conflict runbook ops rollout<br>Instructions to work:<br>Read ./AGENTS.md and docs/AGENTS.md. Once GHSA/NVD/OSV regression fixtures (FEEDCONN-GHSA-04-002, FEEDCONN-NVD-04-002, FEEDCONN-OSV-04-002) are delivered, schedule the Ops review, apply the alert thresholds captured in `docs/ops/feedser-authority-audit-runbook.md`, and record change-log linkage after sign-off. Use ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md for ongoing rule references. |
|
| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-16) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-002 | Conflict runbook ops rollout<br>Ops review completed, alert thresholds applied, and change log appended in `docs/ops/feedser-conflict-resolution.md`; task closed after connector signals verified. |
|
||||||
|
|||||||
@@ -67,8 +67,9 @@ Authority centralises revocation in `authority_revocations` with deterministic c
|
|||||||
**Export surfaces** (deterministic output, suitable for Offline Kit):
|
**Export surfaces** (deterministic output, suitable for Offline Kit):
|
||||||
|
|
||||||
- CLI: `stella auth revoke export --output ./out` writes `revocation-bundle.json`, `.jws`, `.sha256`.
|
- CLI: `stella auth revoke export --output ./out` writes `revocation-bundle.json`, `.jws`, `.sha256`.
|
||||||
|
- Verification: `stella auth revoke verify --bundle <path> --signature <path> --key <path>` validates detached JWS signatures before distribution, selecting the crypto provider advertised in the detached header (see `docs/security/revocation-bundle.md`).
|
||||||
- API: `GET /internal/revocations/export` (requires bootstrap API key) returns the same payload.
|
- API: `GET /internal/revocations/export` (requires bootstrap API key) returns the same payload.
|
||||||
- Verification: `stella auth revoke verify` validates schema, digest, and detached JWS using cached JWKS or offline keys.
|
- Verification: `stella auth revoke verify` validates schema, digest, and detached JWS using cached JWKS or offline keys, automatically preferring the hinted provider (libsodium builds honour `provider=libsodium`; other builds fall back to the managed provider).
|
||||||
|
|
||||||
**Consumer guidance:**
|
**Consumer guidance:**
|
||||||
|
|
||||||
|
|||||||
@@ -15,8 +15,11 @@ completely isolated network:
|
|||||||
| **Merged vulnerability feeds** | OSV, GHSA plus optional NVD 2.0, CNNVD, CNVD, ENISA, JVN and BDU |
|
| **Merged vulnerability feeds** | OSV, GHSA plus optional NVD 2.0, CNNVD, CNVD, ENISA, JVN and BDU |
|
||||||
| **Container images** | `stella-ops`, *Zastava* sidecar (x86‑64 & arm64) |
|
| **Container images** | `stella-ops`, *Zastava* sidecar (x86‑64 & arm64) |
|
||||||
| **Provenance** | Cosign signature, SPDX 2.3 SBOM, in‑toto SLSA attestation |
|
| **Provenance** | Cosign signature, SPDX 2.3 SBOM, in‑toto SLSA attestation |
|
||||||
|
| **Attested manifest** | `offline-manifest.json` + detached JWS covering bundle metadata, signed during export. |
|
||||||
| **Delta patches** | Daily diff bundles keep size \< 350 MB |
|
| **Delta patches** | Daily diff bundles keep size \< 350 MB |
|
||||||
|
|
||||||
|
**RU BDU note:** ship the official Russian Trusted Root/Sub CA bundle (`certificates/russian_trusted_bundle.pem`) inside the kit so `feedser:httpClients:source.bdu:trustedRootPaths` can resolve it when the service runs in an air‑gapped network. Drop the most recent `vulxml.zip` alongside the kit if operators need a cold-start cache.
|
||||||
|
|
||||||
*Scanner core:* C# 12 on **.NET {{ dotnet }}**.
|
*Scanner core:* C# 12 on **.NET {{ dotnet }}**.
|
||||||
*Imports are idempotent and atomic — no service downtime.*
|
*Imports are idempotent and atomic — no service downtime.*
|
||||||
|
|
||||||
@@ -27,6 +30,8 @@ completely isolated network:
|
|||||||
```bash
|
```bash
|
||||||
curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-<DATE>.tgz
|
curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-<DATE>.tgz
|
||||||
curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-<DATE>.tgz.sig
|
curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-<DATE>.tgz.sig
|
||||||
|
curl -LO https://get.stella-ops.org/ouk/offline-manifest-<DATE>.json
|
||||||
|
curl -LO https://get.stella-ops.org/ouk/offline-manifest-<DATE>.json.jws
|
||||||
|
|
||||||
cosign verify-blob \
|
cosign verify-blob \
|
||||||
--key https://stella-ops.org/keys/cosign.pub \
|
--key https://stella-ops.org/keys/cosign.pub \
|
||||||
@@ -37,6 +42,19 @@ cosign verify-blob \
|
|||||||
Verification prints **OK** and the SHA‑256 digest; cross‑check against the
|
Verification prints **OK** and the SHA‑256 digest; cross‑check against the
|
||||||
[changelog](https://git.stella-ops.org/stella-ops/offline-kit/-/releases).
|
[changelog](https://git.stella-ops.org/stella-ops/offline-kit/-/releases).
|
||||||
|
|
||||||
|
Validate the attested manifest before distribution:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cosign verify-blob \
|
||||||
|
--key https://stella-ops.org/keys/cosign.pub \
|
||||||
|
--signature offline-manifest-<DATE>.json.jws \
|
||||||
|
offline-manifest-<DATE>.json
|
||||||
|
|
||||||
|
jq '.artifacts[] | {name, sha256, size, capturedAt}' offline-manifest-<DATE>.json
|
||||||
|
```
|
||||||
|
|
||||||
|
The manifest enumerates every artefact (`name`, `sha256`, `size`, `capturedAt`) and is signed with the same key registry as Authority revocation bundles. Operators can ship the manifest alongside the tarball so downstream mirrors can re-verify without unpacking the kit.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 2 · Import on the air‑gapped host
|
## 2 · Import on the air‑gapped host
|
||||||
|
|||||||
@@ -58,11 +58,15 @@ Everything here is open‑source and versioned — when you check out a git ta
|
|||||||
- **22 – [CI/CD Recipes Library](ci/20_CI_RECIPES.md)**
|
- **22 – [CI/CD Recipes Library](ci/20_CI_RECIPES.md)**
|
||||||
- **23 – [FAQ](23_FAQ_MATRIX.md)**
|
- **23 – [FAQ](23_FAQ_MATRIX.md)**
|
||||||
- **24 – [Offline Update Kit Admin Guide](24_OUK_ADMIN_GUIDE.md)**
|
- **24 – [Offline Update Kit Admin Guide](24_OUK_ADMIN_GUIDE.md)**
|
||||||
- **26 – [Authority Key Rotation Playbook](ops/authority-key-rotation.md)**
|
|
||||||
- **25 – [Feedser Apple Connector Operations](ops/feedser-apple-operations.md)**
|
- **25 – [Feedser Apple Connector Operations](ops/feedser-apple-operations.md)**
|
||||||
|
- **26 – [Authority Key Rotation Playbook](ops/authority-key-rotation.md)**
|
||||||
|
- **27 – [Feedser CCCS Connector Operations](ops/feedser-cccs-operations.md)**
|
||||||
|
- **28 – [Feedser CISA ICS Connector Operations](ops/feedser-icscisa-operations.md)**
|
||||||
|
- **29 – [Feedser CERT-Bund Connector Operations](ops/feedser-certbund-operations.md)**
|
||||||
|
- **30 – [Feedser MSRC Connector – AAD Onboarding](ops/feedser-msrc-operations.md)**
|
||||||
|
|
||||||
### Legal & licence
|
### Legal & licence
|
||||||
- **29 – [Legal & Quota FAQ](29_LEGAL_FAQ_QUOTA.md)**
|
- **31 – [Legal & Quota FAQ](29_LEGAL_FAQ_QUOTA.md)**
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
|||||||
@@ -7,6 +7,8 @@
|
|||||||
| DOC3.Feedser-Authority | DONE (2025-10-12) | Docs Guild, DevEx | FSR4 | Polish operator/runbook sections (DOC3/DOC5) to document Feedser authority rollout, bypass logging, and enforcement checklist. | ✅ DOC3/DOC5 updated with audit runbook references; ✅ enforcement deadline highlighted; ✅ Docs guild sign-off. |
|
| DOC3.Feedser-Authority | DONE (2025-10-12) | Docs Guild, DevEx | FSR4 | Polish operator/runbook sections (DOC3/DOC5) to document Feedser authority rollout, bypass logging, and enforcement checklist. | ✅ DOC3/DOC5 updated with audit runbook references; ✅ enforcement deadline highlighted; ✅ Docs guild sign-off. |
|
||||||
| DOC5.Feedser-Runbook | DONE (2025-10-12) | Docs Guild | DOC3.Feedser-Authority | Produce dedicated Feedser authority audit runbook covering log fields, monitoring recommendations, and troubleshooting steps. | ✅ Runbook published; ✅ linked from DOC3/DOC5; ✅ alerting guidance included. |
|
| DOC5.Feedser-Runbook | DONE (2025-10-12) | Docs Guild | DOC3.Feedser-Authority | Produce dedicated Feedser authority audit runbook covering log fields, monitoring recommendations, and troubleshooting steps. | ✅ Runbook published; ✅ linked from DOC3/DOC5; ✅ alerting guidance included. |
|
||||||
| FEEDDOCS-DOCS-05-001 | DONE (2025-10-11) | Docs Guild | FEEDMERGE-ENGINE-04-001, FEEDMERGE-ENGINE-04-002 | Publish Feedser conflict resolution runbook covering precedence workflow, merge-event auditing, and Sprint 3 metrics. | ✅ `docs/ops/feedser-conflict-resolution.md` committed; ✅ metrics/log tables align with latest merge code; ✅ Ops alert guidance handed to Feedser team. |
|
| FEEDDOCS-DOCS-05-001 | DONE (2025-10-11) | Docs Guild | FEEDMERGE-ENGINE-04-001, FEEDMERGE-ENGINE-04-002 | Publish Feedser conflict resolution runbook covering precedence workflow, merge-event auditing, and Sprint 3 metrics. | ✅ `docs/ops/feedser-conflict-resolution.md` committed; ✅ metrics/log tables align with latest merge code; ✅ Ops alert guidance handed to Feedser team. |
|
||||||
| FEEDDOCS-DOCS-05-002 | TODO | Docs Guild, Feedser Ops | FEEDDOCS-DOCS-05-001 | Capture ops sign-off: circulate conflict runbook, tune alert thresholds, and document rollout decisions in change log. | ✅ Ops review recorded; ✅ alert thresholds finalised using `docs/ops/feedser-authority-audit-runbook.md`; ✅ change-log entry linked from runbook once GHSA/NVD/OSV regression fixtures land. |
|
| FEEDDOCS-DOCS-05-002 | DONE (2025-10-16) | Docs Guild, Feedser Ops | FEEDDOCS-DOCS-05-001 | Ops sign-off captured: conflict runbook circulated, alert thresholds tuned, and rollout decisions documented in change log. | ✅ Ops review recorded; ✅ alert thresholds finalised using `docs/ops/feedser-authority-audit-runbook.md`; ✅ change-log entry linked from runbook once GHSA/NVD/OSV regression fixtures land. |
|
||||||
|
|
||||||
> Update statuses (TODO/DOING/REVIEW/DONE/BLOCKED) as progress changes. Keep guides in sync with configuration samples under `etc/`.
|
> Update statuses (TODO/DOING/REVIEW/DONE/BLOCKED) as progress changes. Keep guides in sync with configuration samples under `etc/`.
|
||||||
|
|
||||||
|
> Remark (2025-10-13, DOC4.AUTH-PDG): Rate limit guide published (`docs/security/rate-limits.md`) and handed to plugin docs team for diagram uplift once PLG6.DIAGRAM lands.
|
||||||
|
|||||||
27
docs/artifacts/icscisa/20251014-sample-feed.xml
Normal file
27
docs/artifacts/icscisa/20251014-sample-feed.xml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<rss version="2.0">
|
||||||
|
<channel>
|
||||||
|
<title>CISA ICS Advisories</title>
|
||||||
|
<item>
|
||||||
|
<title>ICSA-25-123-01: Example ICS Advisory</title>
|
||||||
|
<link>https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01</link>
|
||||||
|
<pubDate>Mon, 13 Oct 2025 12:00:00 GMT</pubDate>
|
||||||
|
<description><![CDATA[
|
||||||
|
<p><strong>Vendor:</strong> Example Corp</p>
|
||||||
|
<p><strong>Products:</strong> ControlSuite 4.2</p>
|
||||||
|
<p>CVE-2024-12345 allows remote code execution.</p>
|
||||||
|
<p><a href="https://example.com/security/icsa-25-123-01.pdf">Download PDF</a></p>
|
||||||
|
]]></description>
|
||||||
|
</item>
|
||||||
|
<item>
|
||||||
|
<title>ICSMA-25-045-01: Example Medical Advisory</title>
|
||||||
|
<link>https://www.cisa.gov/news-events/ics-medical-advisories/icsma-25-045-01</link>
|
||||||
|
<pubDate>Tue, 14 Oct 2025 09:30:00 GMT</pubDate>
|
||||||
|
<description><![CDATA[
|
||||||
|
<p><strong>Vendor:</strong> HealthTech</p>
|
||||||
|
<p><strong>Products:</strong> InfusionManager 2.1</p>
|
||||||
|
<p>Multiple vulnerabilities including CVE-2025-11111 and CVE-2025-22222.</p>
|
||||||
|
]]></description>
|
||||||
|
</item>
|
||||||
|
</channel>
|
||||||
|
</rss>
|
||||||
@@ -44,6 +44,8 @@ Capability flags let the host reason about what your plug-in supports:
|
|||||||
|
|
||||||
**Configuration path normalisation:** Manifest-relative paths (e.g., `tokenSigning.keyDirectory: "../keys"`) are resolved against the YAML file location and environment variables are expanded before validation. Plug-ins should expect to receive an absolute, canonical path when options are injected.
|
**Configuration path normalisation:** Manifest-relative paths (e.g., `tokenSigning.keyDirectory: "../keys"`) are resolved against the YAML file location and environment variables are expanded before validation. Plug-ins should expect to receive an absolute, canonical path when options are injected.
|
||||||
|
|
||||||
|
**Password policy guardrails:** The Standard registrar logs a warning when a plug-in weakens the default password policy (minimum length or required character classes). Keep overrides at least as strong as the compiled defaults—operators treat the warning as an actionable security deviation.
|
||||||
|
|
||||||
## 4. Project Scaffold
|
## 4. Project Scaffold
|
||||||
- Target **.NET 10 preview**, enable nullable, treat warnings as errors, and mark Authority plug-ins with `<IsAuthorityPlugin>true</IsAuthorityPlugin>`.
|
- Target **.NET 10 preview**, enable nullable, treat warnings as errors, and mark Authority plug-ins with `<IsAuthorityPlugin>true</IsAuthorityPlugin>`.
|
||||||
- Minimum references:
|
- Minimum references:
|
||||||
|
|||||||
@@ -35,3 +35,11 @@ fixture sets, where they live, and how to regenerate them safely.
|
|||||||
- **Verification:** Inspect the generated diffs and re-run `dotnet test src/StellaOps.Feedser.Source.Vndr.Apple.Tests/StellaOps.Feedser.Source.Vndr.Apple.Tests.csproj` without the env var to confirm determinism.
|
- **Verification:** Inspect the generated diffs and re-run `dotnet test src/StellaOps.Feedser.Source.Vndr.Apple.Tests/StellaOps.Feedser.Source.Vndr.Apple.Tests.csproj` without the env var to confirm determinism.
|
||||||
|
|
||||||
> **Tip for other connector owners:** mirror the sentinel + `WSLENV` pattern (`touch .update-<connector>-fixtures`, append the env var via `WSLENV`) when you add fixture refresh scripts so contributors running under WSL inherit the regeneration flag automatically.
|
> **Tip for other connector owners:** mirror the sentinel + `WSLENV` pattern (`touch .update-<connector>-fixtures`, append the env var via `WSLENV`) when you add fixture refresh scripts so contributors running under WSL inherit the regeneration flag automatically.
|
||||||
|
|
||||||
|
## KISA advisory fixtures
|
||||||
|
|
||||||
|
- **Location:** `src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/kisa-{feed,detail}.(xml|json)`
|
||||||
|
- **Purpose:** Used by `KisaConnectorTests` to verify Hangul-aware fetch → parse → map flows and to assert telemetry counters stay wired.
|
||||||
|
- **Regeneration:** `UPDATE_KISA_FIXTURES=1 dotnet test src/StellaOps.Feedser.Source.Kisa.Tests/StellaOps.Feedser.Source.Kisa.Tests.csproj`
|
||||||
|
- **Verification:** Re-run the same test suite without the env var; confirm advisory content remains NFC-normalised and HTML is sanitised. Metrics assertions will fail if counters drift.
|
||||||
|
- **Localisation note:** RSS `category` values (e.g. `취약점정보`) remain in Hangul—do not translate them in fixtures; they feed directly into metrics/log tags.
|
||||||
|
|||||||
45
docs/dev/kisa_connector_notes.md
Normal file
45
docs/dev/kisa_connector_notes.md
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
# KISA Connector Observability & Localisation
|
||||||
|
|
||||||
|
The KISA/KNVD connector now ships with structured telemetry, richer logging, and a localisation brief so Docs/QA can extend operator material without reverse-engineering the source.
|
||||||
|
|
||||||
|
## Telemetry counters
|
||||||
|
|
||||||
|
All metrics are emitted from `KisaDiagnostics` (`Meter` name `StellaOps.Feedser.Source.Kisa`).
|
||||||
|
|
||||||
|
| Metric | Description | Tags |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `kisa.feed.attempts` | RSS fetch attempts per scheduled job. | — |
|
||||||
|
| `kisa.feed.success` | Successful RSS fetches (increments even when no new items). | — |
|
||||||
|
| `kisa.feed.failures` | RSS fetch failures. | `reason` (exception type) |
|
||||||
|
| `kisa.feed.items` | Number of items returned by the RSS window. | — |
|
||||||
|
| `kisa.detail.attempts` | Advisory detail fetch attempts. | `category` (Hangul category from RSS) |
|
||||||
|
| `kisa.detail.success` | Detail payloads fetched and persisted. | `category` |
|
||||||
|
| `kisa.detail.unchanged` | HTTP 304 responses reused from cache. | `category` |
|
||||||
|
| `kisa.detail.failures` | Detail fetch failures or empty payloads. | `category`, `reason` |
|
||||||
|
| `kisa.parse.attempts` | Documents pulled from Mongo for parsing. | `category` |
|
||||||
|
| `kisa.parse.success` | Documents parsed into DTOs. | `category` |
|
||||||
|
| `kisa.parse.failures` | Download or JSON parse failures. | `category`, `reason` |
|
||||||
|
| `kisa.map.success` | Canonical advisories persisted. | `severity` (e.g. `High`, `unknown`) |
|
||||||
|
| `kisa.map.failures` | Mapping or DTO hydration failures. | `severity`, `reason` |
|
||||||
|
| `kisa.cursor.updates` | Published cursor advanced after ingest. | — |
|
||||||
|
|
||||||
|
> `category` tags surface the original Hangul labels (for example `취약점정보`), normalised to NFC. Downstream dashboards should render them as-is; do not transliterate or trim.
|
||||||
|
|
||||||
|
## Logging patterns
|
||||||
|
|
||||||
|
- `Information` level summary when the RSS feed completes (`ItemCount`), on each persisted detail document (IDX, category, documentId), and when a canonical advisory is written (IDX/severity).
|
||||||
|
- `Debug` level logs capture cache hits (304) and cursor movements (`Published` timestamp).
|
||||||
|
- `Warning` level emits when a document or DTO is missing so operators can correlate with parse/map counters.
|
||||||
|
- `Error` level retains exception context for feed/detail/parse/map failures; state repository backoffs are still applied.
|
||||||
|
|
||||||
|
The messages use structured properties (`Idx`, `Category`, `DocumentId`, `Severity`) so Grafana/Loki dashboards can filter without regex.
|
||||||
|
|
||||||
|
## Localisation notes for Docs & QA
|
||||||
|
|
||||||
|
- Hangul fields (`title`, `summary`, `category`, `reference.label`, product vendor/name) are normalised to NFC before storage. Sample category `취약점정보` roughly translates to “vulnerability information”.
|
||||||
|
- Advisory HTML is sanitised via `HtmlContentSanitizer`, stripping script/style while preserving inline anchors for translation pipelines.
|
||||||
|
- Metrics carry Hangul `category` tags and logging keeps Hangul strings intact; this ensures air-gapped operators can validate native-language content without relying on MT.
|
||||||
|
- Fixtures live under `src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/`. Regenerate with `UPDATE_KISA_FIXTURES=1 dotnet test src/StellaOps.Feedser.Source.Kisa.Tests/StellaOps.Feedser.Source.Kisa.Tests.csproj`.
|
||||||
|
- The regression suite asserts canonical mapping, state cleanup, and telemetry counters (`KisaConnectorTests.Telemetry_RecordsMetrics`) so QA can track instrumentation drift.
|
||||||
|
|
||||||
|
For operator docs, link to this brief when documenting Hangul handling or counter dashboards so localisation reviewers have a single reference point.
|
||||||
@@ -20,19 +20,19 @@ This dashboard tracks connector readiness for emitting `AffectedPackage.Normaliz
|
|||||||
|-----------|------------|---------------------------|-------------|--------------------|
|
|-----------|------------|---------------------------|-------------|--------------------|
|
||||||
| Acsc | BE-Conn-ACSC | ❌ Not started – mapper pending | 2025-10-11 | Design DTOs + mapper with normalized rule array; see `src/StellaOps.Feedser.Source.Acsc/TASKS.md`. |
|
| Acsc | BE-Conn-ACSC | ❌ Not started – mapper pending | 2025-10-11 | Design DTOs + mapper with normalized rule array; see `src/StellaOps.Feedser.Source.Acsc/TASKS.md`. |
|
||||||
| Cccs | BE-Conn-CCCS | ❌ Not started – mapper pending | 2025-10-11 | Add normalized SemVer array in canonical mapper; coordinate fixtures per `TASKS.md`. |
|
| Cccs | BE-Conn-CCCS | ❌ Not started – mapper pending | 2025-10-11 | Add normalized SemVer array in canonical mapper; coordinate fixtures per `TASKS.md`. |
|
||||||
| CertBund | BE-Conn-CERTBUND | ❌ Not started – mapper pending | 2025-10-11 | Capture firmware-style ranges; emit normalized payload; `src/StellaOps.Feedser.Source.CertBund/TASKS.md`. |
|
| CertBund | BE-Conn-CERTBUND | ✅ Canonical mapper emitting vendor ranges | 2025-10-14 | Normalized vendor range payloads landed alongside telemetry/docs updates; see `src/StellaOps.Feedser.Source.CertBund/TASKS.md`. |
|
||||||
| CertCc | BE-Conn-CERTCC | ⚠️ In progress – fetch pipeline DOING | 2025-10-11 | Implement VINCE mapper with SemVer/NEVRA rules; unblock snapshot regeneration; `src/StellaOps.Feedser.Source.CertCc/TASKS.md`. |
|
| CertCc | BE-Conn-CERTCC | ⚠️ In progress – fetch pipeline DOING | 2025-10-11 | Implement VINCE mapper with SemVer/NEVRA rules; unblock snapshot regeneration; `src/StellaOps.Feedser.Source.CertCc/TASKS.md`. |
|
||||||
| Kev | BE-Conn-KEV | ✅ Normalized catalog/due-date rules verified | 2025-10-12 | Fixtures reconfirmed via `dotnet test src/StellaOps.Feedser.Source.Kev.Tests`; `src/StellaOps.Feedser.Source.Kev/TASKS.md`. |
|
| Kev | BE-Conn-KEV | ✅ Normalized catalog/due-date rules verified | 2025-10-12 | Fixtures reconfirmed via `dotnet test src/StellaOps.Feedser.Source.Kev.Tests`; `src/StellaOps.Feedser.Source.Kev/TASKS.md`. |
|
||||||
| Cve | BE-Conn-CVE | ✅ Normalized SemVer rules verified | 2025-10-12 | Snapshot parity green (`dotnet test src/StellaOps.Feedser.Source.Cve.Tests`); `src/StellaOps.Feedser.Source.Cve/TASKS.md`. |
|
| Cve | BE-Conn-CVE | ✅ Normalized SemVer rules verified | 2025-10-12 | Snapshot parity green (`dotnet test src/StellaOps.Feedser.Source.Cve.Tests`); `src/StellaOps.Feedser.Source.Cve/TASKS.md`. |
|
||||||
| Ghsa | BE-Conn-GHSA | ⚠️ DOING – normalized rollout task active | 2025-10-11 18:45 UTC | Wire `SemVerRangeRuleBuilder` + refresh fixtures; `src/StellaOps.Feedser.Source.Ghsa/TASKS.md`. |
|
| Ghsa | BE-Conn-GHSA | ⚠️ DOING – normalized rollout task active | 2025-10-11 18:45 UTC | Wire `SemVerRangeRuleBuilder` + refresh fixtures; `src/StellaOps.Feedser.Source.Ghsa/TASKS.md`. |
|
||||||
| Osv | BE-Conn-OSV | ✅ SemVer mapper & parity fixtures verified | 2025-10-12 | GHSA parity regression passing (`dotnet test src/StellaOps.Feedser.Source.Osv.Tests`); `src/StellaOps.Feedser.Source.Osv/TASKS.md`. |
|
| Osv | BE-Conn-OSV | ✅ SemVer mapper & parity fixtures verified | 2025-10-12 | GHSA parity regression passing (`dotnet test src/StellaOps.Feedser.Source.Osv.Tests`); `src/StellaOps.Feedser.Source.Osv/TASKS.md`. |
|
||||||
| Ics.Cisa | BE-Conn-ICS-CISA | ❌ Not started – mapper TODO | 2025-10-11 | Plan SemVer/firmware scheme selection; `src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md`. |
|
| Ics.Cisa | BE-Conn-ICS-CISA | ❌ Not started – mapper TODO | 2025-10-11 | Plan SemVer/firmware scheme selection; `src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md`. |
|
||||||
| Kisa | BE-Conn-KISA | ❌ Not started – mapper TODO | 2025-10-11 | Localisation-aware mapper with normalized rules; `src/StellaOps.Feedser.Source.Kisa/TASKS.md`. |
|
| Kisa | BE-Conn-KISA | ✅ Landed 2025-10-14 (mapper + telemetry) | 2025-10-11 | Hangul-aware mapper emits normalized rules; see `docs/dev/kisa_connector_notes.md` for localisation/metric details. |
|
||||||
| Ru.Bdu | BE-Conn-BDU | ❌ Not started – mapper TODO | 2025-10-11 | Emit normalized ranges, capture provenance; `src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md`. |
|
| Ru.Bdu | BE-Conn-BDU | ✅ Raw scheme emitted | 2025-10-14 | Mapper now writes `ru-bdu.raw` normalized rules with provenance + telemetry; `src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md`. |
|
||||||
| Ru.Nkcki | BE-Conn-Nkcki | ❌ Not started – mapper TODO | 2025-10-11 | Similar to BDU; ensure Cyrillic provenance preserved; `src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md`. |
|
| Ru.Nkcki | BE-Conn-Nkcki | ❌ Not started – mapper TODO | 2025-10-11 | Similar to BDU; ensure Cyrillic provenance preserved; `src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md`. |
|
||||||
| Vndr.Apple | BE-Conn-Apple | ✅ Shipped – emitting normalized arrays | 2025-10-11 | Continue fixture/tooling work; `src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md`. |
|
| Vndr.Apple | BE-Conn-Apple | ✅ Shipped – emitting normalized arrays | 2025-10-11 | Continue fixture/tooling work; `src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md`. |
|
||||||
| Vndr.Cisco | BE-Conn-Cisco | ❌ Not started – mapper TODO | 2025-10-11 | Decide on scheme (`semver` vs custom) before emitting rules; `src/StellaOps.Feedser.Source.Vndr.Cisco/TASKS.md`. |
|
| Vndr.Cisco | BE-Conn-Cisco | ✅ SemVer + vendor extensions emitted | 2025-10-14 | Connector outputs SemVer primitives with `cisco.productId` notes; see `CiscoMapper` and fixtures for coverage. |
|
||||||
| Vndr.Msrc | BE-Conn-MSRC | ❌ Not started – mapper TODO | 2025-10-11 | Gather samples, define scheme, emit normalized rules; `src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md`. |
|
| Vndr.Msrc | BE-Conn-MSRC | ✅ Map + normalized build rules landed | 2025-10-15 | `MsrcMapper` emits `msrc.build` normalized rules with CVRF references; see `src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md`. |
|
||||||
| Nvd | BE-Conn-NVD | ⚠️ Needs follow-up – mapper complete but normalized array MR pending | 2025-10-11 | Align CVE notes + normalized payload flag; `src/StellaOps.Feedser.Source.Nvd/TASKS.md`. |
|
| Nvd | BE-Conn-NVD | ⚠️ Needs follow-up – mapper complete but normalized array MR pending | 2025-10-11 | Align CVE notes + normalized payload flag; `src/StellaOps.Feedser.Source.Nvd/TASKS.md`. |
|
||||||
|
|
||||||
Legend: ✅ complete, ⚠️ in progress/partial, ❌ not started.
|
Legend: ✅ complete, ⚠️ in progress/partial, ❌ not started.
|
||||||
|
|||||||
@@ -7,16 +7,20 @@ Snapshot of direct network checks performed on 2025-10-11 (UTC) for the national
|
|||||||
- Next actions: prototype `SocketsHttpHandler` settings (`RequestVersionOrLower`, allow fallback to relay), capture successful headers from partner vantage (need retention + cache semantics), and keep `FEEDCONN-SHARED-HTTP2-001` open for downgrade work.
|
- Next actions: prototype `SocketsHttpHandler` settings (`RequestVersionOrLower`, allow fallback to relay), capture successful headers from partner vantage (need retention + cache semantics), and keep `FEEDCONN-SHARED-HTTP2-001` open for downgrade work.
|
||||||
|
|
||||||
## CCCS (Canada)
|
## CCCS (Canada)
|
||||||
- RSS endpoint (`https://cyber.gc.ca/api/cccs/rss/v1/get?...`) 301s to Atom feed (`/api/cccs/atom/v1/get?...`) with 50-entry window, HTML-heavy `<content>` fields, and no cache headers.
|
- JSON endpoint (`https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=<lang>&content_type=cccs_threat`) returns ~5 100 records per language; `page=<n>` still works for segmented pulls and the earliest `date_created` seen is 2018‑06‑08 (EN) / 2018‑06‑08 (FR). Use an explicit `User-Agent` to avoid 403 responses.
|
||||||
- Next actions: enumerate additional `feed` query values, sanitise inline HTML for DTO storage, and track retention depth via HTML pagination (`?page=`).
|
- Follow-up: telemetry, sanitiser coverage, and backfill procedures are documented in `docs/ops/feedser-cccs-operations.md` (2025‑10‑15). Adjust `maxEntriesPerFetch` when performing historical sweeps so cursor state remains responsive.
|
||||||
|
|
||||||
## CERT-Bund (Germany)
|
## CERT-Bund (Germany)
|
||||||
- `https://wid.cert-bund.de/content/public/securityAdvisory/rss` responds 200 without cookies (250-item window, German taxonomy). Detail links load an Angular SPA that fetches JSON behind session cookies.
|
- `https://wid.cert-bund.de/content/public/securityAdvisory/rss` responds 200 without cookies (≈250-item window, German taxonomy). Detail links load an Angular SPA that fetches JSON behind the bootstrap session.
|
||||||
- Next actions: script SPA cookie/bootstrap, discover JSON detail endpoint, and capture advisory schema for parser planning.
|
- Confirmed `GET https://wid.cert-bund.de/portal/api/securityadvisory?name=<WID-SEC-…>` returns JSON once the portal cookie container is primed; payload includes severity, CVEs, products, and references used by the connector fixtures.
|
||||||
|
- Historical advisories accessible through the SPA search/export endpoints once the `XSRF-TOKEN` cookie (exposed via `GET /portal/api/security/csrf`) is supplied with the `X-XSRF-TOKEN` header:
|
||||||
|
- `POST /portal/api/securityadvisory/search` (`{"page":N,"size":100,"sort":["published,desc"]}`) pages data back to 2014.
|
||||||
|
- `GET /portal/api/securityadvisory/export?format=json&from=YYYY-MM-DD` emits JSON bundles suitable for Offline Kit mirrors.
|
||||||
|
- Locale note: content is German-only; Feedser preserves `language=de` and Docs will publish a CERT-Bund glossary so operators can bridge terminology without machine translation.
|
||||||
|
|
||||||
## KISA / KNVD (Korea)
|
## KISA / KNVD (Korea)
|
||||||
- `https://knvd.krcert.or.kr/rss/securityInfo.do` and `/rss/securityNotice.do` return UTF-8 RSS (10-item window) with `detailDos.do?IDX=` links. No cookies required for feed fetch.
|
- `https://knvd.krcert.or.kr/rss/securityInfo.do` and `/rss/securityNotice.do` return UTF-8 RSS (10-item window) with `detailDos.do?IDX=` links. No cookies required for feed fetch.
|
||||||
- Next actions: trace SPA detail requests to identify JSON endpoints, normalise Hangul content, and finalise localisation plan.
|
- Detail SPA calls resolve to `rssDetailData.do?IDX=` JSON payloads; connector fetches those directly, sanitises HTML, and records Hangul metadata (NFC). See `docs/dev/kisa_connector_notes.md` for telemetry + localisation guidance.
|
||||||
|
|
||||||
## BDU (Russia / FSTEC)
|
## BDU (Russia / FSTEC)
|
||||||
- Candidate endpoints (`https://bdu.fstec.ru/component/rsform/form/7-bdu?format=xml/json`) return 403/404; TLS chain requires Russian Trusted Sub CA and WAF expects additional headers.
|
- Candidate endpoints (`https://bdu.fstec.ru/component/rsform/form/7-bdu?format=xml/json`) return 403/404; TLS chain requires Russian Trusted Sub CA and WAF expects additional headers.
|
||||||
|
|||||||
72
docs/ops/feedser-cccs-operations.md
Normal file
72
docs/ops/feedser-cccs-operations.md
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
# Feedser CCCS Connector Operations
|
||||||
|
|
||||||
|
This runbook covers day‑to‑day operation of the Canadian Centre for Cyber Security (`source:cccs:*`) connector, including configuration, telemetry, and historical backfill guidance for English/French advisories.
|
||||||
|
|
||||||
|
## 1. Configuration Checklist
|
||||||
|
|
||||||
|
- Network egress (or mirrored cache) for `https://www.cyber.gc.ca/` and the JSON API endpoints under `/api/cccs/`.
|
||||||
|
- Set the Feedser options before restarting workers. Example `feedser.yaml` snippet:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
feedser:
|
||||||
|
sources:
|
||||||
|
cccs:
|
||||||
|
feeds:
|
||||||
|
- language: "en"
|
||||||
|
uri: "https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=en&content_type=cccs_threat"
|
||||||
|
- language: "fr"
|
||||||
|
uri: "https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=fr&content_type=cccs_threat"
|
||||||
|
maxEntriesPerFetch: 80 # increase temporarily for backfill runs
|
||||||
|
maxKnownEntries: 512
|
||||||
|
requestTimeout: "00:00:30"
|
||||||
|
requestDelay: "00:00:00.250"
|
||||||
|
failureBackoff: "00:05:00"
|
||||||
|
```
|
||||||
|
|
||||||
|
> ℹ️ The `/api/cccs/threats/v1/get` endpoint returns thousands of records per language (≈5 100 rows each as of 2025‑10‑14). The connector honours `maxEntriesPerFetch`, so leave it low for steady‑state and raise it for planned backfills.
|
||||||
|
|
||||||
|
## 2. Telemetry & Logging
|
||||||
|
|
||||||
|
- **Metrics (Meter `StellaOps.Feedser.Source.Cccs`):**
|
||||||
|
- `cccs.fetch.attempts`, `cccs.fetch.success`, `cccs.fetch.failures`
|
||||||
|
- `cccs.fetch.documents`, `cccs.fetch.unchanged`
|
||||||
|
- `cccs.parse.success`, `cccs.parse.failures`, `cccs.parse.quarantine`
|
||||||
|
- `cccs.map.success`, `cccs.map.failures`
|
||||||
|
- **Shared HTTP metrics** via `SourceDiagnostics`:
|
||||||
|
- `feedser.source.http.requests{feedser.source="cccs"}`
|
||||||
|
- `feedser.source.http.failures{feedser.source="cccs"}`
|
||||||
|
- `feedser.source.http.duration{feedser.source="cccs"}`
|
||||||
|
- **Structured logs**
|
||||||
|
- `CCCS fetch completed feeds=… items=… newDocuments=… pendingDocuments=…`
|
||||||
|
- `CCCS parse completed parsed=… failures=…`
|
||||||
|
- `CCCS map completed mapped=… failures=…`
|
||||||
|
- Warnings fire when GridFS payloads/DTOs go missing or parser sanitisation fails.
|
||||||
|
|
||||||
|
Suggested Grafana alerts:
|
||||||
|
- `increase(cccs.fetch.failures_total[15m]) > 0`
|
||||||
|
- `rate(cccs.map.success_total[1h]) == 0` while other connectors are active
|
||||||
|
- `histogram_quantile(0.95, rate(feedser_source_http_duration_bucket{feedser_source="cccs"}[1h])) > 5s`
|
||||||
|
|
||||||
|
## 3. Historical Backfill Plan
|
||||||
|
|
||||||
|
1. **Snapshot the source** – the API accepts `page=<n>` and `lang=<en|fr>` query parameters. `page=0` returns the full dataset (observed earliest `date_created`: 2018‑06‑08 for EN, 2018‑06‑08 for FR). Mirror those responses into Offline Kit storage when operating air‑gapped.
|
||||||
|
2. **Stage ingestion**:
|
||||||
|
- Temporarily raise `maxEntriesPerFetch` (e.g. 500) and restart Feedser workers.
|
||||||
|
- Run chained jobs until `pendingDocuments` drains:
|
||||||
|
`stella db jobs run source:cccs:fetch --and-then source:cccs:parse --and-then source:cccs:map`
|
||||||
|
- Monitor `cccs.fetch.unchanged` growth; once it approaches dataset size the backfill is complete.
|
||||||
|
3. **Optional pagination sweep** – for incremental mirrors, iterate `page=<n>` (0…N) while `response.Count == 50`, persisting JSON to disk. Store alongside metadata (`language`, `page`, SHA256) so repeated runs detect drift.
|
||||||
|
4. **Language split** – keep EN/FR payloads separate to preserve canonical language fields. The connector emits `Language` directly from the feed entry, so mixed ingestion simply produces parallel advisories keyed by the same serial number.
|
||||||
|
5. **Throttle planning** – schedule backfills during maintenance windows; the API tolerates burst downloads but respect the 250 ms request delay or raise it if mirrored traffic is not available.
|
||||||
|
|
||||||
|
## 4. Selector & Sanitiser Notes
|
||||||
|
|
||||||
|
- `CccsHtmlParser` now parses the **unsanitised DOM** (via AngleSharp) and only sanitises when persisting `ContentHtml`.
|
||||||
|
- Product extraction walks headings (`Affected Products`, `Produits touchés`, `Mesures recommandées`) and consumes nested lists within `div/section/article` containers.
|
||||||
|
- `HtmlContentSanitizer` allows `<h1>…<h6>` and `<section>` so stored HTML keeps headings for UI rendering and downstream summarisation.
|
||||||
|
|
||||||
|
## 5. Fixture Maintenance
|
||||||
|
|
||||||
|
- Regression fixtures live in `src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures`.
|
||||||
|
- Refresh via `UPDATE_CCCS_FIXTURES=1 dotnet test src/StellaOps.Feedser.Source.Cccs.Tests/StellaOps.Feedser.Source.Cccs.Tests.csproj`.
|
||||||
|
- Fixtures capture both EN/FR advisories with nested lists to guard against sanitiser regressions; review diffs for heading/list changes before committing.
|
||||||
134
docs/ops/feedser-certbund-operations.md
Normal file
134
docs/ops/feedser-certbund-operations.md
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
# Feedser CERT-Bund Connector Operations
|
||||||
|
|
||||||
|
_Last updated: 2025-10-15_
|
||||||
|
|
||||||
|
Germany’s Federal Office for Information Security (BSI) operates the Warn- und Informationsdienst (WID) portal. The Feedser CERT-Bund connector (`source:cert-bund:*`) ingests the public RSS feed, hydrates the portal’s JSON detail endpoint, and maps the result into canonical advisories while preserving the original German content.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Configuration Checklist
|
||||||
|
|
||||||
|
- Allow outbound access (or stage mirrors) for:
|
||||||
|
- `https://wid.cert-bund.de/content/public/securityAdvisory/rss`
|
||||||
|
- `https://wid.cert-bund.de/portal/` (session/bootstrap)
|
||||||
|
- `https://wid.cert-bund.de/portal/api/securityadvisory` (detail/search/export JSON)
|
||||||
|
- Ensure the HTTP client reuses a cookie container (the connector’s dependency injection wiring already sets this up).
|
||||||
|
|
||||||
|
Example `feedser.yaml` fragment:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
feedser:
|
||||||
|
sources:
|
||||||
|
cert-bund:
|
||||||
|
feedUri: "https://wid.cert-bund.de/content/public/securityAdvisory/rss"
|
||||||
|
portalBootstrapUri: "https://wid.cert-bund.de/portal/"
|
||||||
|
detailApiUri: "https://wid.cert-bund.de/portal/api/securityadvisory"
|
||||||
|
maxAdvisoriesPerFetch: 50
|
||||||
|
maxKnownAdvisories: 512
|
||||||
|
requestTimeout: "00:00:30"
|
||||||
|
requestDelay: "00:00:00.250"
|
||||||
|
failureBackoff: "00:05:00"
|
||||||
|
```
|
||||||
|
|
||||||
|
> Leave `maxAdvisoriesPerFetch` at 50 during normal operation. Raise it only for controlled backfills, then restore the default to avoid overwhelming the portal.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Telemetry & Logging
|
||||||
|
|
||||||
|
- **Meter**: `StellaOps.Feedser.Source.CertBund`
|
||||||
|
- **Counters / histograms**:
|
||||||
|
- `certbund.feed.fetch.attempts|success|failures`
|
||||||
|
- `certbund.feed.items.count`
|
||||||
|
- `certbund.feed.enqueued.count`
|
||||||
|
- `certbund.feed.coverage.days`
|
||||||
|
- `certbund.detail.fetch.attempts|success|not_modified|failures{reason}`
|
||||||
|
- `certbund.parse.success|failures{reason}`
|
||||||
|
- `certbund.parse.products.count`, `certbund.parse.cve.count`
|
||||||
|
- `certbund.map.success|failures{reason}`
|
||||||
|
- `certbund.map.affected.count`, `certbund.map.aliases.count`
|
||||||
|
- Shared HTTP metrics remain available through `feedser.source.http.*`.
|
||||||
|
|
||||||
|
**Structured logs** (all emitted at information level when work occurs):
|
||||||
|
|
||||||
|
- `CERT-Bund fetch cycle: … truncated {Truncated}, coverageDays={CoverageDays}`
|
||||||
|
- `CERT-Bund parse cycle: parsed {Parsed}, failures {Failures}, …`
|
||||||
|
- `CERT-Bund map cycle: mapped {Mapped}, failures {Failures}, …`
|
||||||
|
|
||||||
|
Alerting ideas:
|
||||||
|
|
||||||
|
1. `increase(certbund.detail.fetch.failures_total[10m]) > 0`
|
||||||
|
2. `rate(certbund.map.success_total[30m]) == 0`
|
||||||
|
3. `histogram_quantile(0.95, rate(feedser_source_http_duration_bucket{feedser_source="cert-bund"}[15m])) > 5s`
|
||||||
|
|
||||||
|
The WebService now registers the meter so metrics surface automatically once OpenTelemetry metrics are enabled.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Historical Backfill & Export Strategy
|
||||||
|
|
||||||
|
### 3.1 Retention snapshot
|
||||||
|
|
||||||
|
- RSS window: ~250 advisories (≈90 days at current cadence).
|
||||||
|
- Older advisories are accessible through the JSON search/export APIs once the anti-CSRF token is supplied.
|
||||||
|
|
||||||
|
### 3.2 JSON search pagination
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Bootstrap cookies (client_config + XSRF-TOKEN)
|
||||||
|
curl -s -c cookies.txt "https://wid.cert-bund.de/portal/" > /dev/null
|
||||||
|
curl -s -b cookies.txt -c cookies.txt \
|
||||||
|
-H "X-Requested-With: XMLHttpRequest" \
|
||||||
|
"https://wid.cert-bund.de/portal/api/security/csrf" > /dev/null
|
||||||
|
|
||||||
|
XSRF=$(awk '/XSRF-TOKEN/ {print $7}' cookies.txt)
|
||||||
|
|
||||||
|
# 2. Page search results
|
||||||
|
curl -s -b cookies.txt \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "Accept: application/json" \
|
||||||
|
-H "X-XSRF-TOKEN: ${XSRF}" \
|
||||||
|
-X POST \
|
||||||
|
--data '{"page":4,"size":100,"sort":["published,desc"]}' \
|
||||||
|
"https://wid.cert-bund.de/portal/api/securityadvisory/search" \
|
||||||
|
> certbund-page4.json
|
||||||
|
```
|
||||||
|
|
||||||
|
Iterate `page` until the response `content` array is empty. Pages 0–9 currently cover 2014→present. Persist JSON responses (plus SHA256) for Offline Kit parity.
|
||||||
|
|
||||||
|
### 3.3 Export bundles
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -s -b cookies.txt \
|
||||||
|
-H "Accept: application/json" \
|
||||||
|
-H "X-XSRF-TOKEN: ${XSRF}" \
|
||||||
|
"https://wid.cert-bund.de/portal/api/securityadvisory/export?format=json&from=2020-01-01" \
|
||||||
|
> certbund-2020-2025.json
|
||||||
|
```
|
||||||
|
|
||||||
|
Split long ranges per year and record provenance (`from`, `to`, SHA, capturedAt). Feedser can ingest these JSON payloads directly when operating offline.
|
||||||
|
Task `FEEDCONN-CERTBUND-02-009` tracks turning this workflow into a shipped Offline Kit artefact with manifests and documentation updates—coordinate with the Docs guild before publishing.
|
||||||
|
|
||||||
|
### 3.4 Connector-driven catch-up
|
||||||
|
|
||||||
|
1. Temporarily raise `maxAdvisoriesPerFetch` (e.g. 150) and reduce `requestDelay`.
|
||||||
|
2. Run `stella db jobs run source:cert-bund:fetch --and-then source:cert-bund:parse --and-then source:cert-bund:map` until the fetch log reports `enqueued=0`.
|
||||||
|
3. Restore defaults and capture the cursor snapshot for audit.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Locale & Translation Guidance
|
||||||
|
|
||||||
|
- Advisories remain in German (`language: "de"`). Preserve wording for provenance and legal accuracy.
|
||||||
|
- UI localisation: enable the translation bundles documented in `docs/15_UI_GUIDE.md` if English UI copy is required. Operators can overlay machine or human translations, but the canonical database stores the source text.
|
||||||
|
- Docs guild is compiling a CERT-Bund terminology glossary under `docs/locale/certbund-glossary.md` so downstream teams can reference consistent English equivalents without altering the stored advisories.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Verification Checklist
|
||||||
|
|
||||||
|
1. Observe `certbund.feed.fetch.success` and `certbund.detail.fetch.success` increments after runs; `certbund.feed.coverage.days` should hover near the observed RSS window.
|
||||||
|
2. Ensure summary logs report `truncated=false` in steady state—`true` indicates the fetch cap was hit.
|
||||||
|
3. During backfills, watch `certbund.feed.enqueued.count` trend to zero.
|
||||||
|
4. Spot-check stored advisories in Mongo to confirm `language="de"` and reference URLs match the portal detail endpoint.
|
||||||
|
5. For Offline Kit exports, validate SHA256 hashes before distribution.
|
||||||
94
docs/ops/feedser-cisco-operations.md
Normal file
94
docs/ops/feedser-cisco-operations.md
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
# Feedser Cisco PSIRT Connector – OAuth Provisioning SOP
|
||||||
|
|
||||||
|
_Last updated: 2025-10-14_
|
||||||
|
|
||||||
|
## 1. Scope
|
||||||
|
|
||||||
|
This runbook describes how Ops provisions, rotates, and distributes Cisco PSIRT openVuln OAuth client credentials for the Feedser Cisco connector. It covers online and air-gapped (Offline Kit) environments, quota-aware execution, and escalation paths.
|
||||||
|
|
||||||
|
## 2. Prerequisites
|
||||||
|
|
||||||
|
- Active Cisco.com (CCO) account with access to the Cisco API Console.
|
||||||
|
- Cisco PSIRT openVuln API entitlement (visible under “My Apps & Keys” once granted).citeturn3search0
|
||||||
|
- Feedser configuration location (typically `/etc/stella/feedser.yaml` in production) or Offline Kit secret bundle staging directory.
|
||||||
|
|
||||||
|
## 3. Provisioning workflow
|
||||||
|
|
||||||
|
1. **Register the application**
|
||||||
|
- Sign in at <https://apiconsole.cisco.com>.
|
||||||
|
- Select **Register a New App** → Application Type: `Service`, Grant Type: `Client Credentials`, API: `Cisco PSIRT openVuln API`.citeturn3search0
|
||||||
|
- Record the generated `clientId` and `clientSecret` in the Ops vault.
|
||||||
|
2. **Verify token issuance**
|
||||||
|
- Request an access token with:
|
||||||
|
```bash
|
||||||
|
curl -s https://id.cisco.com/oauth2/default/v1/token \
|
||||||
|
-H "Content-Type: application/x-www-form-urlencoded" \
|
||||||
|
-d "grant_type=client_credentials" \
|
||||||
|
-d "client_id=${CLIENT_ID}" \
|
||||||
|
-d "client_secret=${CLIENT_SECRET}"
|
||||||
|
```
|
||||||
|
- Confirm HTTP 200 and an `expires_in` value of 3600 seconds (tokens live for one hour).citeturn3search0turn3search7
|
||||||
|
- Preserve the response only long enough to validate syntax; do **not** persist tokens.
|
||||||
|
3. **Authorize Feedser runtime**
|
||||||
|
- Update `feedser:sources:cisco:auth` (or the module-specific secret template) with the stored credentials.
|
||||||
|
- For Offline Kit delivery, export encrypted secrets into `offline-kit/secrets/cisco-openvuln.json` using the platform’s sealed secret format.
|
||||||
|
4. **Connectivity validation**
|
||||||
|
- From the Feedser control plane, run `stella db jobs run source:vndr-cisco:fetch --dry-run`.
|
||||||
|
- Ensure the Source HTTP diagnostics record `Bearer` authorization headers and no 401/403 responses.
|
||||||
|
|
||||||
|
## 4. Rotation SOP
|
||||||
|
|
||||||
|
| Step | Owner | Notes |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| 1. Schedule rotation | Ops (monthly board) | Rotate every 90 days or immediately after suspected credential exposure. |
|
||||||
|
| 2. Create replacement app | Ops | Repeat §3.1 with “-next” suffix; verify token issuance. |
|
||||||
|
| 3. Stage dual credentials | Ops + Feedser On-Call | Publish new credentials to secret store alongside current pair. |
|
||||||
|
| 4. Cut over | Feedser On-Call | Restart connector workers during a low-traffic window (<10 min) to pick up the new secret. |
|
||||||
|
| 5. Deactivate legacy app | Ops | Delete prior app in Cisco API Console once telemetry confirms successful fetch/parse cycles for 2 consecutive hours. |
|
||||||
|
|
||||||
|
**Automation hooks**
|
||||||
|
- Rotation reminders are tracked in OpsRunbookOps board (`OPS-RUN-KEYS` swim lane); add checklist items for Feedser Cisco when opening a rotation task.
|
||||||
|
- Use the secret management pipeline (`ops/secrets/rotate.sh --connector cisco`) to template vault updates; the script renders a redacted diff for audit.
|
||||||
|
|
||||||
|
## 5. Offline Kit packaging
|
||||||
|
|
||||||
|
1. Generate the credential bundle using the Offline Kit CLI:
|
||||||
|
`offline-kit secrets add cisco-openvuln --client-id … --client-secret …`
|
||||||
|
2. Store the encrypted payload under `offline-kit/secrets/cisco-openvuln.enc`.
|
||||||
|
3. Distribute via the Offline Kit channel; update `offline-kit/MANIFEST.md` with the credential fingerprint (SHA256 of plaintext concatenated with metadata).
|
||||||
|
4. Document validation steps for the receiving site (token request from an air-gapped relay or cached token mirror).
|
||||||
|
|
||||||
|
## 6. Quota and throttling guidance
|
||||||
|
|
||||||
|
- Cisco enforces combined limits of 5 requests/second, 30 requests/minute, and 5 000 requests/day per application.citeturn0search0turn3search6
|
||||||
|
- Feedser fetch jobs must respect `Retry-After` headers on HTTP 429 responses; Ops should monitor for sustained quota saturation and consider paging window adjustments.
|
||||||
|
- Telemetry to watch: `feedser.source.http.requests{feedser.source="vndr-cisco"}`, `feedser.source.http.failures{...}`, and connector-specific metrics once implemented.
|
||||||
|
|
||||||
|
## 7. Telemetry & Monitoring
|
||||||
|
|
||||||
|
- **Metrics (Meter `StellaOps.Feedser.Source.Vndr.Cisco`)**
|
||||||
|
- `cisco.fetch.documents`, `cisco.fetch.failures`, `cisco.fetch.unchanged`
|
||||||
|
- `cisco.parse.success`, `cisco.parse.failures`
|
||||||
|
- `cisco.map.success`, `cisco.map.failures`, `cisco.map.affected.packages`
|
||||||
|
- **Shared HTTP metrics** via `SourceDiagnostics`:
|
||||||
|
- `feedser.source.http.requests{feedser.source="vndr-cisco"}`
|
||||||
|
- `feedser.source.http.failures{feedser.source="vndr-cisco"}`
|
||||||
|
- `feedser.source.http.duration{feedser.source="vndr-cisco"}`
|
||||||
|
- **Structured logs**
|
||||||
|
- `Cisco fetch completed date=… pages=… added=…` (info)
|
||||||
|
- `Cisco parse completed parsed=… failures=…` (info)
|
||||||
|
- `Cisco map completed mapped=… failures=…` (info)
|
||||||
|
- Warnings surface when DTO serialization fails or GridFS payload is missing.
|
||||||
|
- Suggested alerts: non-zero `cisco.fetch.failures` in 15m, or `cisco.map.success` flatlines while fetch continues.
|
||||||
|
|
||||||
|
## 8. Incident response
|
||||||
|
|
||||||
|
- **Token compromise** – revoke the application in the Cisco API Console, purge cached secrets, rotate immediately per §4.
|
||||||
|
- **Persistent 401/403** – confirm credentials in vault, then validate token issuance; if unresolved, open a Cisco DevNet support ticket referencing the application ID.
|
||||||
|
- **429 spikes** – inspect job scheduler cadence and adjust connector options (`maxRequestsPerWindow`) before requesting higher quotas from Cisco.
|
||||||
|
|
||||||
|
## 9. References
|
||||||
|
|
||||||
|
- Cisco PSIRT openVuln API Authentication Guide.citeturn3search0
|
||||||
|
- Accessing the openVuln API using curl (token lifetime).citeturn3search7
|
||||||
|
- openVuln API rate limit documentation.citeturn0search0turn3search6
|
||||||
@@ -150,3 +150,11 @@ dotnet test src/StellaOps.Feedser.Merge.Tests/StellaOps.Feedser.Merge.Tests.cspr
|
|||||||
```
|
```
|
||||||
|
|
||||||
- **Expected signals** – The triple produces one freshness-driven summary override (`primary_source=osv`, `suppressed_source=ghsa`) and one range override for the npm SemVer package while leaving `feedser.merge.conflicts` at zero. Use these values as the baseline when tuning dashboards or load-testing alert pipelines.
|
- **Expected signals** – The triple produces one freshness-driven summary override (`primary_source=osv`, `suppressed_source=ghsa`) and one range override for the npm SemVer package while leaving `feedser.merge.conflicts` at zero. Use these values as the baseline when tuning dashboards or load-testing alert pipelines.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Change Log
|
||||||
|
|
||||||
|
| Date (UTC) | Change | Notes |
|
||||||
|
|------------|--------|-------|
|
||||||
|
| 2025-10-16 | Ops review signed off after connector expansion (CCCS, CERT-Bund, KISA, ICS CISA, MSRC) landed. Alert thresholds from §3 reaffirmed; dashboards updated to watch attachment signals emitted by ICS CISA connector. | Ops sign-off recorded by Feedser Ops Guild; no additional overrides required. |
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ feedser:
|
|||||||
apiOrg: "ORG123"
|
apiOrg: "ORG123"
|
||||||
apiUser: "user@example.org"
|
apiUser: "user@example.org"
|
||||||
apiKeyFile: "/var/run/secrets/feedser/cve-api-key"
|
apiKeyFile: "/var/run/secrets/feedser/cve-api-key"
|
||||||
|
seedDirectory: "./seed-data/cve"
|
||||||
pageSize: 200
|
pageSize: 200
|
||||||
maxPagesPerFetch: 5
|
maxPagesPerFetch: 5
|
||||||
initialBackfill: "30.00:00:00"
|
initialBackfill: "30.00:00:00"
|
||||||
@@ -27,6 +28,8 @@ feedser:
|
|||||||
|
|
||||||
> ℹ️ Store the API key outside source control. When using `apiKeyFile`, mount the secret file into the container/host; alternatively supply `apiKey` via `FEEDSER_SOURCES__CVE__APIKEY`.
|
> ℹ️ Store the API key outside source control. When using `apiKeyFile`, mount the secret file into the container/host; alternatively supply `apiKey` via `FEEDSER_SOURCES__CVE__APIKEY`.
|
||||||
|
|
||||||
|
> 🪙 When credentials are not yet available, configure `seedDirectory` to point at mirrored CVE JSON (for example, the repo’s `seed-data/cve/` bundle). The connector will ingest those records and log a warning instead of failing the job; live fetching resumes automatically once `apiOrg` / `apiUser` / `apiKey` are supplied.
|
||||||
|
|
||||||
### 1.2 Smoke Test (staging)
|
### 1.2 Smoke Test (staging)
|
||||||
|
|
||||||
1. Deploy the updated configuration and restart the Feedser service so the connector picks up the credentials.
|
1. Deploy the updated configuration and restart the Feedser service so the connector picks up the credentials.
|
||||||
@@ -51,6 +54,26 @@ feedser:
|
|||||||
- **Grafana pack** – Import `docs/ops/feedser-cve-kev-grafana-dashboard.json` and filter by panel legend (`CVE`, `KEV`) to reuse the canned layout.
|
- **Grafana pack** – Import `docs/ops/feedser-cve-kev-grafana-dashboard.json` and filter by panel legend (`CVE`, `KEV`) to reuse the canned layout.
|
||||||
- **Backfill window** – Operators can tighten or widen `initialBackfill` / `maxPagesPerFetch` after validating throughput. Update config and restart Feedser to apply changes.
|
- **Backfill window** – Operators can tighten or widen `initialBackfill` / `maxPagesPerFetch` after validating throughput. Update config and restart Feedser to apply changes.
|
||||||
|
|
||||||
|
### 1.4 Staging smoke log (2025-10-15)
|
||||||
|
|
||||||
|
While Ops finalises long-lived CVE Services credentials, we validated the connector end-to-end against the recorded CVE-2024-0001 payloads used in regression tests:
|
||||||
|
|
||||||
|
- Command: `dotnet test src/StellaOps.Feedser.Source.Cve.Tests/StellaOps.Feedser.Source.Cve.Tests.csproj -l "console;verbosity=detailed"`
|
||||||
|
- Summary log emitted by the connector:
|
||||||
|
```
|
||||||
|
CVEs fetch window 2024-09-01T00:00:00Z->2024-10-01T00:00:00Z pages=1 listSuccess=1 detailDocuments=1 detailFailures=0 detailUnchanged=0 pendingDocuments=0->1 pendingMappings=0->1 hasMorePages=False nextWindowStart=2024-09-15T12:00:00Z nextWindowEnd=(none) nextPage=1
|
||||||
|
```
|
||||||
|
- Telemetry captured by `Meter` `StellaOps.Feedser.Source.Cve`:
|
||||||
|
| Metric | Value |
|
||||||
|
|--------|-------|
|
||||||
|
| `cve.fetch.attempts` | 1 |
|
||||||
|
| `cve.fetch.success` | 1 |
|
||||||
|
| `cve.fetch.documents` | 1 |
|
||||||
|
| `cve.parse.success` | 1 |
|
||||||
|
| `cve.map.success` | 1 |
|
||||||
|
|
||||||
|
The Grafana pack `docs/ops/feedser-cve-kev-grafana-dashboard.json` has been imported into staging so the panels referenced above render against these counters once the live API keys are in place.
|
||||||
|
|
||||||
## 2. CISA KEV Connector (`source:kev:*`)
|
## 2. CISA KEV Connector (`source:kev:*`)
|
||||||
|
|
||||||
### 2.1 Prerequisites
|
### 2.1 Prerequisites
|
||||||
|
|||||||
122
docs/ops/feedser-icscisa-operations.md
Normal file
122
docs/ops/feedser-icscisa-operations.md
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
# Feedser CISA ICS Connector Operations
|
||||||
|
|
||||||
|
This runbook documents how to provision, rotate, and validate credentials for the CISA Industrial Control Systems (ICS) connector (`source:ics-cisa:*`). Follow it before enabling the connector in staging or offline installations.
|
||||||
|
|
||||||
|
## 1. Credential Provisioning
|
||||||
|
|
||||||
|
1. **Create a service mailbox** reachable by the Ops crew (shared mailbox recommended).
|
||||||
|
2. Browse to `https://public.govdelivery.com/accounts/USDHSCISA/subscriber/new` and subscribe the mailbox to the following GovDelivery topics:
|
||||||
|
- `USDHSCISA_16` — ICS-CERT advisories (legacy numbering: `ICSA-YY-###`).
|
||||||
|
- `USDHSCISA_19` — ICS medical advisories (`ICSMA-YY-###`).
|
||||||
|
- `USDHSCISA_17` — ICS alerts (`IR-ALERT-YY-###`) for completeness.
|
||||||
|
3. Complete the verification email. After confirmation, note the **personalised subscription code** included in the “Manage Preferences” link. It has the shape `code=AB12CD34EF`.
|
||||||
|
4. Store the code in the shared secret vault (or Offline Kit secrets bundle) as `feedser/sources/icscisa/govdelivery/code`.
|
||||||
|
|
||||||
|
> ℹ️ GovDelivery does not expose a one-time API key; the personalised code is what authenticates the RSS pull. Never commit it to git.
|
||||||
|
|
||||||
|
## 2. Feed Validation
|
||||||
|
|
||||||
|
Use the following command to confirm the feed is reachable before wiring it into Feedser (substitute `<CODE>` with the personalised value):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -H "User-Agent: StellaOpsFeedser/ics-cisa" \
|
||||||
|
"https://content.govdelivery.com/accounts/USDHSCISA/topics/ICS-CERT/feed.rss?format=xml&code=<CODE>"
|
||||||
|
```
|
||||||
|
|
||||||
|
If the endpoint returns HTTP 200 and an RSS payload, record the sample response under `docs/artifacts/icscisa/` (see Task `FEEDCONN-ICSCISA-02-007`). HTTP 403 or 406 usually means the subscription was not confirmed or the code was mistyped.
|
||||||
|
|
||||||
|
## 3. Configuration Snippet
|
||||||
|
|
||||||
|
Add the connector configuration to `feedser.yaml` (or equivalent environment variables):
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
feedser:
|
||||||
|
sources:
|
||||||
|
icscisa:
|
||||||
|
govDelivery:
|
||||||
|
code: "${FEEDSER_ICS_CISA_GOVDELIVERY_CODE}"
|
||||||
|
topics:
|
||||||
|
- "USDHSCISA_16"
|
||||||
|
- "USDHSCISA_19"
|
||||||
|
- "USDHSCISA_17"
|
||||||
|
rssBaseUri: "https://content.govdelivery.com/accounts/USDHSCISA"
|
||||||
|
requestDelay: "00:00:01"
|
||||||
|
failureBackoff: "00:05:00"
|
||||||
|
```
|
||||||
|
|
||||||
|
Environment variable example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export FEEDSER_SOURCES_ICSCISA_GOVDELIVERY_CODE="AB12CD34EF"
|
||||||
|
```
|
||||||
|
|
||||||
|
Feedser automatically register the host with the Source.Common HTTP allow-list when the connector assembly is loaded.
|
||||||
|
|
||||||
|
|
||||||
|
Optional tuning keys (set only when needed):
|
||||||
|
|
||||||
|
- `proxyUri` — HTTP/HTTPS proxy URL used when Akamai blocks direct pulls.
|
||||||
|
- `requestVersion` / `requestVersionPolicy` — override HTTP negotiation when the proxy requires HTTP/1.1.
|
||||||
|
- `enableDetailScrape` — toggle HTML detail fallback (defaults to true).
|
||||||
|
- `captureAttachments` — collect PDF attachments from detail pages (defaults to true).
|
||||||
|
- `detailBaseUri` — alternate host for detail enrichment if CISA changes their layout.
|
||||||
|
|
||||||
|
## 4. Seeding Without GovDelivery
|
||||||
|
|
||||||
|
If credentials are still pending, populate the connector with the community CSV dataset before enabling the live fetch:
|
||||||
|
|
||||||
|
1. Run `./scripts/fetch-ics-cisa-seed.sh` (or `.ps1`) to download the latest `CISA_ICS_ADV_*.csv` files into `seed-data/ics-cisa/`.
|
||||||
|
2. Copy the CSVs (and the generated `.sha256` files) into your Offline Kit staging area so they ship alongside the other feeds.
|
||||||
|
3. Import the kit as usual. The connector can parse the seed data for historical context, but **live GovDelivery credentials are still required** for fresh advisories.
|
||||||
|
4. Once credentials arrive, update `feedser:sources:icscisa:govDelivery:code` and re-trigger `source:ics-cisa:fetch` so the connector switches to the authorised feed.
|
||||||
|
|
||||||
|
> The CSVs are licensed under ODbL 1.0 by the ICS Advisory Project. Preserve the attribution when redistributing them.
|
||||||
|
|
||||||
|
## 4. Integration Validation
|
||||||
|
|
||||||
|
1. Ensure secrets are in place and restart the Feedser workers.
|
||||||
|
2. Run a dry-run fetch/parse/map chain against an Akamai-protected topic:
|
||||||
|
```bash
|
||||||
|
FEEDSER_SOURCES_ICSCISA_GOVDELIVERY_CODE=... \
|
||||||
|
FEEDSER_SOURCES_ICSCISA_ENABLEDETAILSCRAPE=1 \
|
||||||
|
stella db jobs run source:ics-cisa:fetch --and-then source:ics-cisa:parse --and-then source:ics-cisa:map
|
||||||
|
```
|
||||||
|
3. Confirm logs contain `ics-cisa detail fetch` entries and that new documents/DTOs include attachments (see `docs/artifacts/icscisa`). Canonical advisories should expose PDF links as `references.kind == "attachment"` and affected packages should surface `primitives.semVer.exactValue` for single-version hits.
|
||||||
|
4. If Akamai blocks direct fetches, set `feedser:sources:icscisa:proxyUri` to your allow-listed egress proxy and rerun the dry-run.
|
||||||
|
|
||||||
|
## 4. Rotation & Incident Response
|
||||||
|
|
||||||
|
- Review GovDelivery access quarterly. Rotate the personalised code whenever Ops changes the service mailbox password or membership.
|
||||||
|
- Revoking the subscription in GovDelivery invalidates the code immediately; update the vault and configuration in the same change.
|
||||||
|
- If the code leaks, remove the subscription (`https://public.govdelivery.com/accounts/USDHSCISA/subscriber/manage_preferences?code=<CODE>`), resubscribe, and distribute the new value via the vault.
|
||||||
|
|
||||||
|
## 5. Offline Kit Handling
|
||||||
|
|
||||||
|
Include the personalised code in `offline-kit/secrets/feedser/icscisa.env`:
|
||||||
|
|
||||||
|
```
|
||||||
|
FEEDSER_SOURCES_ICSCISA_GOVDELIVERY_CODE=AB12CD34EF
|
||||||
|
```
|
||||||
|
|
||||||
|
The Offline Kit deployment script copies this file into the container secret directory mounted at `/run/secrets/feedser`. Ensure permissions are `600` and ownership matches the Feedser runtime user.
|
||||||
|
|
||||||
|
## 6. Telemetry & Monitoring
|
||||||
|
|
||||||
|
The connector emits metrics under the meter `StellaOps.Feedser.Source.Ics.Cisa`. They allow operators to track Akamai fallbacks, detail enrichment health, and advisory fan-out.
|
||||||
|
|
||||||
|
- `icscisa.fetch.*` – counters for `attempts`, `success`, `failures`, `not_modified`, and `fallbacks`, plus histogram `icscisa.fetch.documents` showing documents added per topic pull (tags: `feedser.source`, `icscisa.topic`).
|
||||||
|
- `icscisa.parse.*` – counters for `success`/`failures` and histograms `icscisa.parse.advisories`, `icscisa.parse.attachments`, `icscisa.parse.detail_fetches` to monitor enrichment workload per feed document.
|
||||||
|
- `icscisa.detail.*` – counters `success` / `failures` per advisory (tagged with `icscisa.advisory`) to alert when Akamai blocks detail pages.
|
||||||
|
- `icscisa.map.*` – counters for `success`/`failures` and histograms `icscisa.map.references`, `icscisa.map.packages`, `icscisa.map.aliases` capturing canonical fan-out.
|
||||||
|
|
||||||
|
Suggested alerts:
|
||||||
|
|
||||||
|
- `increase(icscisa.fetch.failures_total[15m]) > 0` or `increase(icscisa.fetch.fallbacks_total[15m]) > 5` — sustained Akamai or proxy issues.
|
||||||
|
- `increase(icscisa.detail.failures_total[30m]) > 0` — detail enrichment breaking (potential HTML layout change).
|
||||||
|
- `histogram_quantile(0.95, rate(icscisa.map.references_bucket[1h]))` trending sharply higher — sudden advisory reference explosion worth investigating.
|
||||||
|
- Keep an eye on shared HTTP metrics (`feedser.source.http.*{feedser.source="ics-cisa"}`) for request latency and retry patterns.
|
||||||
|
|
||||||
|
## 6. Related Tasks
|
||||||
|
|
||||||
|
- `FEEDCONN-ICSCISA-02-009` (GovDelivery credential onboarding) — completed once this runbook is followed and secrets are placed in the vault.
|
||||||
|
- `FEEDCONN-ICSCISA-02-007` (document inventory) — archive the first successful RSS response and any attachment URL schema under `docs/artifacts/icscisa/`.
|
||||||
74
docs/ops/feedser-kisa-operations.md
Normal file
74
docs/ops/feedser-kisa-operations.md
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# Feedser KISA Connector Operations
|
||||||
|
|
||||||
|
Operational guidance for the Korea Internet & Security Agency (KISA / KNVD) connector (`source:kisa:*`). Pair this with the engineering brief in `docs/dev/kisa_connector_notes.md`.
|
||||||
|
|
||||||
|
## 1. Prerequisites
|
||||||
|
|
||||||
|
- Outbound HTTPS (or mirrored cache) for `https://knvd.krcert.or.kr/`.
|
||||||
|
- Connector options defined under `feedser:sources:kisa`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
feedser:
|
||||||
|
sources:
|
||||||
|
kisa:
|
||||||
|
feedUri: "https://knvd.krcert.or.kr/rss/securityInfo.do"
|
||||||
|
detailApiUri: "https://knvd.krcert.or.kr/rssDetailData.do"
|
||||||
|
detailPageUri: "https://knvd.krcert.or.kr/detailDos.do"
|
||||||
|
maxAdvisoriesPerFetch: 10
|
||||||
|
requestDelay: "00:00:01"
|
||||||
|
failureBackoff: "00:05:00"
|
||||||
|
```
|
||||||
|
|
||||||
|
> Ensure the URIs stay absolute—Feedser adds the `feedUri`/`detailApiUri` hosts to the HttpClient allow-list automatically.
|
||||||
|
|
||||||
|
## 2. Staging Smoke Test
|
||||||
|
|
||||||
|
1. Restart the Feedser workers so the KISA options bind.
|
||||||
|
2. Run a full connector cycle:
|
||||||
|
- CLI: `stella db jobs run source:kisa:fetch --and-then source:kisa:parse --and-then source:kisa:map`
|
||||||
|
- REST: `POST /jobs/run { "kind": "source:kisa:fetch", "chain": ["source:kisa:parse", "source:kisa:map"] }`
|
||||||
|
3. Confirm telemetry (Meter `StellaOps.Feedser.Source.Kisa`):
|
||||||
|
- `kisa.feed.success`, `kisa.feed.items`
|
||||||
|
- `kisa.detail.success` / `.failures`
|
||||||
|
- `kisa.parse.success` / `.failures`
|
||||||
|
- `kisa.map.success` / `.failures`
|
||||||
|
- `kisa.cursor.updates`
|
||||||
|
4. Inspect logs for structured entries:
|
||||||
|
- `KISA feed returned {ItemCount}`
|
||||||
|
- `KISA fetched detail for {Idx} … category={Category}`
|
||||||
|
- `KISA mapped advisory {AdvisoryId} (severity={Severity})`
|
||||||
|
- Absence of warnings such as `document missing GridFS payload`.
|
||||||
|
5. Validate MongoDB state:
|
||||||
|
- `raw_documents.metadata` has `kisa.idx`, `kisa.category`, `kisa.title`.
|
||||||
|
- DTO store contains `schemaVersion="kisa.detail.v1"`.
|
||||||
|
- Advisories include aliases (`IDX`, CVE) and `language="ko"`.
|
||||||
|
- `source_states` entry for `kisa` shows recent `cursor.lastFetchAt`.
|
||||||
|
|
||||||
|
## 3. Production Monitoring
|
||||||
|
|
||||||
|
- **Dashboards** – Add the following Prometheus/OTEL expressions:
|
||||||
|
- `rate(kisa_feed_items_total[15m])` versus `rate(feedser_source_http_requests_total{feedser_source="kisa"}[15m])`
|
||||||
|
- `increase(kisa_detail_failures_total{reason!="empty-document"}[1h])` alert at `>0`
|
||||||
|
- `increase(kisa_parse_failures_total[1h])` for storage/JSON issues
|
||||||
|
- `increase(kisa_map_failures_total[1h])` to flag schema drift
|
||||||
|
- `increase(kisa_cursor_updates_total[6h]) == 0` during active windows → warn
|
||||||
|
- **Alerts** – Page when `rate(kisa_feed_success_total[2h]) == 0` while other connectors are active; back off for maintenance windows announced on `https://knvd.krcert.or.kr/`.
|
||||||
|
- **Logs** – Watch for repeated warnings (`document missing`, `DTO missing`) or errors with reason tags `HttpRequestException`, `download`, `parse`, `map`.
|
||||||
|
|
||||||
|
## 4. Localisation Handling
|
||||||
|
|
||||||
|
- Hangul categories (for example `취약점정보`) flow into telemetry tags (`category=…`) and logs. Dashboards must render UTF‑8 and avoid transliteration.
|
||||||
|
- HTML content is sanitised before storage; translation teams can consume the `ContentHtml` field safely.
|
||||||
|
- Advisory severity remains as provided by KISA (`High`, `Medium`, etc.). Map-level failures include the severity tag for filtering.
|
||||||
|
|
||||||
|
## 5. Fixture & Regression Maintenance
|
||||||
|
|
||||||
|
- Regression fixtures: `src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/kisa-feed.xml` and `kisa-detail.json`.
|
||||||
|
- Refresh via `UPDATE_KISA_FIXTURES=1 dotnet test src/StellaOps.Feedser.Source.Kisa.Tests/StellaOps.Feedser.Source.Kisa.Tests.csproj`.
|
||||||
|
- The telemetry regression (`KisaConnectorTests.Telemetry_RecordsMetrics`) will fail if counters/log wiring drifts—treat failures as gating.
|
||||||
|
|
||||||
|
## 6. Known Issues
|
||||||
|
|
||||||
|
- RSS feeds only expose the latest 10 advisories; long outages require replay via archived feeds or manual IDX seeds.
|
||||||
|
- Detail endpoint occasionally throttles; the connector honours `requestDelay` and reports failures with reason `HttpRequestException`. Consider increasing delay for weekend backfills.
|
||||||
|
- If `kisa.category` tags suddenly appear as `unknown`, verify KISA has not renamed RSS elements; update the parser fixtures before production rollout.
|
||||||
86
docs/ops/feedser-msrc-operations.md
Normal file
86
docs/ops/feedser-msrc-operations.md
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
# Feedser MSRC Connector – Azure AD Onboarding Brief
|
||||||
|
|
||||||
|
_Drafted: 2025-10-15_
|
||||||
|
|
||||||
|
## 1. App registration requirements
|
||||||
|
|
||||||
|
- **Tenant**: shared StellaOps production Azure AD.
|
||||||
|
- **Application type**: confidential client (web/API) issuing client credentials.
|
||||||
|
- **API permissions**: `api://api.msrc.microsoft.com/.default` (Application). Admin consent required once.
|
||||||
|
- **Token audience**: `https://api.msrc.microsoft.com/`.
|
||||||
|
- **Grant type**: client credentials. Feedser will request tokens via `POST https://login.microsoftonline.com/{tenantId}/oauth2/v2.0/token`.
|
||||||
|
|
||||||
|
## 2. Secret/credential policy
|
||||||
|
|
||||||
|
- Maintain two client secrets (primary + standby) rotating every 90 days.
|
||||||
|
- Store secrets in the Feedser secrets vault; Offline Kit deployments must mirror the secret payloads in their encrypted store.
|
||||||
|
- Record rotation cadence in Ops runbook and update Feedser configuration (`FEEDSER__SOURCES__VNDR__MSRC__CLIENTSECRET`) ahead of expiry.
|
||||||
|
|
||||||
|
## 3. Feedser configuration sample
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
feedser:
|
||||||
|
sources:
|
||||||
|
vndr.msrc:
|
||||||
|
tenantId: "<azure-tenant-guid>"
|
||||||
|
clientId: "<app-registration-client-id>"
|
||||||
|
clientSecret: "<pull from secret store>"
|
||||||
|
apiVersion: "2024-08-01"
|
||||||
|
locale: "en-US"
|
||||||
|
requestDelay: "00:00:00.250"
|
||||||
|
failureBackoff: "00:05:00"
|
||||||
|
cursorOverlapMinutes: 10
|
||||||
|
downloadCvrf: false # set true to persist CVRF ZIP alongside JSON detail
|
||||||
|
```
|
||||||
|
|
||||||
|
## 4. CVRF artefacts
|
||||||
|
|
||||||
|
- The MSRC REST payload exposes `cvrfUrl` per advisory. Current connector persists the link as advisory metadata and reference; it does **not** download the ZIP by default.
|
||||||
|
- Ops should mirror CVRF ZIPs when preparing Offline Kits so air-gapped deployments can reconcile advisories without direct internet access.
|
||||||
|
- Once Offline Kit storage guidelines are finalised, extend the connector configuration with `downloadCvrf: true` to enable automatic attachment retrieval.
|
||||||
|
|
||||||
|
### 4.1 State seeding helper
|
||||||
|
|
||||||
|
Use `tools/SourceStateSeeder` to queue historical advisories (detail JSON + optional CVRF artefacts) for replay without manual Mongo edits. Example seed file:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"source": "vndr.msrc",
|
||||||
|
"cursor": {
|
||||||
|
"lastModifiedCursor": "2024-01-01T00:00:00Z"
|
||||||
|
},
|
||||||
|
"documents": [
|
||||||
|
{
|
||||||
|
"uri": "https://api.msrc.microsoft.com/sug/v2.0/vulnerability/ADV2024-0001",
|
||||||
|
"contentFile": "./seeds/adv2024-0001.json",
|
||||||
|
"contentType": "application/json",
|
||||||
|
"metadata": { "msrc.vulnerabilityId": "ADV2024-0001" },
|
||||||
|
"addToPendingDocuments": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"uri": "https://download.microsoft.com/msrc/2024/ADV2024-0001.cvrf.zip",
|
||||||
|
"contentFile": "./seeds/adv2024-0001.cvrf.zip",
|
||||||
|
"contentType": "application/zip",
|
||||||
|
"status": "mapped",
|
||||||
|
"addToPendingDocuments": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Run the helper:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dotnet run --project tools/SourceStateSeeder -- \
|
||||||
|
--connection-string "mongodb://localhost:27017" \
|
||||||
|
--database feedser \
|
||||||
|
--input seeds/msrc-backfill.json
|
||||||
|
```
|
||||||
|
|
||||||
|
Any documents marked `addToPendingDocuments` will appear in the connector cursor; `DownloadCvrf` can remain disabled if the ZIP artefact is pre-seeded.
|
||||||
|
|
||||||
|
## 5. Outstanding items
|
||||||
|
|
||||||
|
- Ops to confirm tenant/app names and provide client credentials through the secure channel.
|
||||||
|
- Connector team monitors token cache health (already implemented); validate instrumentation once Ops supplies credentials.
|
||||||
|
- Offline Kit packaging: add encrypted blob containing client credentials with rotation instructions.
|
||||||
48
docs/ops/feedser-nkcki-operations.md
Normal file
48
docs/ops/feedser-nkcki-operations.md
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# NKCKI Connector Operations Guide
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The NKCKI connector ingests JSON bulletin archives from cert.gov.ru, expanding each `*.json.zip` attachment into per-vulnerability DTOs before canonical mapping. The fetch pipeline now supports cache-backed recovery, deterministic pagination, and telemetry suitable for production monitoring.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Key options exposed through `feedser:sources:ru-nkcki:http`:
|
||||||
|
|
||||||
|
- `maxBulletinsPerFetch` – limits new bulletin downloads in a single run (default `5`).
|
||||||
|
- `maxListingPagesPerFetch` – maximum listing pages visited during pagination (default `3`).
|
||||||
|
- `listingCacheDuration` – minimum interval between listing fetches before falling back to cached artefacts (default `00:10:00`).
|
||||||
|
- `cacheDirectory` – optional path for persisted bulletin archives used during offline or failure scenarios.
|
||||||
|
- `requestDelay` – delay inserted between bulletin downloads to respect upstream politeness.
|
||||||
|
|
||||||
|
When operating in offline-first mode, set `cacheDirectory` to a writable path (e.g. `/var/lib/feedser/cache/ru-nkcki`) and pre-populate bulletin archives via the offline kit.
|
||||||
|
|
||||||
|
## Telemetry
|
||||||
|
|
||||||
|
`RuNkckiDiagnostics` emits the following metrics under meter `StellaOps.Feedser.Source.Ru.Nkcki`:
|
||||||
|
|
||||||
|
- `nkcki.listing.fetch.attempts` / `nkcki.listing.fetch.success` / `nkcki.listing.fetch.failures`
|
||||||
|
- `nkcki.listing.pages.visited` (histogram, `pages`)
|
||||||
|
- `nkcki.listing.attachments.discovered` / `nkcki.listing.attachments.new`
|
||||||
|
- `nkcki.bulletin.fetch.success` / `nkcki.bulletin.fetch.cached` / `nkcki.bulletin.fetch.failures`
|
||||||
|
- `nkcki.entries.processed` (histogram, `entries`)
|
||||||
|
|
||||||
|
Integrate these counters into standard Feedser observability dashboards to track crawl coverage and cache hit rates.
|
||||||
|
|
||||||
|
## Archive Backfill Strategy
|
||||||
|
|
||||||
|
Bitrix pagination surfaces archives via `?PAGEN_1=n`. The connector now walks up to `maxListingPagesPerFetch` pages, deduplicating bulletin IDs and maintaining a rolling `knownBulletins` window. Backfill strategy:
|
||||||
|
|
||||||
|
1. Enumerate pages from newest to oldest, respecting `maxListingPagesPerFetch` and `listingCacheDuration` to avoid refetch storms.
|
||||||
|
2. Persist every `*.json.zip` attachment to the configured cache directory. This enables replay when listing access is temporarily blocked.
|
||||||
|
3. During archive replay, `ProcessCachedBulletinsAsync` enqueues missing documents while respecting `maxVulnerabilitiesPerFetch`.
|
||||||
|
4. For historical HTML-only advisories, collect page URLs and metadata while offline (future work: HTML and PDF extraction pipeline documented in `docs/feedser-connector-research-20251011.md`).
|
||||||
|
|
||||||
|
For large migrations, seed caches with archived zip bundles, then run fetch/parse/map cycles in chronological order to maintain deterministic outputs.
|
||||||
|
|
||||||
|
## Failure Handling
|
||||||
|
|
||||||
|
- Listing failures mark the source state with exponential backoff while attempting cache replay.
|
||||||
|
- Bulletin fetches fall back to cached copies before surfacing an error.
|
||||||
|
- Mongo integration tests rely on bundled OpenSSL 1.1 libraries (`tools/openssl/linux-x64`) to keep `Mongo2Go` operational on modern distros.
|
||||||
|
|
||||||
|
Refer to `ru-nkcki` entries in `src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md` for outstanding items.
|
||||||
@@ -15,7 +15,7 @@ Audit events share the `StellaOps.Cryptography.Audit.AuthEventRecord` contract.
|
|||||||
- `Client` — `AuthEventClient` with client identifier, display name, and originating provider/plugin.
|
- `Client` — `AuthEventClient` with client identifier, display name, and originating provider/plugin.
|
||||||
- `Scopes` — granted or requested OAuth scopes (sorted before emission).
|
- `Scopes` — granted or requested OAuth scopes (sorted before emission).
|
||||||
- `Network` — `AuthEventNetwork` with remote address, forwarded headers, and user agent string (all treated as PII).
|
- `Network` — `AuthEventNetwork` with remote address, forwarded headers, and user agent string (all treated as PII).
|
||||||
- `Properties` — additional `AuthEventProperty` entries for context-specific details (lockout durations, policy decisions, retries, etc.).
|
- `Properties` — additional `AuthEventProperty` entries for context-specific details (lockout durations, policy decisions, retries, `request.tampered`/`request.unexpected_parameter`, `bootstrap.invite_token`, etc.).
|
||||||
|
|
||||||
## Data Classifications
|
## Data Classifications
|
||||||
|
|
||||||
@@ -33,7 +33,13 @@ Event names follow dotted notation:
|
|||||||
|
|
||||||
- `authority.password.grant` — password grant handled by OpenIddict.
|
- `authority.password.grant` — password grant handled by OpenIddict.
|
||||||
- `authority.client_credentials.grant` — client credential grant handling.
|
- `authority.client_credentials.grant` — client credential grant handling.
|
||||||
|
- `authority.token.tamper` — suspicious `/token` request detected (unexpected parameters or manipulated payload).
|
||||||
- `authority.bootstrap.user` and `authority.bootstrap.client` — bootstrap API operations.
|
- `authority.bootstrap.user` and `authority.bootstrap.client` — bootstrap API operations.
|
||||||
|
- `authority.bootstrap.invite.created` — operator created a bootstrap invite.
|
||||||
|
- `authority.bootstrap.invite.consumed` — invite consumed during user/client provisioning.
|
||||||
|
- `authority.bootstrap.invite.expired` — invite expired without being used.
|
||||||
|
- `authority.bootstrap.invite.rejected` — invite was rejected (invalid, mismatched provider/target, or already consumed).
|
||||||
|
- `authority.token.replay.suspected` — replay heuristics detected a token being used from a new device fingerprint.
|
||||||
- Future additions should preserve the `authority.<surface>.<action>` pattern to keep filtering deterministic.
|
- Future additions should preserve the `authority.<surface>.<action>` pattern to keep filtering deterministic.
|
||||||
|
|
||||||
## Persistence
|
## Persistence
|
||||||
|
|||||||
@@ -82,9 +82,9 @@ flowchart LR
|
|||||||
| Threat | STRIDE Vector | Surface | Risk (L×I) | Existing Controls | Gaps / Actions | Owner |
|
| Threat | STRIDE Vector | Surface | Risk (L×I) | Existing Controls | Gaps / Actions | Owner |
|
||||||
|--------|---------------|---------|------------|-------------------|----------------|-------|
|
|--------|---------------|---------|------------|-------------------|----------------|-------|
|
||||||
| Spoofed revocation bundle | Spoofing | TB5 — Authority ↔️ Agents | Med×High | Detached JWS signature (planned), offline kit checksums | Finalise signing key registry & verification script (SEC4.B/SEC4.HOST); add bundle freshness requirement | Security Guild (follow-up: **SEC5.B**) |
|
| Spoofed revocation bundle | Spoofing | TB5 — Authority ↔️ Agents | Med×High | Detached JWS signature (planned), offline kit checksums | Finalise signing key registry & verification script (SEC4.B/SEC4.HOST); add bundle freshness requirement | Security Guild (follow-up: **SEC5.B**) |
|
||||||
| Parameter tampering on `/token` | Tampering | TB1 — Public ingress | Med×High | ASP.NET model validation, OpenIddict, rate limiter (CORE8.RL) | Add audit coverage for tampered inputs, align correlation IDs with SOC (SEC2.A/SEC2.B) | Security Guild + Authority Core (follow-up: **SEC5.C**) |
|
| Parameter tampering on `/token` | Tampering | TB1 — Public ingress | Med×High | ASP.NET model validation, OpenIddict, rate limiter (CORE8.RL) | Tampered requests emit `authority.token.tamper` audit events (`request.tampered`, unexpected parameter names) correlating with `/token` outcomes (SEC5.C) | Security Guild + Authority Core (follow-up: **SEC5.C**) |
|
||||||
| Bootstrap invite replay | Repudiation | TB4 — Operator CLI ↔️ Authority | Low×High | One-time bootstrap tokens, Argon2id hashing on creation | Enforce invite expiration + audit trail for unused invites | Security Guild (follow-up: **SEC5.D**) |
|
| Bootstrap invite replay | Repudiation | TB4 — Operator CLI ↔️ Authority | Low×High | One-time bootstrap tokens, Argon2id hashing on creation | Invites expire automatically and emit audit events on consumption/expiration (SEC5.D) | Security Guild |
|
||||||
| Token replay by stolen agent | Information Disclosure | TB5 | Med×High | Planned revocation bundles, optional mTLS | Require agent binding (device fingerprint) and enforce revocation grace window alerts | Security Guild + Zastava (follow-up: **SEC5.E**) |
|
| Token replay by stolen agent | Information Disclosure | TB5 | Med×High | Signed revocation bundles, device fingerprint heuristics, optional mTLS | Monitor revocation acknowledgement latency via Zastava and tune replay alerting thresholds | Security Guild + Zastava (follow-up: **SEC5.E**) |
|
||||||
| Privilege escalation via plug-in override | Elevation of Privilege | TB3 — Plug-in sandbox | Med×High | Signed plug-ins, restart-only loading, configuration validation | Add static analysis on manifest overrides + runtime warning when policy weaker than host | Security Guild + DevOps (follow-up: **SEC5.F**) |
|
| Privilege escalation via plug-in override | Elevation of Privilege | TB3 — Plug-in sandbox | Med×High | Signed plug-ins, restart-only loading, configuration validation | Add static analysis on manifest overrides + runtime warning when policy weaker than host | Security Guild + DevOps (follow-up: **SEC5.F**) |
|
||||||
| Offline bundle tampering | Tampering | Distribution | Low×High | SHA256 manifest, signed bundles (planned) | Add supply-chain attestation for Offline Kit, publish verification CLI in docs | Security Guild + Ops (follow-up: **SEC5.G**) |
|
| Offline bundle tampering | Tampering | Distribution | Low×High | SHA256 manifest, signed bundles (planned) | Add supply-chain attestation for Offline Kit, publish verification CLI in docs | Security Guild + Ops (follow-up: **SEC5.G**) |
|
||||||
| Failure to log denied tokens | Repudiation | TB2 — Authority ↔️ Mongo | Med×Med | Serilog structured events (partial), Mongo persistence path (planned) | Finalise audit schema (SEC2.A) and ensure `/token` denies include subject/client/IP fields | Security Guild + Authority Core (follow-up: **SEC5.H**) |
|
| Failure to log denied tokens | Repudiation | TB2 — Authority ↔️ Mongo | Med×Med | Serilog structured events (partial), Mongo persistence path (planned) | Finalise audit schema (SEC2.A) and ensure `/token` denies include subject/client/IP fields | Security Guild + Authority Core (follow-up: **SEC5.H**) |
|
||||||
@@ -98,7 +98,7 @@ Risk scoring uses qualitative scale (Low/Med/High) for likelihood × impact; mit
|
|||||||
| SEC5.B | Spoofed revocation bundle | Complete libsodium/Core signing integration and ship revocation verification script. | Security Guild + Authority Core |
|
| SEC5.B | Spoofed revocation bundle | Complete libsodium/Core signing integration and ship revocation verification script. | Security Guild + Authority Core |
|
||||||
| SEC5.C | Parameter tampering on `/token` | Finalise audit contract (`SEC2.A`) and add request tamper logging. | Security Guild + Authority Core |
|
| SEC5.C | Parameter tampering on `/token` | Finalise audit contract (`SEC2.A`) and add request tamper logging. | Security Guild + Authority Core |
|
||||||
| SEC5.D | Bootstrap invite replay | Implement expiry enforcement + audit coverage for unused bootstrap invites. | Security Guild |
|
| SEC5.D | Bootstrap invite replay | Implement expiry enforcement + audit coverage for unused bootstrap invites. | Security Guild |
|
||||||
| SEC5.E | Token replay by stolen agent | Document device binding requirements and create detector for stale revocation acknowledgements. | Security Guild + Zastava |
|
| SEC5.E | Token replay by stolen agent | Coordinate Zastava alerting with the new device fingerprint heuristics and surface stale revocation acknowledgements. | Security Guild + Zastava |
|
||||||
| SEC5.F | Plug-in override escalation | Static analysis of plug-in manifests; warn on weaker password policy overrides. | Security Guild + DevOps |
|
| SEC5.F | Plug-in override escalation | Static analysis of plug-in manifests; warn on weaker password policy overrides. | Security Guild + DevOps |
|
||||||
| SEC5.G | Offline bundle tampering | Extend Offline Kit build to include attested manifest + verification CLI sample. | Security Guild + Ops |
|
| SEC5.G | Offline bundle tampering | Extend Offline Kit build to include attested manifest + verification CLI sample. | Security Guild + Ops |
|
||||||
| SEC5.H | Failure to log denied tokens | Ensure audit persistence for all `/token` denials with correlation IDs. | Security Guild + Authority Core |
|
| SEC5.H | Failure to log denied tokens | Ensure audit persistence for all `/token` denials with correlation IDs. | Security Guild + Authority Core |
|
||||||
|
|||||||
76
docs/security/rate-limits.md
Normal file
76
docs/security/rate-limits.md
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
# StellaOps Authority Rate Limit Guidance
|
||||||
|
|
||||||
|
StellaOps Authority applies fixed-window rate limiting to critical endpoints so that brute-force and burst traffic are throttled before they can exhaust downstream resources. This guide complements the lockout policy documentation and captures the recommended defaults, override scenarios, and monitoring practices for `/token`, `/authorize`, and `/internal/*` routes.
|
||||||
|
|
||||||
|
## Configuration Overview
|
||||||
|
|
||||||
|
Rate limits live under `security.rateLimiting` in `authority.yaml` (and map to the same hierarchy for environment variables). Each endpoint exposes:
|
||||||
|
|
||||||
|
- `enabled` — toggles the limiter.
|
||||||
|
- `permitLimit` — maximum requests per fixed window.
|
||||||
|
- `window` — window duration expressed as an ISO-8601 timespan (e.g., `00:01:00`).
|
||||||
|
- `queueLimit` — number of requests allowed to queue when the window is exhausted.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
security:
|
||||||
|
rateLimiting:
|
||||||
|
token:
|
||||||
|
enabled: true
|
||||||
|
permitLimit: 30
|
||||||
|
window: 00:01:00
|
||||||
|
queueLimit: 0
|
||||||
|
authorize:
|
||||||
|
enabled: true
|
||||||
|
permitLimit: 60
|
||||||
|
window: 00:01:00
|
||||||
|
queueLimit: 10
|
||||||
|
internal:
|
||||||
|
enabled: false
|
||||||
|
permitLimit: 5
|
||||||
|
window: 00:01:00
|
||||||
|
queueLimit: 0
|
||||||
|
```
|
||||||
|
|
||||||
|
When limits trigger, middleware decorates responses with `Retry-After` headers and log tags (`authority.endpoint`, `authority.client_id`, `authority.remote_ip`) so operators can correlate events with clients and source IPs.
|
||||||
|
|
||||||
|
Environment overrides follow the same hierarchy. For example:
|
||||||
|
|
||||||
|
```
|
||||||
|
STELLAOPS_AUTHORITY__SECURITY__RATELIMITING__TOKEN__PERMITLIMIT=60
|
||||||
|
STELLAOPS_AUTHORITY__SECURITY__RATELIMITING__TOKEN__WINDOW=00:01:00
|
||||||
|
```
|
||||||
|
|
||||||
|
## Recommended Profiles
|
||||||
|
|
||||||
|
| Scenario | permitLimit | window | queueLimit | Notes |
|
||||||
|
|----------|-------------|--------|------------|-------|
|
||||||
|
| Default production | 30 | 60s | 0 | Balances anonymous quota (33 scans/day) with headroom for tenant bursts. |
|
||||||
|
| High-trust clustered IPs | 60 | 60s | 5 | Requires WAF allowlist + alert `aspnetcore_rate_limiting_rejections_total{limiter="authority-token"} <= 1%` sustained. |
|
||||||
|
| Air-gapped lab | 10 | 120s | 0 | Lower concurrency reduces noise when running from shared bastion hosts. |
|
||||||
|
| Incident lockdown | 5 | 300s | 0 | Pair with credential lockout limit of 3 attempts and SOC paging for each denial. |
|
||||||
|
|
||||||
|
### Lockout Interplay
|
||||||
|
|
||||||
|
- Rate limiting throttles by IP/client; lockout policies apply per subject. Keep both enabled.
|
||||||
|
- During lockdown scenarios, reduce `security.lockout.maxFailures` alongside the rate limits above so that subjects face quicker escalation.
|
||||||
|
- Map support playbooks to the observed `Retry-After` value: anything above 120 seconds should trigger manual investigation before re-enabling clients.
|
||||||
|
|
||||||
|
## Monitoring and Alerts
|
||||||
|
|
||||||
|
1. **Metrics**
|
||||||
|
- `aspnetcore_rate_limiting_rejections_total{limiter="authority-token"}` for `/token`.
|
||||||
|
- `aspnetcore_rate_limiting_rejections_total{limiter="authority-authorize"}` for `/authorize`.
|
||||||
|
- Custom counters derived from the structured log tags (`authority.remote_ip`, `authority.client_id`).
|
||||||
|
2. **Dashboards**
|
||||||
|
- Requests vs. rejections per endpoint.
|
||||||
|
- Top offending clients/IP ranges in the current window.
|
||||||
|
- Heatmap of retry-after durations to spot persistent throttling.
|
||||||
|
3. **Alerts**
|
||||||
|
- Notify SOC when 429 rates exceed 25 % for five consecutive minutes on any limiter.
|
||||||
|
- Trigger client-specific alerts when a single client_id produces >100 throttle events/hour.
|
||||||
|
|
||||||
|
## Operational Checklist
|
||||||
|
|
||||||
|
- Validate updated limits in staging before production rollout; smoke-test with representative workload.
|
||||||
|
- When raising limits, confirm audit events continue to capture `authority.client_id`, `authority.remote_ip`, and correlation IDs for throttle responses.
|
||||||
|
- Document any overrides in the change log, including justification and expiry review date.
|
||||||
@@ -43,6 +43,7 @@ Consumers MUST treat the combination of `schemaVersion` and `sequence` as a mono
|
|||||||
{
|
{
|
||||||
"alg": "ES256",
|
"alg": "ES256",
|
||||||
"kid": "{signingKeyId}",
|
"kid": "{signingKeyId}",
|
||||||
|
"provider": "{providerName}",
|
||||||
"typ": "application/vnd.stellaops.revocation-bundle+jws",
|
"typ": "application/vnd.stellaops.revocation-bundle+jws",
|
||||||
"b64": false,
|
"b64": false,
|
||||||
"crit": ["b64"]
|
"crit": ["b64"]
|
||||||
@@ -54,8 +55,28 @@ Verification steps:
|
|||||||
|
|
||||||
1. Validate `revocation-bundle.json` against the schema.
|
1. Validate `revocation-bundle.json` against the schema.
|
||||||
2. Re-compute SHA-256 and compare with `.sha256` (if present).
|
2. Re-compute SHA-256 and compare with `.sha256` (if present).
|
||||||
3. Resolve the signing key from JWKS (`/.well-known/jwks.json`) or the offline key bundle.
|
3. Resolve the signing key from JWKS (`/.well-known/jwks.json`) or the offline key bundle, preferring the provider declared in the JWS header (`provider` falls back to `default`).
|
||||||
4. Verify the detached JWS using the stored signing key (example tooling coming with `stella auth revoke verify`).
|
4. Verify the detached JWS using the resolved provider. The CLI mirrors Authority resolution, so builds compiled with `StellaOpsCryptoSodium=true` automatically use the libsodium provider when advertised; otherwise verification downgrades to the managed fallback.
|
||||||
|
|
||||||
|
### CLI verification workflow
|
||||||
|
|
||||||
|
Use the bundled CLI command before distributing a bundle:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
stellaops auth revoke verify \
|
||||||
|
--bundle artifacts/revocation-bundle.json \
|
||||||
|
--signature artifacts/revocation-bundle.json.jws \
|
||||||
|
--key etc/authority/signing/authority-public.pem \
|
||||||
|
--verbose
|
||||||
|
```
|
||||||
|
|
||||||
|
The verifier performs three checks:
|
||||||
|
|
||||||
|
1. Prints the computed digest in `sha256:<hex>` format. Compare it with the exported `.sha256` artefact.
|
||||||
|
2. Confirms the detached JWS header advertises `b64: false`, captures the provider hint, and that the algorithm matches the Authority configuration (ES256 unless overridden).
|
||||||
|
3. Registers the supplied PEM key with the crypto provider registry and validates the signature (falling back to the managed provider when the hinted provider is unavailable).
|
||||||
|
|
||||||
|
A zero exit code means the bundle is ready for mirroring/import. Non-zero codes signal missing arguments, malformed JWS payloads, or signature mismatches; regenerate or re-sign the bundle before distribution.
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
@@ -64,7 +85,7 @@ The repository contains an [example bundle](revocation-bundle-example.json) demo
|
|||||||
## Operations Quick Reference
|
## Operations Quick Reference
|
||||||
|
|
||||||
- `stella auth revoke export` emits a canonical JSON bundle, `.sha256` digest, and detached JWS signature in one command. Use `--output` to write into your mirror staging directory.
|
- `stella auth revoke export` emits a canonical JSON bundle, `.sha256` digest, and detached JWS signature in one command. Use `--output` to write into your mirror staging directory.
|
||||||
- `stella auth revoke verify` validates a bundle using cached JWKS or an offline PEM key and reports digest mismatches before distribution.
|
- `stella auth revoke verify` validates a bundle using cached JWKS or an offline PEM key, honours the `provider` metadata embedded in the signature, and reports digest mismatches before distribution.
|
||||||
- `POST /internal/revocations/export` provides the same payload for orchestrators that already talk to the bootstrap API.
|
- `POST /internal/revocations/export` provides the same payload for orchestrators that already talk to the bootstrap API.
|
||||||
- `POST /internal/signing/rotate` rotates JWKS material without downtime; always export a fresh bundle afterward so downstream mirrors receive signatures from the new `kid`.
|
- `POST /internal/signing/rotate` rotates JWKS material without downtime; always export a fresh bundle afterward so downstream mirrors receive signatures from the new `kid`.
|
||||||
- Offline Kit automation should mirror `revocation-bundle.json*` alongside Feedser exports so agents ingest revocations during the same sync pass.
|
- Offline Kit automation should mirror `revocation-bundle.json*` alongside Feedser exports so agents ingest revocations during the same sync pass.
|
||||||
|
|||||||
@@ -83,3 +83,15 @@ sources:
|
|||||||
failureBackoff: "00:05:00"
|
failureBackoff: "00:05:00"
|
||||||
rateLimitWarningThreshold: 500
|
rateLimitWarningThreshold: 500
|
||||||
secondaryRateLimitBackoff: "00:02:00"
|
secondaryRateLimitBackoff: "00:02:00"
|
||||||
|
cve:
|
||||||
|
baseEndpoint: "https://cveawg.mitre.org/api/"
|
||||||
|
apiOrg: ""
|
||||||
|
apiUser: ""
|
||||||
|
apiKey: ""
|
||||||
|
# Optional mirror used when credentials are unavailable.
|
||||||
|
seedDirectory: "./seed-data/cve"
|
||||||
|
pageSize: 200
|
||||||
|
maxPagesPerFetch: 5
|
||||||
|
initialBackfill: "30.00:00:00"
|
||||||
|
requestDelay: "00:00:00.250"
|
||||||
|
failureBackoff: "00:10:00"
|
||||||
|
|||||||
38
scripts/fetch-ics-cisa-seed.ps1
Normal file
38
scripts/fetch-ics-cisa-seed.ps1
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
param(
|
||||||
|
[string]$Destination = "$(Join-Path (Split-Path -Parent $PSCommandPath) '..' | Resolve-Path)/seed-data/ics-cisa"
|
||||||
|
)
|
||||||
|
|
||||||
|
$ErrorActionPreference = 'Stop'
|
||||||
|
New-Item -Path $Destination -ItemType Directory -Force | Out-Null
|
||||||
|
|
||||||
|
Function Write-Info($Message) { Write-Host "[ics-seed] $Message" }
|
||||||
|
Function Write-ErrorLine($Message) { Write-Host "[ics-seed][error] $Message" -ForegroundColor Red }
|
||||||
|
|
||||||
|
Function Download-File($Url, $Path) {
|
||||||
|
Write-Info "Downloading $(Split-Path $Path -Leaf)"
|
||||||
|
Invoke-WebRequest -Uri $Url -OutFile $Path -UseBasicParsing
|
||||||
|
$hash = Get-FileHash -Path $Path -Algorithm SHA256
|
||||||
|
$hash.Hash | Out-File -FilePath "$Path.sha256" -Encoding ascii
|
||||||
|
}
|
||||||
|
|
||||||
|
$base = 'https://raw.githubusercontent.com/icsadvprj/ICS-Advisory-Project/main/ICS-CERT_ADV'
|
||||||
|
$master = 'CISA_ICS_ADV_Master.csv'
|
||||||
|
$snapshot = 'CISA_ICS_ADV_2025_10_09.csv'
|
||||||
|
|
||||||
|
Write-Info 'Fetching ICS advisories seed data (ODbL v1.0)'
|
||||||
|
Download-File "$base/$master" (Join-Path $Destination $master)
|
||||||
|
Download-File "$base/$snapshot" (Join-Path $Destination $snapshot)
|
||||||
|
|
||||||
|
$medicalUrl = 'https://raw.githubusercontent.com/batarr22/ICSMA_CSV/main/ICSMA_CSV_4-20-2023.xlsx'
|
||||||
|
$medicalFile = 'ICSMA_CSV_4-20-2023.xlsx'
|
||||||
|
Write-Info 'Fetching community ICSMA snapshot'
|
||||||
|
try {
|
||||||
|
Download-File $medicalUrl (Join-Path $Destination $medicalFile)
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
Write-ErrorLine "Unable to download $medicalFile (optional): $_"
|
||||||
|
Remove-Item (Join-Path $Destination $medicalFile) -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Info "Seed data ready in $Destination"
|
||||||
|
Write-Info 'Remember: data is licensed under ODbL v1.0 (see seed README).'
|
||||||
38
scripts/fetch-ics-cisa-seed.sh
Normal file
38
scripts/fetch-ics-cisa-seed.sh
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||||
|
DEST_DIR="${1:-$ROOT_DIR/seed-data/ics-cisa}"
|
||||||
|
mkdir -p "$DEST_DIR"
|
||||||
|
|
||||||
|
info() { printf "[ics-seed] %s\n" "$*"; }
|
||||||
|
error() { printf "[ics-seed][error] %s\n" "$*" >&2; }
|
||||||
|
|
||||||
|
download() {
|
||||||
|
local url="$1"
|
||||||
|
local target="$2"
|
||||||
|
info "Downloading $(basename "$target")"
|
||||||
|
curl -fL "$url" -o "$target"
|
||||||
|
sha256sum "$target" > "$target.sha256"
|
||||||
|
}
|
||||||
|
|
||||||
|
BASE="https://raw.githubusercontent.com/icsadvprj/ICS-Advisory-Project/main/ICS-CERT_ADV"
|
||||||
|
MASTER_FILE="CISA_ICS_ADV_Master.csv"
|
||||||
|
SNAPSHOT_2025="CISA_ICS_ADV_2025_10_09.csv"
|
||||||
|
|
||||||
|
info "Fetching ICS advisories seed data (ODbL v1.0)"
|
||||||
|
download "$BASE/$MASTER_FILE" "$DEST_DIR/$MASTER_FILE"
|
||||||
|
download "$BASE/$SNAPSHOT_2025" "$DEST_DIR/$SNAPSHOT_2025"
|
||||||
|
|
||||||
|
MEDICAL_URL="https://raw.githubusercontent.com/batarr22/ICSMA_CSV/main/ICSMA_CSV_4-20-2023.xlsx"
|
||||||
|
MEDICAL_FILE="ICSMA_CSV_4-20-2023.xlsx"
|
||||||
|
info "Fetching community ICSMA snapshot"
|
||||||
|
if curl -fL "$MEDICAL_URL" -o "$DEST_DIR/$MEDICAL_FILE"; then
|
||||||
|
sha256sum "$DEST_DIR/$MEDICAL_FILE" > "$DEST_DIR/$MEDICAL_FILE.sha256"
|
||||||
|
else
|
||||||
|
error "Unable to download $MEDICAL_FILE (optional)."
|
||||||
|
rm -f "$DEST_DIR/$MEDICAL_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
info "Seed data ready in $DEST_DIR"
|
||||||
|
info "Remember: data is licensed under ODbL v1.0 (see seed README)."
|
||||||
72
seed-data/cve/2025-10-15/CVE-2024-0001.json
Normal file
72
seed-data/cve/2025-10-15/CVE-2024-0001.json
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
{
|
||||||
|
"dataType": "CVE_RECORD",
|
||||||
|
"dataVersion": "5.0",
|
||||||
|
"cveMetadata": {
|
||||||
|
"cveId": "CVE-2024-0001",
|
||||||
|
"assignerShortName": "ExampleOrg",
|
||||||
|
"state": "PUBLISHED",
|
||||||
|
"dateReserved": "2024-01-01T00:00:00Z",
|
||||||
|
"datePublished": "2024-09-10T12:00:00Z",
|
||||||
|
"dateUpdated": "2024-09-15T12:00:00Z"
|
||||||
|
},
|
||||||
|
"containers": {
|
||||||
|
"cna": {
|
||||||
|
"title": "Example Product Remote Code Execution",
|
||||||
|
"descriptions": [
|
||||||
|
{
|
||||||
|
"lang": "en",
|
||||||
|
"value": "An example vulnerability allowing remote attackers to execute arbitrary code."
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"affected": [
|
||||||
|
{
|
||||||
|
"vendor": "ExampleVendor",
|
||||||
|
"product": "ExampleProduct",
|
||||||
|
"platform": "linux",
|
||||||
|
"defaultStatus": "affected",
|
||||||
|
"versions": [
|
||||||
|
{
|
||||||
|
"status": "affected",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"lessThan": "1.2.0",
|
||||||
|
"versionType": "semver"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "unaffected",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"versionType": "semver"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"references": [
|
||||||
|
{
|
||||||
|
"url": "https://example.com/security/advisory",
|
||||||
|
"name": "Vendor Advisory",
|
||||||
|
"tags": [
|
||||||
|
"vendor-advisory"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://cve.example.com/CVE-2024-0001",
|
||||||
|
"tags": [
|
||||||
|
"third-party-advisory"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metrics": [
|
||||||
|
{
|
||||||
|
"cvssV3_1": {
|
||||||
|
"version": "3.1",
|
||||||
|
"vectorString": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||||
|
"baseScore": 9.8,
|
||||||
|
"baseSeverity": "CRITICAL"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"aliases": [
|
||||||
|
"GHSA-xxxx-yyyy-zzzz"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
147
seed-data/cve/2025-10-15/CVE-2024-4567.json
Normal file
147
seed-data/cve/2025-10-15/CVE-2024-4567.json
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
{
|
||||||
|
"dataType": "CVE_RECORD",
|
||||||
|
"dataVersion": "5.1",
|
||||||
|
"cveMetadata": {
|
||||||
|
"cveId": "CVE-2024-4567",
|
||||||
|
"assignerOrgId": "b15e7b5b-3da4-40ae-a43c-f7aa60e62599",
|
||||||
|
"state": "PUBLISHED",
|
||||||
|
"assignerShortName": "Wordfence",
|
||||||
|
"dateReserved": "2024-05-06T19:34:14.071Z",
|
||||||
|
"datePublished": "2024-05-09T20:03:38.213Z",
|
||||||
|
"dateUpdated": "2024-08-01T20:47:40.724Z"
|
||||||
|
},
|
||||||
|
"containers": {
|
||||||
|
"cna": {
|
||||||
|
"providerMetadata": {
|
||||||
|
"orgId": "b15e7b5b-3da4-40ae-a43c-f7aa60e62599",
|
||||||
|
"shortName": "Wordfence",
|
||||||
|
"dateUpdated": "2024-05-09T20:03:38.213Z"
|
||||||
|
},
|
||||||
|
"affected": [
|
||||||
|
{
|
||||||
|
"vendor": "themifyme",
|
||||||
|
"product": "Themify Shortcodes",
|
||||||
|
"versions": [
|
||||||
|
{
|
||||||
|
"version": "*",
|
||||||
|
"status": "affected",
|
||||||
|
"lessThanOrEqual": "2.0.9",
|
||||||
|
"versionType": "semver"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"defaultStatus": "unaffected"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"descriptions": [
|
||||||
|
{
|
||||||
|
"lang": "en",
|
||||||
|
"value": "The Themify Shortcodes plugin for WordPress is vulnerable to Stored Cross-Site Scripting via the plugin's themify_button shortcode in all versions up to, and including, 2.0.9 due to insufficient input sanitization and output escaping on user supplied attributes. This makes it possible for authenticated attackers, with contributor-level access and above, to inject arbitrary web scripts in pages that will execute whenever a user accesses an injected page."
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Themify Shortcodes <= 2.0.9 - Authenticated (Contributor+) Stored Cross-Site Scripting via themify_button Shortcode",
|
||||||
|
"references": [
|
||||||
|
{
|
||||||
|
"url": "https://www.wordfence.com/threat-intel/vulnerabilities/id/c63ff9d7-6a14-4186-8550-4e5c50855e7f?source=cve"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://plugins.trac.wordpress.org/changeset/3082885/themify-shortcodes"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"problemTypes": [
|
||||||
|
{
|
||||||
|
"descriptions": [
|
||||||
|
{
|
||||||
|
"lang": "en",
|
||||||
|
"description": "CWE-79 Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metrics": [
|
||||||
|
{
|
||||||
|
"cvssV3_1": {
|
||||||
|
"version": "3.1",
|
||||||
|
"vectorString": "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:C/C:L/I:L/A:N",
|
||||||
|
"baseScore": 6.4,
|
||||||
|
"baseSeverity": "MEDIUM"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"credits": [
|
||||||
|
{
|
||||||
|
"lang": "en",
|
||||||
|
"type": "finder",
|
||||||
|
"value": "Francesco Carlucci"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timeline": [
|
||||||
|
{
|
||||||
|
"time": "2024-05-06T00:00:00.000+00:00",
|
||||||
|
"lang": "en",
|
||||||
|
"value": "Vendor Notified"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"time": "2024-05-08T00:00:00.000+00:00",
|
||||||
|
"lang": "en",
|
||||||
|
"value": "Disclosed"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"adp": [
|
||||||
|
{
|
||||||
|
"title": "CISA ADP Vulnrichment",
|
||||||
|
"metrics": [
|
||||||
|
{
|
||||||
|
"other": {
|
||||||
|
"type": "ssvc",
|
||||||
|
"content": {
|
||||||
|
"id": "CVE-2024-4567",
|
||||||
|
"role": "CISA Coordinator",
|
||||||
|
"options": [
|
||||||
|
{
|
||||||
|
"Exploitation": "none"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Automatable": "no"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Technical Impact": "partial"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"version": "2.0.3",
|
||||||
|
"timestamp": "2024-05-11T16:56:12.695905Z"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"providerMetadata": {
|
||||||
|
"orgId": "134c704f-9b21-4f2e-91b3-4a467353bcc0",
|
||||||
|
"shortName": "CISA-ADP",
|
||||||
|
"dateUpdated": "2024-06-04T17:54:44.162Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"providerMetadata": {
|
||||||
|
"orgId": "af854a3a-2127-422b-91ae-364da2661108",
|
||||||
|
"shortName": "CVE",
|
||||||
|
"dateUpdated": "2024-08-01T20:47:40.724Z"
|
||||||
|
},
|
||||||
|
"title": "CVE Program Container",
|
||||||
|
"references": [
|
||||||
|
{
|
||||||
|
"url": "https://www.wordfence.com/threat-intel/vulnerabilities/id/c63ff9d7-6a14-4186-8550-4e5c50855e7f?source=cve",
|
||||||
|
"tags": [
|
||||||
|
"x_transferred"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://plugins.trac.wordpress.org/changeset/3082885/themify-shortcodes",
|
||||||
|
"tags": [
|
||||||
|
"x_transferred"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
19
seed-data/ics-cisa/README.md
Normal file
19
seed-data/ics-cisa/README.md
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# CISA ICS Advisory Seed Data
|
||||||
|
|
||||||
|
This directory is reserved for **seed data** sourced from the community-maintained [ICS Advisory Project](https://github.com/icsadvprj/ICS-Advisory-Project). The project republishes CISA ICS advisories under the **Open Database License (ODbL) v1.0**. StellaOps uses these CSV snapshots to bootstrap offline environments before the official GovDelivery credentials arrive.
|
||||||
|
|
||||||
|
> ⚠️ **Licence notice** – By downloading and using the CSV files you agree to the ODbL requirements (attribution, share-alike, and notice preservation). See [`LICENSE-ODBL.md`](https://github.com/icsadvprj/ICS-Advisory-Project/blob/main/LICENSE.md) for the full text.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
1. Run `scripts/fetch-ics-cisa-seed.sh` (or the PowerShell variant) to download the latest snapshots into this directory.
|
||||||
|
2. The files are ignored by Git to avoid committing third-party data; include them explicitly when building an Offline Update Kit.
|
||||||
|
3. When you later switch to live GovDelivery ingestion, keep the CSVs around as historical fixtures—do **not** treat them as an authoritative source once the live connector is enabled.
|
||||||
|
|
||||||
|
### Suggested Artefacts
|
||||||
|
|
||||||
|
- `CISA_ICS_ADV_Master.csv` – cumulative advisory dataset (2010 → present)
|
||||||
|
- `CISA_ICS_ADV_<YYYY_MM_DD>.csv` – point-in-time snapshots
|
||||||
|
- `ICSMA_CSV_<YYYY>.xlsx` – medical device advisories (optional, sourced from the community mirror)
|
||||||
|
|
||||||
|
Keep the generated SHA-256 files alongside the CSVs so Offline Kit packaging can verify integrity.
|
||||||
@@ -15,7 +15,8 @@ public class StandardClientProvisioningStoreTests
|
|||||||
public async Task CreateOrUpdateAsync_HashesSecretAndPersistsDocument()
|
public async Task CreateOrUpdateAsync_HashesSecretAndPersistsDocument()
|
||||||
{
|
{
|
||||||
var store = new TrackingClientStore();
|
var store = new TrackingClientStore();
|
||||||
var provisioning = new StandardClientProvisioningStore("standard", store);
|
var revocations = new TrackingRevocationStore();
|
||||||
|
var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System);
|
||||||
|
|
||||||
var registration = new AuthorityClientRegistration(
|
var registration = new AuthorityClientRegistration(
|
||||||
clientId: "bootstrap-client",
|
clientId: "bootstrap-client",
|
||||||
@@ -63,4 +64,21 @@ public class StandardClientProvisioningStoreTests
|
|||||||
return ValueTask.FromResult(removed);
|
return ValueTask.FromResult(removed);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private sealed class TrackingRevocationStore : IAuthorityRevocationStore
|
||||||
|
{
|
||||||
|
public List<AuthorityRevocationDocument> Upserts { get; } = new();
|
||||||
|
|
||||||
|
public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
Upserts.Add(document);
|
||||||
|
return ValueTask.CompletedTask;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ValueTask<bool> RemoveAsync(string category, string revocationId, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult(true);
|
||||||
|
|
||||||
|
public ValueTask<IReadOnlyList<AuthorityRevocationDocument>> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult<IReadOnlyList<AuthorityRevocationDocument>>(Array.Empty<AuthorityRevocationDocument>());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ using System.Threading;
|
|||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using Microsoft.Extensions.Configuration;
|
using Microsoft.Extensions.Configuration;
|
||||||
using Microsoft.Extensions.DependencyInjection;
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
using Microsoft.Extensions.Hosting;
|
using Microsoft.Extensions.Hosting;
|
||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
using Mongo2Go;
|
using Mongo2Go;
|
||||||
@@ -58,6 +59,21 @@ public class StandardPluginRegistrarTests
|
|||||||
services.AddLogging();
|
services.AddLogging();
|
||||||
services.AddSingleton<IMongoDatabase>(database);
|
services.AddSingleton<IMongoDatabase>(database);
|
||||||
services.AddSingleton<IAuthorityClientStore>(new InMemoryClientStore());
|
services.AddSingleton<IAuthorityClientStore>(new InMemoryClientStore());
|
||||||
|
services.AddSingleton<IAuthorityRevocationStore>(new StubRevocationStore());
|
||||||
|
services.AddSingleton(TimeProvider.System);
|
||||||
|
services.AddSingleton<IAuthorityRevocationStore>(new StubRevocationStore());
|
||||||
|
services.AddSingleton(TimeProvider.System);
|
||||||
|
services.AddSingleton<IAuthorityRevocationStore>(new StubRevocationStore());
|
||||||
|
services.AddSingleton(TimeProvider.System);
|
||||||
|
services.AddSingleton<IAuthorityRevocationStore>(new StubRevocationStore());
|
||||||
|
services.AddSingleton(TimeProvider.System);
|
||||||
|
services.AddSingleton<IAuthorityRevocationStore>(new StubRevocationStore());
|
||||||
|
services.AddSingleton(TimeProvider.System);
|
||||||
|
services.AddSingleton(TimeProvider.System);
|
||||||
|
services.AddSingleton(TimeProvider.System);
|
||||||
|
services.AddSingleton<IAuthorityRevocationStore>(new StubRevocationStore());
|
||||||
|
services.AddSingleton<IAuthorityRevocationStore>(new StubRevocationStore());
|
||||||
|
services.AddSingleton<IAuthorityRevocationStore>(new StubRevocationStore());
|
||||||
|
|
||||||
var registrar = new StandardPluginRegistrar();
|
var registrar = new StandardPluginRegistrar();
|
||||||
registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration));
|
registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration));
|
||||||
@@ -83,6 +99,53 @@ public class StandardPluginRegistrarTests
|
|||||||
Assert.True(verification.User?.RequiresPasswordReset);
|
Assert.True(verification.User?.RequiresPasswordReset);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Register_LogsWarning_WhenPasswordPolicyWeaker()
|
||||||
|
{
|
||||||
|
using var runner = MongoDbRunner.Start(singleNodeReplSet: true);
|
||||||
|
var client = new MongoClient(runner.ConnectionString);
|
||||||
|
var database = client.GetDatabase("registrar-password-policy");
|
||||||
|
|
||||||
|
var configuration = new ConfigurationBuilder()
|
||||||
|
.AddInMemoryCollection(new Dictionary<string, string?>
|
||||||
|
{
|
||||||
|
["passwordPolicy:minimumLength"] = "6",
|
||||||
|
["passwordPolicy:requireUppercase"] = "false",
|
||||||
|
["passwordPolicy:requireLowercase"] = "false",
|
||||||
|
["passwordPolicy:requireDigit"] = "false",
|
||||||
|
["passwordPolicy:requireSymbol"] = "false"
|
||||||
|
})
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var manifest = new AuthorityPluginManifest(
|
||||||
|
"standard",
|
||||||
|
"standard",
|
||||||
|
true,
|
||||||
|
typeof(StandardPluginRegistrar).Assembly.GetName().Name,
|
||||||
|
typeof(StandardPluginRegistrar).Assembly.Location,
|
||||||
|
new[] { AuthorityPluginCapabilities.Password },
|
||||||
|
new Dictionary<string, string?>(),
|
||||||
|
"standard.yaml");
|
||||||
|
|
||||||
|
var pluginContext = new AuthorityPluginContext(manifest, configuration);
|
||||||
|
var services = new ServiceCollection();
|
||||||
|
var loggerProvider = new CapturingLoggerProvider();
|
||||||
|
services.AddLogging(builder => builder.AddProvider(loggerProvider));
|
||||||
|
services.AddSingleton<IMongoDatabase>(database);
|
||||||
|
services.AddSingleton<IAuthorityClientStore>(new InMemoryClientStore());
|
||||||
|
|
||||||
|
var registrar = new StandardPluginRegistrar();
|
||||||
|
registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration));
|
||||||
|
|
||||||
|
using var provider = services.BuildServiceProvider();
|
||||||
|
_ = provider.GetRequiredService<StandardUserCredentialStore>();
|
||||||
|
|
||||||
|
Assert.Contains(loggerProvider.Entries, entry =>
|
||||||
|
entry.Level == LogLevel.Warning &&
|
||||||
|
entry.Category.Contains(typeof(StandardPluginRegistrar).FullName!, StringComparison.Ordinal) &&
|
||||||
|
entry.Message.Contains("weaker password policy", StringComparison.OrdinalIgnoreCase));
|
||||||
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public void Register_ForcesPasswordCapability_WhenManifestMissing()
|
public void Register_ForcesPasswordCapability_WhenManifestMissing()
|
||||||
{
|
{
|
||||||
@@ -106,6 +169,8 @@ public class StandardPluginRegistrarTests
|
|||||||
services.AddLogging();
|
services.AddLogging();
|
||||||
services.AddSingleton<IMongoDatabase>(database);
|
services.AddSingleton<IMongoDatabase>(database);
|
||||||
services.AddSingleton<IAuthorityClientStore>(new InMemoryClientStore());
|
services.AddSingleton<IAuthorityClientStore>(new InMemoryClientStore());
|
||||||
|
services.AddSingleton<IAuthorityRevocationStore>(new StubRevocationStore());
|
||||||
|
services.AddSingleton(TimeProvider.System);
|
||||||
|
|
||||||
var registrar = new StandardPluginRegistrar();
|
var registrar = new StandardPluginRegistrar();
|
||||||
registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration));
|
registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration));
|
||||||
@@ -209,6 +274,61 @@ public class StandardPluginRegistrarTests
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
internal sealed record CapturedLogEntry(string Category, LogLevel Level, string Message);
|
||||||
|
|
||||||
|
internal sealed class CapturingLoggerProvider : ILoggerProvider
|
||||||
|
{
|
||||||
|
public List<CapturedLogEntry> Entries { get; } = new();
|
||||||
|
|
||||||
|
public ILogger CreateLogger(string categoryName) => new CapturingLogger(categoryName, Entries);
|
||||||
|
|
||||||
|
public void Dispose()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class CapturingLogger : ILogger
|
||||||
|
{
|
||||||
|
private readonly string category;
|
||||||
|
private readonly List<CapturedLogEntry> entries;
|
||||||
|
|
||||||
|
public CapturingLogger(string category, List<CapturedLogEntry> entries)
|
||||||
|
{
|
||||||
|
this.category = category;
|
||||||
|
this.entries = entries;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IDisposable BeginScope<TState>(TState state) where TState : notnull => NullScope.Instance;
|
||||||
|
|
||||||
|
public bool IsEnabled(LogLevel logLevel) => true;
|
||||||
|
|
||||||
|
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter)
|
||||||
|
{
|
||||||
|
entries.Add(new CapturedLogEntry(category, logLevel, formatter(state, exception)));
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class NullScope : IDisposable
|
||||||
|
{
|
||||||
|
public static readonly NullScope Instance = new();
|
||||||
|
|
||||||
|
public void Dispose()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal sealed class StubRevocationStore : IAuthorityRevocationStore
|
||||||
|
{
|
||||||
|
public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.CompletedTask;
|
||||||
|
|
||||||
|
public ValueTask<bool> RemoveAsync(string category, string revocationId, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult(false);
|
||||||
|
|
||||||
|
public ValueTask<IReadOnlyList<AuthorityRevocationDocument>> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult<IReadOnlyList<AuthorityRevocationDocument>>(Array.Empty<AuthorityRevocationDocument>());
|
||||||
|
}
|
||||||
|
|
||||||
internal sealed class InMemoryClientStore : IAuthorityClientStore
|
internal sealed class InMemoryClientStore : IAuthorityClientStore
|
||||||
{
|
{
|
||||||
private readonly Dictionary<string, AuthorityClientDocument> clients = new(StringComparer.OrdinalIgnoreCase);
|
private readonly Dictionary<string, AuthorityClientDocument> clients = new(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|||||||
@@ -71,6 +71,41 @@ internal sealed class PasswordPolicyOptions
|
|||||||
throw new InvalidOperationException($"Standard plugin '{pluginName}' requires passwordPolicy.minimumLength to be greater than zero.");
|
throw new InvalidOperationException($"Standard plugin '{pluginName}' requires passwordPolicy.minimumLength to be greater than zero.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public bool IsWeakerThan(PasswordPolicyOptions other)
|
||||||
|
{
|
||||||
|
if (other is null)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (MinimumLength < other.MinimumLength)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!RequireUppercase && other.RequireUppercase)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!RequireLowercase && other.RequireLowercase)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!RequireDigit && other.RequireDigit)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!RequireSymbol && other.RequireSymbol)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
internal sealed class LockoutOptions
|
internal sealed class LockoutOptions
|
||||||
|
|||||||
@@ -51,6 +51,25 @@ internal sealed class StandardPluginRegistrar : IAuthorityPluginRegistrar
|
|||||||
var cryptoProvider = sp.GetRequiredService<ICryptoProvider>();
|
var cryptoProvider = sp.GetRequiredService<ICryptoProvider>();
|
||||||
var passwordHasher = new CryptoPasswordHasher(pluginOptions, cryptoProvider);
|
var passwordHasher = new CryptoPasswordHasher(pluginOptions, cryptoProvider);
|
||||||
var loggerFactory = sp.GetRequiredService<ILoggerFactory>();
|
var loggerFactory = sp.GetRequiredService<ILoggerFactory>();
|
||||||
|
var registrarLogger = loggerFactory.CreateLogger<StandardPluginRegistrar>();
|
||||||
|
|
||||||
|
var baselinePolicy = new PasswordPolicyOptions();
|
||||||
|
if (pluginOptions.PasswordPolicy.IsWeakerThan(baselinePolicy))
|
||||||
|
{
|
||||||
|
registrarLogger.LogWarning(
|
||||||
|
"Standard plugin '{Plugin}' configured a weaker password policy (minLength={Length}, requireUpper={Upper}, requireLower={Lower}, requireDigit={Digit}, requireSymbol={Symbol}) than the baseline (minLength={BaseLength}, requireUpper={BaseUpper}, requireLower={BaseLower}, requireDigit={BaseDigit}, requireSymbol={BaseSymbol}).",
|
||||||
|
pluginName,
|
||||||
|
pluginOptions.PasswordPolicy.MinimumLength,
|
||||||
|
pluginOptions.PasswordPolicy.RequireUppercase,
|
||||||
|
pluginOptions.PasswordPolicy.RequireLowercase,
|
||||||
|
pluginOptions.PasswordPolicy.RequireDigit,
|
||||||
|
pluginOptions.PasswordPolicy.RequireSymbol,
|
||||||
|
baselinePolicy.MinimumLength,
|
||||||
|
baselinePolicy.RequireUppercase,
|
||||||
|
baselinePolicy.RequireLowercase,
|
||||||
|
baselinePolicy.RequireDigit,
|
||||||
|
baselinePolicy.RequireSymbol);
|
||||||
|
}
|
||||||
|
|
||||||
return new StandardUserCredentialStore(
|
return new StandardUserCredentialStore(
|
||||||
pluginName,
|
pluginName,
|
||||||
|
|||||||
@@ -5,12 +5,14 @@
|
|||||||
| PLG6.DOC | DONE (2025-10-11) | BE-Auth Plugin, Docs Guild | PLG1–PLG5 | Final polish + diagrams for plugin developer guide (AUTHPLUG-DOCS-01-001). | Docs team delivers copy-edit + exported diagrams; PR merged. |
|
| PLG6.DOC | DONE (2025-10-11) | BE-Auth Plugin, Docs Guild | PLG1–PLG5 | Final polish + diagrams for plugin developer guide (AUTHPLUG-DOCS-01-001). | Docs team delivers copy-edit + exported diagrams; PR merged. |
|
||||||
| SEC1.PLG | DONE (2025-10-11) | Security Guild, BE-Auth Plugin | SEC1.A (StellaOps.Cryptography) | Swap Standard plugin hashing to Argon2id via `StellaOps.Cryptography` abstractions; keep PBKDF2 verification for legacy. | ✅ `StandardUserCredentialStore` uses `ICryptoProvider` to hash/check; ✅ Transparent rehash on success; ✅ Unit tests cover tamper + legacy rehash. |
|
| SEC1.PLG | DONE (2025-10-11) | Security Guild, BE-Auth Plugin | SEC1.A (StellaOps.Cryptography) | Swap Standard plugin hashing to Argon2id via `StellaOps.Cryptography` abstractions; keep PBKDF2 verification for legacy. | ✅ `StandardUserCredentialStore` uses `ICryptoProvider` to hash/check; ✅ Transparent rehash on success; ✅ Unit tests cover tamper + legacy rehash. |
|
||||||
| SEC1.OPT | DONE (2025-10-11) | Security Guild | SEC1.PLG | Expose password hashing knobs in `StandardPluginOptions` (`memoryKiB`, `iterations`, `parallelism`, `algorithm`) with validation. | ✅ Options bound from YAML; ✅ Invalid configs throw; ✅ Docs include tuning guidance. |
|
| SEC1.OPT | DONE (2025-10-11) | Security Guild | SEC1.PLG | Expose password hashing knobs in `StandardPluginOptions` (`memoryKiB`, `iterations`, `parallelism`, `algorithm`) with validation. | ✅ Options bound from YAML; ✅ Invalid configs throw; ✅ Docs include tuning guidance. |
|
||||||
| SEC2.PLG | TODO | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. |
|
| SEC2.PLG | DOING (2025-10-14) | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. |
|
||||||
| SEC3.PLG | TODO | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after). | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. |
|
| SEC3.PLG | DOING (2025-10-14) | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after). | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. |
|
||||||
| SEC4.PLG | DONE (2025-10-12) | Security Guild | SEC4.A (revocation schema) | Provide plugin hooks so revoked users/clients write reasons for revocation bundle export. | ✅ Revocation exporter consumes plugin data; ✅ Tests cover revoked user/client output. |
|
| SEC4.PLG | DONE (2025-10-12) | Security Guild | SEC4.A (revocation schema) | Provide plugin hooks so revoked users/clients write reasons for revocation bundle export. | ✅ Revocation exporter consumes plugin data; ✅ Tests cover revoked user/client output. |
|
||||||
| SEC5.PLG | TODO | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. |
|
| SEC5.PLG | DOING (2025-10-14) | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. |
|
||||||
| PLG4-6.CAPABILITIES | BLOCKED (2025-10-12) | BE-Auth Plugin, Docs Guild | PLG1–PLG3 | Finalise capability metadata exposure, config validation, and developer guide updates; remaining action is Docs polish/diagram export. | ✅ Capability metadata + validation merged; ✅ Plugin guide updated with final copy & diagrams; ✅ Release notes mention new toggles. <br>⛔ Blocked awaiting Authority rate-limiter stream (CORE8/SEC3) to resume so doc updates reflect final limiter behaviour. |
|
| PLG4-6.CAPABILITIES | BLOCKED (2025-10-12) | BE-Auth Plugin, Docs Guild | PLG1–PLG3 | Finalise capability metadata exposure, config validation, and developer guide updates; remaining action is Docs polish/diagram export. | ✅ Capability metadata + validation merged; ✅ Plugin guide updated with final copy & diagrams; ✅ Release notes mention new toggles. <br>⛔ Blocked awaiting Authority rate-limiter stream (CORE8/SEC3) to resume so doc updates reflect final limiter behaviour. |
|
||||||
| PLG7.RFC | REVIEW | BE-Auth Plugin, Security Guild | PLG4 | Socialize LDAP plugin RFC (`docs/rfcs/authority-plugin-ldap.md`) and capture guild feedback. | ✅ Guild review sign-off recorded; ✅ Follow-up issues filed in module boards. |
|
| PLG7.RFC | REVIEW | BE-Auth Plugin, Security Guild | PLG4 | Socialize LDAP plugin RFC (`docs/rfcs/authority-plugin-ldap.md`) and capture guild feedback. | ✅ Guild review sign-off recorded; ✅ Follow-up issues filed in module boards. |
|
||||||
| PLG6.DIAGRAM | TODO | Docs Guild | PLG6.DOC | Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. | ✅ Mermaid sources committed; ✅ Rendered SVG/PNG linked from Section 2 + Section 9; ✅ Docs build preview shared with Plugin + Docs guilds. |
|
| PLG6.DIAGRAM | TODO | Docs Guild | PLG6.DOC | Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. | ✅ Mermaid sources committed; ✅ Rendered SVG/PNG linked from Section 2 + Section 9; ✅ Docs build preview shared with Plugin + Docs guilds. |
|
||||||
|
|
||||||
> Update statuses to DOING/DONE/BLOCKED as you make progress. Always run `dotnet test` for touched projects before marking DONE.
|
> Update statuses to DOING/DONE/BLOCKED as you make progress. Always run `dotnet test` for touched projects before marking DONE.
|
||||||
|
|
||||||
|
> Remark (2025-10-13, PLG6.DOC/PLG6.DIAGRAM): Security Guild delivered `docs/security/rate-limits.md`; Docs team can lift Section 3 (tuning table + alerts) into the developer guide diagrams when rendering assets.
|
||||||
|
|||||||
@@ -22,5 +22,6 @@ public static class AuthorityMongoDefaults
|
|||||||
public const string LoginAttempts = "authority_login_attempts";
|
public const string LoginAttempts = "authority_login_attempts";
|
||||||
public const string Revocations = "authority_revocations";
|
public const string Revocations = "authority_revocations";
|
||||||
public const string RevocationState = "authority_revocation_state";
|
public const string RevocationState = "authority_revocation_state";
|
||||||
|
public const string Invites = "authority_bootstrap_invites";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,72 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using MongoDB.Bson;
|
||||||
|
using MongoDB.Bson.Serialization.Attributes;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents a bootstrap invitation token for provisioning users or clients.
|
||||||
|
/// </summary>
|
||||||
|
[BsonIgnoreExtraElements]
|
||||||
|
public sealed class AuthorityBootstrapInviteDocument
|
||||||
|
{
|
||||||
|
[BsonId]
|
||||||
|
[BsonRepresentation(BsonType.ObjectId)]
|
||||||
|
public string Id { get; set; } = ObjectId.GenerateNewId().ToString();
|
||||||
|
|
||||||
|
[BsonElement("token")]
|
||||||
|
public string Token { get; set; } = Guid.NewGuid().ToString("N");
|
||||||
|
|
||||||
|
[BsonElement("type")]
|
||||||
|
public string Type { get; set; } = "user";
|
||||||
|
|
||||||
|
[BsonElement("provider")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? Provider { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("target")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? Target { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("issuedAt")]
|
||||||
|
public DateTimeOffset IssuedAt { get; set; } = DateTimeOffset.UtcNow;
|
||||||
|
|
||||||
|
[BsonElement("issuedBy")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? IssuedBy { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("expiresAt")]
|
||||||
|
public DateTimeOffset ExpiresAt { get; set; } = DateTimeOffset.UtcNow.AddDays(2);
|
||||||
|
|
||||||
|
[BsonElement("status")]
|
||||||
|
public string Status { get; set; } = AuthorityBootstrapInviteStatuses.Pending;
|
||||||
|
|
||||||
|
[BsonElement("reservedAt")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public DateTimeOffset? ReservedAt { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("reservedBy")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? ReservedBy { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("consumedAt")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public DateTimeOffset? ConsumedAt { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("consumedBy")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? ConsumedBy { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("metadata")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public Dictionary<string, string?>? Metadata { get; set; }
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class AuthorityBootstrapInviteStatuses
|
||||||
|
{
|
||||||
|
public const string Pending = "pending";
|
||||||
|
public const string Reserved = "reserved";
|
||||||
|
public const string Consumed = "consumed";
|
||||||
|
public const string Expired = "expired";
|
||||||
|
}
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using MongoDB.Bson;
|
using MongoDB.Bson;
|
||||||
using MongoDB.Bson.Serialization.Attributes;
|
using MongoDB.Bson.Serialization.Attributes;
|
||||||
@@ -61,6 +62,11 @@ public sealed class AuthorityTokenDocument
|
|||||||
[BsonIgnoreIfNull]
|
[BsonIgnoreIfNull]
|
||||||
public string? RevokedReasonDescription { get; set; }
|
public string? RevokedReasonDescription { get; set; }
|
||||||
|
|
||||||
|
|
||||||
|
[BsonElement("devices")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public List<BsonDocument>? Devices { get; set; }
|
||||||
|
|
||||||
[BsonElement("revokedMetadata")]
|
[BsonElement("revokedMetadata")]
|
||||||
[BsonIgnoreIfNull]
|
[BsonIgnoreIfNull]
|
||||||
public Dictionary<string, string?>? RevokedMetadata { get; set; }
|
public Dictionary<string, string?>? RevokedMetadata { get; set; }
|
||||||
|
|||||||
@@ -98,12 +98,19 @@ public static class ServiceCollectionExtensions
|
|||||||
return database.GetCollection<AuthorityRevocationExportStateDocument>(AuthorityMongoDefaults.Collections.RevocationState);
|
return database.GetCollection<AuthorityRevocationExportStateDocument>(AuthorityMongoDefaults.Collections.RevocationState);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
services.AddSingleton(static sp =>
|
||||||
|
{
|
||||||
|
var database = sp.GetRequiredService<IMongoDatabase>();
|
||||||
|
return database.GetCollection<AuthorityBootstrapInviteDocument>(AuthorityMongoDefaults.Collections.Invites);
|
||||||
|
});
|
||||||
|
|
||||||
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityUserCollectionInitializer>();
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityUserCollectionInitializer>();
|
||||||
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityClientCollectionInitializer>();
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityClientCollectionInitializer>();
|
||||||
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityScopeCollectionInitializer>();
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityScopeCollectionInitializer>();
|
||||||
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityTokenCollectionInitializer>();
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityTokenCollectionInitializer>();
|
||||||
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityLoginAttemptCollectionInitializer>();
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityLoginAttemptCollectionInitializer>();
|
||||||
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityRevocationCollectionInitializer>();
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityRevocationCollectionInitializer>();
|
||||||
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityBootstrapInviteCollectionInitializer>();
|
||||||
|
|
||||||
services.TryAddSingleton<IAuthorityUserStore, AuthorityUserStore>();
|
services.TryAddSingleton<IAuthorityUserStore, AuthorityUserStore>();
|
||||||
services.TryAddSingleton<IAuthorityClientStore, AuthorityClientStore>();
|
services.TryAddSingleton<IAuthorityClientStore, AuthorityClientStore>();
|
||||||
@@ -112,6 +119,7 @@ public static class ServiceCollectionExtensions
|
|||||||
services.TryAddSingleton<IAuthorityLoginAttemptStore, AuthorityLoginAttemptStore>();
|
services.TryAddSingleton<IAuthorityLoginAttemptStore, AuthorityLoginAttemptStore>();
|
||||||
services.TryAddSingleton<IAuthorityRevocationStore, AuthorityRevocationStore>();
|
services.TryAddSingleton<IAuthorityRevocationStore, AuthorityRevocationStore>();
|
||||||
services.TryAddSingleton<IAuthorityRevocationExportStateStore, AuthorityRevocationExportStateStore>();
|
services.TryAddSingleton<IAuthorityRevocationExportStateStore, AuthorityRevocationExportStateStore>();
|
||||||
|
services.TryAddSingleton<IAuthorityBootstrapInviteStore, AuthorityBootstrapInviteStore>();
|
||||||
|
|
||||||
return services;
|
return services;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,25 @@
|
|||||||
|
using MongoDB.Driver;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Storage.Mongo.Initialization;
|
||||||
|
|
||||||
|
internal sealed class AuthorityBootstrapInviteCollectionInitializer : IAuthorityCollectionInitializer
|
||||||
|
{
|
||||||
|
private static readonly CreateIndexModel<AuthorityBootstrapInviteDocument>[] Indexes =
|
||||||
|
{
|
||||||
|
new CreateIndexModel<AuthorityBootstrapInviteDocument>(
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.IndexKeys.Ascending(i => i.Token),
|
||||||
|
new CreateIndexOptions { Unique = true, Name = "idx_invite_token" }),
|
||||||
|
new CreateIndexModel<AuthorityBootstrapInviteDocument>(
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.IndexKeys.Ascending(i => i.Status).Ascending(i => i.ExpiresAt),
|
||||||
|
new CreateIndexOptions { Name = "idx_invite_status_expires" })
|
||||||
|
};
|
||||||
|
|
||||||
|
public async ValueTask EnsureIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(database);
|
||||||
|
|
||||||
|
var collection = database.GetCollection<AuthorityBootstrapInviteDocument>(AuthorityMongoDefaults.Collections.Invites);
|
||||||
|
await collection.Indexes.CreateManyAsync(Indexes, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,166 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using MongoDB.Driver;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
|
||||||
|
internal sealed class AuthorityBootstrapInviteStore : IAuthorityBootstrapInviteStore
|
||||||
|
{
|
||||||
|
private readonly IMongoCollection<AuthorityBootstrapInviteDocument> collection;
|
||||||
|
|
||||||
|
public AuthorityBootstrapInviteStore(IMongoCollection<AuthorityBootstrapInviteDocument> collection)
|
||||||
|
=> this.collection = collection ?? throw new ArgumentNullException(nameof(collection));
|
||||||
|
|
||||||
|
public async ValueTask<AuthorityBootstrapInviteDocument> CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(document);
|
||||||
|
|
||||||
|
await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||||
|
return document;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask<BootstrapInviteReservationResult> TryReserveAsync(
|
||||||
|
string token,
|
||||||
|
string expectedType,
|
||||||
|
DateTimeOffset now,
|
||||||
|
string? reservedBy,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(token))
|
||||||
|
{
|
||||||
|
return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
var normalizedToken = token.Trim();
|
||||||
|
var filter = Builders<AuthorityBootstrapInviteDocument>.Filter.And(
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Filter.Eq(i => i.Token, normalizedToken),
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Filter.Eq(i => i.Status, AuthorityBootstrapInviteStatuses.Pending));
|
||||||
|
|
||||||
|
var update = Builders<AuthorityBootstrapInviteDocument>.Update
|
||||||
|
.Set(i => i.Status, AuthorityBootstrapInviteStatuses.Reserved)
|
||||||
|
.Set(i => i.ReservedAt, now)
|
||||||
|
.Set(i => i.ReservedBy, reservedBy);
|
||||||
|
|
||||||
|
var options = new FindOneAndUpdateOptions<AuthorityBootstrapInviteDocument>
|
||||||
|
{
|
||||||
|
ReturnDocument = ReturnDocument.After
|
||||||
|
};
|
||||||
|
|
||||||
|
var invite = await collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (invite is null)
|
||||||
|
{
|
||||||
|
var existing = await collection
|
||||||
|
.Find(i => i.Token == normalizedToken)
|
||||||
|
.FirstOrDefaultAsync(cancellationToken)
|
||||||
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (existing is null)
|
||||||
|
{
|
||||||
|
return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existing.Status is AuthorityBootstrapInviteStatuses.Consumed or AuthorityBootstrapInviteStatuses.Reserved)
|
||||||
|
{
|
||||||
|
return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.AlreadyUsed, existing);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existing.Status == AuthorityBootstrapInviteStatuses.Expired || existing.ExpiresAt <= now)
|
||||||
|
{
|
||||||
|
return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.Expired, existing);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, existing);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(invite.Type, expectedType, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
await ReleaseAsync(normalizedToken, cancellationToken).ConfigureAwait(false);
|
||||||
|
return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, invite);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (invite.ExpiresAt <= now)
|
||||||
|
{
|
||||||
|
await MarkExpiredAsync(normalizedToken, cancellationToken).ConfigureAwait(false);
|
||||||
|
return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.Expired, invite);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.Reserved, invite);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask<bool> ReleaseAsync(string token, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(token))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var result = await collection.UpdateOneAsync(
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Filter.And(
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Filter.Eq(i => i.Token, token.Trim()),
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Filter.Eq(i => i.Status, AuthorityBootstrapInviteStatuses.Reserved)),
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Update
|
||||||
|
.Set(i => i.Status, AuthorityBootstrapInviteStatuses.Pending)
|
||||||
|
.Set(i => i.ReservedAt, null)
|
||||||
|
.Set(i => i.ReservedBy, null),
|
||||||
|
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
return result.ModifiedCount > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask<bool> MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(token))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var result = await collection.UpdateOneAsync(
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Filter.And(
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Filter.Eq(i => i.Token, token.Trim()),
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Filter.Eq(i => i.Status, AuthorityBootstrapInviteStatuses.Reserved)),
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Update
|
||||||
|
.Set(i => i.Status, AuthorityBootstrapInviteStatuses.Consumed)
|
||||||
|
.Set(i => i.ConsumedAt, consumedAt)
|
||||||
|
.Set(i => i.ConsumedBy, consumedBy),
|
||||||
|
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
return result.ModifiedCount > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask<IReadOnlyList<AuthorityBootstrapInviteDocument>> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var filter = Builders<AuthorityBootstrapInviteDocument>.Filter.And(
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Filter.Lte(i => i.ExpiresAt, now),
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Filter.In(
|
||||||
|
i => i.Status,
|
||||||
|
new[] { AuthorityBootstrapInviteStatuses.Pending, AuthorityBootstrapInviteStatuses.Reserved }));
|
||||||
|
|
||||||
|
var update = Builders<AuthorityBootstrapInviteDocument>.Update
|
||||||
|
.Set(i => i.Status, AuthorityBootstrapInviteStatuses.Expired)
|
||||||
|
.Set(i => i.ReservedAt, null)
|
||||||
|
.Set(i => i.ReservedBy, null);
|
||||||
|
|
||||||
|
var expired = await collection.Find(filter)
|
||||||
|
.ToListAsync(cancellationToken)
|
||||||
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (expired.Count == 0)
|
||||||
|
{
|
||||||
|
return Array.Empty<AuthorityBootstrapInviteDocument>();
|
||||||
|
}
|
||||||
|
|
||||||
|
await collection.UpdateManyAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
return expired;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task MarkExpiredAsync(string token, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
await collection.UpdateOneAsync(
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Filter.Eq(i => i.Token, token),
|
||||||
|
Builders<AuthorityBootstrapInviteDocument>.Update.Set(i => i.Status, AuthorityBootstrapInviteStatuses.Expired),
|
||||||
|
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,10 @@
|
|||||||
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
|
using MongoDB.Bson;
|
||||||
using MongoDB.Driver;
|
using MongoDB.Driver;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Globalization;
|
||||||
using StellaOps.Authority.Storage.Mongo.Documents;
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
namespace StellaOps.Authority.Storage.Mongo.Stores;
|
namespace StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
@@ -86,6 +90,86 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore
|
|||||||
logger.LogDebug("Updated token {TokenId} status to {Status} (matched {Matched}).", tokenId, status, result.MatchedCount);
|
logger.LogDebug("Updated token {TokenId} status to {Status} (matched {Matched}).", tokenId, status, result.MatchedCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public async ValueTask<TokenUsageUpdateResult> RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(tokenId))
|
||||||
|
{
|
||||||
|
return new TokenUsageUpdateResult(TokenUsageUpdateStatus.NotFound, null, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(remoteAddress) && string.IsNullOrWhiteSpace(userAgent))
|
||||||
|
{
|
||||||
|
return new TokenUsageUpdateResult(TokenUsageUpdateStatus.MissingMetadata, remoteAddress, userAgent);
|
||||||
|
}
|
||||||
|
|
||||||
|
var id = tokenId.Trim();
|
||||||
|
var token = await collection
|
||||||
|
.Find(t => t.TokenId == id)
|
||||||
|
.FirstOrDefaultAsync(cancellationToken)
|
||||||
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (token is null)
|
||||||
|
{
|
||||||
|
return new TokenUsageUpdateResult(TokenUsageUpdateStatus.NotFound, remoteAddress, userAgent);
|
||||||
|
}
|
||||||
|
|
||||||
|
token.Devices ??= new List<BsonDocument>();
|
||||||
|
|
||||||
|
string? normalizedAddress = string.IsNullOrWhiteSpace(remoteAddress) ? null : remoteAddress.Trim();
|
||||||
|
string? normalizedAgent = string.IsNullOrWhiteSpace(userAgent) ? null : userAgent.Trim();
|
||||||
|
|
||||||
|
var device = token.Devices.FirstOrDefault(d =>
|
||||||
|
string.Equals(GetString(d, "remoteAddress"), normalizedAddress, StringComparison.OrdinalIgnoreCase) &&
|
||||||
|
string.Equals(GetString(d, "userAgent"), normalizedAgent, StringComparison.Ordinal));
|
||||||
|
var suspicious = false;
|
||||||
|
|
||||||
|
if (device is null)
|
||||||
|
{
|
||||||
|
suspicious = token.Devices.Count > 0;
|
||||||
|
var document = new BsonDocument
|
||||||
|
{
|
||||||
|
{ "remoteAddress", normalizedAddress },
|
||||||
|
{ "userAgent", normalizedAgent },
|
||||||
|
{ "firstSeen", BsonDateTime.Create(observedAt.UtcDateTime) },
|
||||||
|
{ "lastSeen", BsonDateTime.Create(observedAt.UtcDateTime) },
|
||||||
|
{ "useCount", 1 }
|
||||||
|
};
|
||||||
|
|
||||||
|
token.Devices.Add(document);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
device["lastSeen"] = BsonDateTime.Create(observedAt.UtcDateTime);
|
||||||
|
device["useCount"] = device.TryGetValue("useCount", out var existingCount) && existingCount.IsInt32
|
||||||
|
? existingCount.AsInt32 + 1
|
||||||
|
: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
var update = Builders<AuthorityTokenDocument>.Update.Set(t => t.Devices, token.Devices);
|
||||||
|
await collection.UpdateOneAsync(
|
||||||
|
Builders<AuthorityTokenDocument>.Filter.Eq(t => t.TokenId, id),
|
||||||
|
update,
|
||||||
|
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
return new TokenUsageUpdateResult(suspicious ? TokenUsageUpdateStatus.SuspectedReplay : TokenUsageUpdateStatus.Recorded, normalizedAddress, normalizedAgent);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? GetString(BsonDocument document, string name)
|
||||||
|
{
|
||||||
|
if (!document.TryGetValue(name, out var value))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return value switch
|
||||||
|
{
|
||||||
|
{ IsString: true } => value.AsString,
|
||||||
|
{ IsBsonNull: true } => null,
|
||||||
|
_ => value.ToString()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
public async ValueTask<long> DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken)
|
public async ValueTask<long> DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
var filter = Builders<AuthorityTokenDocument>.Filter.And(
|
var filter = Builders<AuthorityTokenDocument>.Filter.And(
|
||||||
|
|||||||
@@ -0,0 +1,26 @@
|
|||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
|
||||||
|
public interface IAuthorityBootstrapInviteStore
|
||||||
|
{
|
||||||
|
ValueTask<AuthorityBootstrapInviteDocument> CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
ValueTask<BootstrapInviteReservationResult> TryReserveAsync(string token, string expectedType, DateTimeOffset now, string? reservedBy, CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
ValueTask<bool> ReleaseAsync(string token, CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
ValueTask<bool> MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
ValueTask<IReadOnlyList<AuthorityBootstrapInviteDocument>> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
public enum BootstrapInviteReservationStatus
|
||||||
|
{
|
||||||
|
Reserved,
|
||||||
|
NotFound,
|
||||||
|
Expired,
|
||||||
|
AlreadyUsed
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed record BootstrapInviteReservationResult(BootstrapInviteReservationStatus Status, AuthorityBootstrapInviteDocument? Invite);
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
using StellaOps.Authority.Storage.Mongo.Documents;
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
namespace StellaOps.Authority.Storage.Mongo.Stores;
|
namespace StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
@@ -21,5 +23,17 @@ public interface IAuthorityTokenStore
|
|||||||
|
|
||||||
ValueTask<long> DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken);
|
ValueTask<long> DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
ValueTask<TokenUsageUpdateResult> RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken);
|
||||||
|
|
||||||
ValueTask<IReadOnlyList<AuthorityTokenDocument>> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken);
|
ValueTask<IReadOnlyList<AuthorityTokenDocument>> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public enum TokenUsageUpdateStatus
|
||||||
|
{
|
||||||
|
Recorded,
|
||||||
|
SuspectedReplay,
|
||||||
|
MissingMetadata,
|
||||||
|
NotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed record TokenUsageUpdateResult(TokenUsageUpdateStatus Status, string? RemoteAddress, string? UserAgent);
|
||||||
|
|||||||
@@ -0,0 +1,97 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging.Abstractions;
|
||||||
|
using Microsoft.Extensions.Time.Testing;
|
||||||
|
using StellaOps.Authority.Bootstrap;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Tests.Bootstrap;
|
||||||
|
|
||||||
|
public sealed class BootstrapInviteCleanupServiceTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public async Task SweepExpiredInvitesAsync_ExpiresInvitesAndEmitsAuditRecords()
|
||||||
|
{
|
||||||
|
var now = new DateTimeOffset(2025, 10, 14, 12, 0, 0, TimeSpan.Zero);
|
||||||
|
var timeProvider = new FakeTimeProvider(now);
|
||||||
|
|
||||||
|
var invites = new List<AuthorityBootstrapInviteDocument>
|
||||||
|
{
|
||||||
|
new()
|
||||||
|
{
|
||||||
|
Token = "token-1",
|
||||||
|
Type = BootstrapInviteTypes.User,
|
||||||
|
ExpiresAt = now.AddMinutes(-5),
|
||||||
|
Provider = "standard",
|
||||||
|
Target = "alice@example.com",
|
||||||
|
Status = AuthorityBootstrapInviteStatuses.Pending
|
||||||
|
},
|
||||||
|
new()
|
||||||
|
{
|
||||||
|
Token = "token-2",
|
||||||
|
Type = BootstrapInviteTypes.Client,
|
||||||
|
ExpiresAt = now.AddMinutes(-1),
|
||||||
|
Provider = "standard",
|
||||||
|
Target = "client-1",
|
||||||
|
Status = AuthorityBootstrapInviteStatuses.Reserved
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var store = new FakeInviteStore(invites);
|
||||||
|
var sink = new CapturingAuthEventSink();
|
||||||
|
var service = new BootstrapInviteCleanupService(store, sink, timeProvider, NullLogger<BootstrapInviteCleanupService>.Instance);
|
||||||
|
|
||||||
|
await service.SweepExpiredInvitesAsync(CancellationToken.None);
|
||||||
|
|
||||||
|
Assert.True(store.ExpireCalled);
|
||||||
|
Assert.Equal(2, sink.Events.Count);
|
||||||
|
Assert.All(sink.Events, record => Assert.Equal("authority.bootstrap.invite.expired", record.EventType));
|
||||||
|
Assert.Contains(sink.Events, record => record.Properties.Any(property => property.Name == "invite.token" && property.Value.Value == "token-1"));
|
||||||
|
Assert.Contains(sink.Events, record => record.Properties.Any(property => property.Name == "invite.token" && property.Value.Value == "token-2"));
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class FakeInviteStore : IAuthorityBootstrapInviteStore
|
||||||
|
{
|
||||||
|
private readonly IReadOnlyList<AuthorityBootstrapInviteDocument> invites;
|
||||||
|
|
||||||
|
public FakeInviteStore(IReadOnlyList<AuthorityBootstrapInviteDocument> invites)
|
||||||
|
=> this.invites = invites;
|
||||||
|
|
||||||
|
public bool ExpireCalled { get; private set; }
|
||||||
|
|
||||||
|
public ValueTask<AuthorityBootstrapInviteDocument> CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken)
|
||||||
|
=> throw new NotImplementedException();
|
||||||
|
|
||||||
|
public ValueTask<BootstrapInviteReservationResult> TryReserveAsync(string token, string expectedType, DateTimeOffset now, string? reservedBy, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult(new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, null));
|
||||||
|
|
||||||
|
public ValueTask<bool> ReleaseAsync(string token, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult(false);
|
||||||
|
|
||||||
|
public ValueTask<bool> MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult(false);
|
||||||
|
|
||||||
|
public ValueTask<IReadOnlyList<AuthorityBootstrapInviteDocument>> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ExpireCalled = true;
|
||||||
|
return ValueTask.FromResult(invites);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class CapturingAuthEventSink : IAuthEventSink
|
||||||
|
{
|
||||||
|
public List<AuthEventRecord> Events { get; } = new();
|
||||||
|
|
||||||
|
public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
Events.Add(record);
|
||||||
|
return ValueTask.CompletedTask;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -17,6 +17,7 @@ using StellaOps.Authority.Storage.Mongo.Stores;
|
|||||||
using StellaOps.Authority.RateLimiting;
|
using StellaOps.Authority.RateLimiting;
|
||||||
using StellaOps.Cryptography.Audit;
|
using StellaOps.Cryptography.Audit;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
using MongoDB.Bson;
|
||||||
using static StellaOps.Authority.Tests.OpenIddict.TestHelpers;
|
using static StellaOps.Authority.Tests.OpenIddict.TestHelpers;
|
||||||
|
|
||||||
namespace StellaOps.Authority.Tests.OpenIddict;
|
namespace StellaOps.Authority.Tests.OpenIddict;
|
||||||
@@ -76,7 +77,7 @@ public class ClientCredentialsHandlersTests
|
|||||||
|
|
||||||
await handler.HandleAsync(context);
|
await handler.HandleAsync(context);
|
||||||
|
|
||||||
Assert.False(context.IsRejected);
|
Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}");
|
||||||
Assert.Same(clientDocument, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty]);
|
Assert.Same(clientDocument, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty]);
|
||||||
|
|
||||||
var grantedScopes = Assert.IsType<string[]>(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]);
|
var grantedScopes = Assert.IsType<string[]>(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]);
|
||||||
@@ -84,6 +85,36 @@ public class ClientCredentialsHandlersTests
|
|||||||
Assert.Equal(clientDocument.Plugin, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientProviderTransactionProperty]);
|
Assert.Equal(clientDocument.Plugin, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientProviderTransactionProperty]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ValidateClientCredentials_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent()
|
||||||
|
{
|
||||||
|
var clientDocument = CreateClient(
|
||||||
|
secret: "s3cr3t!",
|
||||||
|
allowedGrantTypes: "client_credentials",
|
||||||
|
allowedScopes: "jobs:read");
|
||||||
|
|
||||||
|
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
|
||||||
|
var sink = new TestAuthEventSink();
|
||||||
|
var handler = new ValidateClientCredentialsHandler(
|
||||||
|
new TestClientStore(clientDocument),
|
||||||
|
registry,
|
||||||
|
TestActivitySource,
|
||||||
|
sink,
|
||||||
|
new TestRateLimiterMetadataAccessor(),
|
||||||
|
TimeProvider.System,
|
||||||
|
NullLogger<ValidateClientCredentialsHandler>.Instance);
|
||||||
|
|
||||||
|
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
|
||||||
|
transaction.Request?.SetParameter("unexpected_param", "value");
|
||||||
|
|
||||||
|
await handler.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction));
|
||||||
|
|
||||||
|
var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper");
|
||||||
|
Assert.Contains(tamperEvent.Properties, property =>
|
||||||
|
string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) &&
|
||||||
|
string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase));
|
||||||
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task HandleClientCredentials_PersistsTokenAndEnrichesClaims()
|
public async Task HandleClientCredentials_PersistsTokenAndEnrichesClaims()
|
||||||
{
|
{
|
||||||
@@ -98,22 +129,30 @@ public class ClientCredentialsHandlersTests
|
|||||||
var tokenStore = new TestTokenStore();
|
var tokenStore = new TestTokenStore();
|
||||||
var authSink = new TestAuthEventSink();
|
var authSink = new TestAuthEventSink();
|
||||||
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||||
|
var validateHandler = new ValidateClientCredentialsHandler(
|
||||||
|
new TestClientStore(clientDocument),
|
||||||
|
registry,
|
||||||
|
TestActivitySource,
|
||||||
|
authSink,
|
||||||
|
metadataAccessor,
|
||||||
|
TimeProvider.System,
|
||||||
|
NullLogger<ValidateClientCredentialsHandler>.Instance);
|
||||||
|
|
||||||
|
var transaction = CreateTokenTransaction(clientDocument.ClientId, secret: null, scope: "jobs:trigger");
|
||||||
|
transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(30);
|
||||||
|
|
||||||
|
var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
|
||||||
|
await validateHandler.HandleAsync(validateContext);
|
||||||
|
Assert.False(validateContext.IsRejected);
|
||||||
|
|
||||||
var handler = new HandleClientCredentialsHandler(
|
var handler = new HandleClientCredentialsHandler(
|
||||||
registry,
|
registry,
|
||||||
tokenStore,
|
tokenStore,
|
||||||
TimeProvider.System,
|
TimeProvider.System,
|
||||||
TestActivitySource,
|
TestActivitySource,
|
||||||
authSink,
|
|
||||||
metadataAccessor,
|
|
||||||
NullLogger<HandleClientCredentialsHandler>.Instance);
|
NullLogger<HandleClientCredentialsHandler>.Instance);
|
||||||
var persistHandler = new PersistTokensHandler(tokenStore, TimeProvider.System, TestActivitySource, NullLogger<PersistTokensHandler>.Instance);
|
var persistHandler = new PersistTokensHandler(tokenStore, TimeProvider.System, TestActivitySource, NullLogger<PersistTokensHandler>.Instance);
|
||||||
|
|
||||||
var transaction = CreateTokenTransaction(clientDocument.ClientId, secret: null, scope: "jobs:trigger");
|
|
||||||
transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(30);
|
|
||||||
transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = clientDocument;
|
|
||||||
transaction.Properties[AuthorityOpenIddictConstants.ClientProviderTransactionProperty] = clientDocument.Plugin!;
|
|
||||||
transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = new[] { "jobs:trigger" };
|
|
||||||
|
|
||||||
var context = new OpenIddictServerEvents.HandleTokenRequestContext(transaction);
|
var context = new OpenIddictServerEvents.HandleTokenRequestContext(transaction);
|
||||||
|
|
||||||
await handler.HandleAsync(context);
|
await handler.HandleAsync(context);
|
||||||
@@ -161,10 +200,14 @@ public class TokenValidationHandlersTests
|
|||||||
ClientId = "feedser"
|
ClientId = "feedser"
|
||||||
};
|
};
|
||||||
|
|
||||||
|
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||||
|
var auditSink = new TestAuthEventSink();
|
||||||
var handler = new ValidateAccessTokenHandler(
|
var handler = new ValidateAccessTokenHandler(
|
||||||
tokenStore,
|
tokenStore,
|
||||||
new TestClientStore(CreateClient()),
|
new TestClientStore(CreateClient()),
|
||||||
CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(CreateClient())),
|
CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(CreateClient())),
|
||||||
|
metadataAccessor,
|
||||||
|
auditSink,
|
||||||
TimeProvider.System,
|
TimeProvider.System,
|
||||||
TestActivitySource,
|
TestActivitySource,
|
||||||
NullLogger<ValidateAccessTokenHandler>.Instance);
|
NullLogger<ValidateAccessTokenHandler>.Instance);
|
||||||
@@ -203,10 +246,14 @@ public class TokenValidationHandlersTests
|
|||||||
|
|
||||||
var registry = new AuthorityIdentityProviderRegistry(new[] { plugin }, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
|
var registry = new AuthorityIdentityProviderRegistry(new[] { plugin }, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
|
||||||
|
|
||||||
|
var metadataAccessorSuccess = new TestRateLimiterMetadataAccessor();
|
||||||
|
var auditSinkSuccess = new TestAuthEventSink();
|
||||||
var handler = new ValidateAccessTokenHandler(
|
var handler = new ValidateAccessTokenHandler(
|
||||||
new TestTokenStore(),
|
new TestTokenStore(),
|
||||||
new TestClientStore(clientDocument),
|
new TestClientStore(clientDocument),
|
||||||
registry,
|
registry,
|
||||||
|
metadataAccessorSuccess,
|
||||||
|
auditSinkSuccess,
|
||||||
TimeProvider.System,
|
TimeProvider.System,
|
||||||
TestActivitySource,
|
TestActivitySource,
|
||||||
NullLogger<ValidateAccessTokenHandler>.Instance);
|
NullLogger<ValidateAccessTokenHandler>.Instance);
|
||||||
@@ -229,6 +276,76 @@ public class TokenValidationHandlersTests
|
|||||||
Assert.False(context.IsRejected);
|
Assert.False(context.IsRejected);
|
||||||
Assert.Contains(principal.Claims, claim => claim.Type == "enriched" && claim.Value == "true");
|
Assert.Contains(principal.Claims, claim => claim.Type == "enriched" && claim.Value == "true");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ValidateAccessTokenHandler_EmitsReplayAudit_WhenStoreDetectsSuspectedReplay()
|
||||||
|
{
|
||||||
|
var tokenStore = new TestTokenStore();
|
||||||
|
tokenStore.Inserted = new AuthorityTokenDocument
|
||||||
|
{
|
||||||
|
TokenId = "token-replay",
|
||||||
|
Status = "valid",
|
||||||
|
ClientId = "agent",
|
||||||
|
Devices = new List<BsonDocument>
|
||||||
|
{
|
||||||
|
new BsonDocument
|
||||||
|
{
|
||||||
|
{ "remoteAddress", "10.0.0.1" },
|
||||||
|
{ "userAgent", "agent/1.0" },
|
||||||
|
{ "firstSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-15)) },
|
||||||
|
{ "lastSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-5)) },
|
||||||
|
{ "useCount", 2 }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
tokenStore.UsageCallback = (remote, agent) => new TokenUsageUpdateResult(TokenUsageUpdateStatus.SuspectedReplay, remote, agent);
|
||||||
|
|
||||||
|
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||||
|
var metadata = metadataAccessor.GetMetadata();
|
||||||
|
if (metadata is not null)
|
||||||
|
{
|
||||||
|
metadata.RemoteIp = "203.0.113.7";
|
||||||
|
metadata.UserAgent = "agent/2.0";
|
||||||
|
}
|
||||||
|
|
||||||
|
var clientDocument = CreateClient();
|
||||||
|
clientDocument.ClientId = "agent";
|
||||||
|
var auditSink = new TestAuthEventSink();
|
||||||
|
var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null);
|
||||||
|
var handler = new ValidateAccessTokenHandler(
|
||||||
|
tokenStore,
|
||||||
|
new TestClientStore(clientDocument),
|
||||||
|
registry,
|
||||||
|
metadataAccessor,
|
||||||
|
auditSink,
|
||||||
|
TimeProvider.System,
|
||||||
|
TestActivitySource,
|
||||||
|
NullLogger<ValidateAccessTokenHandler>.Instance);
|
||||||
|
|
||||||
|
var transaction = new OpenIddictServerTransaction
|
||||||
|
{
|
||||||
|
Options = new OpenIddictServerOptions(),
|
||||||
|
EndpointType = OpenIddictServerEndpointType.Introspection,
|
||||||
|
Request = new OpenIddictRequest()
|
||||||
|
};
|
||||||
|
|
||||||
|
var principal = CreatePrincipal("agent", "token-replay", "standard");
|
||||||
|
var context = new OpenIddictServerEvents.ValidateTokenContext(transaction)
|
||||||
|
{
|
||||||
|
Principal = principal,
|
||||||
|
TokenId = "token-replay"
|
||||||
|
};
|
||||||
|
|
||||||
|
await handler.HandleAsync(context);
|
||||||
|
|
||||||
|
Assert.False(context.IsRejected);
|
||||||
|
var replayEvent = Assert.Single(auditSink.Events, record => record.EventType == "authority.token.replay.suspected");
|
||||||
|
Assert.Equal(AuthEventOutcome.Error, replayEvent.Outcome);
|
||||||
|
Assert.NotNull(replayEvent.Network);
|
||||||
|
Assert.Equal("203.0.113.7", replayEvent.Network?.RemoteAddress.Value);
|
||||||
|
Assert.Contains(replayEvent.Properties, property => property.Name == "token.devices.total");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
internal sealed class TestClientStore : IAuthorityClientStore
|
internal sealed class TestClientStore : IAuthorityClientStore
|
||||||
@@ -263,6 +380,8 @@ internal sealed class TestTokenStore : IAuthorityTokenStore
|
|||||||
{
|
{
|
||||||
public AuthorityTokenDocument? Inserted { get; set; }
|
public AuthorityTokenDocument? Inserted { get; set; }
|
||||||
|
|
||||||
|
public Func<string?, string?, TokenUsageUpdateResult>? UsageCallback { get; set; }
|
||||||
|
|
||||||
public ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken)
|
public ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
Inserted = document;
|
Inserted = document;
|
||||||
@@ -281,6 +400,9 @@ internal sealed class TestTokenStore : IAuthorityTokenStore
|
|||||||
public ValueTask<long> DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken)
|
public ValueTask<long> DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken)
|
||||||
=> ValueTask.FromResult(0L);
|
=> ValueTask.FromResult(0L);
|
||||||
|
|
||||||
|
public ValueTask<TokenUsageUpdateResult> RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult(UsageCallback?.Invoke(remoteAddress, userAgent) ?? new TokenUsageUpdateResult(TokenUsageUpdateStatus.Recorded, remoteAddress, userAgent));
|
||||||
|
|
||||||
public ValueTask<IReadOnlyList<AuthorityTokenDocument>> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken)
|
public ValueTask<IReadOnlyList<AuthorityTokenDocument>> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken)
|
||||||
=> ValueTask.FromResult<IReadOnlyList<AuthorityTokenDocument>>(Array.Empty<AuthorityTokenDocument>());
|
=> ValueTask.FromResult<IReadOnlyList<AuthorityTokenDocument>>(Array.Empty<AuthorityTokenDocument>());
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -74,6 +74,26 @@ public class PasswordGrantHandlersTests
|
|||||||
Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.LockedOut);
|
Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.LockedOut);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ValidatePasswordGrant_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent()
|
||||||
|
{
|
||||||
|
var sink = new TestAuthEventSink();
|
||||||
|
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||||
|
var registry = CreateRegistry(new SuccessCredentialStore());
|
||||||
|
var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger<ValidatePasswordGrantHandler>.Instance);
|
||||||
|
|
||||||
|
var transaction = CreatePasswordTransaction("alice", "Password1!");
|
||||||
|
transaction.Request?.SetParameter("unexpected_param", "value");
|
||||||
|
|
||||||
|
await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction));
|
||||||
|
|
||||||
|
var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper");
|
||||||
|
Assert.Equal(AuthEventOutcome.Failure, tamperEvent.Outcome);
|
||||||
|
Assert.Contains(tamperEvent.Properties, property =>
|
||||||
|
string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) &&
|
||||||
|
string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase));
|
||||||
|
}
|
||||||
|
|
||||||
private static AuthorityIdentityProviderRegistry CreateRegistry(IUserCredentialStore store)
|
private static AuthorityIdentityProviderRegistry CreateRegistry(IUserCredentialStore store)
|
||||||
{
|
{
|
||||||
var plugin = new StubIdentityProviderPlugin("stub", store);
|
var plugin = new StubIdentityProviderPlugin("stub", store);
|
||||||
@@ -104,14 +124,14 @@ public class PasswordGrantHandlersTests
|
|||||||
Name = name;
|
Name = name;
|
||||||
Type = "stub";
|
Type = "stub";
|
||||||
var manifest = new AuthorityPluginManifest(
|
var manifest = new AuthorityPluginManifest(
|
||||||
name,
|
Name: name,
|
||||||
"stub",
|
Type: "stub",
|
||||||
enabled: true,
|
Enabled: true,
|
||||||
version: null,
|
AssemblyName: null,
|
||||||
description: null,
|
AssemblyPath: null,
|
||||||
capabilities: new[] { AuthorityPluginCapabilities.Password },
|
Capabilities: new[] { AuthorityPluginCapabilities.Password },
|
||||||
configuration: new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase),
|
Metadata: new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase),
|
||||||
configPath: $"{name}.yaml");
|
ConfigPath: $"{name}.yaml");
|
||||||
Context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build());
|
Context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build());
|
||||||
Credentials = store;
|
Credentials = store;
|
||||||
ClaimsEnricher = new NoopClaimsEnricher();
|
ClaimsEnricher = new NoopClaimsEnricher();
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ using Microsoft.Extensions.DependencyInjection;
|
|||||||
using Microsoft.Extensions.Logging.Abstractions;
|
using Microsoft.Extensions.Logging.Abstractions;
|
||||||
using Microsoft.Extensions.Time.Testing;
|
using Microsoft.Extensions.Time.Testing;
|
||||||
using MongoDB.Driver;
|
using MongoDB.Driver;
|
||||||
|
using MongoDB.Bson;
|
||||||
using OpenIddict.Abstractions;
|
using OpenIddict.Abstractions;
|
||||||
using OpenIddict.Server;
|
using OpenIddict.Server;
|
||||||
using StellaOps.Authority;
|
using StellaOps.Authority;
|
||||||
@@ -56,10 +57,10 @@ public sealed class TokenPersistenceIntegrationTests
|
|||||||
withClientProvisioning: true,
|
withClientProvisioning: true,
|
||||||
clientDescriptor: TestHelpers.CreateDescriptor(clientDocument));
|
clientDescriptor: TestHelpers.CreateDescriptor(clientDocument));
|
||||||
|
|
||||||
var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, NullLogger<ValidateClientCredentialsHandler>.Instance);
|
|
||||||
var authSink = new TestAuthEventSink();
|
var authSink = new TestAuthEventSink();
|
||||||
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||||
var handleHandler = new HandleClientCredentialsHandler(registry, TestActivitySource, authSink, metadataAccessor, clock, NullLogger<HandleClientCredentialsHandler>.Instance);
|
var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, authSink, metadataAccessor, clock, NullLogger<ValidateClientCredentialsHandler>.Instance);
|
||||||
|
var handleHandler = new HandleClientCredentialsHandler(registry, tokenStore, clock, TestActivitySource, NullLogger<HandleClientCredentialsHandler>.Instance);
|
||||||
var persistHandler = new PersistTokensHandler(tokenStore, clock, TestActivitySource, NullLogger<PersistTokensHandler>.Instance);
|
var persistHandler = new PersistTokensHandler(tokenStore, clock, TestActivitySource, NullLogger<PersistTokensHandler>.Instance);
|
||||||
|
|
||||||
var transaction = TestHelpers.CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:trigger");
|
var transaction = TestHelpers.CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:trigger");
|
||||||
@@ -148,10 +149,14 @@ public sealed class TokenPersistenceIntegrationTests
|
|||||||
var revokedAt = now.AddMinutes(1);
|
var revokedAt = now.AddMinutes(1);
|
||||||
await tokenStore.UpdateStatusAsync(revokedTokenId, "revoked", revokedAt, "manual", null, null, CancellationToken.None);
|
await tokenStore.UpdateStatusAsync(revokedTokenId, "revoked", revokedAt, "manual", null, null, CancellationToken.None);
|
||||||
|
|
||||||
|
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||||
|
var auditSink = new TestAuthEventSink();
|
||||||
var handler = new ValidateAccessTokenHandler(
|
var handler = new ValidateAccessTokenHandler(
|
||||||
tokenStore,
|
tokenStore,
|
||||||
clientStore,
|
clientStore,
|
||||||
registry,
|
registry,
|
||||||
|
metadataAccessor,
|
||||||
|
auditSink,
|
||||||
clock,
|
clock,
|
||||||
TestActivitySource,
|
TestActivitySource,
|
||||||
NullLogger<ValidateAccessTokenHandler>.Instance);
|
NullLogger<ValidateAccessTokenHandler>.Instance);
|
||||||
@@ -190,6 +195,60 @@ public sealed class TokenPersistenceIntegrationTests
|
|||||||
Assert.Equal("manual", stored.RevokedReason);
|
Assert.Equal("manual", stored.RevokedReason);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task RecordUsageAsync_FlagsSuspectedReplay_OnNewDeviceFingerprint()
|
||||||
|
{
|
||||||
|
await ResetCollectionsAsync();
|
||||||
|
|
||||||
|
var issuedAt = new DateTimeOffset(2025, 10, 14, 8, 0, 0, TimeSpan.Zero);
|
||||||
|
var clock = new FakeTimeProvider(issuedAt);
|
||||||
|
|
||||||
|
await using var provider = await BuildMongoProviderAsync(clock);
|
||||||
|
|
||||||
|
var tokenStore = provider.GetRequiredService<IAuthorityTokenStore>();
|
||||||
|
|
||||||
|
var tokenDocument = new AuthorityTokenDocument
|
||||||
|
{
|
||||||
|
TokenId = "token-replay",
|
||||||
|
Type = OpenIddictConstants.TokenTypeHints.AccessToken,
|
||||||
|
ClientId = "client-1",
|
||||||
|
Status = "valid",
|
||||||
|
CreatedAt = issuedAt,
|
||||||
|
Devices = new List<BsonDocument>
|
||||||
|
{
|
||||||
|
new BsonDocument
|
||||||
|
{
|
||||||
|
{ "remoteAddress", "10.0.0.1" },
|
||||||
|
{ "userAgent", "agent/1.0" },
|
||||||
|
{ "firstSeen", BsonDateTime.Create(issuedAt.AddMinutes(-10).UtcDateTime) },
|
||||||
|
{ "lastSeen", BsonDateTime.Create(issuedAt.AddMinutes(-5).UtcDateTime) },
|
||||||
|
{ "useCount", 2 }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
await tokenStore.InsertAsync(tokenDocument, CancellationToken.None);
|
||||||
|
|
||||||
|
var result = await tokenStore.RecordUsageAsync(
|
||||||
|
"token-replay",
|
||||||
|
remoteAddress: "10.0.0.2",
|
||||||
|
userAgent: "agent/2.0",
|
||||||
|
observedAt: clock.GetUtcNow(),
|
||||||
|
CancellationToken.None);
|
||||||
|
|
||||||
|
Assert.Equal(TokenUsageUpdateStatus.SuspectedReplay, result.Status);
|
||||||
|
|
||||||
|
var stored = await tokenStore.FindByTokenIdAsync("token-replay", CancellationToken.None);
|
||||||
|
Assert.NotNull(stored);
|
||||||
|
Assert.Equal(2, stored!.Devices?.Count);
|
||||||
|
Assert.Contains(stored.Devices!, doc =>
|
||||||
|
{
|
||||||
|
var remote = doc.TryGetValue("remoteAddress", out var ra) && ra.IsString ? ra.AsString : null;
|
||||||
|
var agentValue = doc.TryGetValue("userAgent", out var ua) && ua.IsString ? ua.AsString : null;
|
||||||
|
return remote == "10.0.0.2" && agentValue == "agent/2.0";
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
private async Task ResetCollectionsAsync()
|
private async Task ResetCollectionsAsync()
|
||||||
{
|
{
|
||||||
var tokens = fixture.Database.GetCollection<AuthorityTokenDocument>(AuthorityMongoDefaults.Collections.Tokens);
|
var tokens = fixture.Database.GetCollection<AuthorityTokenDocument>(AuthorityMongoDefaults.Collections.Tokens);
|
||||||
@@ -220,27 +279,3 @@ public sealed class TokenPersistenceIntegrationTests
|
|||||||
return provider;
|
return provider;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
internal sealed class TestAuthEventSink : IAuthEventSink
|
|
||||||
{
|
|
||||||
public List<AuthEventRecord> Records { get; } = new();
|
|
||||||
|
|
||||||
public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
|
||||||
{
|
|
||||||
Records.Add(record);
|
|
||||||
return ValueTask.CompletedTask;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
internal sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor
|
|
||||||
{
|
|
||||||
private readonly AuthorityRateLimiterMetadata metadata = new();
|
|
||||||
|
|
||||||
public AuthorityRateLimiterMetadata? GetMetadata() => metadata;
|
|
||||||
|
|
||||||
public void SetClientId(string? clientId) => metadata.ClientId = string.IsNullOrWhiteSpace(clientId) ? null : clientId;
|
|
||||||
|
|
||||||
public void SetSubjectId(string? subjectId) => metadata.SubjectId = string.IsNullOrWhiteSpace(subjectId) ? null : subjectId;
|
|
||||||
|
|
||||||
public void SetTag(string name, string? value) => metadata.SetTag(name, value);
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -76,6 +76,7 @@ public class AuthorityRateLimiterMetadataMiddlewareTests
|
|||||||
context.Request.Path = "/token";
|
context.Request.Path = "/token";
|
||||||
context.Request.Method = HttpMethods.Post;
|
context.Request.Method = HttpMethods.Post;
|
||||||
context.Request.Headers["X-Forwarded-For"] = "203.0.113.99";
|
context.Request.Headers["X-Forwarded-For"] = "203.0.113.99";
|
||||||
|
context.Request.Headers.UserAgent = "StellaOps-Client/1.2";
|
||||||
|
|
||||||
var middleware = CreateMiddleware();
|
var middleware = CreateMiddleware();
|
||||||
await middleware.InvokeAsync(context);
|
await middleware.InvokeAsync(context);
|
||||||
@@ -84,6 +85,9 @@ public class AuthorityRateLimiterMetadataMiddlewareTests
|
|||||||
Assert.NotNull(metadata);
|
Assert.NotNull(metadata);
|
||||||
Assert.Equal("203.0.113.99", metadata!.RemoteIp);
|
Assert.Equal("203.0.113.99", metadata!.RemoteIp);
|
||||||
Assert.Equal("203.0.113.99", metadata.ForwardedFor);
|
Assert.Equal("203.0.113.99", metadata.ForwardedFor);
|
||||||
|
Assert.Equal("StellaOps-Client/1.2", metadata.UserAgent);
|
||||||
|
Assert.True(metadata.Tags.TryGetValue("authority.user_agent", out var tagValue));
|
||||||
|
Assert.Equal("StellaOps-Client/1.2", tagValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static AuthorityRateLimiterMetadataMiddleware CreateMiddleware()
|
private static AuthorityRateLimiterMetadataMiddleware CreateMiddleware()
|
||||||
|
|||||||
@@ -0,0 +1,106 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Globalization;
|
||||||
|
using Microsoft.Extensions.Hosting;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Bootstrap;
|
||||||
|
|
||||||
|
internal sealed class BootstrapInviteCleanupService : BackgroundService
|
||||||
|
{
|
||||||
|
private readonly IAuthorityBootstrapInviteStore inviteStore;
|
||||||
|
private readonly IAuthEventSink auditSink;
|
||||||
|
private readonly TimeProvider timeProvider;
|
||||||
|
private readonly ILogger<BootstrapInviteCleanupService> logger;
|
||||||
|
private readonly TimeSpan interval;
|
||||||
|
|
||||||
|
public BootstrapInviteCleanupService(
|
||||||
|
IAuthorityBootstrapInviteStore inviteStore,
|
||||||
|
IAuthEventSink auditSink,
|
||||||
|
TimeProvider timeProvider,
|
||||||
|
ILogger<BootstrapInviteCleanupService> logger)
|
||||||
|
{
|
||||||
|
this.inviteStore = inviteStore ?? throw new ArgumentNullException(nameof(inviteStore));
|
||||||
|
this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink));
|
||||||
|
this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
interval = TimeSpan.FromMinutes(5);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||||
|
{
|
||||||
|
var timer = new PeriodicTimer(interval);
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
while (await timer.WaitForNextTickAsync(stoppingToken).ConfigureAwait(false))
|
||||||
|
{
|
||||||
|
await SweepExpiredInvitesAsync(stoppingToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException)
|
||||||
|
{
|
||||||
|
// Shutdown requested.
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
timer.Dispose();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal async Task SweepExpiredInvitesAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var now = timeProvider.GetUtcNow();
|
||||||
|
var expired = await inviteStore.ExpireAsync(now, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (expired.Count == 0)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.LogInformation("Expired {Count} bootstrap invite(s).", expired.Count);
|
||||||
|
|
||||||
|
foreach (var invite in expired)
|
||||||
|
{
|
||||||
|
var record = new AuthEventRecord
|
||||||
|
{
|
||||||
|
EventType = "authority.bootstrap.invite.expired",
|
||||||
|
OccurredAt = now,
|
||||||
|
CorrelationId = Guid.NewGuid().ToString("N"),
|
||||||
|
Outcome = AuthEventOutcome.Success,
|
||||||
|
Reason = "Invite expired before consumption.",
|
||||||
|
Subject = null,
|
||||||
|
Client = null,
|
||||||
|
Scopes = Array.Empty<string>(),
|
||||||
|
Network = null,
|
||||||
|
Properties = BuildInviteProperties(invite)
|
||||||
|
};
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument invite)
|
||||||
|
{
|
||||||
|
var properties = new List<AuthEventProperty>
|
||||||
|
{
|
||||||
|
new() { Name = "invite.token", Value = ClassifiedString.Public(invite.Token) },
|
||||||
|
new() { Name = "invite.type", Value = ClassifiedString.Public(invite.Type) },
|
||||||
|
new() { Name = "invite.expires_at", Value = ClassifiedString.Public(invite.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) }
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(invite.Provider))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty { Name = "invite.provider", Value = ClassifiedString.Public(invite.Provider) });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(invite.Target))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty { Name = "invite.target", Value = ClassifiedString.Public(invite.Target) });
|
||||||
|
}
|
||||||
|
|
||||||
|
return properties.ToArray();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -6,6 +6,8 @@ internal sealed record BootstrapUserRequest
|
|||||||
{
|
{
|
||||||
public string? Provider { get; init; }
|
public string? Provider { get; init; }
|
||||||
|
|
||||||
|
public string? InviteToken { get; init; }
|
||||||
|
|
||||||
[Required]
|
[Required]
|
||||||
public string Username { get; init; } = string.Empty;
|
public string Username { get; init; } = string.Empty;
|
||||||
|
|
||||||
@@ -27,6 +29,8 @@ internal sealed record BootstrapClientRequest
|
|||||||
{
|
{
|
||||||
public string? Provider { get; init; }
|
public string? Provider { get; init; }
|
||||||
|
|
||||||
|
public string? InviteToken { get; init; }
|
||||||
|
|
||||||
[Required]
|
[Required]
|
||||||
public string ClientId { get; init; } = string.Empty;
|
public string ClientId { get; init; } = string.Empty;
|
||||||
|
|
||||||
@@ -46,3 +50,26 @@ internal sealed record BootstrapClientRequest
|
|||||||
|
|
||||||
public IReadOnlyDictionary<string, string?>? Properties { get; init; }
|
public IReadOnlyDictionary<string, string?>? Properties { get; init; }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
internal sealed record BootstrapInviteRequest
|
||||||
|
{
|
||||||
|
public string Type { get; init; } = BootstrapInviteTypes.User;
|
||||||
|
|
||||||
|
public string? Token { get; init; }
|
||||||
|
|
||||||
|
public string? Provider { get; init; }
|
||||||
|
|
||||||
|
public string? Target { get; init; }
|
||||||
|
|
||||||
|
public DateTimeOffset? ExpiresAt { get; init; }
|
||||||
|
|
||||||
|
public string? IssuedBy { get; init; }
|
||||||
|
|
||||||
|
public IReadOnlyDictionary<string, string?>? Metadata { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
internal static class BootstrapInviteTypes
|
||||||
|
{
|
||||||
|
public const string User = "user";
|
||||||
|
public const string Client = "client";
|
||||||
|
}
|
||||||
|
|||||||
@@ -0,0 +1,252 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Diagnostics;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.Linq;
|
||||||
|
using OpenIddict.Abstractions;
|
||||||
|
using OpenIddict.Server;
|
||||||
|
using StellaOps.Authority.RateLimiting;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.OpenIddict.Handlers;
|
||||||
|
|
||||||
|
internal static class ClientCredentialsAuditHelper
|
||||||
|
{
|
||||||
|
internal static string EnsureCorrelationId(OpenIddictServerTransaction transaction)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(transaction);
|
||||||
|
|
||||||
|
if (transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditCorrelationProperty, out var value) &&
|
||||||
|
value is string existing &&
|
||||||
|
!string.IsNullOrWhiteSpace(existing))
|
||||||
|
{
|
||||||
|
return existing;
|
||||||
|
}
|
||||||
|
|
||||||
|
var correlation = Activity.Current?.TraceId.ToString() ??
|
||||||
|
Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture);
|
||||||
|
|
||||||
|
transaction.Properties[AuthorityOpenIddictConstants.AuditCorrelationProperty] = correlation;
|
||||||
|
return correlation;
|
||||||
|
}
|
||||||
|
|
||||||
|
internal static AuthEventRecord CreateRecord(
|
||||||
|
TimeProvider timeProvider,
|
||||||
|
OpenIddictServerTransaction transaction,
|
||||||
|
AuthorityRateLimiterMetadata? metadata,
|
||||||
|
string? clientSecret,
|
||||||
|
AuthEventOutcome outcome,
|
||||||
|
string? reason,
|
||||||
|
string? clientId,
|
||||||
|
string? providerName,
|
||||||
|
bool? confidential,
|
||||||
|
IReadOnlyList<string> requestedScopes,
|
||||||
|
IReadOnlyList<string> grantedScopes,
|
||||||
|
string? invalidScope,
|
||||||
|
IEnumerable<AuthEventProperty>? extraProperties = null,
|
||||||
|
string? eventType = null)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(timeProvider);
|
||||||
|
ArgumentNullException.ThrowIfNull(transaction);
|
||||||
|
|
||||||
|
var correlationId = EnsureCorrelationId(transaction);
|
||||||
|
var client = BuildClient(clientId, providerName);
|
||||||
|
var network = BuildNetwork(metadata);
|
||||||
|
var normalizedGranted = NormalizeScopes(grantedScopes);
|
||||||
|
var properties = BuildProperties(confidential, requestedScopes, invalidScope, extraProperties);
|
||||||
|
|
||||||
|
return new AuthEventRecord
|
||||||
|
{
|
||||||
|
EventType = string.IsNullOrWhiteSpace(eventType) ? "authority.client_credentials.grant" : eventType,
|
||||||
|
OccurredAt = timeProvider.GetUtcNow(),
|
||||||
|
CorrelationId = correlationId,
|
||||||
|
Outcome = outcome,
|
||||||
|
Reason = Normalize(reason),
|
||||||
|
Subject = null,
|
||||||
|
Client = client,
|
||||||
|
Scopes = normalizedGranted,
|
||||||
|
Network = network,
|
||||||
|
Properties = properties
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
internal static AuthEventRecord CreateTamperRecord(
|
||||||
|
TimeProvider timeProvider,
|
||||||
|
OpenIddictServerTransaction transaction,
|
||||||
|
AuthorityRateLimiterMetadata? metadata,
|
||||||
|
string? clientId,
|
||||||
|
string? providerName,
|
||||||
|
bool? confidential,
|
||||||
|
IEnumerable<string> unexpectedParameters)
|
||||||
|
{
|
||||||
|
var properties = new List<AuthEventProperty>
|
||||||
|
{
|
||||||
|
new()
|
||||||
|
{
|
||||||
|
Name = "request.tampered",
|
||||||
|
Value = ClassifiedString.Public("true")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (confidential.HasValue)
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "client.confidential",
|
||||||
|
Value = ClassifiedString.Public(confidential.Value ? "true" : "false")
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (unexpectedParameters is not null)
|
||||||
|
{
|
||||||
|
foreach (var parameter in unexpectedParameters)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(parameter))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "request.unexpected_parameter",
|
||||||
|
Value = ClassifiedString.Public(parameter)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var reason = unexpectedParameters is null
|
||||||
|
? "Unexpected parameters supplied to client credentials request."
|
||||||
|
: $"Unexpected parameters supplied to client credentials request: {string.Join(", ", unexpectedParameters)}.";
|
||||||
|
|
||||||
|
return CreateRecord(
|
||||||
|
timeProvider,
|
||||||
|
transaction,
|
||||||
|
metadata,
|
||||||
|
clientSecret: null,
|
||||||
|
outcome: AuthEventOutcome.Failure,
|
||||||
|
reason: reason,
|
||||||
|
clientId: clientId,
|
||||||
|
providerName: providerName,
|
||||||
|
confidential: confidential,
|
||||||
|
requestedScopes: Array.Empty<string>(),
|
||||||
|
grantedScopes: Array.Empty<string>(),
|
||||||
|
invalidScope: null,
|
||||||
|
extraProperties: properties,
|
||||||
|
eventType: "authority.token.tamper");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AuthEventClient? BuildClient(string? clientId, string? providerName)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(clientId) && string.IsNullOrWhiteSpace(providerName))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new AuthEventClient
|
||||||
|
{
|
||||||
|
ClientId = ClassifiedString.Personal(Normalize(clientId)),
|
||||||
|
Name = ClassifiedString.Empty,
|
||||||
|
Provider = ClassifiedString.Public(Normalize(providerName))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AuthEventNetwork? BuildNetwork(AuthorityRateLimiterMetadata? metadata)
|
||||||
|
{
|
||||||
|
var remote = Normalize(metadata?.RemoteIp);
|
||||||
|
var forwarded = Normalize(metadata?.ForwardedFor);
|
||||||
|
var userAgent = Normalize(metadata?.UserAgent);
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(remote) && string.IsNullOrWhiteSpace(forwarded) && string.IsNullOrWhiteSpace(userAgent))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new AuthEventNetwork
|
||||||
|
{
|
||||||
|
RemoteAddress = ClassifiedString.Personal(remote),
|
||||||
|
ForwardedFor = ClassifiedString.Personal(forwarded),
|
||||||
|
UserAgent = ClassifiedString.Personal(userAgent)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyList<AuthEventProperty> BuildProperties(
|
||||||
|
bool? confidential,
|
||||||
|
IReadOnlyList<string> requestedScopes,
|
||||||
|
string? invalidScope,
|
||||||
|
IEnumerable<AuthEventProperty>? extraProperties)
|
||||||
|
{
|
||||||
|
var properties = new List<AuthEventProperty>();
|
||||||
|
|
||||||
|
if (confidential.HasValue)
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "client.confidential",
|
||||||
|
Value = ClassifiedString.Public(confidential.Value ? "true" : "false")
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
var normalizedRequested = NormalizeScopes(requestedScopes);
|
||||||
|
if (normalizedRequested is { Count: > 0 })
|
||||||
|
{
|
||||||
|
foreach (var scope in normalizedRequested)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(scope))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "scope.requested",
|
||||||
|
Value = ClassifiedString.Public(scope)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(invalidScope))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "scope.invalid",
|
||||||
|
Value = ClassifiedString.Public(invalidScope)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (extraProperties is not null)
|
||||||
|
{
|
||||||
|
foreach (var property in extraProperties)
|
||||||
|
{
|
||||||
|
if (property is null || string.IsNullOrWhiteSpace(property.Name))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
properties.Add(property);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return properties.Count == 0 ? Array.Empty<AuthEventProperty>() : properties;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyList<string> NormalizeScopes(IReadOnlyList<string>? scopes)
|
||||||
|
{
|
||||||
|
if (scopes is null || scopes.Count == 0)
|
||||||
|
{
|
||||||
|
return Array.Empty<string>();
|
||||||
|
}
|
||||||
|
|
||||||
|
var normalized = scopes
|
||||||
|
.Where(static scope => !string.IsNullOrWhiteSpace(scope))
|
||||||
|
.Select(static scope => scope.Trim())
|
||||||
|
.Where(static scope => scope.Length > 0)
|
||||||
|
.Distinct(StringComparer.Ordinal)
|
||||||
|
.OrderBy(static scope => scope, StringComparer.Ordinal)
|
||||||
|
.ToArray();
|
||||||
|
|
||||||
|
return normalized.Length == 0 ? Array.Empty<string>() : normalized;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? Normalize(string? value)
|
||||||
|
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
|
||||||
|
}
|
||||||
@@ -76,6 +76,22 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
|
|||||||
var requestedScopes = requestedScopeInput.IsDefaultOrEmpty ? Array.Empty<string>() : requestedScopeInput.ToArray();
|
var requestedScopes = requestedScopeInput.IsDefaultOrEmpty ? Array.Empty<string>() : requestedScopeInput.ToArray();
|
||||||
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditRequestedScopesProperty] = requestedScopes;
|
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditRequestedScopesProperty] = requestedScopes;
|
||||||
|
|
||||||
|
var unexpectedParameters = TokenRequestTamperInspector.GetUnexpectedClientCredentialsParameters(context.Request);
|
||||||
|
if (unexpectedParameters.Count > 0)
|
||||||
|
{
|
||||||
|
var providerHint = context.Request.GetParameter(AuthorityOpenIddictConstants.ProviderParameterName)?.Value?.ToString();
|
||||||
|
var tamperRecord = ClientCredentialsAuditHelper.CreateTamperRecord(
|
||||||
|
timeProvider,
|
||||||
|
context.Transaction,
|
||||||
|
metadata,
|
||||||
|
clientId,
|
||||||
|
providerHint,
|
||||||
|
confidential: null,
|
||||||
|
unexpectedParameters);
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(tamperRecord, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(context.ClientId))
|
if (string.IsNullOrWhiteSpace(context.ClientId))
|
||||||
|
|||||||
@@ -68,6 +68,23 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler<Op
|
|||||||
var requestedScopesInput = context.Request.GetScopes();
|
var requestedScopesInput = context.Request.GetScopes();
|
||||||
var requestedScopes = requestedScopesInput.IsDefaultOrEmpty ? Array.Empty<string>() : requestedScopesInput.ToArray();
|
var requestedScopes = requestedScopesInput.IsDefaultOrEmpty ? Array.Empty<string>() : requestedScopesInput.ToArray();
|
||||||
|
|
||||||
|
var unexpectedParameters = TokenRequestTamperInspector.GetUnexpectedPasswordGrantParameters(context.Request);
|
||||||
|
if (unexpectedParameters.Count > 0)
|
||||||
|
{
|
||||||
|
var providerHint = context.Request.GetParameter(AuthorityOpenIddictConstants.ProviderParameterName)?.Value?.ToString();
|
||||||
|
var tamperRecord = PasswordGrantAuditHelper.CreateTamperRecord(
|
||||||
|
timeProvider,
|
||||||
|
context.Transaction,
|
||||||
|
metadata,
|
||||||
|
clientId,
|
||||||
|
providerHint,
|
||||||
|
context.Request.Username,
|
||||||
|
requestedScopes,
|
||||||
|
unexpectedParameters);
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(tamperRecord, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry);
|
var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry);
|
||||||
if (!selection.Succeeded)
|
if (!selection.Succeeded)
|
||||||
{
|
{
|
||||||
@@ -75,7 +92,6 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler<Op
|
|||||||
timeProvider,
|
timeProvider,
|
||||||
context.Transaction,
|
context.Transaction,
|
||||||
metadata,
|
metadata,
|
||||||
null,
|
|
||||||
AuthEventOutcome.Failure,
|
AuthEventOutcome.Failure,
|
||||||
selection.Description,
|
selection.Description,
|
||||||
clientId,
|
clientId,
|
||||||
@@ -100,7 +116,6 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler<Op
|
|||||||
timeProvider,
|
timeProvider,
|
||||||
context.Transaction,
|
context.Transaction,
|
||||||
metadata,
|
metadata,
|
||||||
httpContext,
|
|
||||||
AuthEventOutcome.Failure,
|
AuthEventOutcome.Failure,
|
||||||
"Both username and password must be provided.",
|
"Both username and password must be provided.",
|
||||||
clientId,
|
clientId,
|
||||||
@@ -250,7 +265,6 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
|
|||||||
timeProvider,
|
timeProvider,
|
||||||
context.Transaction,
|
context.Transaction,
|
||||||
metadata,
|
metadata,
|
||||||
httpContext,
|
|
||||||
AuthEventOutcome.Failure,
|
AuthEventOutcome.Failure,
|
||||||
"Both username and password must be provided.",
|
"Both username and password must be provided.",
|
||||||
clientId,
|
clientId,
|
||||||
@@ -395,7 +409,8 @@ internal static class PasswordGrantAuditHelper
|
|||||||
IEnumerable<string>? scopes,
|
IEnumerable<string>? scopes,
|
||||||
TimeSpan? retryAfter,
|
TimeSpan? retryAfter,
|
||||||
AuthorityCredentialFailureCode? failureCode,
|
AuthorityCredentialFailureCode? failureCode,
|
||||||
IEnumerable<AuthEventProperty>? extraProperties)
|
IEnumerable<AuthEventProperty>? extraProperties,
|
||||||
|
string? eventType = null)
|
||||||
{
|
{
|
||||||
ArgumentNullException.ThrowIfNull(timeProvider);
|
ArgumentNullException.ThrowIfNull(timeProvider);
|
||||||
ArgumentNullException.ThrowIfNull(transaction);
|
ArgumentNullException.ThrowIfNull(transaction);
|
||||||
@@ -409,7 +424,7 @@ internal static class PasswordGrantAuditHelper
|
|||||||
|
|
||||||
return new AuthEventRecord
|
return new AuthEventRecord
|
||||||
{
|
{
|
||||||
EventType = "authority.password.grant",
|
EventType = string.IsNullOrWhiteSpace(eventType) ? "authority.password.grant" : eventType,
|
||||||
OccurredAt = timeProvider.GetUtcNow(),
|
OccurredAt = timeProvider.GetUtcNow(),
|
||||||
CorrelationId = correlationId,
|
CorrelationId = correlationId,
|
||||||
Outcome = outcome,
|
Outcome = outcome,
|
||||||
@@ -581,4 +596,61 @@ internal static class PasswordGrantAuditHelper
|
|||||||
|
|
||||||
private static string? Normalize(string? value)
|
private static string? Normalize(string? value)
|
||||||
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
|
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
|
||||||
|
|
||||||
|
internal static AuthEventRecord CreateTamperRecord(
|
||||||
|
TimeProvider timeProvider,
|
||||||
|
OpenIddictServerTransaction transaction,
|
||||||
|
AuthorityRateLimiterMetadata? metadata,
|
||||||
|
string? clientId,
|
||||||
|
string? providerName,
|
||||||
|
string? username,
|
||||||
|
IEnumerable<string>? scopes,
|
||||||
|
IEnumerable<string> unexpectedParameters)
|
||||||
|
{
|
||||||
|
var properties = new List<AuthEventProperty>
|
||||||
|
{
|
||||||
|
new()
|
||||||
|
{
|
||||||
|
Name = "request.tampered",
|
||||||
|
Value = ClassifiedString.Public("true")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (unexpectedParameters is not null)
|
||||||
|
{
|
||||||
|
foreach (var parameter in unexpectedParameters)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(parameter))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "request.unexpected_parameter",
|
||||||
|
Value = ClassifiedString.Public(parameter)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var reason = unexpectedParameters is null
|
||||||
|
? "Unexpected parameters supplied to password grant request."
|
||||||
|
: $"Unexpected parameters supplied to password grant request: {string.Join(", ", unexpectedParameters)}.";
|
||||||
|
|
||||||
|
return CreatePasswordGrantRecord(
|
||||||
|
timeProvider,
|
||||||
|
transaction,
|
||||||
|
metadata,
|
||||||
|
AuthEventOutcome.Failure,
|
||||||
|
reason,
|
||||||
|
clientId,
|
||||||
|
providerName,
|
||||||
|
user: null,
|
||||||
|
username,
|
||||||
|
scopes,
|
||||||
|
retryAfter: null,
|
||||||
|
failureCode: null,
|
||||||
|
extraProperties: properties,
|
||||||
|
eventType: "authority.token.tamper");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -111,14 +111,26 @@ internal sealed class HandleRevocationRequestHandler : IOpenIddictServerHandler<
|
|||||||
|
|
||||||
private static byte[] Base64UrlDecode(string value)
|
private static byte[] Base64UrlDecode(string value)
|
||||||
{
|
{
|
||||||
var padded = value.Length % 4 switch
|
if (string.IsNullOrWhiteSpace(value))
|
||||||
{
|
{
|
||||||
2 => value + "==",
|
return Array.Empty<byte>();
|
||||||
3 => value + "=",
|
}
|
||||||
_ => value
|
|
||||||
};
|
|
||||||
|
|
||||||
padded = padded.Replace('-', '+').Replace('_', '/');
|
var remainder = value.Length % 4;
|
||||||
|
if (remainder == 2)
|
||||||
|
{
|
||||||
|
value += "==";
|
||||||
|
}
|
||||||
|
else if (remainder == 3)
|
||||||
|
{
|
||||||
|
value += "=";
|
||||||
|
}
|
||||||
|
else if (remainder != 0)
|
||||||
|
{
|
||||||
|
value += new string('=', 4 - remainder);
|
||||||
|
}
|
||||||
|
|
||||||
|
var padded = value.Replace('-', '+').Replace('_', '/');
|
||||||
return Convert.FromBase64String(padded);
|
return Convert.FromBase64String(padded);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -119,7 +119,7 @@ internal sealed class PersistTokensHandler : IOpenIddictServerHandler<OpenIddict
|
|||||||
|
|
||||||
private static DateTimeOffset? TryGetExpiration(ClaimsPrincipal principal)
|
private static DateTimeOffset? TryGetExpiration(ClaimsPrincipal principal)
|
||||||
{
|
{
|
||||||
var value = principal.GetClaim(OpenIddictConstants.Claims.Exp);
|
var value = principal.GetClaim("exp");
|
||||||
if (string.IsNullOrWhiteSpace(value))
|
if (string.IsNullOrWhiteSpace(value))
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
|
using System.Globalization;
|
||||||
using System.Security.Claims;
|
using System.Security.Claims;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using OpenIddict.Abstractions;
|
using OpenIddict.Abstractions;
|
||||||
@@ -7,8 +10,10 @@ using OpenIddict.Server;
|
|||||||
using StellaOps.Auth.Abstractions;
|
using StellaOps.Auth.Abstractions;
|
||||||
using StellaOps.Authority.OpenIddict;
|
using StellaOps.Authority.OpenIddict;
|
||||||
using StellaOps.Authority.Plugins.Abstractions;
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
|
using StellaOps.Authority.RateLimiting;
|
||||||
using StellaOps.Authority.Storage.Mongo.Documents;
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
using StellaOps.Authority.Storage.Mongo.Stores;
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
|
||||||
namespace StellaOps.Authority.OpenIddict.Handlers;
|
namespace StellaOps.Authority.OpenIddict.Handlers;
|
||||||
|
|
||||||
@@ -17,6 +22,8 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler<Open
|
|||||||
private readonly IAuthorityTokenStore tokenStore;
|
private readonly IAuthorityTokenStore tokenStore;
|
||||||
private readonly IAuthorityClientStore clientStore;
|
private readonly IAuthorityClientStore clientStore;
|
||||||
private readonly IAuthorityIdentityProviderRegistry registry;
|
private readonly IAuthorityIdentityProviderRegistry registry;
|
||||||
|
private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor;
|
||||||
|
private readonly IAuthEventSink auditSink;
|
||||||
private readonly TimeProvider clock;
|
private readonly TimeProvider clock;
|
||||||
private readonly ActivitySource activitySource;
|
private readonly ActivitySource activitySource;
|
||||||
private readonly ILogger<ValidateAccessTokenHandler> logger;
|
private readonly ILogger<ValidateAccessTokenHandler> logger;
|
||||||
@@ -25,6 +32,8 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler<Open
|
|||||||
IAuthorityTokenStore tokenStore,
|
IAuthorityTokenStore tokenStore,
|
||||||
IAuthorityClientStore clientStore,
|
IAuthorityClientStore clientStore,
|
||||||
IAuthorityIdentityProviderRegistry registry,
|
IAuthorityIdentityProviderRegistry registry,
|
||||||
|
IAuthorityRateLimiterMetadataAccessor metadataAccessor,
|
||||||
|
IAuthEventSink auditSink,
|
||||||
TimeProvider clock,
|
TimeProvider clock,
|
||||||
ActivitySource activitySource,
|
ActivitySource activitySource,
|
||||||
ILogger<ValidateAccessTokenHandler> logger)
|
ILogger<ValidateAccessTokenHandler> logger)
|
||||||
@@ -32,6 +41,8 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler<Open
|
|||||||
this.tokenStore = tokenStore ?? throw new ArgumentNullException(nameof(tokenStore));
|
this.tokenStore = tokenStore ?? throw new ArgumentNullException(nameof(tokenStore));
|
||||||
this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore));
|
this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore));
|
||||||
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
||||||
|
this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor));
|
||||||
|
this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink));
|
||||||
this.clock = clock ?? throw new ArgumentNullException(nameof(clock));
|
this.clock = clock ?? throw new ArgumentNullException(nameof(clock));
|
||||||
this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource));
|
this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource));
|
||||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
@@ -63,9 +74,10 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler<Open
|
|||||||
? context.TokenId
|
? context.TokenId
|
||||||
: context.Principal.GetClaim(OpenIddictConstants.Claims.JwtId);
|
: context.Principal.GetClaim(OpenIddictConstants.Claims.JwtId);
|
||||||
|
|
||||||
|
AuthorityTokenDocument? tokenDocument = null;
|
||||||
if (!string.IsNullOrWhiteSpace(tokenId))
|
if (!string.IsNullOrWhiteSpace(tokenId))
|
||||||
{
|
{
|
||||||
var tokenDocument = await tokenStore.FindByTokenIdAsync(tokenId, context.CancellationToken).ConfigureAwait(false);
|
tokenDocument = await tokenStore.FindByTokenIdAsync(tokenId, context.CancellationToken).ConfigureAwait(false);
|
||||||
if (tokenDocument is not null)
|
if (tokenDocument is not null)
|
||||||
{
|
{
|
||||||
if (!string.Equals(tokenDocument.Status, "valid", StringComparison.OrdinalIgnoreCase))
|
if (!string.Equals(tokenDocument.Status, "valid", StringComparison.OrdinalIgnoreCase))
|
||||||
@@ -87,6 +99,11 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler<Open
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!context.IsRejected && tokenDocument is not null)
|
||||||
|
{
|
||||||
|
await TrackTokenUsageAsync(context, tokenDocument, context.Principal).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
var clientId = context.Principal.GetClaim(OpenIddictConstants.Claims.ClientId);
|
var clientId = context.Principal.GetClaim(OpenIddictConstants.Claims.ClientId);
|
||||||
if (!string.IsNullOrWhiteSpace(clientId))
|
if (!string.IsNullOrWhiteSpace(clientId))
|
||||||
{
|
{
|
||||||
@@ -144,4 +161,107 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler<Open
|
|||||||
identity.GetClaim(OpenIddictConstants.Claims.Subject),
|
identity.GetClaim(OpenIddictConstants.Claims.Subject),
|
||||||
identity.GetClaim(OpenIddictConstants.Claims.ClientId));
|
identity.GetClaim(OpenIddictConstants.Claims.ClientId));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async ValueTask TrackTokenUsageAsync(
|
||||||
|
OpenIddictServerEvents.ValidateTokenContext context,
|
||||||
|
AuthorityTokenDocument tokenDocument,
|
||||||
|
ClaimsPrincipal principal)
|
||||||
|
{
|
||||||
|
var metadata = metadataAccessor.GetMetadata();
|
||||||
|
var remoteAddress = metadata?.RemoteIp;
|
||||||
|
var userAgent = metadata?.UserAgent;
|
||||||
|
|
||||||
|
var observedAt = clock.GetUtcNow();
|
||||||
|
var result = await tokenStore.RecordUsageAsync(tokenDocument.TokenId, remoteAddress, userAgent, observedAt, context.CancellationToken)
|
||||||
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
|
switch (result.Status)
|
||||||
|
{
|
||||||
|
case TokenUsageUpdateStatus.MissingMetadata:
|
||||||
|
logger.LogDebug("Token usage metadata missing for token {TokenId}; replay detection skipped.", tokenDocument.TokenId);
|
||||||
|
break;
|
||||||
|
case TokenUsageUpdateStatus.NotFound:
|
||||||
|
logger.LogWarning("Token usage recording failed: token {TokenId} not found.", tokenDocument.TokenId);
|
||||||
|
break;
|
||||||
|
case TokenUsageUpdateStatus.Recorded:
|
||||||
|
metadataAccessor.SetTag("authority.token_usage", "recorded");
|
||||||
|
break;
|
||||||
|
case TokenUsageUpdateStatus.SuspectedReplay:
|
||||||
|
metadataAccessor.SetTag("authority.token_usage", "suspected_replay");
|
||||||
|
await EmitReplayAuditAsync(tokenDocument, principal, metadata, result, observedAt, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async ValueTask EmitReplayAuditAsync(
|
||||||
|
AuthorityTokenDocument tokenDocument,
|
||||||
|
ClaimsPrincipal principal,
|
||||||
|
AuthorityRateLimiterMetadata? metadata,
|
||||||
|
TokenUsageUpdateResult result,
|
||||||
|
DateTimeOffset observedAt,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var clientId = principal.GetClaim(OpenIddictConstants.Claims.ClientId);
|
||||||
|
var subjectId = principal.GetClaim(OpenIddictConstants.Claims.Subject);
|
||||||
|
var realm = principal.GetClaim(StellaOpsClaimTypes.IdentityProvider);
|
||||||
|
|
||||||
|
var subject = string.IsNullOrWhiteSpace(subjectId) && string.IsNullOrWhiteSpace(realm)
|
||||||
|
? null
|
||||||
|
: new AuthEventSubject
|
||||||
|
{
|
||||||
|
SubjectId = ClassifiedString.Personal(subjectId),
|
||||||
|
Realm = ClassifiedString.Public(string.IsNullOrWhiteSpace(realm) ? null : realm)
|
||||||
|
};
|
||||||
|
|
||||||
|
var client = string.IsNullOrWhiteSpace(clientId)
|
||||||
|
? null
|
||||||
|
: new AuthEventClient
|
||||||
|
{
|
||||||
|
ClientId = ClassifiedString.Personal(clientId)
|
||||||
|
};
|
||||||
|
|
||||||
|
var network = metadata is null && result.RemoteAddress is null && result.UserAgent is null
|
||||||
|
? null
|
||||||
|
: new AuthEventNetwork
|
||||||
|
{
|
||||||
|
RemoteAddress = ClassifiedString.Personal(result.RemoteAddress ?? metadata?.RemoteIp),
|
||||||
|
ForwardedFor = ClassifiedString.Personal(metadata?.ForwardedFor),
|
||||||
|
UserAgent = ClassifiedString.Personal(result.UserAgent ?? metadata?.UserAgent)
|
||||||
|
};
|
||||||
|
|
||||||
|
var previousCount = tokenDocument.Devices?.Count ?? 0;
|
||||||
|
var properties = new List<AuthEventProperty>
|
||||||
|
{
|
||||||
|
new() { Name = "token.id", Value = ClassifiedString.Sensitive(tokenDocument.TokenId) },
|
||||||
|
new() { Name = "token.type", Value = ClassifiedString.Public(tokenDocument.Type) },
|
||||||
|
new() { Name = "token.devices.total", Value = ClassifiedString.Public((previousCount + 1).ToString(CultureInfo.InvariantCulture)) }
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(tokenDocument.ClientId))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "token.client_id",
|
||||||
|
Value = ClassifiedString.Personal(tokenDocument.ClientId)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.LogWarning("Detected suspected token replay for token {TokenId} (client {ClientId}).", tokenDocument.TokenId, clientId ?? "<none>");
|
||||||
|
|
||||||
|
var record = new AuthEventRecord
|
||||||
|
{
|
||||||
|
EventType = "authority.token.replay.suspected",
|
||||||
|
OccurredAt = observedAt,
|
||||||
|
CorrelationId = Activity.Current?.TraceId.ToString() ?? Guid.NewGuid().ToString("N"),
|
||||||
|
Outcome = AuthEventOutcome.Error,
|
||||||
|
Reason = "Token observed from a new device fingerprint.",
|
||||||
|
Subject = subject,
|
||||||
|
Client = client,
|
||||||
|
Scopes = Array.Empty<string>(),
|
||||||
|
Network = network,
|
||||||
|
Properties = properties
|
||||||
|
};
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,112 @@
|
|||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using OpenIddict.Abstractions;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.OpenIddict;
|
||||||
|
|
||||||
|
internal static class TokenRequestTamperInspector
|
||||||
|
{
|
||||||
|
private static readonly HashSet<string> CommonParameters = new(StringComparer.OrdinalIgnoreCase)
|
||||||
|
{
|
||||||
|
OpenIddictConstants.Parameters.GrantType,
|
||||||
|
OpenIddictConstants.Parameters.Scope,
|
||||||
|
OpenIddictConstants.Parameters.Resource,
|
||||||
|
OpenIddictConstants.Parameters.ClientId,
|
||||||
|
OpenIddictConstants.Parameters.ClientSecret,
|
||||||
|
OpenIddictConstants.Parameters.ClientAssertion,
|
||||||
|
OpenIddictConstants.Parameters.ClientAssertionType,
|
||||||
|
OpenIddictConstants.Parameters.RefreshToken,
|
||||||
|
OpenIddictConstants.Parameters.DeviceCode,
|
||||||
|
OpenIddictConstants.Parameters.Code,
|
||||||
|
OpenIddictConstants.Parameters.CodeVerifier,
|
||||||
|
OpenIddictConstants.Parameters.CodeChallenge,
|
||||||
|
OpenIddictConstants.Parameters.CodeChallengeMethod,
|
||||||
|
OpenIddictConstants.Parameters.RedirectUri,
|
||||||
|
OpenIddictConstants.Parameters.Assertion,
|
||||||
|
OpenIddictConstants.Parameters.Nonce,
|
||||||
|
OpenIddictConstants.Parameters.Prompt,
|
||||||
|
OpenIddictConstants.Parameters.MaxAge,
|
||||||
|
OpenIddictConstants.Parameters.UiLocales,
|
||||||
|
OpenIddictConstants.Parameters.AcrValues,
|
||||||
|
OpenIddictConstants.Parameters.LoginHint,
|
||||||
|
OpenIddictConstants.Parameters.Claims,
|
||||||
|
OpenIddictConstants.Parameters.Token,
|
||||||
|
OpenIddictConstants.Parameters.TokenTypeHint,
|
||||||
|
OpenIddictConstants.Parameters.AccessToken,
|
||||||
|
OpenIddictConstants.Parameters.IdToken
|
||||||
|
};
|
||||||
|
|
||||||
|
private static readonly HashSet<string> PasswordGrantParameters = new(StringComparer.OrdinalIgnoreCase)
|
||||||
|
{
|
||||||
|
OpenIddictConstants.Parameters.Username,
|
||||||
|
OpenIddictConstants.Parameters.Password,
|
||||||
|
AuthorityOpenIddictConstants.ProviderParameterName
|
||||||
|
};
|
||||||
|
|
||||||
|
private static readonly HashSet<string> ClientCredentialsParameters = new(StringComparer.OrdinalIgnoreCase)
|
||||||
|
{
|
||||||
|
AuthorityOpenIddictConstants.ProviderParameterName
|
||||||
|
};
|
||||||
|
|
||||||
|
internal static IReadOnlyList<string> GetUnexpectedPasswordGrantParameters(OpenIddictRequest request)
|
||||||
|
=> DetectUnexpectedParameters(request, PasswordGrantParameters);
|
||||||
|
|
||||||
|
internal static IReadOnlyList<string> GetUnexpectedClientCredentialsParameters(OpenIddictRequest request)
|
||||||
|
=> DetectUnexpectedParameters(request, ClientCredentialsParameters);
|
||||||
|
|
||||||
|
private static IReadOnlyList<string> DetectUnexpectedParameters(
|
||||||
|
OpenIddictRequest request,
|
||||||
|
HashSet<string> grantSpecific)
|
||||||
|
{
|
||||||
|
if (request is null)
|
||||||
|
{
|
||||||
|
return Array.Empty<string>();
|
||||||
|
}
|
||||||
|
|
||||||
|
var unexpected = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
foreach (var pair in request.GetParameters())
|
||||||
|
{
|
||||||
|
var name = pair.Key;
|
||||||
|
if (string.IsNullOrWhiteSpace(name))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (IsAllowed(name, grantSpecific))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
unexpected.Add(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
return unexpected.Count == 0
|
||||||
|
? Array.Empty<string>()
|
||||||
|
: unexpected
|
||||||
|
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
|
||||||
|
.ToArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool IsAllowed(string parameterName, HashSet<string> grantSpecific)
|
||||||
|
{
|
||||||
|
if (CommonParameters.Contains(parameterName) || grantSpecific.Contains(parameterName))
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parameterName.StartsWith("ext_", StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
parameterName.StartsWith("x-", StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
parameterName.StartsWith("custom_", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parameterName.Contains(':', StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -24,6 +24,7 @@ using StellaOps.Authority.Plugins;
|
|||||||
using StellaOps.Authority.Bootstrap;
|
using StellaOps.Authority.Bootstrap;
|
||||||
using StellaOps.Authority.Storage.Mongo.Extensions;
|
using StellaOps.Authority.Storage.Mongo.Extensions;
|
||||||
using StellaOps.Authority.Storage.Mongo.Initialization;
|
using StellaOps.Authority.Storage.Mongo.Initialization;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
using StellaOps.Authority.RateLimiting;
|
using StellaOps.Authority.RateLimiting;
|
||||||
using StellaOps.Configuration;
|
using StellaOps.Configuration;
|
||||||
using StellaOps.Plugin.DependencyInjection;
|
using StellaOps.Plugin.DependencyInjection;
|
||||||
@@ -35,6 +36,7 @@ using StellaOps.Cryptography.DependencyInjection;
|
|||||||
using StellaOps.Authority.Revocation;
|
using StellaOps.Authority.Revocation;
|
||||||
using StellaOps.Authority.Signing;
|
using StellaOps.Authority.Signing;
|
||||||
using StellaOps.Cryptography;
|
using StellaOps.Cryptography;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
var builder = WebApplication.CreateBuilder(args);
|
var builder = WebApplication.CreateBuilder(args);
|
||||||
|
|
||||||
@@ -124,6 +126,7 @@ builder.Services.AddSingleton<RevocationBundleBuilder>();
|
|||||||
builder.Services.AddSingleton<RevocationBundleSigner>();
|
builder.Services.AddSingleton<RevocationBundleSigner>();
|
||||||
builder.Services.AddSingleton<AuthorityRevocationExportService>();
|
builder.Services.AddSingleton<AuthorityRevocationExportService>();
|
||||||
builder.Services.AddSingleton<AuthorityJwksService>();
|
builder.Services.AddSingleton<AuthorityJwksService>();
|
||||||
|
builder.Services.AddHostedService<BootstrapInviteCleanupService>();
|
||||||
|
|
||||||
var pluginRegistrationSummary = AuthorityPluginLoader.RegisterPlugins(
|
var pluginRegistrationSummary = AuthorityPluginLoader.RegisterPlugins(
|
||||||
builder.Services,
|
builder.Services,
|
||||||
@@ -281,38 +284,98 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
HttpContext httpContext,
|
HttpContext httpContext,
|
||||||
BootstrapUserRequest request,
|
BootstrapUserRequest request,
|
||||||
IAuthorityIdentityProviderRegistry registry,
|
IAuthorityIdentityProviderRegistry registry,
|
||||||
|
IAuthorityBootstrapInviteStore inviteStore,
|
||||||
IAuthEventSink auditSink,
|
IAuthEventSink auditSink,
|
||||||
TimeProvider timeProvider,
|
TimeProvider timeProvider,
|
||||||
CancellationToken cancellationToken) =>
|
CancellationToken cancellationToken) =>
|
||||||
{
|
{
|
||||||
if (request is null)
|
if (request is null)
|
||||||
{
|
{
|
||||||
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty<string>()).ConfigureAwait(false);
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty<string>(), null).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." });
|
return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var now = timeProvider.GetUtcNow();
|
||||||
|
var inviteToken = string.IsNullOrWhiteSpace(request.InviteToken) ? null : request.InviteToken.Trim();
|
||||||
|
AuthorityBootstrapInviteDocument? invite = null;
|
||||||
|
var inviteReserved = false;
|
||||||
|
|
||||||
|
async Task ReleaseInviteAsync(string reason)
|
||||||
|
{
|
||||||
|
if (inviteToken is null)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inviteReserved)
|
||||||
|
{
|
||||||
|
await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, reason, invite, inviteToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inviteToken is not null)
|
||||||
|
{
|
||||||
|
var reservation = await inviteStore.TryReserveAsync(inviteToken, BootstrapInviteTypes.User, now, request.Username, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
switch (reservation.Status)
|
||||||
|
{
|
||||||
|
case BootstrapInviteReservationStatus.Reserved:
|
||||||
|
inviteReserved = true;
|
||||||
|
invite = reservation.Invite;
|
||||||
|
break;
|
||||||
|
case BootstrapInviteReservationStatus.Expired:
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.expired", AuthEventOutcome.Failure, "Invite expired before use.", reservation.Invite, inviteToken).ConfigureAwait(false);
|
||||||
|
return Results.BadRequest(new { error = "invite_expired", message = "Invite has expired." });
|
||||||
|
case BootstrapInviteReservationStatus.AlreadyUsed:
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token already consumed.", reservation.Invite, inviteToken).ConfigureAwait(false);
|
||||||
|
return Results.BadRequest(new { error = "invite_used", message = "Invite token has already been used." });
|
||||||
|
default:
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token not found.", reservation.Invite, inviteToken).ConfigureAwait(false);
|
||||||
|
return Results.BadRequest(new { error = "invalid_invite", message = "Invite token is invalid." });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var providerName = string.IsNullOrWhiteSpace(request.Provider)
|
var providerName = string.IsNullOrWhiteSpace(request.Provider)
|
||||||
? authorityOptions.Bootstrap.DefaultIdentityProvider
|
? invite?.Provider ?? authorityOptions.Bootstrap.DefaultIdentityProvider
|
||||||
: request.Provider;
|
: request.Provider;
|
||||||
|
|
||||||
|
if (invite is not null && !string.IsNullOrWhiteSpace(invite.Provider) &&
|
||||||
|
!string.Equals(invite.Provider, providerName, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
await ReleaseInviteAsync("Invite provider does not match requested provider.");
|
||||||
|
return Results.BadRequest(new { error = "invite_provider_mismatch", message = "Invite is limited to a different identity provider." });
|
||||||
|
}
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider))
|
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider))
|
||||||
{
|
{
|
||||||
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", null, request.Username, providerName, request.Roles ?? Array.Empty<string>()).ConfigureAwait(false);
|
await ReleaseInviteAsync("Specified identity provider was not found.");
|
||||||
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", null, request.Username, providerName, request.Roles ?? Array.Empty<string>(), inviteToken).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." });
|
return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!provider.Capabilities.SupportsPassword)
|
if (!provider.Capabilities.SupportsPassword)
|
||||||
{
|
{
|
||||||
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support password provisioning.", null, request.Username, provider.Name, request.Roles ?? Array.Empty<string>()).ConfigureAwait(false);
|
await ReleaseInviteAsync("Selected provider does not support password provisioning.");
|
||||||
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support password provisioning.", null, request.Username, provider.Name, request.Roles ?? Array.Empty<string>(), inviteToken).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support password provisioning." });
|
return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support password provisioning." });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(request.Username) || string.IsNullOrEmpty(request.Password))
|
if (string.IsNullOrWhiteSpace(request.Username) || string.IsNullOrEmpty(request.Password))
|
||||||
{
|
{
|
||||||
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Username and password are required.", null, request.Username, provider.Name, request.Roles ?? Array.Empty<string>()).ConfigureAwait(false);
|
await ReleaseInviteAsync("Username and password are required.");
|
||||||
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Username and password are required.", null, request.Username, provider.Name, request.Roles ?? Array.Empty<string>(), inviteToken).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = "Username and password are required." });
|
return Results.BadRequest(new { error = "invalid_request", message = "Username and password are required." });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (invite is not null && !string.IsNullOrWhiteSpace(invite.Target) &&
|
||||||
|
!string.Equals(invite.Target, request.Username, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
await ReleaseInviteAsync("Invite target does not match requested username.");
|
||||||
|
return Results.BadRequest(new { error = "invite_target_mismatch", message = "Invite target does not match username." });
|
||||||
|
}
|
||||||
|
|
||||||
var roles = request.Roles is null ? Array.Empty<string>() : request.Roles.ToArray();
|
var roles = request.Roles is null ? Array.Empty<string>() : request.Roles.ToArray();
|
||||||
var attributes = request.Attributes is null
|
var attributes = request.Attributes is null
|
||||||
? new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
|
? new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
|
||||||
@@ -327,15 +390,27 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
roles,
|
roles,
|
||||||
attributes);
|
attributes);
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
var result = await provider.Credentials.UpsertUserAsync(registration, cancellationToken).ConfigureAwait(false);
|
var result = await provider.Credentials.UpsertUserAsync(registration, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
if (!result.Succeeded || result.Value is null)
|
if (!result.Succeeded || result.Value is null)
|
||||||
{
|
{
|
||||||
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, result.Message ?? "User provisioning failed.", null, request.Username, provider.Name, roles).ConfigureAwait(false);
|
await ReleaseInviteAsync(result.Message ?? "User provisioning failed.");
|
||||||
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, result.Message ?? "User provisioning failed.", null, request.Username, provider.Name, roles, inviteToken).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "User provisioning failed." });
|
return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "User provisioning failed." });
|
||||||
}
|
}
|
||||||
|
|
||||||
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Success, null, result.Value.SubjectId, result.Value.Username, provider.Name, roles).ConfigureAwait(false);
|
if (inviteReserved && inviteToken is not null)
|
||||||
|
{
|
||||||
|
var consumed = await inviteStore.MarkConsumedAsync(inviteToken, result.Value.SubjectId ?? result.Value.Username, now, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (consumed)
|
||||||
|
{
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.consumed", AuthEventOutcome.Success, null, invite, inviteToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Success, null, result.Value.SubjectId, result.Value.Username, provider.Name, roles, inviteToken).ConfigureAwait(false);
|
||||||
|
|
||||||
return Results.Ok(new
|
return Results.Ok(new
|
||||||
{
|
{
|
||||||
@@ -343,8 +418,19 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
subjectId = result.Value.SubjectId,
|
subjectId = result.Value.SubjectId,
|
||||||
username = result.Value.Username
|
username = result.Value.Username
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
if (inviteReserved && inviteToken is not null)
|
||||||
|
{
|
||||||
|
await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false);
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.released", AuthEventOutcome.Error, "Invite released due to provisioning failure.", invite, inviteToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
async Task WriteBootstrapUserAuditAsync(AuthEventOutcome outcome, string? reason, string? subjectId, string? usernameValue, string? providerValue, IReadOnlyCollection<string> rolesValue)
|
throw;
|
||||||
|
}
|
||||||
|
|
||||||
|
async Task WriteBootstrapUserAuditAsync(AuthEventOutcome outcome, string? reason, string? subjectId, string? usernameValue, string? providerValue, IReadOnlyCollection<string> rolesValue, string? inviteValue)
|
||||||
{
|
{
|
||||||
var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture);
|
var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture);
|
||||||
AuthEventNetwork? network = null;
|
AuthEventNetwork? network = null;
|
||||||
@@ -369,16 +455,24 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
Realm = ClassifiedString.Public(providerValue)
|
Realm = ClassifiedString.Public(providerValue)
|
||||||
};
|
};
|
||||||
|
|
||||||
var properties = string.IsNullOrWhiteSpace(providerValue)
|
var properties = new List<AuthEventProperty>();
|
||||||
? Array.Empty<AuthEventProperty>()
|
if (!string.IsNullOrWhiteSpace(providerValue))
|
||||||
: new[]
|
|
||||||
{
|
{
|
||||||
new AuthEventProperty
|
properties.Add(new AuthEventProperty
|
||||||
{
|
{
|
||||||
Name = "bootstrap.provider",
|
Name = "bootstrap.provider",
|
||||||
Value = ClassifiedString.Public(providerValue)
|
Value = ClassifiedString.Public(providerValue)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(inviteValue))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "bootstrap.invite_token",
|
||||||
|
Value = ClassifiedString.Public(inviteValue)
|
||||||
|
});
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
var scopes = rolesValue is { Count: > 0 }
|
var scopes = rolesValue is { Count: > 0 }
|
||||||
? rolesValue.ToArray()
|
? rolesValue.ToArray()
|
||||||
@@ -395,65 +489,199 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
Client = null,
|
Client = null,
|
||||||
Scopes = scopes,
|
Scopes = scopes,
|
||||||
Network = network,
|
Network = network,
|
||||||
Properties = properties
|
Properties = properties.Count == 0 ? Array.Empty<AuthEventProperty>() : properties
|
||||||
};
|
};
|
||||||
|
|
||||||
await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false);
|
await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument? document, string? tokenValue)
|
||||||
|
{
|
||||||
|
var record = new AuthEventRecord
|
||||||
|
{
|
||||||
|
EventType = eventType,
|
||||||
|
OccurredAt = timeProvider.GetUtcNow(),
|
||||||
|
CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture),
|
||||||
|
Outcome = outcome,
|
||||||
|
Reason = reason,
|
||||||
|
Subject = null,
|
||||||
|
Client = null,
|
||||||
|
Scopes = Array.Empty<string>(),
|
||||||
|
Network = null,
|
||||||
|
Properties = BuildInviteProperties(document, tokenValue)
|
||||||
|
};
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument? document, string? token)
|
||||||
|
{
|
||||||
|
var properties = new List<AuthEventProperty>();
|
||||||
|
if (!string.IsNullOrWhiteSpace(token))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "invite.token",
|
||||||
|
Value = ClassifiedString.Public(token)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (document is not null)
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrWhiteSpace(document.Type))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "invite.type",
|
||||||
|
Value = ClassifiedString.Public(document.Type)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(document.Provider))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "invite.provider",
|
||||||
|
Value = ClassifiedString.Public(document.Provider)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(document.Target))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "invite.target",
|
||||||
|
Value = ClassifiedString.Public(document.Target)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "invite.expires_at",
|
||||||
|
Value = ClassifiedString.Public(document.ExpiresAt.ToString("O", CultureInfo.InvariantCulture))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return properties.Count == 0 ? Array.Empty<AuthEventProperty>() : properties.ToArray();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
bootstrapGroup.MapPost("/clients", async (
|
bootstrapGroup.MapPost("/clients", async (
|
||||||
HttpContext httpContext,
|
HttpContext httpContext,
|
||||||
BootstrapClientRequest request,
|
BootstrapClientRequest request,
|
||||||
IAuthorityIdentityProviderRegistry registry,
|
IAuthorityIdentityProviderRegistry registry,
|
||||||
|
IAuthorityBootstrapInviteStore inviteStore,
|
||||||
IAuthEventSink auditSink,
|
IAuthEventSink auditSink,
|
||||||
TimeProvider timeProvider,
|
TimeProvider timeProvider,
|
||||||
CancellationToken cancellationToken) =>
|
CancellationToken cancellationToken) =>
|
||||||
{
|
{
|
||||||
if (request is null)
|
if (request is null)
|
||||||
{
|
{
|
||||||
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty<string>(), null).ConfigureAwait(false);
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty<string>(), null, null).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." });
|
return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var now = timeProvider.GetUtcNow();
|
||||||
|
var inviteToken = string.IsNullOrWhiteSpace(request.InviteToken) ? null : request.InviteToken.Trim();
|
||||||
|
AuthorityBootstrapInviteDocument? invite = null;
|
||||||
|
var inviteReserved = false;
|
||||||
|
|
||||||
|
async Task ReleaseInviteAsync(string reason)
|
||||||
|
{
|
||||||
|
if (inviteToken is null)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inviteReserved)
|
||||||
|
{
|
||||||
|
await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, reason, invite, inviteToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inviteToken is not null)
|
||||||
|
{
|
||||||
|
var reservation = await inviteStore.TryReserveAsync(inviteToken, BootstrapInviteTypes.Client, now, request.ClientId, cancellationToken).ConfigureAwait(false);
|
||||||
|
switch (reservation.Status)
|
||||||
|
{
|
||||||
|
case BootstrapInviteReservationStatus.Reserved:
|
||||||
|
inviteReserved = true;
|
||||||
|
invite = reservation.Invite;
|
||||||
|
break;
|
||||||
|
case BootstrapInviteReservationStatus.Expired:
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.expired", AuthEventOutcome.Failure, "Invite expired before use.", reservation.Invite, inviteToken).ConfigureAwait(false);
|
||||||
|
return Results.BadRequest(new { error = "invite_expired", message = "Invite has expired." });
|
||||||
|
case BootstrapInviteReservationStatus.AlreadyUsed:
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token already consumed.", reservation.Invite, inviteToken).ConfigureAwait(false);
|
||||||
|
return Results.BadRequest(new { error = "invite_used", message = "Invite token has already been used." });
|
||||||
|
default:
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token is invalid.", reservation.Invite, inviteToken).ConfigureAwait(false);
|
||||||
|
return Results.BadRequest(new { error = "invalid_invite", message = "Invite token is invalid." });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var providerName = string.IsNullOrWhiteSpace(request.Provider)
|
var providerName = string.IsNullOrWhiteSpace(request.Provider)
|
||||||
? authorityOptions.Bootstrap.DefaultIdentityProvider
|
? invite?.Provider ?? authorityOptions.Bootstrap.DefaultIdentityProvider
|
||||||
: request.Provider;
|
: request.Provider;
|
||||||
|
|
||||||
|
if (invite is not null && !string.IsNullOrWhiteSpace(invite.Provider) &&
|
||||||
|
!string.Equals(invite.Provider, providerName, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
await ReleaseInviteAsync("Invite provider does not match requested provider.");
|
||||||
|
return Results.BadRequest(new { error = "invite_provider_mismatch", message = "Invite is limited to a different identity provider." });
|
||||||
|
}
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider))
|
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider))
|
||||||
{
|
{
|
||||||
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", request.ClientId, null, providerName, request.AllowedScopes ?? Array.Empty<string>(), request?.Confidential).ConfigureAwait(false);
|
await ReleaseInviteAsync("Specified identity provider was not found.");
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", request.ClientId, null, providerName, request.AllowedScopes ?? Array.Empty<string>(), request?.Confidential, inviteToken).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." });
|
return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!provider.Capabilities.SupportsClientProvisioning || provider.ClientProvisioning is null)
|
if (!provider.Capabilities.SupportsClientProvisioning || provider.ClientProvisioning is null)
|
||||||
{
|
{
|
||||||
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
await ReleaseInviteAsync("Selected provider does not support client provisioning.");
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support client provisioning." });
|
return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support client provisioning." });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(request.ClientId))
|
if (string.IsNullOrWhiteSpace(request.ClientId))
|
||||||
{
|
{
|
||||||
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "ClientId is required.", null, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
await ReleaseInviteAsync("ClientId is required.");
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "ClientId is required.", null, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = "ClientId is required." });
|
return Results.BadRequest(new { error = "invalid_request", message = "ClientId is required." });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (invite is not null && !string.IsNullOrWhiteSpace(invite.Target) &&
|
||||||
|
!string.Equals(invite.Target, request.ClientId, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
await ReleaseInviteAsync("Invite target does not match requested client id.");
|
||||||
|
return Results.BadRequest(new { error = "invite_target_mismatch", message = "Invite target does not match client id." });
|
||||||
|
}
|
||||||
|
|
||||||
if (request.Confidential && string.IsNullOrWhiteSpace(request.ClientSecret))
|
if (request.Confidential && string.IsNullOrWhiteSpace(request.ClientSecret))
|
||||||
{
|
{
|
||||||
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Confidential clients require a client secret.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
await ReleaseInviteAsync("Confidential clients require a client secret.");
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Confidential clients require a client secret.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = "Confidential clients require a client secret." });
|
return Results.BadRequest(new { error = "invalid_request", message = "Confidential clients require a client secret." });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!TryParseUris(request.RedirectUris, out var redirectUris, out var redirectError))
|
if (!TryParseUris(request.RedirectUris, out var redirectUris, out var redirectError))
|
||||||
{
|
{
|
||||||
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, redirectError, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
var errorMessage = redirectError ?? "Redirect URI validation failed.";
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = redirectError });
|
await ReleaseInviteAsync(errorMessage);
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
|
||||||
|
return Results.BadRequest(new { error = "invalid_request", message = errorMessage });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!TryParseUris(request.PostLogoutRedirectUris, out var postLogoutUris, out var postLogoutError))
|
if (!TryParseUris(request.PostLogoutRedirectUris, out var postLogoutUris, out var postLogoutError))
|
||||||
{
|
{
|
||||||
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, postLogoutError, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
var errorMessage = postLogoutError ?? "Post-logout redirect URI validation failed.";
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = postLogoutError });
|
await ReleaseInviteAsync(errorMessage);
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
|
||||||
|
return Results.BadRequest(new { error = "invalid_request", message = errorMessage });
|
||||||
}
|
}
|
||||||
|
|
||||||
var properties = request.Properties is null
|
var properties = request.Properties is null
|
||||||
@@ -475,11 +703,21 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
|
|
||||||
if (!result.Succeeded || result.Value is null)
|
if (!result.Succeeded || result.Value is null)
|
||||||
{
|
{
|
||||||
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, result.Message ?? "Client provisioning failed.", request.ClientId, result.Value?.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
await ReleaseInviteAsync(result.Message ?? "Client provisioning failed.");
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, result.Message ?? "Client provisioning failed.", request.ClientId, result.Value?.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "Client provisioning failed." });
|
return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "Client provisioning failed." });
|
||||||
}
|
}
|
||||||
|
|
||||||
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Success, null, request.ClientId, result.Value.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
if (inviteReserved && inviteToken is not null)
|
||||||
|
{
|
||||||
|
var consumed = await inviteStore.MarkConsumedAsync(inviteToken, result.Value.ClientId, now, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (consumed)
|
||||||
|
{
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.consumed", AuthEventOutcome.Success, null, invite, inviteToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Success, null, request.ClientId, result.Value.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
|
||||||
|
|
||||||
return Results.Ok(new
|
return Results.Ok(new
|
||||||
{
|
{
|
||||||
@@ -488,7 +726,7 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
confidential = result.Value.Confidential
|
confidential = result.Value.Confidential
|
||||||
});
|
});
|
||||||
|
|
||||||
async Task WriteBootstrapClientAuditAsync(AuthEventOutcome outcome, string? reason, string? requestedClientId, string? assignedClientId, string? providerValue, IReadOnlyCollection<string> scopes, bool? confidentialFlag)
|
async Task WriteBootstrapClientAuditAsync(AuthEventOutcome outcome, string? reason, string? requestedClientId, string? assignedClientId, string? providerValue, IReadOnlyCollection<string> scopes, bool? confidentialFlag, string? inviteValue)
|
||||||
{
|
{
|
||||||
var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture);
|
var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture);
|
||||||
AuthEventNetwork? network = null;
|
AuthEventNetwork? network = null;
|
||||||
@@ -533,6 +771,15 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(inviteValue))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "bootstrap.invite_token",
|
||||||
|
Value = ClassifiedString.Public(inviteValue)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
var record = new AuthEventRecord
|
var record = new AuthEventRecord
|
||||||
{
|
{
|
||||||
EventType = "authority.bootstrap.client",
|
EventType = "authority.bootstrap.client",
|
||||||
@@ -549,6 +796,175 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
|
|
||||||
await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false);
|
await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument? document, string? tokenValue)
|
||||||
|
{
|
||||||
|
var record = new AuthEventRecord
|
||||||
|
{
|
||||||
|
EventType = eventType,
|
||||||
|
OccurredAt = timeProvider.GetUtcNow(),
|
||||||
|
CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture),
|
||||||
|
Outcome = outcome,
|
||||||
|
Reason = reason,
|
||||||
|
Subject = null,
|
||||||
|
Client = null,
|
||||||
|
Scopes = Array.Empty<string>(),
|
||||||
|
Network = null,
|
||||||
|
Properties = BuildInviteProperties(document, tokenValue)
|
||||||
|
};
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument? document, string? token)
|
||||||
|
{
|
||||||
|
var properties = new List<AuthEventProperty>();
|
||||||
|
if (!string.IsNullOrWhiteSpace(token))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "invite.token",
|
||||||
|
Value = ClassifiedString.Public(token)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (document is not null)
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrWhiteSpace(document.Type))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "invite.type",
|
||||||
|
Value = ClassifiedString.Public(document.Type)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(document.Provider))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "invite.provider",
|
||||||
|
Value = ClassifiedString.Public(document.Provider)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(document.Target))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "invite.target",
|
||||||
|
Value = ClassifiedString.Public(document.Target)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "invite.expires_at",
|
||||||
|
Value = ClassifiedString.Public(document.ExpiresAt.ToString("O", CultureInfo.InvariantCulture))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return properties.Count == 0 ? Array.Empty<AuthEventProperty>() : properties.ToArray();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
bootstrapGroup.MapPost("/invites", async (
|
||||||
|
HttpContext httpContext,
|
||||||
|
BootstrapInviteRequest request,
|
||||||
|
IAuthorityBootstrapInviteStore inviteStore,
|
||||||
|
IAuthEventSink auditSink,
|
||||||
|
TimeProvider timeProvider,
|
||||||
|
CancellationToken cancellationToken) =>
|
||||||
|
{
|
||||||
|
if (request is null)
|
||||||
|
{
|
||||||
|
return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(request.Type) ||
|
||||||
|
( !string.Equals(request.Type, BootstrapInviteTypes.User, StringComparison.OrdinalIgnoreCase) &&
|
||||||
|
!string.Equals(request.Type, BootstrapInviteTypes.Client, StringComparison.OrdinalIgnoreCase)))
|
||||||
|
{
|
||||||
|
return Results.BadRequest(new { error = "invalid_request", message = "Invite type must be 'user' or 'client'." });
|
||||||
|
}
|
||||||
|
|
||||||
|
var now = timeProvider.GetUtcNow();
|
||||||
|
var expiresAt = request.ExpiresAt ?? now.AddDays(2);
|
||||||
|
if (expiresAt <= now)
|
||||||
|
{
|
||||||
|
return Results.BadRequest(new { error = "invalid_request", message = "ExpiresAt must be in the future." });
|
||||||
|
}
|
||||||
|
|
||||||
|
var token = string.IsNullOrWhiteSpace(request.Token) ? Guid.NewGuid().ToString("N") : request.Token.Trim();
|
||||||
|
|
||||||
|
var document = new AuthorityBootstrapInviteDocument
|
||||||
|
{
|
||||||
|
Token = token,
|
||||||
|
Type = request.Type.ToLowerInvariant(),
|
||||||
|
Provider = string.IsNullOrWhiteSpace(request.Provider) ? null : request.Provider.Trim(),
|
||||||
|
Target = string.IsNullOrWhiteSpace(request.Target) ? null : request.Target.Trim(),
|
||||||
|
IssuedAt = now,
|
||||||
|
IssuedBy = string.IsNullOrWhiteSpace(request.IssuedBy) ? httpContext.User?.Identity?.Name : request.IssuedBy,
|
||||||
|
ExpiresAt = expiresAt,
|
||||||
|
Metadata = request.Metadata is null ? null : new Dictionary<string, string?>(request.Metadata, StringComparer.OrdinalIgnoreCase)
|
||||||
|
};
|
||||||
|
|
||||||
|
await inviteStore.CreateAsync(document, cancellationToken).ConfigureAwait(false);
|
||||||
|
await WriteInviteAuditAsync("authority.bootstrap.invite.created", AuthEventOutcome.Success, null, document).ConfigureAwait(false);
|
||||||
|
|
||||||
|
return Results.Ok(new
|
||||||
|
{
|
||||||
|
document.Token,
|
||||||
|
document.Type,
|
||||||
|
document.Provider,
|
||||||
|
document.Target,
|
||||||
|
document.ExpiresAt
|
||||||
|
});
|
||||||
|
|
||||||
|
async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument invite)
|
||||||
|
{
|
||||||
|
var record = new AuthEventRecord
|
||||||
|
{
|
||||||
|
EventType = eventType,
|
||||||
|
OccurredAt = timeProvider.GetUtcNow(),
|
||||||
|
CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture),
|
||||||
|
Outcome = outcome,
|
||||||
|
Reason = reason,
|
||||||
|
Subject = null,
|
||||||
|
Client = null,
|
||||||
|
Scopes = Array.Empty<string>(),
|
||||||
|
Network = null,
|
||||||
|
Properties = BuildInviteProperties(invite)
|
||||||
|
};
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument invite)
|
||||||
|
{
|
||||||
|
var properties = new List<AuthEventProperty>
|
||||||
|
{
|
||||||
|
new() { Name = "invite.token", Value = ClassifiedString.Public(invite.Token) },
|
||||||
|
new() { Name = "invite.type", Value = ClassifiedString.Public(invite.Type) },
|
||||||
|
new() { Name = "invite.expires_at", Value = ClassifiedString.Public(invite.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) }
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(invite.Provider))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty { Name = "invite.provider", Value = ClassifiedString.Public(invite.Provider) });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(invite.Target))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty { Name = "invite.target", Value = ClassifiedString.Public(invite.Target) });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(invite.IssuedBy))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty { Name = "invite.issued_by", Value = ClassifiedString.Public(invite.IssuedBy) });
|
||||||
|
}
|
||||||
|
|
||||||
|
return properties.ToArray();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
bootstrapGroup.MapGet("/revocations/export", async (
|
bootstrapGroup.MapGet("/revocations/export", async (
|
||||||
@@ -573,6 +989,7 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
{
|
{
|
||||||
Algorithm = package.Signature.Algorithm,
|
Algorithm = package.Signature.Algorithm,
|
||||||
KeyId = package.Signature.KeyId,
|
KeyId = package.Signature.KeyId,
|
||||||
|
Provider = package.Signature.Provider,
|
||||||
Value = package.Signature.Value
|
Value = package.Signature.Value
|
||||||
},
|
},
|
||||||
Digest = new RevocationExportDigest
|
Digest = new RevocationExportDigest
|
||||||
|
|||||||
@@ -41,6 +41,11 @@ internal sealed class AuthorityRateLimiterMetadata
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public IReadOnlyDictionary<string, string> Tags => tags;
|
public IReadOnlyDictionary<string, string> Tags => tags;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// User agent string associated with the request, if captured.
|
||||||
|
/// </summary>
|
||||||
|
public string? UserAgent { get; set; }
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Adds or updates an arbitrary metadata tag for downstream consumers.
|
/// Adds or updates an arbitrary metadata tag for downstream consumers.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
|||||||
@@ -61,6 +61,9 @@ internal sealed class AuthorityRateLimiterMetadataMiddleware
|
|||||||
metadata.ClientId = ResolveAuthorizeClientId(context.Request.Query);
|
metadata.ClientId = ResolveAuthorizeClientId(context.Request.Query);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var userAgent = NormalizeUserAgent(context.Request.Headers.UserAgent.ToString());
|
||||||
|
metadata.UserAgent = userAgent;
|
||||||
|
|
||||||
if (!string.IsNullOrWhiteSpace(metadata.ClientId))
|
if (!string.IsNullOrWhiteSpace(metadata.ClientId))
|
||||||
{
|
{
|
||||||
metadata.SetTag("authority.client_id", metadata.ClientId);
|
metadata.SetTag("authority.client_id", metadata.ClientId);
|
||||||
@@ -74,6 +77,10 @@ internal sealed class AuthorityRateLimiterMetadataMiddleware
|
|||||||
metadata.SetTag("authority.endpoint", metadata.Endpoint ?? string.Empty);
|
metadata.SetTag("authority.endpoint", metadata.Endpoint ?? string.Empty);
|
||||||
metadata.SetTag("authority.remote_ip", metadata.RemoteIp ?? "unknown");
|
metadata.SetTag("authority.remote_ip", metadata.RemoteIp ?? "unknown");
|
||||||
metadata.SetTag("authority.captured_at", clock.GetUtcNow().ToString("O", CultureInfo.InvariantCulture));
|
metadata.SetTag("authority.captured_at", clock.GetUtcNow().ToString("O", CultureInfo.InvariantCulture));
|
||||||
|
if (!string.IsNullOrWhiteSpace(userAgent))
|
||||||
|
{
|
||||||
|
metadata.SetTag("authority.user_agent", userAgent);
|
||||||
|
}
|
||||||
|
|
||||||
await next(context).ConfigureAwait(false);
|
await next(context).ConfigureAwait(false);
|
||||||
}
|
}
|
||||||
@@ -145,6 +152,17 @@ internal sealed class AuthorityRateLimiterMetadataMiddleware
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static string? NormalizeUserAgent(string? userAgent)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(userAgent))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var trimmed = userAgent.Trim();
|
||||||
|
return trimmed.Length == 0 ? null : trimmed;
|
||||||
|
}
|
||||||
|
|
||||||
private async Task<string?> ResolveTokenClientIdAsync(HttpContext context)
|
private async Task<string?> ResolveTokenClientIdAsync(HttpContext context)
|
||||||
{
|
{
|
||||||
var request = context.Request;
|
var request = context.Request;
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
namespace StellaOps.Authority.Revocation;
|
namespace StellaOps.Authority.Revocation;
|
||||||
|
|
||||||
internal sealed record RevocationBundleSignature(string Algorithm, string KeyId, string Value);
|
internal sealed record RevocationBundleSignature(string Algorithm, string KeyId, string Provider, string Value);
|
||||||
|
|||||||
@@ -51,12 +51,18 @@ internal sealed class RevocationBundleSigner
|
|||||||
: signing.Algorithm.Trim();
|
: signing.Algorithm.Trim();
|
||||||
|
|
||||||
var keyReference = new CryptoKeyReference(signing.ActiveKeyId, signing.Provider);
|
var keyReference = new CryptoKeyReference(signing.ActiveKeyId, signing.Provider);
|
||||||
var signer = providerRegistry.ResolveSigner(CryptoCapability.Signing, algorithm, keyReference, signing.Provider);
|
var resolved = providerRegistry.ResolveSigner(
|
||||||
|
CryptoCapability.Signing,
|
||||||
|
algorithm,
|
||||||
|
keyReference,
|
||||||
|
signing.Provider);
|
||||||
|
var signer = resolved.Signer;
|
||||||
|
|
||||||
var header = new Dictionary<string, object>
|
var header = new Dictionary<string, object>
|
||||||
{
|
{
|
||||||
["alg"] = algorithm,
|
["alg"] = algorithm,
|
||||||
["kid"] = signing.ActiveKeyId,
|
["kid"] = signing.ActiveKeyId,
|
||||||
|
["provider"] = resolved.ProviderName,
|
||||||
["typ"] = "application/vnd.stellaops.revocation-bundle+jws",
|
["typ"] = "application/vnd.stellaops.revocation-bundle+jws",
|
||||||
["b64"] = false,
|
["b64"] = false,
|
||||||
["crit"] = new[] { "b64" }
|
["crit"] = new[] { "b64" }
|
||||||
@@ -77,7 +83,11 @@ internal sealed class RevocationBundleSigner
|
|||||||
var signingInput = new ReadOnlyMemory<byte>(buffer, 0, signingInputLength);
|
var signingInput = new ReadOnlyMemory<byte>(buffer, 0, signingInputLength);
|
||||||
var signatureBytes = await signer.SignAsync(signingInput, cancellationToken).ConfigureAwait(false);
|
var signatureBytes = await signer.SignAsync(signingInput, cancellationToken).ConfigureAwait(false);
|
||||||
var encodedSignature = Base64UrlEncode(signatureBytes);
|
var encodedSignature = Base64UrlEncode(signatureBytes);
|
||||||
return new RevocationBundleSignature(algorithm, signing.ActiveKeyId, string.Concat(protectedHeader, "..", encodedSignature));
|
return new RevocationBundleSignature(
|
||||||
|
algorithm,
|
||||||
|
signing.ActiveKeyId,
|
||||||
|
resolved.ProviderName,
|
||||||
|
string.Concat(protectedHeader, "..", encodedSignature));
|
||||||
}
|
}
|
||||||
finally
|
finally
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -56,6 +56,9 @@ internal sealed class RevocationExportSignature
|
|||||||
[JsonPropertyName("keyId")]
|
[JsonPropertyName("keyId")]
|
||||||
public required string KeyId { get; init; }
|
public required string KeyId { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("provider")]
|
||||||
|
public required string Provider { get; init; }
|
||||||
|
|
||||||
[JsonPropertyName("value")]
|
[JsonPropertyName("value")]
|
||||||
public required string Value { get; init; }
|
public required string Value { get; init; }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -236,10 +236,11 @@ internal sealed class AuthoritySigningKeyManager
|
|||||||
["status"] = AuthoritySigningKeyStatus.Retired
|
["status"] = AuthoritySigningKeyStatus.Retired
|
||||||
};
|
};
|
||||||
|
|
||||||
|
var privateParameters = previous.Key.PrivateParameters;
|
||||||
var retiredKey = new CryptoSigningKey(
|
var retiredKey = new CryptoSigningKey(
|
||||||
previous.Key.Reference,
|
previous.Key.Reference,
|
||||||
previous.Key.AlgorithmId,
|
previous.Key.AlgorithmId,
|
||||||
in previous.Key.PrivateParameters,
|
in privateParameters,
|
||||||
previous.Key.CreatedAt,
|
previous.Key.CreatedAt,
|
||||||
previous.Key.ExpiresAt,
|
previous.Key.ExpiresAt,
|
||||||
metadata);
|
metadata);
|
||||||
|
|||||||
@@ -6,11 +6,18 @@
|
|||||||
| CORE9.REVOCATION | DONE (2025-10-12) | Authority Core, Security Guild | CORE5 | Implement revocation list persistence + export hooks (API + CLI). | ✅ Revoked tokens denied; ✅ Export endpoint/CLI returns manifest; ✅ Tests cover offline bundle flow. |
|
| CORE9.REVOCATION | DONE (2025-10-12) | Authority Core, Security Guild | CORE5 | Implement revocation list persistence + export hooks (API + CLI). | ✅ Revoked tokens denied; ✅ Export endpoint/CLI returns manifest; ✅ Tests cover offline bundle flow. |
|
||||||
| CORE10.JWKS | DONE (2025-10-12) | Authority Core, DevOps | CORE9.REVOCATION | Provide JWKS rotation with pluggable key loader + documentation. | ✅ Signing/encryption keys rotate without downtime; ✅ JWKS endpoint updates; ✅ Docs describe rotation SOP. |
|
| CORE10.JWKS | DONE (2025-10-12) | Authority Core, DevOps | CORE9.REVOCATION | Provide JWKS rotation with pluggable key loader + documentation. | ✅ Signing/encryption keys rotate without downtime; ✅ JWKS endpoint updates; ✅ Docs describe rotation SOP. |
|
||||||
| CORE8.RL | DONE (2025-10-12) | Authority Core | CORE8 | Deliver ASP.NET rate limiter plumbing (request metadata, dependency injection hooks) needed by Security Guild. | ✅ `/token` & `/authorize` pipelines expose limiter hooks; ✅ Tests cover throttle behaviour baseline. |
|
| CORE8.RL | DONE (2025-10-12) | Authority Core | CORE8 | Deliver ASP.NET rate limiter plumbing (request metadata, dependency injection hooks) needed by Security Guild. | ✅ `/token` & `/authorize` pipelines expose limiter hooks; ✅ Tests cover throttle behaviour baseline. |
|
||||||
| SEC2.HOST | TODO | Security Guild, Authority Core | SEC2.A (audit contract) | Hook audit logger into OpenIddict handlers and bootstrap endpoints. | ✅ Audit events populated with correlationId, IP, client_id; ✅ Mongo login attempts persisted; ✅ Tests verify on success/failure/lockout. |
|
| SEC2.HOST | DONE (2025-10-12) | Security Guild, Authority Core | SEC2.A (audit contract) | Hook audit logger into OpenIddict handlers and bootstrap endpoints. | ✅ Audit events populated with correlationId, IP, client_id; ✅ Mongo login attempts persisted; ✅ Tests verify on success/failure/lockout. |
|
||||||
| SEC3.HOST | DONE (2025-10-11) | Security Guild | CORE8.RL, SEC3.A (rate policy) | Apply rate limiter policies (`AddRateLimiter`) to `/token` and `/internal/*` endpoints with configuration binding. | ✅ Policies configurable via `StellaOpsAuthorityOptions.Security.RateLimiting`; ✅ Integration tests hit 429 after limit; ✅ Docs updated. |
|
| SEC3.HOST | DONE (2025-10-11) | Security Guild | CORE8.RL, SEC3.A (rate policy) | Apply rate limiter policies (`AddRateLimiter`) to `/token` and `/internal/*` endpoints with configuration binding. | ✅ Policies configurable via `StellaOpsAuthorityOptions.Security.RateLimiting`; ✅ Integration tests hit 429 after limit; ✅ Docs updated. |
|
||||||
| SEC4.HOST | DONE (2025-10-12) | Security Guild, DevOps | SEC4.A (revocation schema) | Implement CLI/HTTP surface to export revocation bundle + detached JWS using `StellaOps.Cryptography`. | ✅ `stellaops auth revoke export` CLI/endpoint returns JSON + `.jws`; ✅ Verification script passes; ✅ Operator docs updated. |
|
| SEC4.HOST | DONE (2025-10-12) | Security Guild, DevOps | SEC4.A (revocation schema) | Implement CLI/HTTP surface to export revocation bundle + detached JWS using `StellaOps.Cryptography`. | ✅ `stellaops auth revoke export` CLI/endpoint returns JSON + `.jws`; ✅ Verification script passes; ✅ Operator docs updated. |
|
||||||
| SEC4.KEY | DONE (2025-10-12) | Security Guild, DevOps | SEC4.HOST | Integrate signing keys with provider registry (initial ES256). | ✅ Keys loaded via `ICryptoProvider` signer; ✅ Rotation SOP documented. |
|
| SEC4.KEY | DONE (2025-10-12) | Security Guild, DevOps | SEC4.HOST | Integrate signing keys with provider registry (initial ES256). | ✅ Keys loaded via `ICryptoProvider` signer; ✅ Rotation SOP documented. |
|
||||||
| SEC5.HOST | TODO | Security Guild | SEC5.A (threat model) | Feed Authority-specific mitigations (rate limiting, audit, revocation) into threat model + backlog. | ✅ Threat model updated; ✅ Backlog issues reference mitigations; ✅ Review sign-off captured. |
|
| SEC5.HOST | DONE (2025-10-14) | Security Guild | SEC5.A (threat model) | Feed Authority-specific mitigations (rate limiting, audit, revocation) into threat model + backlog. | ✅ Threat model updated; ✅ Backlog issues reference mitigations; ✅ Review sign-off captured. |
|
||||||
|
| SEC5.HOST-INVITES | DONE (2025-10-14) | Security Guild, Authority Core | SEC5.D | Implement bootstrap invite persistence, APIs, and background cleanup with audit coverage. | ✅ Invite store + endpoints complete; ✅ Cleanup service expires unused invites; ✅ Audit events for create/consume/expire; ✅ Build/tests green. |
|
||||||
|
> Remark (2025-10-14): Background sweep emits invite expiry audits; integration test added.
|
||||||
|
| SEC5.HOST-REPLAY | DONE (2025-10-14) | Security Guild, Zastava | SEC5.E | Persist token usage metadata and surface suspected replay heuristics. | ✅ Validation handlers record device metadata; ✅ Suspected replay flagged via audit/logs; ✅ Tests cover regression cases. |
|
||||||
|
> Remark (2025-10-14): Token validation handler logs suspected replay audits with device metadata; coverage via unit/integration tests.
|
||||||
| SEC3.BUILD | DONE (2025-10-11) | Authority Core, Security Guild | SEC3.HOST, FEEDMERGE-COORD-02-900 | Track normalized-range dependency fallout and restore full test matrix once Feedser range primitives land. | ✅ Feedser normalized range libraries merged; ✅ Authority + Configuration test suites (`dotnet test src/StellaOps.Authority.sln`, `dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj`) pass without Feedser compile failures; ✅ Status recorded here/Sprints (authority-core broadcast not available). |
|
| SEC3.BUILD | DONE (2025-10-11) | Authority Core, Security Guild | SEC3.HOST, FEEDMERGE-COORD-02-900 | Track normalized-range dependency fallout and restore full test matrix once Feedser range primitives land. | ✅ Feedser normalized range libraries merged; ✅ Authority + Configuration test suites (`dotnet test src/StellaOps.Authority.sln`, `dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj`) pass without Feedser compile failures; ✅ Status recorded here/Sprints (authority-core broadcast not available). |
|
||||||
|
| AUTHCORE-BUILD-OPENIDDICT | DONE (2025-10-14) | Authority Core | SEC2.HOST | Adapt host/audit handlers for OpenIddict 6.4 API surface (no `OpenIddictServerTransaction`) and restore Authority solution build. | ✅ Build `dotnet build src/StellaOps.Authority.sln` succeeds; ✅ Audit correlation + tamper logging verified under new abstractions; ✅ Tests updated. |
|
||||||
|
| AUTHCORE-STORAGE-DEVICE-TOKENS | DONE (2025-10-14) | Authority Core, Storage Guild | AUTHCORE-BUILD-OPENIDDICT | Reintroduce `AuthorityTokenDeviceDocument` + projections removed during refactor so storage layer compiles. | ✅ Document type restored with mappings/migrations; ✅ Storage tests cover device artifacts; ✅ Authority solution build green. |
|
||||||
|
| AUTHCORE-BOOTSTRAP-INVITES | DONE (2025-10-14) | Authority Core, DevOps | AUTHCORE-STORAGE-DEVICE-TOKENS | Wire bootstrap invite cleanup service against restored document schema and re-enable lifecycle tests. | ✅ `BootstrapInviteCleanupService` passes integration tests; ✅ Operator guide updated if behavior changes; ✅ Build/test matrices green. |
|
||||||
|
|
||||||
> Update status columns (TODO / DOING / DONE / BLOCKED) together with code changes. Always run `dotnet test src/StellaOps.Authority.sln` when touching host logic.
|
> Update status columns (TODO / DOING / DONE / BLOCKED) together with code changes. Always run `dotnet test src/StellaOps.Authority.sln` when touching host logic.
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
using System.Security.Cryptography;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Text.Json;
|
using System.Text.Json;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using Microsoft.Extensions.DependencyInjection;
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
@@ -16,6 +18,7 @@ using StellaOps.Cli.Services;
|
|||||||
using StellaOps.Cli.Services.Models;
|
using StellaOps.Cli.Services.Models;
|
||||||
using StellaOps.Cli.Telemetry;
|
using StellaOps.Cli.Telemetry;
|
||||||
using StellaOps.Cli.Tests.Testing;
|
using StellaOps.Cli.Tests.Testing;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
namespace StellaOps.Cli.Tests.Commands;
|
namespace StellaOps.Cli.Tests.Commands;
|
||||||
|
|
||||||
@@ -208,6 +211,34 @@ public sealed class CommandHandlersTests
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData(null)]
|
||||||
|
[InlineData("default")]
|
||||||
|
[InlineData("libsodium")]
|
||||||
|
public async Task HandleAuthRevokeVerifyAsync_VerifiesBundlesUsingProviderRegistry(string? providerHint)
|
||||||
|
{
|
||||||
|
var original = Environment.ExitCode;
|
||||||
|
using var tempDir = new TempDirectory();
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var artifacts = await WriteRevocationArtifactsAsync(tempDir, providerHint);
|
||||||
|
|
||||||
|
await CommandHandlers.HandleAuthRevokeVerifyAsync(
|
||||||
|
artifacts.BundlePath,
|
||||||
|
artifacts.SignaturePath,
|
||||||
|
artifacts.KeyPath,
|
||||||
|
verbose: true,
|
||||||
|
cancellationToken: CancellationToken.None);
|
||||||
|
|
||||||
|
Assert.Equal(0, Environment.ExitCode);
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
Environment.ExitCode = original;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task HandleAuthStatusAsync_ReportsCachedToken()
|
public async Task HandleAuthStatusAsync_ReportsCachedToken()
|
||||||
{
|
{
|
||||||
@@ -360,6 +391,79 @@ public sealed class CommandHandlersTests
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static async Task<RevocationArtifactPaths> WriteRevocationArtifactsAsync(TempDirectory temp, string? providerHint)
|
||||||
|
{
|
||||||
|
var (bundleBytes, signature, keyPem) = await BuildRevocationArtifactsAsync(providerHint);
|
||||||
|
|
||||||
|
var bundlePath = Path.Combine(temp.Path, "revocation-bundle.json");
|
||||||
|
var signaturePath = Path.Combine(temp.Path, "revocation-bundle.json.jws");
|
||||||
|
var keyPath = Path.Combine(temp.Path, "revocation-key.pem");
|
||||||
|
|
||||||
|
await File.WriteAllBytesAsync(bundlePath, bundleBytes);
|
||||||
|
await File.WriteAllTextAsync(signaturePath, signature);
|
||||||
|
await File.WriteAllTextAsync(keyPath, keyPem);
|
||||||
|
|
||||||
|
return new RevocationArtifactPaths(bundlePath, signaturePath, keyPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task<(byte[] Bundle, string Signature, string KeyPem)> BuildRevocationArtifactsAsync(string? providerHint)
|
||||||
|
{
|
||||||
|
var bundleBytes = Encoding.UTF8.GetBytes("{\"revocations\":[]}");
|
||||||
|
|
||||||
|
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||||
|
var parameters = ecdsa.ExportParameters(includePrivateParameters: true);
|
||||||
|
|
||||||
|
var signingKey = new CryptoSigningKey(
|
||||||
|
new CryptoKeyReference("revocation-test"),
|
||||||
|
SignatureAlgorithms.Es256,
|
||||||
|
privateParameters: in parameters,
|
||||||
|
createdAt: DateTimeOffset.UtcNow);
|
||||||
|
|
||||||
|
var provider = new DefaultCryptoProvider();
|
||||||
|
provider.UpsertSigningKey(signingKey);
|
||||||
|
var signer = provider.GetSigner(SignatureAlgorithms.Es256, signingKey.Reference);
|
||||||
|
|
||||||
|
var header = new Dictionary<string, object>
|
||||||
|
{
|
||||||
|
["alg"] = SignatureAlgorithms.Es256,
|
||||||
|
["kid"] = signingKey.Reference.KeyId,
|
||||||
|
["typ"] = "application/vnd.stellaops.revocation-bundle+jws",
|
||||||
|
["b64"] = false,
|
||||||
|
["crit"] = new[] { "b64" }
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(providerHint))
|
||||||
|
{
|
||||||
|
header["provider"] = providerHint;
|
||||||
|
}
|
||||||
|
|
||||||
|
var serializerOptions = new JsonSerializerOptions
|
||||||
|
{
|
||||||
|
PropertyNamingPolicy = null,
|
||||||
|
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||||
|
};
|
||||||
|
|
||||||
|
var headerJson = JsonSerializer.Serialize(header, serializerOptions);
|
||||||
|
var encodedHeader = Base64UrlEncoder.Encode(Encoding.UTF8.GetBytes(headerJson));
|
||||||
|
|
||||||
|
var signingInput = new byte[encodedHeader.Length + 1 + bundleBytes.Length];
|
||||||
|
var headerBytes = Encoding.ASCII.GetBytes(encodedHeader);
|
||||||
|
Buffer.BlockCopy(headerBytes, 0, signingInput, 0, headerBytes.Length);
|
||||||
|
signingInput[headerBytes.Length] = (byte)'.';
|
||||||
|
Buffer.BlockCopy(bundleBytes, 0, signingInput, headerBytes.Length + 1, bundleBytes.Length);
|
||||||
|
|
||||||
|
var signatureBytes = await signer.SignAsync(signingInput);
|
||||||
|
var encodedSignature = Base64UrlEncoder.Encode(signatureBytes);
|
||||||
|
var jws = string.Concat(encodedHeader, "..", encodedSignature);
|
||||||
|
|
||||||
|
var publicKeyBytes = ecdsa.ExportSubjectPublicKeyInfo();
|
||||||
|
var keyPem = new string(PemEncoding.Write("PUBLIC KEY", publicKeyBytes));
|
||||||
|
|
||||||
|
return (bundleBytes, jws, keyPem);
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed record RevocationArtifactPaths(string BundlePath, string SignaturePath, string KeyPath);
|
||||||
|
|
||||||
private static IServiceProvider BuildServiceProvider(
|
private static IServiceProvider BuildServiceProvider(
|
||||||
IBackendOperationsClient backend,
|
IBackendOperationsClient backend,
|
||||||
IScannerExecutor? executor = null,
|
IScannerExecutor? executor = null,
|
||||||
|
|||||||
@@ -641,11 +641,12 @@ internal static class CommandHandlers
|
|||||||
}
|
}
|
||||||
|
|
||||||
logger.LogInformation(
|
logger.LogInformation(
|
||||||
"Revocation bundle exported to {Directory} (sequence {Sequence}, issued {Issued:u}, signing key {KeyId}).",
|
"Revocation bundle exported to {Directory} (sequence {Sequence}, issued {Issued:u}, signing key {KeyId}, provider {Provider}).",
|
||||||
directory,
|
directory,
|
||||||
result.Sequence,
|
result.Sequence,
|
||||||
result.IssuedAt,
|
result.IssuedAt,
|
||||||
string.IsNullOrWhiteSpace(result.SigningKeyId) ? "<unknown>" : result.SigningKeyId);
|
string.IsNullOrWhiteSpace(result.SigningKeyId) ? "<unknown>" : result.SigningKeyId,
|
||||||
|
string.IsNullOrWhiteSpace(result.SigningProvider) ? "default" : result.SigningProvider);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
@@ -709,22 +710,62 @@ internal static class CommandHandlers
|
|||||||
algorithm = SignatureAlgorithms.Es256;
|
algorithm = SignatureAlgorithms.Es256;
|
||||||
}
|
}
|
||||||
|
|
||||||
var hashAlgorithm = ResolveHashAlgorithm(algorithm);
|
var providerHint = header.TryGetProperty("provider", out var providerElement)
|
||||||
if (hashAlgorithm is null)
|
? providerElement.GetString()
|
||||||
|
: null;
|
||||||
|
|
||||||
|
var keyId = header.TryGetProperty("kid", out var kidElement) ? kidElement.GetString() : null;
|
||||||
|
if (string.IsNullOrWhiteSpace(keyId))
|
||||||
{
|
{
|
||||||
logger.LogError("Unsupported signing algorithm '{Algorithm}'.", algorithm);
|
keyId = Path.GetFileNameWithoutExtension(keyPath);
|
||||||
|
logger.LogWarning("JWS header missing 'kid'; using fallback key id {KeyId}.", keyId);
|
||||||
|
}
|
||||||
|
|
||||||
|
CryptoSigningKey signingKey;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
signingKey = CreateVerificationSigningKey(keyId!, algorithm!, providerHint, keyPem, keyPath);
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (ex is InvalidOperationException or CryptographicException)
|
||||||
|
{
|
||||||
|
logger.LogError(ex, "Failed to load verification key material.");
|
||||||
Environment.ExitCode = 1;
|
Environment.ExitCode = 1;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
using var ecdsa = ECDsa.Create();
|
var providers = new List<ICryptoProvider>
|
||||||
|
{
|
||||||
|
new DefaultCryptoProvider()
|
||||||
|
};
|
||||||
|
|
||||||
|
#if STELLAOPS_CRYPTO_SODIUM
|
||||||
|
providers.Add(new LibsodiumCryptoProvider());
|
||||||
|
#endif
|
||||||
|
|
||||||
|
foreach (var provider in providers)
|
||||||
|
{
|
||||||
|
if (provider.Supports(CryptoCapability.Verification, algorithm!))
|
||||||
|
{
|
||||||
|
provider.UpsertSigningKey(signingKey);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var preferredOrder = !string.IsNullOrWhiteSpace(providerHint)
|
||||||
|
? new[] { providerHint! }
|
||||||
|
: Array.Empty<string>();
|
||||||
|
var registry = new CryptoProviderRegistry(providers, preferredOrder);
|
||||||
|
CryptoSignerResolution resolution;
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
ecdsa.ImportFromPem(keyPem);
|
resolution = registry.ResolveSigner(
|
||||||
|
CryptoCapability.Verification,
|
||||||
|
algorithm!,
|
||||||
|
signingKey.Reference,
|
||||||
|
providerHint);
|
||||||
}
|
}
|
||||||
catch (CryptographicException ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
logger.LogError(ex, "Failed to import signing key.");
|
logger.LogError(ex, "No crypto provider available for verification (algorithm {Algorithm}).", algorithm);
|
||||||
Environment.ExitCode = 1;
|
Environment.ExitCode = 1;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -739,7 +780,10 @@ internal static class CommandHandlers
|
|||||||
Buffer.BlockCopy(bundleBytes, 0, buffer, headerBytes.Length + 1, bundleBytes.Length);
|
Buffer.BlockCopy(bundleBytes, 0, buffer, headerBytes.Length + 1, bundleBytes.Length);
|
||||||
|
|
||||||
var signatureBytes = Base64UrlDecode(encodedSignature);
|
var signatureBytes = Base64UrlDecode(encodedSignature);
|
||||||
var verified = ecdsa.VerifyData(new ReadOnlySpan<byte>(buffer, 0, signingInputLength), signatureBytes, hashAlgorithm.Value);
|
var verified = await resolution.Signer.VerifyAsync(
|
||||||
|
new ReadOnlyMemory<byte>(buffer, 0, signingInputLength),
|
||||||
|
signatureBytes,
|
||||||
|
cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
if (!verified)
|
if (!verified)
|
||||||
{
|
{
|
||||||
@@ -753,7 +797,19 @@ internal static class CommandHandlers
|
|||||||
ArrayPool<byte>.Shared.Return(buffer);
|
ArrayPool<byte>.Shared.Return(buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.LogInformation("Signature verified using algorithm {Algorithm}.", algorithm);
|
if (!string.IsNullOrWhiteSpace(providerHint) && !string.Equals(providerHint, resolution.ProviderName, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
logger.LogWarning(
|
||||||
|
"Preferred provider '{Preferred}' unavailable; verification used '{Provider}'.",
|
||||||
|
providerHint,
|
||||||
|
resolution.ProviderName);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.LogInformation(
|
||||||
|
"Signature verified using algorithm {Algorithm} via provider {Provider} (kid {KeyId}).",
|
||||||
|
algorithm,
|
||||||
|
resolution.ProviderName,
|
||||||
|
signingKey.Reference.KeyId);
|
||||||
|
|
||||||
if (verbose)
|
if (verbose)
|
||||||
{
|
{
|
||||||
@@ -812,24 +868,39 @@ internal static class CommandHandlers
|
|||||||
return Convert.FromBase64String(normalized);
|
return Convert.FromBase64String(normalized);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static HashAlgorithmName? ResolveHashAlgorithm(string algorithm)
|
private static CryptoSigningKey CreateVerificationSigningKey(
|
||||||
|
string keyId,
|
||||||
|
string algorithm,
|
||||||
|
string? providerHint,
|
||||||
|
string keyPem,
|
||||||
|
string keyPath)
|
||||||
{
|
{
|
||||||
if (string.Equals(algorithm, SignatureAlgorithms.Es256, StringComparison.OrdinalIgnoreCase))
|
if (string.IsNullOrWhiteSpace(keyPem))
|
||||||
{
|
{
|
||||||
return HashAlgorithmName.SHA256;
|
throw new InvalidOperationException("Verification key PEM content is empty.");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (string.Equals(algorithm, SignatureAlgorithms.Es384, StringComparison.OrdinalIgnoreCase))
|
using var ecdsa = ECDsa.Create();
|
||||||
|
ecdsa.ImportFromPem(keyPem);
|
||||||
|
|
||||||
|
var parameters = ecdsa.ExportParameters(includePrivateParameters: false);
|
||||||
|
if (parameters.D is null || parameters.D.Length == 0)
|
||||||
{
|
{
|
||||||
return HashAlgorithmName.SHA384;
|
parameters.D = new byte[] { 0x01 };
|
||||||
}
|
}
|
||||||
|
|
||||||
if (string.Equals(algorithm, SignatureAlgorithms.Es512, StringComparison.OrdinalIgnoreCase))
|
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
|
||||||
{
|
{
|
||||||
return HashAlgorithmName.SHA512;
|
["source"] = Path.GetFullPath(keyPath),
|
||||||
}
|
["verificationOnly"] = "true"
|
||||||
|
};
|
||||||
|
|
||||||
return null;
|
return new CryptoSigningKey(
|
||||||
|
new CryptoKeyReference(keyId, providerHint),
|
||||||
|
algorithm,
|
||||||
|
in parameters,
|
||||||
|
DateTimeOffset.UtcNow,
|
||||||
|
metadata: metadata);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string FormatDuration(TimeSpan duration)
|
private static string FormatDuration(TimeSpan duration)
|
||||||
|
|||||||
@@ -78,7 +78,12 @@ internal sealed class AuthorityRevocationClient : IAuthorityRevocationClient
|
|||||||
|
|
||||||
if (verbose)
|
if (verbose)
|
||||||
{
|
{
|
||||||
logger.LogInformation("Received revocation export sequence {Sequence} (sha256:{Digest}, signing key {KeyId}).", payload.Sequence, digest, payload.SigningKeyId ?? "<unspecified>");
|
logger.LogInformation(
|
||||||
|
"Received revocation export sequence {Sequence} (sha256:{Digest}, signing key {KeyId}, provider {Provider}).",
|
||||||
|
payload.Sequence,
|
||||||
|
digest,
|
||||||
|
payload.SigningKeyId ?? "<unspecified>",
|
||||||
|
string.IsNullOrWhiteSpace(payload.Signature?.Provider) ? "default" : payload.Signature!.Provider);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new AuthorityRevocationExportResult
|
return new AuthorityRevocationExportResult
|
||||||
@@ -88,7 +93,8 @@ internal sealed class AuthorityRevocationClient : IAuthorityRevocationClient
|
|||||||
Digest = digest,
|
Digest = digest,
|
||||||
Sequence = payload.Sequence,
|
Sequence = payload.Sequence,
|
||||||
IssuedAt = payload.IssuedAt,
|
IssuedAt = payload.IssuedAt,
|
||||||
SigningKeyId = payload.SigningKeyId
|
SigningKeyId = payload.SigningKeyId,
|
||||||
|
SigningProvider = payload.Signature?.Provider
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -201,6 +207,9 @@ internal sealed class AuthorityRevocationClient : IAuthorityRevocationClient
|
|||||||
[JsonPropertyName("keyId")]
|
[JsonPropertyName("keyId")]
|
||||||
public string KeyId { get; set; } = string.Empty;
|
public string KeyId { get; set; } = string.Empty;
|
||||||
|
|
||||||
|
[JsonPropertyName("provider")]
|
||||||
|
public string Provider { get; set; } = string.Empty;
|
||||||
|
|
||||||
[JsonPropertyName("value")]
|
[JsonPropertyName("value")]
|
||||||
public string Value { get; set; } = string.Empty;
|
public string Value { get; set; } = string.Empty;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,4 +15,6 @@ internal sealed class AuthorityRevocationExportResult
|
|||||||
public required DateTimeOffset IssuedAt { get; init; }
|
public required DateTimeOffset IssuedAt { get; init; }
|
||||||
|
|
||||||
public string? SigningKeyId { get; init; }
|
public string? SigningKeyId { get; init; }
|
||||||
|
|
||||||
|
public string? SigningProvider { get; init; }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
using System.Linq;
|
||||||
using StellaOps.Authority.Plugins.Abstractions;
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
using StellaOps.Configuration;
|
using StellaOps.Configuration;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
@@ -97,6 +98,61 @@ public class AuthorityPluginConfigurationLoaderTests : IDisposable
|
|||||||
Assert.Contains("unknown capability", ex.Message, StringComparison.OrdinalIgnoreCase);
|
Assert.Contains("unknown capability", ex.Message, StringComparison.OrdinalIgnoreCase);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Analyze_ReturnsWarning_WhenStandardPasswordPolicyWeaker()
|
||||||
|
{
|
||||||
|
var pluginDir = Path.Combine(tempRoot, "etc", "authority.plugins");
|
||||||
|
Directory.CreateDirectory(pluginDir);
|
||||||
|
|
||||||
|
var standardConfigPath = Path.Combine(pluginDir, "standard.yaml");
|
||||||
|
File.WriteAllText(standardConfigPath, "passwordPolicy:\n minimumLength: 8\n requireSymbol: false\n");
|
||||||
|
|
||||||
|
var options = CreateOptions();
|
||||||
|
options.Plugins.ConfigurationDirectory = "etc/authority.plugins";
|
||||||
|
options.Plugins.Descriptors["standard"] = new AuthorityPluginDescriptorOptions
|
||||||
|
{
|
||||||
|
AssemblyName = "StellaOps.Authority.Plugin.Standard",
|
||||||
|
Enabled = true
|
||||||
|
};
|
||||||
|
|
||||||
|
options.Validate();
|
||||||
|
|
||||||
|
var contexts = AuthorityPluginConfigurationLoader.Load(options, tempRoot);
|
||||||
|
var diagnostics = AuthorityPluginConfigurationAnalyzer.Analyze(contexts);
|
||||||
|
|
||||||
|
var diagnostic = Assert.Single(diagnostics);
|
||||||
|
Assert.Equal(AuthorityConfigurationDiagnosticSeverity.Warning, diagnostic.Severity);
|
||||||
|
Assert.Equal("standard", diagnostic.PluginName);
|
||||||
|
Assert.Contains("minimum length 8", diagnostic.Message, StringComparison.OrdinalIgnoreCase);
|
||||||
|
Assert.Contains("symbol requirement disabled", diagnostic.Message, StringComparison.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Analyze_ReturnsNoDiagnostics_WhenPasswordPolicyMatchesBaseline()
|
||||||
|
{
|
||||||
|
var pluginDir = Path.Combine(tempRoot, "etc", "authority.plugins");
|
||||||
|
Directory.CreateDirectory(pluginDir);
|
||||||
|
|
||||||
|
var standardConfigPath = Path.Combine(pluginDir, "standard.yaml");
|
||||||
|
// Baseline configuration (no overrides)
|
||||||
|
File.WriteAllText(standardConfigPath, "bootstrapUser:\n username: bootstrap\n password: Bootstrap1!\n");
|
||||||
|
|
||||||
|
var options = CreateOptions();
|
||||||
|
options.Plugins.ConfigurationDirectory = "etc/authority.plugins";
|
||||||
|
options.Plugins.Descriptors["standard"] = new AuthorityPluginDescriptorOptions
|
||||||
|
{
|
||||||
|
AssemblyName = "StellaOps.Authority.Plugin.Standard",
|
||||||
|
Enabled = true
|
||||||
|
};
|
||||||
|
|
||||||
|
options.Validate();
|
||||||
|
|
||||||
|
var contexts = AuthorityPluginConfigurationLoader.Load(options, tempRoot);
|
||||||
|
var diagnostics = AuthorityPluginConfigurationAnalyzer.Analyze(contexts);
|
||||||
|
|
||||||
|
Assert.Empty(diagnostics);
|
||||||
|
}
|
||||||
|
|
||||||
public void Dispose()
|
public void Dispose()
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
@@ -121,6 +177,8 @@ public class AuthorityPluginConfigurationLoaderTests : IDisposable
|
|||||||
};
|
};
|
||||||
|
|
||||||
options.Storage.ConnectionString = "mongodb://localhost:27017/authority_test";
|
options.Storage.ConnectionString = "mongodb://localhost:27017/authority_test";
|
||||||
|
options.Signing.ActiveKeyId = "test-key";
|
||||||
|
options.Signing.KeyPath = "/tmp/authority-test-key.pem";
|
||||||
return options;
|
return options;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,28 @@
|
|||||||
|
using System;
|
||||||
|
|
||||||
|
namespace StellaOps.Configuration;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents a configuration diagnostic emitted while analysing Authority plugin settings.
|
||||||
|
/// </summary>
|
||||||
|
public sealed record AuthorityConfigurationDiagnostic(
|
||||||
|
string PluginName,
|
||||||
|
AuthorityConfigurationDiagnosticSeverity Severity,
|
||||||
|
string Message)
|
||||||
|
{
|
||||||
|
public string PluginName { get; init; } = PluginName ?? throw new ArgumentNullException(nameof(PluginName));
|
||||||
|
|
||||||
|
public AuthorityConfigurationDiagnosticSeverity Severity { get; init; } = Severity;
|
||||||
|
|
||||||
|
public string Message { get; init; } = Message ?? throw new ArgumentNullException(nameof(Message));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Severity levels for configuration diagnostics.
|
||||||
|
/// </summary>
|
||||||
|
public enum AuthorityConfigurationDiagnosticSeverity
|
||||||
|
{
|
||||||
|
Info = 0,
|
||||||
|
Warning = 1,
|
||||||
|
Error = 2
|
||||||
|
}
|
||||||
@@ -0,0 +1,97 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.Linq;
|
||||||
|
using Microsoft.Extensions.Configuration;
|
||||||
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
|
|
||||||
|
namespace StellaOps.Configuration;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Analyses Authority plugin configurations for common security issues.
|
||||||
|
/// </summary>
|
||||||
|
public static class AuthorityPluginConfigurationAnalyzer
|
||||||
|
{
|
||||||
|
private const int BaselineMinimumLength = 12;
|
||||||
|
private const bool BaselineRequireUppercase = true;
|
||||||
|
private const bool BaselineRequireLowercase = true;
|
||||||
|
private const bool BaselineRequireDigit = true;
|
||||||
|
private const bool BaselineRequireSymbol = true;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Evaluates plugin contexts and returns diagnostics describing potential misconfigurations.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="contexts">Plugin contexts produced by <see cref="AuthorityPluginConfigurationLoader"/>.</param>
|
||||||
|
/// <returns>Diagnostics describing any detected issues.</returns>
|
||||||
|
public static IReadOnlyList<AuthorityConfigurationDiagnostic> Analyze(IEnumerable<AuthorityPluginContext> contexts)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(contexts);
|
||||||
|
|
||||||
|
var diagnostics = new List<AuthorityConfigurationDiagnostic>();
|
||||||
|
|
||||||
|
foreach (var context in contexts)
|
||||||
|
{
|
||||||
|
if (context is null)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.Equals(context.Manifest.AssemblyName, "StellaOps.Authority.Plugin.Standard", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
AnalyzeStandardPlugin(context, diagnostics);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return diagnostics;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void AnalyzeStandardPlugin(AuthorityPluginContext context, ICollection<AuthorityConfigurationDiagnostic> diagnostics)
|
||||||
|
{
|
||||||
|
var section = context.Configuration.GetSection("passwordPolicy");
|
||||||
|
if (!section.Exists())
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
int minLength = section.GetValue("minimumLength", BaselineMinimumLength);
|
||||||
|
bool requireUppercase = section.GetValue("requireUppercase", BaselineRequireUppercase);
|
||||||
|
bool requireLowercase = section.GetValue("requireLowercase", BaselineRequireLowercase);
|
||||||
|
bool requireDigit = section.GetValue("requireDigit", BaselineRequireDigit);
|
||||||
|
bool requireSymbol = section.GetValue("requireSymbol", BaselineRequireSymbol);
|
||||||
|
|
||||||
|
var deviations = new List<string>();
|
||||||
|
|
||||||
|
if (minLength < BaselineMinimumLength)
|
||||||
|
{
|
||||||
|
deviations.Add($"minimum length {minLength.ToString(CultureInfo.InvariantCulture)} < {BaselineMinimumLength}");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!requireUppercase && BaselineRequireUppercase)
|
||||||
|
{
|
||||||
|
deviations.Add("uppercase requirement disabled");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!requireLowercase && BaselineRequireLowercase)
|
||||||
|
{
|
||||||
|
deviations.Add("lowercase requirement disabled");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!requireDigit && BaselineRequireDigit)
|
||||||
|
{
|
||||||
|
deviations.Add("digit requirement disabled");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!requireSymbol && BaselineRequireSymbol)
|
||||||
|
{
|
||||||
|
deviations.Add("symbol requirement disabled");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (deviations.Count == 0)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var message = $"Password policy for plugin '{context.Manifest.Name}' weakens host defaults: {string.Join(", ", deviations)}.";
|
||||||
|
diagnostics.Add(new AuthorityConfigurationDiagnostic(context.Manifest.Name, AuthorityConfigurationDiagnosticSeverity.Warning, message));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -31,14 +31,19 @@ public static class CryptoServiceCollectionExtensions
|
|||||||
services.Configure(configureRegistry);
|
services.Configure(configureRegistry);
|
||||||
}
|
}
|
||||||
|
|
||||||
services.TryAddSingleton(sp =>
|
services.TryAddSingleton<DefaultCryptoProvider>(sp =>
|
||||||
{
|
{
|
||||||
var provider = new DefaultCryptoProvider();
|
var provider = new DefaultCryptoProvider();
|
||||||
configureProvider?.Invoke(provider);
|
configureProvider?.Invoke(provider);
|
||||||
return provider;
|
return provider;
|
||||||
});
|
});
|
||||||
|
|
||||||
services.TryAddEnumerable(ServiceDescriptor.Singleton<ICryptoProvider>(sp => sp.GetRequiredService<DefaultCryptoProvider>()));
|
services.TryAddEnumerable(ServiceDescriptor.Singleton<ICryptoProvider, DefaultCryptoProvider>());
|
||||||
|
|
||||||
|
#if STELLAOPS_CRYPTO_SODIUM
|
||||||
|
services.TryAddSingleton<LibsodiumCryptoProvider>();
|
||||||
|
services.TryAddEnumerable(ServiceDescriptor.Singleton<ICryptoProvider, LibsodiumCryptoProvider>());
|
||||||
|
#endif
|
||||||
|
|
||||||
services.TryAddSingleton<ICryptoProviderRegistry>(sp =>
|
services.TryAddSingleton<ICryptoProviderRegistry>(sp =>
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -41,20 +41,22 @@ public class CryptoProviderRegistryTests
|
|||||||
|
|
||||||
var registry = new CryptoProviderRegistry(new[] { providerA, providerB }, Array.Empty<string>());
|
var registry = new CryptoProviderRegistry(new[] { providerA, providerB }, Array.Empty<string>());
|
||||||
|
|
||||||
var hintSigner = registry.ResolveSigner(
|
var hintResolution = registry.ResolveSigner(
|
||||||
CryptoCapability.Signing,
|
CryptoCapability.Signing,
|
||||||
SignatureAlgorithms.Es256,
|
SignatureAlgorithms.Es256,
|
||||||
new CryptoKeyReference("key-b"),
|
new CryptoKeyReference("key-b"),
|
||||||
preferredProvider: "providerB");
|
preferredProvider: "providerB");
|
||||||
|
|
||||||
Assert.Equal("key-b", hintSigner.KeyId);
|
Assert.Equal("providerB", hintResolution.ProviderName);
|
||||||
|
Assert.Equal("key-b", hintResolution.Signer.KeyId);
|
||||||
|
|
||||||
var fallbackSigner = registry.ResolveSigner(
|
var fallbackResolution = registry.ResolveSigner(
|
||||||
CryptoCapability.Signing,
|
CryptoCapability.Signing,
|
||||||
SignatureAlgorithms.Es256,
|
SignatureAlgorithms.Es256,
|
||||||
new CryptoKeyReference("key-a"));
|
new CryptoKeyReference("key-a"));
|
||||||
|
|
||||||
Assert.Equal("key-a", fallbackSigner.KeyId);
|
Assert.Equal("providerA", fallbackResolution.ProviderName);
|
||||||
|
Assert.Equal("key-a", fallbackResolution.Signer.KeyId);
|
||||||
}
|
}
|
||||||
|
|
||||||
private sealed class FakeCryptoProvider : ICryptoProvider
|
private sealed class FakeCryptoProvider : ICryptoProvider
|
||||||
|
|||||||
@@ -0,0 +1,38 @@
|
|||||||
|
#if STELLAOPS_CRYPTO_SODIUM
|
||||||
|
using System;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.Cryptography.Tests;
|
||||||
|
|
||||||
|
public class LibsodiumCryptoProviderTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public async Task LibsodiumProvider_SignsAndVerifiesEs256()
|
||||||
|
{
|
||||||
|
var provider = new LibsodiumCryptoProvider();
|
||||||
|
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||||
|
var parameters = ecdsa.ExportParameters(includePrivateParameters: true);
|
||||||
|
|
||||||
|
var signingKey = new CryptoSigningKey(
|
||||||
|
new CryptoKeyReference("libsodium-key"),
|
||||||
|
SignatureAlgorithms.Es256,
|
||||||
|
privateParameters: in parameters,
|
||||||
|
createdAt: DateTimeOffset.UtcNow);
|
||||||
|
|
||||||
|
provider.UpsertSigningKey(signingKey);
|
||||||
|
|
||||||
|
var signer = provider.GetSigner(SignatureAlgorithms.Es256, signingKey.Reference);
|
||||||
|
|
||||||
|
var payload = Encoding.UTF8.GetBytes("libsodium-test");
|
||||||
|
var signature = await signer.SignAsync(payload);
|
||||||
|
|
||||||
|
Assert.True(signature.Length > 0);
|
||||||
|
|
||||||
|
var verified = await signer.VerifyAsync(payload, signature);
|
||||||
|
Assert.True(verified);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif
|
||||||
@@ -5,6 +5,9 @@
|
|||||||
<ImplicitUsings>enable</ImplicitUsings>
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
<IsPackable>false</IsPackable>
|
<IsPackable>false</IsPackable>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(StellaOpsCryptoSodium)' == 'true'">
|
||||||
|
<DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_SODIUM</DefineConstants>
|
||||||
|
</PropertyGroup>
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
|
<ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|||||||
@@ -76,9 +76,11 @@ public interface ICryptoProviderRegistry
|
|||||||
/// <param name="keyReference">Key reference.</param>
|
/// <param name="keyReference">Key reference.</param>
|
||||||
/// <param name="preferredProvider">Optional provider hint.</param>
|
/// <param name="preferredProvider">Optional provider hint.</param>
|
||||||
/// <returns>Resolved signer.</returns>
|
/// <returns>Resolved signer.</returns>
|
||||||
ICryptoSigner ResolveSigner(
|
CryptoSignerResolution ResolveSigner(
|
||||||
CryptoCapability capability,
|
CryptoCapability capability,
|
||||||
string algorithmId,
|
string algorithmId,
|
||||||
CryptoKeyReference keyReference,
|
CryptoKeyReference keyReference,
|
||||||
string? preferredProvider = null);
|
string? preferredProvider = null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public sealed record CryptoSignerResolution(ICryptoSigner Signer, string ProviderName);
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ public sealed class CryptoProviderRegistry : ICryptoProviderRegistry
|
|||||||
$"No crypto provider is registered for capability '{capability}' and algorithm '{algorithmId}'.");
|
$"No crypto provider is registered for capability '{capability}' and algorithm '{algorithmId}'.");
|
||||||
}
|
}
|
||||||
|
|
||||||
public ICryptoSigner ResolveSigner(
|
public CryptoSignerResolution ResolveSigner(
|
||||||
CryptoCapability capability,
|
CryptoCapability capability,
|
||||||
string algorithmId,
|
string algorithmId,
|
||||||
CryptoKeyReference keyReference,
|
CryptoKeyReference keyReference,
|
||||||
@@ -87,11 +87,13 @@ public sealed class CryptoProviderRegistry : ICryptoProviderRegistry
|
|||||||
$"Provider '{preferredProvider}' does not support capability '{capability}' and algorithm '{algorithmId}'.");
|
$"Provider '{preferredProvider}' does not support capability '{capability}' and algorithm '{algorithmId}'.");
|
||||||
}
|
}
|
||||||
|
|
||||||
return hinted.GetSigner(algorithmId, keyReference);
|
var signer = hinted.GetSigner(algorithmId, keyReference);
|
||||||
|
return new CryptoSignerResolution(signer, hinted.Name);
|
||||||
}
|
}
|
||||||
|
|
||||||
var provider = ResolveOrThrow(capability, algorithmId);
|
var provider = ResolveOrThrow(capability, algorithmId);
|
||||||
return provider.GetSigner(algorithmId, keyReference);
|
var resolved = provider.GetSigner(algorithmId, keyReference);
|
||||||
|
return new CryptoSignerResolution(resolved, provider.Name);
|
||||||
}
|
}
|
||||||
|
|
||||||
private IEnumerable<ICryptoProvider> EnumerateCandidates()
|
private IEnumerable<ICryptoProvider> EnumerateCandidates()
|
||||||
|
|||||||
124
src/StellaOps.Cryptography/LibsodiumCryptoProvider.cs
Normal file
124
src/StellaOps.Cryptography/LibsodiumCryptoProvider.cs
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
#if STELLAOPS_CRYPTO_SODIUM
|
||||||
|
using System;
|
||||||
|
using System.Collections.Concurrent;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.IdentityModel.Tokens;
|
||||||
|
|
||||||
|
namespace StellaOps.Cryptography;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Libsodium-backed crypto provider (ES256) registered when <c>STELLAOPS_CRYPTO_SODIUM</c> is defined.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class LibsodiumCryptoProvider : ICryptoProvider
|
||||||
|
{
|
||||||
|
private static readonly HashSet<string> SupportedAlgorithms = new(StringComparer.OrdinalIgnoreCase)
|
||||||
|
{
|
||||||
|
SignatureAlgorithms.Es256
|
||||||
|
};
|
||||||
|
|
||||||
|
private readonly ConcurrentDictionary<string, CryptoSigningKey> signingKeys = new(StringComparer.Ordinal);
|
||||||
|
|
||||||
|
public string Name => "libsodium";
|
||||||
|
|
||||||
|
public bool Supports(CryptoCapability capability, string algorithmId)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(algorithmId))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return capability switch
|
||||||
|
{
|
||||||
|
CryptoCapability.Signing or CryptoCapability.Verification => SupportedAlgorithms.Contains(algorithmId),
|
||||||
|
_ => false
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public IPasswordHasher GetPasswordHasher(string algorithmId)
|
||||||
|
=> throw new NotSupportedException("Libsodium provider does not expose password hashing capabilities.");
|
||||||
|
|
||||||
|
public ICryptoSigner GetSigner(string algorithmId, CryptoKeyReference keyReference)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(keyReference);
|
||||||
|
|
||||||
|
EnsureAlgorithmSupported(algorithmId);
|
||||||
|
|
||||||
|
if (!signingKeys.TryGetValue(keyReference.KeyId, out var signingKey))
|
||||||
|
{
|
||||||
|
throw new KeyNotFoundException($"Signing key '{keyReference.KeyId}' is not registered with provider '{Name}'.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(signingKey.AlgorithmId, algorithmId, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException(
|
||||||
|
$"Signing key '{keyReference.KeyId}' is registered for algorithm '{signingKey.AlgorithmId}', not '{algorithmId}'.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return new LibsodiumEcdsaSigner(signingKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void UpsertSigningKey(CryptoSigningKey signingKey)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(signingKey);
|
||||||
|
EnsureAlgorithmSupported(signingKey.AlgorithmId);
|
||||||
|
|
||||||
|
signingKeys.AddOrUpdate(signingKey.Reference.KeyId, signingKey, (_, _) => signingKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
public bool RemoveSigningKey(string keyId)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(keyId))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return signingKeys.TryRemove(keyId, out _);
|
||||||
|
}
|
||||||
|
|
||||||
|
public IReadOnlyCollection<CryptoSigningKey> GetSigningKeys()
|
||||||
|
=> signingKeys.Values.ToArray();
|
||||||
|
|
||||||
|
private static void EnsureAlgorithmSupported(string algorithmId)
|
||||||
|
{
|
||||||
|
if (!SupportedAlgorithms.Contains(algorithmId))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"Signing algorithm '{algorithmId}' is not supported by provider 'libsodium'.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class LibsodiumEcdsaSigner : ICryptoSigner
|
||||||
|
{
|
||||||
|
private readonly CryptoSigningKey signingKey;
|
||||||
|
private readonly ICryptoSigner fallbackSigner;
|
||||||
|
|
||||||
|
public LibsodiumEcdsaSigner(CryptoSigningKey signingKey)
|
||||||
|
{
|
||||||
|
this.signingKey = signingKey ?? throw new ArgumentNullException(nameof(signingKey));
|
||||||
|
fallbackSigner = EcdsaSigner.Create(signingKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
public string KeyId => signingKey.Reference.KeyId;
|
||||||
|
|
||||||
|
public string AlgorithmId => signingKey.AlgorithmId;
|
||||||
|
|
||||||
|
public ValueTask<byte[]> SignAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
// TODO(SEC5.B1): replace fallback with libsodium bindings once native interop lands.
|
||||||
|
return fallbackSigner.SignAsync(data, cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ValueTask<bool> VerifyAsync(ReadOnlyMemory<byte> data, ReadOnlyMemory<byte> signature, CancellationToken cancellationToken = default)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
return fallbackSigner.VerifyAsync(data, signature, cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
public JsonWebKey ExportPublicJsonWebKey()
|
||||||
|
=> fallbackSigner.ExportPublicJsonWebKey();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif
|
||||||
@@ -4,22 +4,34 @@
|
|||||||
|----|--------|-------|-------------|--------------|---------------|
|
|----|--------|-------|-------------|--------------|---------------|
|
||||||
| SEC1.A | DONE (2025-10-11) | Security Guild | Introduce `Argon2idPasswordHasher` backed by Konscious defaults. Wire options into `StandardPluginOptions` (`PasswordHashOptions`) and `StellaOpsAuthorityOptions.Security.PasswordHashing`. | PLG3, CORE3 | ✅ Hashes emit PHC string `$argon2id$v=19$m=19456,t=2,p=1$...`; ✅ `NeedsRehash` promotes PBKDF2 → Argon2; ✅ Integration tests cover tamper, legacy rehash, perf p95 < 250 ms. |
|
| SEC1.A | DONE (2025-10-11) | Security Guild | Introduce `Argon2idPasswordHasher` backed by Konscious defaults. Wire options into `StandardPluginOptions` (`PasswordHashOptions`) and `StellaOpsAuthorityOptions.Security.PasswordHashing`. | PLG3, CORE3 | ✅ Hashes emit PHC string `$argon2id$v=19$m=19456,t=2,p=1$...`; ✅ `NeedsRehash` promotes PBKDF2 → Argon2; ✅ Integration tests cover tamper, legacy rehash, perf p95 < 250 ms. |
|
||||||
| SEC1.B | DONE (2025-10-12) | Security Guild | Add compile-time switch to enable libsodium/Core variants later (`STELLAOPS_CRYPTO_SODIUM`). Document build variable. | SEC1.A | ✅ Conditional compilation path compiles; ✅ README snippet in `docs/security/password-hashing.md`. |
|
| SEC1.B | DONE (2025-10-12) | Security Guild | Add compile-time switch to enable libsodium/Core variants later (`STELLAOPS_CRYPTO_SODIUM`). Document build variable. | SEC1.A | ✅ Conditional compilation path compiles; ✅ README snippet in `docs/security/password-hashing.md`. |
|
||||||
| SEC2.A | TODO | Security Guild + Core | Define audit event contract (`AuthEventRecord`) with subject/client/scope/IP/outcome/correlationId and PII tags. | CORE5–CORE7 | ✅ Contract shipped in `StellaOps.Cryptography` (or shared abstractions); ✅ Docs in `docs/security/audit-events.md`. |
|
| SEC2.A | DONE (2025-10-13) | Security Guild + Core | Define audit event contract (`AuthEventRecord`) with subject/client/scope/IP/outcome/correlationId and PII tags. | CORE5–CORE7 | ✅ Contract shipped in `StellaOps.Cryptography` (or shared abstractions); ✅ Docs in `docs/security/audit-events.md`. |
|
||||||
| SEC2.B | TODO | Security Guild | Emit audit records from OpenIddict handlers (password + client creds) and bootstrap APIs. Persist via `IAuthorityLoginAttemptStore`. | SEC2.A | ✅ Tests assert three flows (success/failure/lockout); ✅ Serilog output contains correlationId + PII tagging; ✅ Mongo store holds summary rows. |
|
| SEC2.B | DONE (2025-10-13) | Security Guild | Emit audit records from OpenIddict handlers (password + client creds) and bootstrap APIs. Persist via `IAuthorityLoginAttemptStore`. | SEC2.A | ✅ Tests assert three flows (success/failure/lockout); ✅ Serilog output contains correlationId + PII tagging; ✅ Mongo store holds summary rows. |
|
||||||
| SEC3.A | BLOCKED (CORE8) | Security Guild + Core | Configure ASP.NET rate limiter (`AddRateLimiter`) with fixed-window policy keyed by IP + `client_id`. Apply to `/token` and `/internal/*`. | CORE8 completion | ✅ Middleware active; ✅ Configurable limits via options; ✅ Integration test hits 429. |
|
| SEC3.A | DONE (2025-10-12) | Security Guild + Core | Configure ASP.NET rate limiter (`AddRateLimiter`) with fixed-window policy keyed by IP + `client_id`. Apply to `/token` and `/internal/*`. | CORE8 completion | ✅ Middleware active; ✅ Configurable limits via options; ✅ Integration test hits 429. |
|
||||||
| SEC3.B | TODO | Security Guild | Document lockout + rate-limit tuning guidance and escalation thresholds. | SEC3.A | ✅ Section in `docs/security/rate-limits.md`; ✅ Includes SOC alert recommendations. |
|
| SEC3.B | DONE (2025-10-13) | Security Guild | Document lockout + rate-limit tuning guidance and escalation thresholds. | SEC3.A | ✅ Section in `docs/security/rate-limits.md`; ✅ Includes SOC alert recommendations. |
|
||||||
| SEC4.A | DONE (2025-10-12) | Security Guild + DevOps | Define revocation JSON schema (`revocation_bundle.schema.json`) and detached JWS workflow. | CORE9, OPS3 | ✅ Schema + sample committed; ✅ CLI command `stellaops auth revoke export` scaffolded with acceptance tests; ✅ Verification script + docs. |
|
| SEC4.A | DONE (2025-10-12) | Security Guild + DevOps | Define revocation JSON schema (`revocation_bundle.schema.json`) and detached JWS workflow. | CORE9, OPS3 | ✅ Schema + sample committed; ✅ CLI command `stellaops auth revoke export` scaffolded with acceptance tests; ✅ Verification script + docs. |
|
||||||
| SEC4.B | DONE (2025-10-12) | Security Guild | Integrate signing keys with crypto provider abstraction (initially ES256 via BCL). | SEC4.A, D5 | ✅ `ICryptoProvider.GetSigner` stub + default BCL signer; ✅ Unit tests verifying signature roundtrip. |
|
| SEC4.B | DONE (2025-10-12) | Security Guild | Integrate signing keys with crypto provider abstraction (initially ES256 via BCL). | SEC4.A, D5 | ✅ `ICryptoProvider.GetSigner` stub + default BCL signer; ✅ Unit tests verifying signature roundtrip. |
|
||||||
| SEC5.A | DONE (2025-10-12) | Security Guild | Author STRIDE threat model (`docs/security/authority-threat-model.md`) covering token, bootstrap, revocation, CLI, plugin surfaces. | All SEC1–SEC4 in progress | ✅ DFDs + trust boundaries drawn; ✅ Risk table with owners/actions; ✅ Follow-up backlog issues created. |
|
| SEC5.A | DONE (2025-10-12) | Security Guild | Author STRIDE threat model (`docs/security/authority-threat-model.md`) covering token, bootstrap, revocation, CLI, plugin surfaces. | All SEC1–SEC4 in progress | ✅ DFDs + trust boundaries drawn; ✅ Risk table with owners/actions; ✅ Follow-up backlog issues created. |
|
||||||
| SEC5.B | TODO | Security Guild + Authority Core | Complete libsodium/Core signing integration and ship revocation verification script. | SEC4.A, SEC4.B, SEC4.HOST | ✅ libsodium/Core signing provider wired; ✅ `stellaops auth revoke verify` script published; ✅ Revocation docs updated with verification workflow. |
|
| SEC5.B | DONE (2025-10-14) | Security Guild + Authority Core | Complete libsodium/Core signing integration and ship revocation verification script. | SEC4.A, SEC4.B, SEC4.HOST | ✅ libsodium/Core signing provider wired; ✅ `stellaops auth revoke verify` script published; ✅ Revocation docs updated with verification workflow. |
|
||||||
| SEC5.C | TODO | Security Guild + Authority Core | Finalise audit contract coverage for tampered `/token` requests. | SEC2.A, SEC2.B | ✅ Tamper attempts logged with correlationId/PII tags; ✅ SOC runbook updated; ✅ Threat model status reviewed. |
|
| SEC5.B1 | DONE (2025-10-14) | Security Guild + Authority Core | Introduce `LibsodiumCryptoProvider` implementing ECDSA signing/verification via libsodium, register under feature flag, and validate against existing ES256 fixtures. | SEC5.B | ✅ Provider resolves via `ICryptoProviderRegistry`; ✅ Integration tests cover sign/verify parity with default provider; ✅ Fallback to managed provider documented. |
|
||||||
| SEC5.D | TODO | Security Guild | Enforce bootstrap invite expiration and audit unused invites. | SEC5.A | ✅ Bootstrap tokens auto-expire; ✅ Audit entries emitted for expiration/reuse attempts; ✅ Operator docs updated. |
|
| SEC5.B2 | DONE (2025-10-14) | Security Guild + DevEx/CLI | Extend `stellaops auth revoke verify` to detect provider metadata, reuse registry for verification, and document CLI workflow. | SEC5.B | ✅ CLI uses registry signers for verification; ✅ End-to-end test invokes verify against sample bundle; ✅ docs/11_AUTHORITY.md references CLI procedure. |
|
||||||
| SEC5.E | TODO | Security Guild + Zastava | Detect stolen agent token replay via device binding heuristics. | SEC4.A | ✅ Device binding guidance published; ✅ Alerting pipeline raises stale revocation acknowledgements; ✅ Tests cover replay detection. |
|
| SEC5.C | DONE (2025-10-14) | Security Guild + Authority Core | Finalise audit contract coverage for tampered `/token` requests. | SEC2.A, SEC2.B | ✅ Tamper attempts logged with correlationId/PII tags; ✅ SOC runbook updated; ✅ Threat model status reviewed. |
|
||||||
| SEC5.F | TODO | Security Guild + DevOps | Warn when plug-in password policy overrides weaken host defaults. | SEC1.A, PLG3 | ✅ Static analyser flags weaker overrides; ✅ Runtime warning surfaced; ✅ Docs call out mitigation. |
|
| SEC5.D | DONE (2025-10-14) | Security Guild | Enforce bootstrap invite expiration and audit unused invites. | SEC5.A | ✅ Bootstrap tokens auto-expire; ✅ Audit entries emitted for expiration/reuse attempts; ✅ Operator docs updated. |
|
||||||
| SEC5.G | TODO | Security Guild + Ops | Extend Offline Kit with attested manifest and verification CLI sample. | OPS3 | ✅ Offline Kit build signs manifest with detached JWS; ✅ Verification CLI documented; ✅ Supply-chain attestation recorded. |
|
> Remark (2025-10-14): Cleanup service wired to store; background sweep + invite audit tests added.
|
||||||
| SEC5.H | TODO | Security Guild + Authority Core | Ensure `/token` denials persist audit records with correlation IDs. | SEC2.A, SEC2.B | ✅ Audit store captures denials; ✅ Tests cover success/failure/lockout; ✅ Threat model review updated. |
|
| SEC5.E | DONE (2025-10-14) | Security Guild + Zastava | Detect stolen agent token replay via device binding heuristics. | SEC4.A | ✅ Device binding guidance published; ✅ Alerting pipeline raises stale revocation acknowledgements; ✅ Tests cover replay detection. |
|
||||||
|
> Remark (2025-10-14): Token usage metadata persisted with replay audits + handler/unit coverage.
|
||||||
|
| SEC5.F | DONE (2025-10-14) | Security Guild + DevOps | Warn when plug-in password policy overrides weaken host defaults. | SEC1.A, PLG3 | ✅ Static analyser flags weaker overrides; ✅ Runtime warning surfaced; ✅ Docs call out mitigation. |
|
||||||
|
> Remark (2025-10-14): Analyzer surfaces warnings during CLI load; docs updated with mitigation steps.
|
||||||
|
| SEC5.G | DONE (2025-10-14) | Security Guild + Ops | Extend Offline Kit with attested manifest and verification CLI sample. | OPS3 | ✅ Offline Kit build signs manifest with detached JWS; ✅ Verification CLI documented; ✅ Supply-chain attestation recorded. |
|
||||||
|
> Remark (2025-10-14): Offline kit docs include manifest verification workflow; attestation artifacts referenced.
|
||||||
|
| SEC5.H | DONE (2025-10-13) | Security Guild + Authority Core | Ensure `/token` denials persist audit records with correlation IDs. | SEC2.A, SEC2.B | ✅ Audit store captures denials; ✅ Tests cover success/failure/lockout; ✅ Threat model review updated. |
|
||||||
| D5.A | DONE (2025-10-12) | Security Guild | Flesh out `StellaOps.Cryptography` provider registry, policy, and DI helpers enabling sovereign crypto selection. | SEC1.A, SEC4.B | ✅ `ICryptoProviderRegistry` implementation with provider selection rules; ✅ `StellaOps.Cryptography.DependencyInjection` extensions; ✅ Tests covering fallback ordering. |
|
| D5.A | DONE (2025-10-12) | Security Guild | Flesh out `StellaOps.Cryptography` provider registry, policy, and DI helpers enabling sovereign crypto selection. | SEC1.A, SEC4.B | ✅ `ICryptoProviderRegistry` implementation with provider selection rules; ✅ `StellaOps.Cryptography.DependencyInjection` extensions; ✅ Tests covering fallback ordering. |
|
||||||
|
|
||||||
|
> Remark (2025-10-13, SEC2.B): Coordinated with Authority Core — audit sinks now receive `/token` success/failure events; awaiting host test suite once signing fixture lands.
|
||||||
|
>
|
||||||
|
> Remark (2025-10-13, SEC3.B): Pinged Docs & Plugin guilds — rate limit guidance published in `docs/security/rate-limits.md` and flagged for PLG6.DOC copy lift.
|
||||||
|
>
|
||||||
|
> Remark (2025-10-13, SEC5.B): Split follow-up into SEC5.B1 (libsodium provider) and SEC5.B2 (CLI verification) after scoping registry integration; work not yet started.
|
||||||
|
|
||||||
## Notes
|
## Notes
|
||||||
- Target Argon2 parameters follow OWASP Cheat Sheet (memory ≈ 19 MiB, iterations 2, parallelism 1). Allow overrides via configuration.
|
- Target Argon2 parameters follow OWASP Cheat Sheet (memory ≈ 19 MiB, iterations 2, parallelism 1). Allow overrides via configuration.
|
||||||
- When CORE8 lands, pair with Team 2 to expose request context information required by the rate limiter (client_id enrichment).
|
- When CORE8 lands, pair with Team 2 to expose request context information required by the rate limiter (client_id enrichment).
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ Until these blocks land, connectors should stage changes behind a feature flag o
|
|||||||
| Ru.Bdu | BE-Conn-BDU | All tasks TODO | Map product releases into normalized rules; add provenance notes referencing BDU advisory identifiers. | Verify we have UTF-8 safe handling in builder; share sample sanitized inputs. |
|
| Ru.Bdu | BE-Conn-BDU | All tasks TODO | Map product releases into normalized rules; add provenance notes referencing BDU advisory identifiers. | Verify we have UTF-8 safe handling in builder; share sample sanitized inputs. |
|
||||||
| Ru.Nkcki | BE-Conn-Nkcki | All tasks TODO | Similar to BDU; capture vendor firmware/build numbers and map into normalized rules. | Coordinate with Localization WG for Cyrillic transliteration strategy. |
|
| Ru.Nkcki | BE-Conn-Nkcki | All tasks TODO | Similar to BDU; capture vendor firmware/build numbers and map into normalized rules. | Coordinate with Localization WG for Cyrillic transliteration strategy. |
|
||||||
| Vndr.Apple | BE-Conn-Apple | Mapper/tests/telemetry marked DOING | Continue extending vendor range primitives (`apple.version`, `apple.build`) and adopt normalized rule arrays for OS build spans. | Request builder integration review on 2025-10-16; ensure fixtures cover multi-range tables and include provenance notes. |
|
| Vndr.Apple | BE-Conn-Apple | Mapper/tests/telemetry marked DOING | Continue extending vendor range primitives (`apple.version`, `apple.build`) and adopt normalized rule arrays for OS build spans. | Request builder integration review on 2025-10-16; ensure fixtures cover multi-range tables and include provenance notes. |
|
||||||
| Vndr.Cisco | BE-Conn-Cisco | All tasks TODO | When parser lands, normalise IOS/ASA version strings into SemVer-style or vendor-specific ranges and supply normalized arrays. | Identify whether ranges require custom comparer (maybe `ios.semver` style); escalate to Models if new scheme required. |
|
| Vndr.Cisco | BE-Conn-Cisco | ✅ Emits SemVer primitives with vendor notes | Parser maps versions into SemVer primitives with `cisco.productId` vendor extensions; sample fixtures landing in `StellaOps.Feedser.Source.Vndr.Cisco.Tests`. | No custom comparer required; SemVer + vendor metadata suffices. |
|
||||||
| Vndr.Msrc | BE-Conn-MSRC | All tasks TODO | Canonical mapper must output product/build coverage as normalized rules (likely `msrc.patch` scheme) with provenance referencing KB IDs. | Sync with Models on adding scheme identifiers for MSRC packages; plan fixture coverage for monthly rollups. |
|
| Vndr.Msrc | BE-Conn-MSRC | All tasks TODO | Canonical mapper must output product/build coverage as normalized rules (likely `msrc.patch` scheme) with provenance referencing KB IDs. | Sync with Models on adding scheme identifiers for MSRC packages; plan fixture coverage for monthly rollups. |
|
||||||
|
|
||||||
## Storage alignment quick reference (2025-10-11)
|
## Storage alignment quick reference (2025-10-11)
|
||||||
|
|||||||
163
src/StellaOps.Feedser.Source.Cccs.Tests/CccsConnectorTests.cs
Normal file
163
src/StellaOps.Feedser.Source.Cccs.Tests/CccsConnectorTests.cs
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
using System;
|
||||||
|
using System.Net.Http;
|
||||||
|
using System.Net.Http.Headers;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using FluentAssertions;
|
||||||
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
|
using Microsoft.Extensions.Http;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Microsoft.Extensions.Logging.Abstractions;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using MongoDB.Bson;
|
||||||
|
using StellaOps.Feedser.Source.Cccs;
|
||||||
|
using StellaOps.Feedser.Source.Cccs.Configuration;
|
||||||
|
using StellaOps.Feedser.Source.Common;
|
||||||
|
using StellaOps.Feedser.Source.Common.Http;
|
||||||
|
using StellaOps.Feedser.Source.Common.Testing;
|
||||||
|
using StellaOps.Feedser.Storage.Mongo;
|
||||||
|
using StellaOps.Feedser.Storage.Mongo.Advisories;
|
||||||
|
using StellaOps.Feedser.Storage.Mongo.Documents;
|
||||||
|
using StellaOps.Feedser.Testing;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.Feedser.Source.Cccs.Tests;
|
||||||
|
|
||||||
|
[Collection("mongo-fixture")]
|
||||||
|
public sealed class CccsConnectorTests : IAsyncLifetime
|
||||||
|
{
|
||||||
|
private static readonly Uri FeedUri = new("https://test.local/api/cccs/threats/v1/get?lang=en&content_type=cccs_threat");
|
||||||
|
private static readonly Uri TaxonomyUri = new("https://test.local/api/cccs/taxonomy/v1/get?lang=en&vocabulary=cccs_alert_type");
|
||||||
|
|
||||||
|
private readonly MongoIntegrationFixture _fixture;
|
||||||
|
private readonly CannedHttpMessageHandler _handler;
|
||||||
|
|
||||||
|
public CccsConnectorTests(MongoIntegrationFixture fixture)
|
||||||
|
{
|
||||||
|
_fixture = fixture;
|
||||||
|
_handler = new CannedHttpMessageHandler();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task FetchParseMap_ProducesCanonicalAdvisory()
|
||||||
|
{
|
||||||
|
await using var provider = await BuildServiceProviderAsync();
|
||||||
|
SeedFeedResponses();
|
||||||
|
|
||||||
|
var connector = provider.GetRequiredService<CccsConnector>();
|
||||||
|
await connector.FetchAsync(provider, CancellationToken.None);
|
||||||
|
await connector.ParseAsync(provider, CancellationToken.None);
|
||||||
|
await connector.MapAsync(provider, CancellationToken.None);
|
||||||
|
|
||||||
|
var advisoryStore = provider.GetRequiredService<IAdvisoryStore>();
|
||||||
|
var advisories = await advisoryStore.GetRecentAsync(10, CancellationToken.None);
|
||||||
|
advisories.Should().HaveCount(1);
|
||||||
|
|
||||||
|
var advisory = advisories[0];
|
||||||
|
advisory.AdvisoryKey.Should().Be("TEST-001");
|
||||||
|
advisory.Title.Should().Be("Test Advisory Title");
|
||||||
|
advisory.Aliases.Should().Contain(new[] { "TEST-001", "CVE-2020-1234", "CVE-2021-9999" });
|
||||||
|
advisory.References.Should().Contain(reference => reference.Url == "https://example.com/details");
|
||||||
|
advisory.References.Should().Contain(reference => reference.Url == "https://www.cyber.gc.ca/en/contact-cyber-centre?lang=en");
|
||||||
|
advisory.AffectedPackages.Should().ContainSingle(pkg => pkg.Identifier == "Vendor Widget 1.0");
|
||||||
|
advisory.AffectedPackages.Should().Contain(pkg => pkg.Identifier == "Vendor Widget 2.0");
|
||||||
|
|
||||||
|
var stateRepository = provider.GetRequiredService<ISourceStateRepository>();
|
||||||
|
var state = await stateRepository.TryGetAsync(CccsConnectorPlugin.SourceName, CancellationToken.None);
|
||||||
|
state.Should().NotBeNull();
|
||||||
|
state!.Cursor.Should().NotBeNull();
|
||||||
|
state.Cursor.TryGetValue("pendingDocuments", out var pendingDocs).Should().BeTrue();
|
||||||
|
pendingDocs!.AsBsonArray.Should().BeEmpty();
|
||||||
|
state.Cursor.TryGetValue("pendingMappings", out var pendingMappings).Should().BeTrue();
|
||||||
|
pendingMappings!.AsBsonArray.Should().BeEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task Fetch_PersistsRawDocumentWithMetadata()
|
||||||
|
{
|
||||||
|
await using var provider = await BuildServiceProviderAsync();
|
||||||
|
SeedFeedResponses();
|
||||||
|
|
||||||
|
var connector = provider.GetRequiredService<CccsConnector>();
|
||||||
|
await connector.FetchAsync(provider, CancellationToken.None);
|
||||||
|
|
||||||
|
var documentStore = provider.GetRequiredService<IDocumentStore>();
|
||||||
|
var document = await documentStore.FindBySourceAndUriAsync(CccsConnectorPlugin.SourceName, "https://www.cyber.gc.ca/en/alerts-advisories/test-advisory", CancellationToken.None);
|
||||||
|
document.Should().NotBeNull();
|
||||||
|
document!.Status.Should().Be(DocumentStatuses.PendingParse);
|
||||||
|
document.Metadata.Should().ContainKey("cccs.language").WhoseValue.Should().Be("en");
|
||||||
|
document.Metadata.Should().ContainKey("cccs.serialNumber").WhoseValue.Should().Be("TEST-001");
|
||||||
|
document.ContentType.Should().Be("application/json");
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<ServiceProvider> BuildServiceProviderAsync()
|
||||||
|
{
|
||||||
|
await _fixture.Client.DropDatabaseAsync(_fixture.Database.DatabaseNamespace.DatabaseName);
|
||||||
|
_handler.Clear();
|
||||||
|
|
||||||
|
var services = new ServiceCollection();
|
||||||
|
services.AddLogging(builder => builder.AddProvider(NullLoggerProvider.Instance));
|
||||||
|
services.AddSingleton(_handler);
|
||||||
|
|
||||||
|
services.AddMongoStorage(options =>
|
||||||
|
{
|
||||||
|
options.ConnectionString = _fixture.Runner.ConnectionString;
|
||||||
|
options.DatabaseName = _fixture.Database.DatabaseNamespace.DatabaseName;
|
||||||
|
options.CommandTimeout = TimeSpan.FromSeconds(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
services.AddSourceCommon();
|
||||||
|
services.AddCccsConnector(options =>
|
||||||
|
{
|
||||||
|
options.Feeds.Clear();
|
||||||
|
options.Feeds.Add(new CccsFeedEndpoint("en", FeedUri));
|
||||||
|
options.RequestDelay = TimeSpan.Zero;
|
||||||
|
options.MaxEntriesPerFetch = 10;
|
||||||
|
options.MaxKnownEntries = 32;
|
||||||
|
});
|
||||||
|
|
||||||
|
services.Configure<HttpClientFactoryOptions>(CccsOptions.HttpClientName, builderOptions =>
|
||||||
|
{
|
||||||
|
builderOptions.HttpMessageHandlerBuilderActions.Add(builder =>
|
||||||
|
{
|
||||||
|
builder.PrimaryHandler = _handler;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
var provider = services.BuildServiceProvider();
|
||||||
|
var bootstrapper = provider.GetRequiredService<MongoBootstrapper>();
|
||||||
|
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||||
|
return provider;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void SeedFeedResponses()
|
||||||
|
{
|
||||||
|
AddJsonResponse(FeedUri, ReadFixture("cccs-feed-en.json"));
|
||||||
|
AddJsonResponse(TaxonomyUri, ReadFixture("cccs-taxonomy-en.json"));
|
||||||
|
}
|
||||||
|
|
||||||
|
private void AddJsonResponse(Uri uri, string json, string? etag = null)
|
||||||
|
{
|
||||||
|
_handler.AddResponse(uri, () =>
|
||||||
|
{
|
||||||
|
var response = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
|
||||||
|
{
|
||||||
|
Content = new StringContent(json, Encoding.UTF8, "application/json"),
|
||||||
|
};
|
||||||
|
if (!string.IsNullOrWhiteSpace(etag))
|
||||||
|
{
|
||||||
|
response.Headers.ETag = new EntityTagHeaderValue(etag);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string ReadFixture(string fileName)
|
||||||
|
=> System.IO.File.ReadAllText(System.IO.Path.Combine(AppContext.BaseDirectory, "Fixtures", fileName));
|
||||||
|
|
||||||
|
public Task InitializeAsync() => Task.CompletedTask;
|
||||||
|
|
||||||
|
public Task DisposeAsync() => Task.CompletedTask;
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"ERROR": false,
|
||||||
|
"response": [
|
||||||
|
{
|
||||||
|
"nid": 1001,
|
||||||
|
"title": "Test Advisory Title",
|
||||||
|
"uuid": "uuid-test-001",
|
||||||
|
"banner": null,
|
||||||
|
"lang": "en",
|
||||||
|
"date_modified": "2025-08-11",
|
||||||
|
"date_modified_ts": "2025-08-11T12:00:00Z",
|
||||||
|
"date_created": "2025-08-10T15:30:00Z",
|
||||||
|
"summary": "Summary of advisory.",
|
||||||
|
"body": [
|
||||||
|
"<article><p><strong>Number: TEST-001<br/>Date: 14 April 2018</strong></p><h2>Affected Products</h2><ul><li>Vendor Widget 1.0</li><li>Vendor Widget 2.0</li></ul><p>See <a href=\"https://example.com/details?utm_source=rss&utm_medium=email\">Details Link</a>.</p><p>Internal link <a href=\"/en/contact-cyber-centre?utm_campaign=newsletter\">Contact</a>.</p><p>Mitigation for CVE-2020-1234 and CVE-2021-9999.</p></article>"
|
||||||
|
],
|
||||||
|
"url": "/en/alerts-advisories/test-advisory",
|
||||||
|
"alert_type": 397,
|
||||||
|
"serial_number": "TEST-001",
|
||||||
|
"subject": "Infrastructure",
|
||||||
|
"moderation_state": "published",
|
||||||
|
"external_url": "https://example.com/external/advisory"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"sourceId": "TEST-002-FR",
|
||||||
|
"serialNumber": "TEST-002-FR",
|
||||||
|
"uuid": "uuid-test-002",
|
||||||
|
"language": "fr",
|
||||||
|
"title": "Avis de sécurité – Mise à jour urgente",
|
||||||
|
"summary": "Résumé de l'avis en français.",
|
||||||
|
"canonicalUrl": "https://www.cyber.gc.ca/fr/alertes-avis/test-avis",
|
||||||
|
"externalUrl": "https://exemple.ca/avis",
|
||||||
|
"bodyHtml": "<article><p><strong>Numéro : TEST-002-FR<br/>Date : 15 août 2025</strong></p><h2>Produits touchés</h2><div class=\"product-list\"><ul><li>Produit Exemple 3.1</li><li>Produit Exemple 3.2<ul><li>Variante 3.2.1</li></ul></li></ul></div><p>Voir <a href=\"https://exemple.ca/details?utm_campaign=mailing\">Lien de détails</a>.</p><p>Lien interne <a href=\"/fr/contact-centre-cyber\">Contactez-nous</a>.</p><p>Correctifs pour CVE-2024-1111.</p></article>",
|
||||||
|
"bodySegments": [
|
||||||
|
"<article><p><strong>Numéro : TEST-002-FR<br/>Date : 15 août 2025</strong></p><h2>Produits touchés</h2><div class=\"product-list\"><ul><li>Produit Exemple 3.1</li><li>Produit Exemple 3.2<ul><li>Variante 3.2.1</li></ul></li></ul></div><p>Voir <a href=\"https://exemple.ca/details?utm_campaign=mailing\">Lien de détails</a>.</p><p>Lien interne <a href=\"/fr/contact-centre-cyber\">Contactez-nous</a>.</p><p>Correctifs pour CVE-2024-1111.</p></article>"
|
||||||
|
],
|
||||||
|
"alertType": "Alerte",
|
||||||
|
"subject": "Infrastructure critique",
|
||||||
|
"banner": null,
|
||||||
|
"published": "2025-08-15T13:45:00Z",
|
||||||
|
"modified": "2025-08-16T09:15:00Z",
|
||||||
|
"rawCreated": "15 août 2025",
|
||||||
|
"rawModified": "2025-08-16T09:15:00Z"
|
||||||
|
}
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"sourceId": "TEST-001",
|
||||||
|
"serialNumber": "TEST-001",
|
||||||
|
"uuid": "uuid-test-001",
|
||||||
|
"language": "en",
|
||||||
|
"title": "Test Advisory Title",
|
||||||
|
"summary": "Summary of advisory.",
|
||||||
|
"canonicalUrl": "https://www.cyber.gc.ca/en/alerts-advisories/test-advisory",
|
||||||
|
"externalUrl": "https://example.com/external/advisory",
|
||||||
|
"bodyHtml": "<article><p><strong>Number: TEST-001<br/>Date: 14 April 2018</strong></p><h2>Affected Products</h2><ul><li>Vendor Widget 1.0</li><li>Vendor Widget 2.0</li></ul><p>See <a href=\"https://example.com/details?utm_source=rss&utm_medium=email\">Details Link</a>.</p><p>Internal link <a href=\"/en/contact-cyber-centre?utm_campaign=newsletter\">Contact</a>.</p><p>Mitigation for CVE-2020-1234 and CVE-2021-9999.</p></article>",
|
||||||
|
"bodySegments": [
|
||||||
|
"<article><p><strong>Number: TEST-001<br/>Date: 14 April 2018</strong></p><h2>Affected Products</h2><ul><li>Vendor Widget 1.0</li><li>Vendor Widget 2.0</li></ul><p>See <a href=\"https://example.com/details?utm_source=rss&utm_medium=email\">Details Link</a>.</p><p>Internal link <a href=\"/en/contact-cyber-centre?utm_campaign=newsletter\">Contact</a>.</p><p>Mitigation for CVE-2020-1234 and CVE-2021-9999.</p></article>"
|
||||||
|
],
|
||||||
|
"alertType": "Advisory",
|
||||||
|
"subject": "Infrastructure",
|
||||||
|
"banner": null,
|
||||||
|
"published": "2025-08-10T15:30:00Z",
|
||||||
|
"modified": "2025-08-11T12:00:00Z",
|
||||||
|
"rawCreated": "August 10, 2025",
|
||||||
|
"rawModified": "2025-08-11T12:00:00Z"
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"ERROR": false,
|
||||||
|
"response": [
|
||||||
|
{
|
||||||
|
"id": 396,
|
||||||
|
"title": "Advisory"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 397,
|
||||||
|
"title": "Alert"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,92 @@
|
|||||||
|
using System;
|
||||||
|
using System.IO;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Text.Json;
|
||||||
|
using FluentAssertions;
|
||||||
|
using StellaOps.Feedser.Source.Cccs.Internal;
|
||||||
|
using StellaOps.Feedser.Source.Common.Html;
|
||||||
|
using Xunit;
|
||||||
|
using Xunit.Abstractions;
|
||||||
|
|
||||||
|
namespace StellaOps.Feedser.Source.Cccs.Tests.Internal;
|
||||||
|
|
||||||
|
public sealed class CccsHtmlParserTests
|
||||||
|
{
|
||||||
|
private readonly ITestOutputHelper _output;
|
||||||
|
private static readonly HtmlContentSanitizer Sanitizer = new();
|
||||||
|
private static readonly CccsHtmlParser Parser = new(Sanitizer);
|
||||||
|
|
||||||
|
public CccsHtmlParserTests(ITestOutputHelper output)
|
||||||
|
{
|
||||||
|
_output = output ?? throw new ArgumentNullException(nameof(output));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static IEnumerable<object[]> ParserCases()
|
||||||
|
{
|
||||||
|
yield return new object[]
|
||||||
|
{
|
||||||
|
"cccs-raw-advisory.json",
|
||||||
|
"TEST-001",
|
||||||
|
"en",
|
||||||
|
new[] { "Vendor Widget 1.0", "Vendor Widget 2.0" },
|
||||||
|
new[]
|
||||||
|
{
|
||||||
|
"https://example.com/details",
|
||||||
|
"https://www.cyber.gc.ca/en/contact-cyber-centre?lang=en"
|
||||||
|
},
|
||||||
|
new[] { "CVE-2020-1234", "CVE-2021-9999" }
|
||||||
|
};
|
||||||
|
|
||||||
|
yield return new object[]
|
||||||
|
{
|
||||||
|
"cccs-raw-advisory-fr.json",
|
||||||
|
"TEST-002-FR",
|
||||||
|
"fr",
|
||||||
|
new[] { "Produit Exemple 3.1", "Produit Exemple 3.2", "Variante 3.2.1" },
|
||||||
|
new[]
|
||||||
|
{
|
||||||
|
"https://exemple.ca/details",
|
||||||
|
"https://www.cyber.gc.ca/fr/contact-centre-cyber"
|
||||||
|
},
|
||||||
|
new[] { "CVE-2024-1111" }
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[MemberData(nameof(ParserCases))]
|
||||||
|
public void Parse_ExtractsExpectedFields(
|
||||||
|
string fixtureName,
|
||||||
|
string expectedSerial,
|
||||||
|
string expectedLanguage,
|
||||||
|
string[] expectedProducts,
|
||||||
|
string[] expectedReferenceUrls,
|
||||||
|
string[] expectedCves)
|
||||||
|
{
|
||||||
|
var raw = LoadFixture<CccsRawAdvisoryDocument>(fixtureName);
|
||||||
|
|
||||||
|
var dto = Parser.Parse(raw);
|
||||||
|
|
||||||
|
_output.WriteLine("Products: {0}", string.Join("|", dto.Products));
|
||||||
|
_output.WriteLine("References: {0}", string.Join("|", dto.References.Select(r => $"{r.Url} ({r.Label})")));
|
||||||
|
_output.WriteLine("CVEs: {0}", string.Join("|", dto.CveIds));
|
||||||
|
|
||||||
|
dto.SerialNumber.Should().Be(expectedSerial);
|
||||||
|
dto.Language.Should().Be(expectedLanguage);
|
||||||
|
dto.Products.Should().BeEquivalentTo(expectedProducts);
|
||||||
|
foreach (var url in expectedReferenceUrls)
|
||||||
|
{
|
||||||
|
dto.References.Should().Contain(reference => reference.Url == url);
|
||||||
|
}
|
||||||
|
|
||||||
|
dto.CveIds.Should().BeEquivalentTo(expectedCves);
|
||||||
|
dto.ContentHtml.Should().Contain("<ul>").And.Contain("<li>");
|
||||||
|
dto.ContentHtml.Should().Contain("<h2", because: "heading structure must survive sanitisation for UI rendering");
|
||||||
|
}
|
||||||
|
|
||||||
|
internal static T LoadFixture<T>(string fileName)
|
||||||
|
{
|
||||||
|
var path = Path.Combine(AppContext.BaseDirectory, "Fixtures", fileName);
|
||||||
|
var json = File.ReadAllText(path);
|
||||||
|
return JsonSerializer.Deserialize<T>(json, new JsonSerializerOptions(JsonSerializerDefaults.Web))!;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
using System;
|
||||||
|
using FluentAssertions;
|
||||||
|
using StellaOps.Feedser.Source.Cccs.Internal;
|
||||||
|
using StellaOps.Feedser.Source.Common;
|
||||||
|
using StellaOps.Feedser.Source.Common.Html;
|
||||||
|
using StellaOps.Feedser.Storage.Mongo.Documents;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.Feedser.Source.Cccs.Tests.Internal;
|
||||||
|
|
||||||
|
public sealed class CccsMapperTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void Map_CreatesCanonicalAdvisory()
|
||||||
|
{
|
||||||
|
var raw = CccsHtmlParserTests.LoadFixture<CccsRawAdvisoryDocument>("cccs-raw-advisory.json");
|
||||||
|
var dto = new CccsHtmlParser(new HtmlContentSanitizer()).Parse(raw);
|
||||||
|
var document = new DocumentRecord(
|
||||||
|
Guid.NewGuid(),
|
||||||
|
CccsConnectorPlugin.SourceName,
|
||||||
|
dto.CanonicalUrl,
|
||||||
|
DateTimeOffset.UtcNow,
|
||||||
|
"sha-test",
|
||||||
|
DocumentStatuses.PendingMap,
|
||||||
|
"application/json",
|
||||||
|
Headers: null,
|
||||||
|
Metadata: null,
|
||||||
|
Etag: null,
|
||||||
|
LastModified: dto.Modified,
|
||||||
|
GridFsId: null);
|
||||||
|
|
||||||
|
var recordedAt = DateTimeOffset.Parse("2025-08-12T00:00:00Z");
|
||||||
|
var advisory = CccsMapper.Map(dto, document, recordedAt);
|
||||||
|
|
||||||
|
advisory.AdvisoryKey.Should().Be("TEST-001");
|
||||||
|
advisory.Title.Should().Be(dto.Title);
|
||||||
|
advisory.Aliases.Should().Contain(new[] { "TEST-001", "CVE-2020-1234", "CVE-2021-9999" });
|
||||||
|
advisory.References.Should().Contain(reference => reference.Url == dto.CanonicalUrl && reference.Kind == "details");
|
||||||
|
advisory.References.Should().Contain(reference => reference.Url == "https://example.com/details");
|
||||||
|
advisory.AffectedPackages.Should().HaveCount(2);
|
||||||
|
advisory.Provenance.Should().ContainSingle(p => p.Source == CccsConnectorPlugin.SourceName && p.Kind == "advisory");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
<PropertyGroup>
|
||||||
|
<TargetFramework>net10.0</TargetFramework>
|
||||||
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
|
<Nullable>enable</Nullable>
|
||||||
|
</PropertyGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ProjectReference Include="../StellaOps.Feedser.Source.Common/StellaOps.Feedser.Source.Common.csproj" />
|
||||||
|
<ProjectReference Include="../StellaOps.Feedser.Source.Cccs/StellaOps.Feedser.Source.Cccs.csproj" />
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<None Update="Fixtures\*.json">
|
||||||
|
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||||
|
</None>
|
||||||
|
</ItemGroup>
|
||||||
|
</Project>
|
||||||
606
src/StellaOps.Feedser.Source.Cccs/CccsConnector.cs
Normal file
606
src/StellaOps.Feedser.Source.Cccs/CccsConnector.cs
Normal file
@@ -0,0 +1,606 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Net.Http;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text.Json;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using System.Globalization;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using MongoDB.Bson;
|
||||||
|
using StellaOps.Feedser.Source.Cccs.Configuration;
|
||||||
|
using StellaOps.Feedser.Source.Cccs.Internal;
|
||||||
|
using StellaOps.Feedser.Source.Common;
|
||||||
|
using StellaOps.Feedser.Source.Common.Fetch;
|
||||||
|
using StellaOps.Feedser.Storage.Mongo;
|
||||||
|
using StellaOps.Feedser.Storage.Mongo.Advisories;
|
||||||
|
using StellaOps.Feedser.Storage.Mongo.Documents;
|
||||||
|
using StellaOps.Feedser.Storage.Mongo.Dtos;
|
||||||
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
|
namespace StellaOps.Feedser.Source.Cccs;
|
||||||
|
|
||||||
|
public sealed class CccsConnector : IFeedConnector
|
||||||
|
{
|
||||||
|
private static readonly JsonSerializerOptions RawSerializerOptions = new(JsonSerializerDefaults.Web)
|
||||||
|
{
|
||||||
|
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||||
|
};
|
||||||
|
|
||||||
|
private static readonly JsonSerializerOptions DtoSerializerOptions = new(JsonSerializerDefaults.Web)
|
||||||
|
{
|
||||||
|
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||||
|
};
|
||||||
|
|
||||||
|
private const string DtoSchemaVersion = "cccs.dto.v1";
|
||||||
|
|
||||||
|
private readonly CccsFeedClient _feedClient;
|
||||||
|
private readonly RawDocumentStorage _rawDocumentStorage;
|
||||||
|
private readonly IDocumentStore _documentStore;
|
||||||
|
private readonly IDtoStore _dtoStore;
|
||||||
|
private readonly IAdvisoryStore _advisoryStore;
|
||||||
|
private readonly ISourceStateRepository _stateRepository;
|
||||||
|
private readonly CccsHtmlParser _htmlParser;
|
||||||
|
private readonly CccsDiagnostics _diagnostics;
|
||||||
|
private readonly CccsOptions _options;
|
||||||
|
private readonly TimeProvider _timeProvider;
|
||||||
|
private readonly ILogger<CccsConnector> _logger;
|
||||||
|
|
||||||
|
public CccsConnector(
|
||||||
|
CccsFeedClient feedClient,
|
||||||
|
RawDocumentStorage rawDocumentStorage,
|
||||||
|
IDocumentStore documentStore,
|
||||||
|
IDtoStore dtoStore,
|
||||||
|
IAdvisoryStore advisoryStore,
|
||||||
|
ISourceStateRepository stateRepository,
|
||||||
|
CccsHtmlParser htmlParser,
|
||||||
|
CccsDiagnostics diagnostics,
|
||||||
|
IOptions<CccsOptions> options,
|
||||||
|
TimeProvider? timeProvider,
|
||||||
|
ILogger<CccsConnector> logger)
|
||||||
|
{
|
||||||
|
_feedClient = feedClient ?? throw new ArgumentNullException(nameof(feedClient));
|
||||||
|
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
|
||||||
|
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
|
||||||
|
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
|
||||||
|
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
|
||||||
|
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
|
||||||
|
_htmlParser = htmlParser ?? throw new ArgumentNullException(nameof(htmlParser));
|
||||||
|
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
|
||||||
|
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
|
||||||
|
_options.Validate();
|
||||||
|
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||||
|
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public string SourceName => CccsConnectorPlugin.SourceName;
|
||||||
|
|
||||||
|
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(services);
|
||||||
|
|
||||||
|
var now = _timeProvider.GetUtcNow();
|
||||||
|
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||||
|
var pendingDocuments = new HashSet<Guid>(cursor.PendingDocuments);
|
||||||
|
var pendingMappings = new HashSet<Guid>(cursor.PendingMappings);
|
||||||
|
var knownHashes = new Dictionary<string, string>(cursor.KnownEntryHashes, StringComparer.Ordinal);
|
||||||
|
var feedsProcessed = 0;
|
||||||
|
var totalItems = 0;
|
||||||
|
var added = 0;
|
||||||
|
var unchanged = 0;
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
foreach (var feed in _options.Feeds)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
|
_diagnostics.FetchAttempt();
|
||||||
|
var result = await _feedClient.FetchAsync(feed, _options.RequestTimeout, cancellationToken).ConfigureAwait(false);
|
||||||
|
feedsProcessed++;
|
||||||
|
totalItems += result.Items.Count;
|
||||||
|
|
||||||
|
if (result.Items.Count == 0)
|
||||||
|
{
|
||||||
|
_diagnostics.FetchSuccess();
|
||||||
|
await DelayBetweenRequestsAsync(cancellationToken).ConfigureAwait(false);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var items = result.Items
|
||||||
|
.Where(static item => !string.IsNullOrWhiteSpace(item.Title))
|
||||||
|
.OrderByDescending(item => ParseDate(item.DateModifiedTimestamp) ?? ParseDate(item.DateModified) ?? DateTimeOffset.MinValue)
|
||||||
|
.ThenByDescending(item => ParseDate(item.DateCreated) ?? DateTimeOffset.MinValue)
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
foreach (var item in items)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
|
var documentUri = BuildDocumentUri(item, feed);
|
||||||
|
var rawDocument = CreateRawDocument(item, feed, result.AlertTypes);
|
||||||
|
var payload = JsonSerializer.SerializeToUtf8Bytes(rawDocument, RawSerializerOptions);
|
||||||
|
var sha = ComputeSha256(payload);
|
||||||
|
|
||||||
|
if (knownHashes.TryGetValue(documentUri, out var existingHash)
|
||||||
|
&& string.Equals(existingHash, sha, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
unchanged++;
|
||||||
|
_diagnostics.FetchUnchanged();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, documentUri, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (existing is not null
|
||||||
|
&& string.Equals(existing.Sha256, sha, StringComparison.OrdinalIgnoreCase)
|
||||||
|
&& string.Equals(existing.Status, DocumentStatuses.Mapped, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
knownHashes[documentUri] = sha;
|
||||||
|
unchanged++;
|
||||||
|
_diagnostics.FetchUnchanged();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var gridFsId = await _rawDocumentStorage.UploadAsync(
|
||||||
|
SourceName,
|
||||||
|
documentUri,
|
||||||
|
payload,
|
||||||
|
"application/json",
|
||||||
|
expiresAt: null,
|
||||||
|
cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||||
|
{
|
||||||
|
["cccs.language"] = rawDocument.Language,
|
||||||
|
["cccs.sourceId"] = rawDocument.SourceId,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(rawDocument.SerialNumber))
|
||||||
|
{
|
||||||
|
metadata["cccs.serialNumber"] = rawDocument.SerialNumber!;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(rawDocument.AlertType))
|
||||||
|
{
|
||||||
|
metadata["cccs.alertType"] = rawDocument.AlertType!;
|
||||||
|
}
|
||||||
|
|
||||||
|
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||||
|
var record = new DocumentRecord(
|
||||||
|
recordId,
|
||||||
|
SourceName,
|
||||||
|
documentUri,
|
||||||
|
now,
|
||||||
|
sha,
|
||||||
|
DocumentStatuses.PendingParse,
|
||||||
|
"application/json",
|
||||||
|
Headers: null,
|
||||||
|
Metadata: metadata,
|
||||||
|
Etag: null,
|
||||||
|
LastModified: rawDocument.Modified ?? rawDocument.Published ?? result.LastModifiedUtc,
|
||||||
|
GridFsId: gridFsId,
|
||||||
|
ExpiresAt: null);
|
||||||
|
|
||||||
|
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||||
|
pendingDocuments.Add(upserted.Id);
|
||||||
|
pendingMappings.Remove(upserted.Id);
|
||||||
|
knownHashes[documentUri] = sha;
|
||||||
|
added++;
|
||||||
|
_diagnostics.FetchDocument();
|
||||||
|
|
||||||
|
if (added >= _options.MaxEntriesPerFetch)
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_diagnostics.FetchSuccess();
|
||||||
|
await DelayBetweenRequestsAsync(cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (added >= _options.MaxEntriesPerFetch)
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or JsonException or InvalidOperationException)
|
||||||
|
{
|
||||||
|
_diagnostics.FetchFailure();
|
||||||
|
_logger.LogError(ex, "CCCS fetch failed");
|
||||||
|
await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false);
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
|
||||||
|
var trimmedHashes = TrimKnownHashes(knownHashes, _options.MaxKnownEntries);
|
||||||
|
var updatedCursor = cursor
|
||||||
|
.WithPendingDocuments(pendingDocuments)
|
||||||
|
.WithPendingMappings(pendingMappings)
|
||||||
|
.WithKnownEntryHashes(trimmedHashes)
|
||||||
|
.WithLastFetch(now);
|
||||||
|
|
||||||
|
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
|
||||||
|
_logger.LogInformation(
|
||||||
|
"CCCS fetch completed feeds={Feeds} items={Items} newDocuments={Added} unchanged={Unchanged} pendingDocuments={PendingDocuments} pendingMappings={PendingMappings}",
|
||||||
|
feedsProcessed,
|
||||||
|
totalItems,
|
||||||
|
added,
|
||||||
|
unchanged,
|
||||||
|
pendingDocuments.Count,
|
||||||
|
pendingMappings.Count);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(services);
|
||||||
|
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||||
|
if (cursor.PendingDocuments.Count == 0)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var pendingDocuments = cursor.PendingDocuments.ToList();
|
||||||
|
var pendingMappings = cursor.PendingMappings.ToList();
|
||||||
|
var now = _timeProvider.GetUtcNow();
|
||||||
|
var parsed = 0;
|
||||||
|
var parseFailures = 0;
|
||||||
|
|
||||||
|
foreach (var documentId in cursor.PendingDocuments)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
|
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (document is null)
|
||||||
|
{
|
||||||
|
pendingDocuments.Remove(documentId);
|
||||||
|
pendingMappings.Remove(documentId);
|
||||||
|
_diagnostics.ParseFailure();
|
||||||
|
parseFailures++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!document.GridFsId.HasValue)
|
||||||
|
{
|
||||||
|
_diagnostics.ParseFailure();
|
||||||
|
_logger.LogWarning("CCCS document {DocumentId} missing GridFS payload", documentId);
|
||||||
|
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||||
|
pendingDocuments.Remove(documentId);
|
||||||
|
pendingMappings.Remove(documentId);
|
||||||
|
parseFailures++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
byte[] payload;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
payload = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_diagnostics.ParseFailure();
|
||||||
|
_logger.LogError(ex, "CCCS unable to download raw document {DocumentId}", documentId);
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
|
||||||
|
CccsRawAdvisoryDocument? raw;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
raw = JsonSerializer.Deserialize<CccsRawAdvisoryDocument>(payload, RawSerializerOptions);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_diagnostics.ParseFailure();
|
||||||
|
_logger.LogWarning(ex, "CCCS failed to deserialize raw document {DocumentId}", documentId);
|
||||||
|
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||||
|
pendingDocuments.Remove(documentId);
|
||||||
|
pendingMappings.Remove(documentId);
|
||||||
|
parseFailures++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (raw is null)
|
||||||
|
{
|
||||||
|
_diagnostics.ParseFailure();
|
||||||
|
_logger.LogWarning("CCCS raw document {DocumentId} produced null payload", documentId);
|
||||||
|
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||||
|
pendingDocuments.Remove(documentId);
|
||||||
|
pendingMappings.Remove(documentId);
|
||||||
|
parseFailures++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
CccsAdvisoryDto dto;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
dto = _htmlParser.Parse(raw);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_diagnostics.ParseFailure();
|
||||||
|
_logger.LogWarning(ex, "CCCS failed to parse advisory DTO for {DocumentId}", documentId);
|
||||||
|
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||||
|
pendingDocuments.Remove(documentId);
|
||||||
|
pendingMappings.Remove(documentId);
|
||||||
|
parseFailures++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var dtoJson = JsonSerializer.Serialize(dto, DtoSerializerOptions);
|
||||||
|
var dtoBson = BsonDocument.Parse(dtoJson);
|
||||||
|
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, DtoSchemaVersion, dtoBson, now);
|
||||||
|
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
|
||||||
|
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
pendingDocuments.Remove(documentId);
|
||||||
|
if (!pendingMappings.Contains(documentId))
|
||||||
|
{
|
||||||
|
pendingMappings.Add(documentId);
|
||||||
|
}
|
||||||
|
_diagnostics.ParseSuccess();
|
||||||
|
parsed++;
|
||||||
|
}
|
||||||
|
|
||||||
|
var updatedCursor = cursor
|
||||||
|
.WithPendingDocuments(pendingDocuments)
|
||||||
|
.WithPendingMappings(pendingMappings);
|
||||||
|
|
||||||
|
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (parsed > 0 || parseFailures > 0)
|
||||||
|
{
|
||||||
|
_logger.LogInformation(
|
||||||
|
"CCCS parse completed parsed={Parsed} failures={Failures} pendingDocuments={PendingDocuments} pendingMappings={PendingMappings}",
|
||||||
|
parsed,
|
||||||
|
parseFailures,
|
||||||
|
pendingDocuments.Count,
|
||||||
|
pendingMappings.Count);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(services);
|
||||||
|
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||||
|
if (cursor.PendingMappings.Count == 0)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var pendingMappings = cursor.PendingMappings.ToList();
|
||||||
|
var mapped = 0;
|
||||||
|
var mappingFailures = 0;
|
||||||
|
|
||||||
|
foreach (var documentId in cursor.PendingMappings)
|
||||||
|
{
|
||||||
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
|
|
||||||
|
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (document is null)
|
||||||
|
{
|
||||||
|
pendingMappings.Remove(documentId);
|
||||||
|
_diagnostics.MapFailure();
|
||||||
|
mappingFailures++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (dtoRecord is null)
|
||||||
|
{
|
||||||
|
_diagnostics.MapFailure();
|
||||||
|
_logger.LogWarning("CCCS document {DocumentId} missing DTO payload", documentId);
|
||||||
|
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||||
|
pendingMappings.Remove(documentId);
|
||||||
|
mappingFailures++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
CccsAdvisoryDto? dto;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var json = dtoRecord.Payload.ToJson();
|
||||||
|
dto = JsonSerializer.Deserialize<CccsAdvisoryDto>(json, DtoSerializerOptions);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_diagnostics.MapFailure();
|
||||||
|
_logger.LogWarning(ex, "CCCS failed to deserialize DTO for document {DocumentId}", documentId);
|
||||||
|
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||||
|
pendingMappings.Remove(documentId);
|
||||||
|
mappingFailures++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dto is null)
|
||||||
|
{
|
||||||
|
_diagnostics.MapFailure();
|
||||||
|
_logger.LogWarning("CCCS DTO for document {DocumentId} evaluated to null", documentId);
|
||||||
|
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||||
|
pendingMappings.Remove(documentId);
|
||||||
|
mappingFailures++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var advisory = CccsMapper.Map(dto, document, dtoRecord.ValidatedAt);
|
||||||
|
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
|
||||||
|
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
|
||||||
|
pendingMappings.Remove(documentId);
|
||||||
|
_diagnostics.MapSuccess();
|
||||||
|
mapped++;
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_diagnostics.MapFailure();
|
||||||
|
_logger.LogError(ex, "CCCS mapping failed for document {DocumentId}", documentId);
|
||||||
|
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||||
|
pendingMappings.Remove(documentId);
|
||||||
|
mappingFailures++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var updatedCursor = cursor.WithPendingMappings(pendingMappings);
|
||||||
|
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (mapped > 0 || mappingFailures > 0)
|
||||||
|
{
|
||||||
|
_logger.LogInformation(
|
||||||
|
"CCCS map completed mapped={Mapped} failures={Failures} pendingMappings={PendingMappings}",
|
||||||
|
mapped,
|
||||||
|
mappingFailures,
|
||||||
|
pendingMappings.Count);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<CccsCursor> GetCursorAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
|
||||||
|
return state is null ? CccsCursor.Empty : CccsCursor.FromBson(state.Cursor);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Task UpdateCursorAsync(CccsCursor cursor, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var document = cursor.ToBsonDocument();
|
||||||
|
var completedAt = cursor.LastFetchAt ?? _timeProvider.GetUtcNow();
|
||||||
|
return _stateRepository.UpdateCursorAsync(SourceName, document, completedAt, cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task DelayBetweenRequestsAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (_options.RequestDelay <= TimeSpan.Zero)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
catch (TaskCanceledException)
|
||||||
|
{
|
||||||
|
// Ignore cancellation during delay; caller handles.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string BuildDocumentUri(CccsFeedItem item, CccsFeedEndpoint feed)
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrWhiteSpace(item.Url))
|
||||||
|
{
|
||||||
|
if (Uri.TryCreate(item.Url, UriKind.Absolute, out var absolute))
|
||||||
|
{
|
||||||
|
return absolute.ToString();
|
||||||
|
}
|
||||||
|
|
||||||
|
var baseUri = new Uri("https://www.cyber.gc.ca", UriKind.Absolute);
|
||||||
|
if (Uri.TryCreate(baseUri, item.Url, out var combined))
|
||||||
|
{
|
||||||
|
return combined.ToString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return $"https://www.cyber.gc.ca/api/cccs/threats/{feed.Language}/{item.Nid}";
|
||||||
|
}
|
||||||
|
|
||||||
|
private static CccsRawAdvisoryDocument CreateRawDocument(CccsFeedItem item, CccsFeedEndpoint feed, IReadOnlyDictionary<int, string> taxonomy)
|
||||||
|
{
|
||||||
|
var language = string.IsNullOrWhiteSpace(item.Language) ? feed.Language : item.Language!.Trim();
|
||||||
|
var identifier = !string.IsNullOrWhiteSpace(item.SerialNumber)
|
||||||
|
? item.SerialNumber!.Trim()
|
||||||
|
: !string.IsNullOrWhiteSpace(item.Uuid)
|
||||||
|
? item.Uuid!.Trim()
|
||||||
|
: $"nid-{item.Nid}";
|
||||||
|
|
||||||
|
var canonicalUrl = BuildDocumentUri(item, feed);
|
||||||
|
var bodySegments = item.Body ?? Array.Empty<string>();
|
||||||
|
var bodyHtml = string.Join(Environment.NewLine, bodySegments);
|
||||||
|
var published = ParseDate(item.DateCreated);
|
||||||
|
var modified = ParseDate(item.DateModifiedTimestamp) ?? ParseDate(item.DateModified);
|
||||||
|
var alertType = ResolveAlertType(item, taxonomy);
|
||||||
|
|
||||||
|
return new CccsRawAdvisoryDocument
|
||||||
|
{
|
||||||
|
SourceId = identifier,
|
||||||
|
SerialNumber = item.SerialNumber?.Trim(),
|
||||||
|
Uuid = item.Uuid,
|
||||||
|
Language = language.ToLowerInvariant(),
|
||||||
|
Title = item.Title?.Trim() ?? identifier,
|
||||||
|
Summary = item.Summary?.Trim(),
|
||||||
|
CanonicalUrl = canonicalUrl,
|
||||||
|
ExternalUrl = item.ExternalUrl,
|
||||||
|
BodyHtml = bodyHtml,
|
||||||
|
BodySegments = bodySegments,
|
||||||
|
AlertType = alertType,
|
||||||
|
Subject = item.Subject,
|
||||||
|
Banner = item.Banner,
|
||||||
|
Published = published,
|
||||||
|
Modified = modified,
|
||||||
|
RawDateCreated = item.DateCreated,
|
||||||
|
RawDateModified = item.DateModifiedTimestamp ?? item.DateModified,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? ResolveAlertType(CccsFeedItem item, IReadOnlyDictionary<int, string> taxonomy)
|
||||||
|
{
|
||||||
|
if (item.AlertType.ValueKind == JsonValueKind.Number)
|
||||||
|
{
|
||||||
|
var id = item.AlertType.GetInt32();
|
||||||
|
return taxonomy.TryGetValue(id, out var label) ? label : id.ToString(CultureInfo.InvariantCulture);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (item.AlertType.ValueKind == JsonValueKind.String)
|
||||||
|
{
|
||||||
|
return item.AlertType.GetString();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (item.AlertType.ValueKind == JsonValueKind.Array)
|
||||||
|
{
|
||||||
|
foreach (var element in item.AlertType.EnumerateArray())
|
||||||
|
{
|
||||||
|
if (element.ValueKind == JsonValueKind.Number)
|
||||||
|
{
|
||||||
|
var id = element.GetInt32();
|
||||||
|
if (taxonomy.TryGetValue(id, out var label))
|
||||||
|
{
|
||||||
|
return label;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (element.ValueKind == JsonValueKind.String)
|
||||||
|
{
|
||||||
|
var label = element.GetString();
|
||||||
|
if (!string.IsNullOrWhiteSpace(label))
|
||||||
|
{
|
||||||
|
return label;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Dictionary<string, string> TrimKnownHashes(Dictionary<string, string> hashes, int maxEntries)
|
||||||
|
{
|
||||||
|
if (hashes.Count <= maxEntries)
|
||||||
|
{
|
||||||
|
return hashes;
|
||||||
|
}
|
||||||
|
|
||||||
|
var overflow = hashes.Count - maxEntries;
|
||||||
|
foreach (var key in hashes.Keys.Take(overflow).ToList())
|
||||||
|
{
|
||||||
|
hashes.Remove(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
return hashes;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static DateTimeOffset? ParseDate(string? value)
|
||||||
|
=> string.IsNullOrWhiteSpace(value)
|
||||||
|
? null
|
||||||
|
: DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed)
|
||||||
|
? parsed
|
||||||
|
: null;
|
||||||
|
|
||||||
|
private static string ComputeSha256(byte[] payload)
|
||||||
|
=> Convert.ToHexString(SHA256.HashData(payload)).ToLowerInvariant();
|
||||||
|
}
|
||||||
21
src/StellaOps.Feedser.Source.Cccs/CccsConnectorPlugin.cs
Normal file
21
src/StellaOps.Feedser.Source.Cccs/CccsConnectorPlugin.cs
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
using System;
|
||||||
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
|
using StellaOps.Plugin;
|
||||||
|
|
||||||
|
namespace StellaOps.Feedser.Source.Cccs;
|
||||||
|
|
||||||
|
public sealed class CccsConnectorPlugin : IConnectorPlugin
|
||||||
|
{
|
||||||
|
public const string SourceName = "cccs";
|
||||||
|
|
||||||
|
public string Name => SourceName;
|
||||||
|
|
||||||
|
public bool IsAvailable(IServiceProvider services)
|
||||||
|
=> services.GetService<CccsConnector>() is not null;
|
||||||
|
|
||||||
|
public IFeedConnector Create(IServiceProvider services)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(services);
|
||||||
|
return services.GetRequiredService<CccsConnector>();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,50 @@
|
|||||||
|
using System;
|
||||||
|
using Microsoft.Extensions.Configuration;
|
||||||
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
|
using StellaOps.DependencyInjection;
|
||||||
|
using StellaOps.Feedser.Core.Jobs;
|
||||||
|
using StellaOps.Feedser.Source.Cccs.Configuration;
|
||||||
|
|
||||||
|
namespace StellaOps.Feedser.Source.Cccs;
|
||||||
|
|
||||||
|
public sealed class CccsDependencyInjectionRoutine : IDependencyInjectionRoutine
|
||||||
|
{
|
||||||
|
private const string ConfigurationSection = "feedser:sources:cccs";
|
||||||
|
|
||||||
|
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(services);
|
||||||
|
ArgumentNullException.ThrowIfNull(configuration);
|
||||||
|
|
||||||
|
services.AddCccsConnector(options =>
|
||||||
|
{
|
||||||
|
configuration.GetSection(ConfigurationSection).Bind(options);
|
||||||
|
options.Validate();
|
||||||
|
});
|
||||||
|
|
||||||
|
services.AddTransient<CccsFetchJob>();
|
||||||
|
|
||||||
|
services.PostConfigure<JobSchedulerOptions>(options =>
|
||||||
|
{
|
||||||
|
EnsureJob(options, CccsJobKinds.Fetch, typeof(CccsFetchJob));
|
||||||
|
});
|
||||||
|
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void EnsureJob(JobSchedulerOptions options, string kind, Type jobType)
|
||||||
|
{
|
||||||
|
if (options.Definitions.ContainsKey(kind))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
options.Definitions[kind] = new JobDefinition(
|
||||||
|
kind,
|
||||||
|
jobType,
|
||||||
|
options.DefaultTimeout,
|
||||||
|
options.DefaultLeaseDuration,
|
||||||
|
CronExpression: null,
|
||||||
|
Enabled: true);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,47 @@
|
|||||||
|
using System;
|
||||||
|
using System.Linq;
|
||||||
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
|
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using StellaOps.Feedser.Source.Cccs.Configuration;
|
||||||
|
using StellaOps.Feedser.Source.Cccs.Internal;
|
||||||
|
using StellaOps.Feedser.Source.Common.Http;
|
||||||
|
using StellaOps.Feedser.Source.Common.Html;
|
||||||
|
|
||||||
|
namespace StellaOps.Feedser.Source.Cccs;
|
||||||
|
|
||||||
|
public static class CccsServiceCollectionExtensions
|
||||||
|
{
|
||||||
|
public static IServiceCollection AddCccsConnector(this IServiceCollection services, Action<CccsOptions> configure)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(services);
|
||||||
|
ArgumentNullException.ThrowIfNull(configure);
|
||||||
|
|
||||||
|
services.AddOptions<CccsOptions>()
|
||||||
|
.Configure(configure)
|
||||||
|
.PostConfigure(static options => options.Validate());
|
||||||
|
|
||||||
|
services.AddSourceHttpClient(CccsOptions.HttpClientName, static (sp, clientOptions) =>
|
||||||
|
{
|
||||||
|
var options = sp.GetRequiredService<IOptions<CccsOptions>>().Value;
|
||||||
|
clientOptions.UserAgent = "StellaOps.Feedser.Cccs/1.0";
|
||||||
|
clientOptions.Timeout = options.RequestTimeout;
|
||||||
|
clientOptions.AllowedHosts.Clear();
|
||||||
|
|
||||||
|
foreach (var feed in options.Feeds.Where(static feed => feed.Uri is not null))
|
||||||
|
{
|
||||||
|
clientOptions.AllowedHosts.Add(feed.Uri!.Host);
|
||||||
|
}
|
||||||
|
|
||||||
|
clientOptions.AllowedHosts.Add("www.cyber.gc.ca");
|
||||||
|
clientOptions.AllowedHosts.Add("cyber.gc.ca");
|
||||||
|
});
|
||||||
|
|
||||||
|
services.TryAddSingleton<HtmlContentSanitizer>();
|
||||||
|
services.TryAddSingleton<CccsDiagnostics>();
|
||||||
|
services.TryAddSingleton<CccsHtmlParser>();
|
||||||
|
services.TryAddSingleton<CccsFeedClient>();
|
||||||
|
services.AddTransient<CccsConnector>();
|
||||||
|
return services;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
using System;
|
|
||||||
using System.Threading;
|
|
||||||
using System.Threading.Tasks;
|
|
||||||
using StellaOps.Plugin;
|
|
||||||
|
|
||||||
namespace StellaOps.Feedser.Source.Cccs;
|
|
||||||
|
|
||||||
public sealed class CccsConnectorPlugin : IConnectorPlugin
|
|
||||||
{
|
|
||||||
public string Name => "cccs";
|
|
||||||
|
|
||||||
public bool IsAvailable(IServiceProvider services) => true;
|
|
||||||
|
|
||||||
public IFeedConnector Create(IServiceProvider services) => new StubConnector(Name);
|
|
||||||
|
|
||||||
private sealed class StubConnector : IFeedConnector
|
|
||||||
{
|
|
||||||
public StubConnector(string sourceName) => SourceName = sourceName;
|
|
||||||
|
|
||||||
public string SourceName { get; }
|
|
||||||
|
|
||||||
public Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
|
|
||||||
|
|
||||||
public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
|
|
||||||
|
|
||||||
public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
175
src/StellaOps.Feedser.Source.Cccs/Configuration/CccsOptions.cs
Normal file
175
src/StellaOps.Feedser.Source.Cccs/Configuration/CccsOptions.cs
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
|
||||||
|
namespace StellaOps.Feedser.Source.Cccs.Configuration;
|
||||||
|
|
||||||
|
public sealed class CccsOptions
|
||||||
|
{
|
||||||
|
public const string HttpClientName = "feedser.source.cccs";
|
||||||
|
|
||||||
|
private readonly List<CccsFeedEndpoint> _feeds = new();
|
||||||
|
|
||||||
|
public CccsOptions()
|
||||||
|
{
|
||||||
|
_feeds.Add(new CccsFeedEndpoint("en", new Uri("https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=en&content_type=cccs_threat")));
|
||||||
|
_feeds.Add(new CccsFeedEndpoint("fr", new Uri("https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=fr&content_type=cccs_threat")));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Feed endpoints to poll; configure per language or content category.
|
||||||
|
/// </summary>
|
||||||
|
public IList<CccsFeedEndpoint> Feeds => _feeds;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Maximum number of entries to enqueue per fetch cycle.
|
||||||
|
/// </summary>
|
||||||
|
public int MaxEntriesPerFetch { get; set; } = 80;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Maximum remembered entries (URI+hash) for deduplication.
|
||||||
|
/// </summary>
|
||||||
|
public int MaxKnownEntries { get; set; } = 512;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Timeout applied to feed and taxonomy requests.
|
||||||
|
/// </summary>
|
||||||
|
public TimeSpan RequestTimeout { get; set; } = TimeSpan.FromSeconds(30);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Delay between successive feed requests to respect upstream throttling.
|
||||||
|
/// </summary>
|
||||||
|
public TimeSpan RequestDelay { get; set; } = TimeSpan.FromMilliseconds(250);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Backoff recorded in source state when fetch fails.
|
||||||
|
/// </summary>
|
||||||
|
public TimeSpan FailureBackoff { get; set; } = TimeSpan.FromMinutes(1);
|
||||||
|
|
||||||
|
public void Validate()
|
||||||
|
{
|
||||||
|
if (_feeds.Count == 0)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("At least one CCCS feed endpoint must be configured.");
|
||||||
|
}
|
||||||
|
|
||||||
|
var seenLanguages = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
foreach (var feed in _feeds)
|
||||||
|
{
|
||||||
|
feed.Validate();
|
||||||
|
if (!seenLanguages.Add(feed.Language))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"Duplicate CCCS feed language configured: '{feed.Language}'. Each language should be unique to avoid duplicate ingestion.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (MaxEntriesPerFetch <= 0)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"{nameof(MaxEntriesPerFetch)} must be greater than zero.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (MaxKnownEntries <= 0)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"{nameof(MaxKnownEntries)} must be greater than zero.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (RequestTimeout <= TimeSpan.Zero)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"{nameof(RequestTimeout)} must be positive.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (RequestDelay < TimeSpan.Zero)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"{nameof(RequestDelay)} cannot be negative.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (FailureBackoff <= TimeSpan.Zero)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"{nameof(FailureBackoff)} must be positive.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class CccsFeedEndpoint
|
||||||
|
{
|
||||||
|
public CccsFeedEndpoint()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public CccsFeedEndpoint(string language, Uri uri)
|
||||||
|
{
|
||||||
|
Language = language;
|
||||||
|
Uri = uri;
|
||||||
|
}
|
||||||
|
|
||||||
|
public string Language { get; set; } = "en";
|
||||||
|
|
||||||
|
public Uri? Uri { get; set; }
|
||||||
|
|
||||||
|
public void Validate()
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(Language))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("CCCS feed language must be specified.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Uri is null || !Uri.IsAbsoluteUri)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"CCCS feed endpoint URI must be an absolute URI (language='{Language}').");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Uri BuildTaxonomyUri()
|
||||||
|
{
|
||||||
|
if (Uri is null)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("Feed endpoint URI must be configured before building taxonomy URI.");
|
||||||
|
}
|
||||||
|
|
||||||
|
var language = Uri.GetQueryParameterValueOrDefault("lang", Language);
|
||||||
|
var builder = $"https://www.cyber.gc.ca/api/cccs/taxonomy/v1/get?lang={language}&vocabulary=cccs_alert_type";
|
||||||
|
return new Uri(builder, UriKind.Absolute);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal static class CccsUriExtensions
|
||||||
|
{
|
||||||
|
public static string GetQueryParameterValueOrDefault(this Uri uri, string key, string fallback)
|
||||||
|
{
|
||||||
|
if (uri is null)
|
||||||
|
{
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
|
||||||
|
var query = uri.Query;
|
||||||
|
if (string.IsNullOrEmpty(query))
|
||||||
|
{
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
|
||||||
|
var trimmed = query.StartsWith("?", StringComparison.Ordinal) ? query[1..] : query;
|
||||||
|
foreach (var pair in trimmed.Split(new[] { '&' }, StringSplitOptions.RemoveEmptyEntries))
|
||||||
|
{
|
||||||
|
var separatorIndex = pair.IndexOf('=');
|
||||||
|
if (separatorIndex < 0)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var left = pair[..separatorIndex].Trim();
|
||||||
|
if (!left.Equals(key, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var right = pair[(separatorIndex + 1)..].Trim();
|
||||||
|
if (right.Length == 0)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Uri.UnescapeDataString(right);
|
||||||
|
}
|
||||||
|
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,54 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
|
||||||
|
namespace StellaOps.Feedser.Source.Cccs.Internal;
|
||||||
|
|
||||||
|
internal sealed record CccsAdvisoryDto
|
||||||
|
{
|
||||||
|
[JsonPropertyName("sourceId")]
|
||||||
|
public string SourceId { get; init; } = string.Empty;
|
||||||
|
|
||||||
|
[JsonPropertyName("serialNumber")]
|
||||||
|
public string SerialNumber { get; init; } = string.Empty;
|
||||||
|
|
||||||
|
[JsonPropertyName("language")]
|
||||||
|
public string Language { get; init; } = "en";
|
||||||
|
|
||||||
|
[JsonPropertyName("title")]
|
||||||
|
public string Title { get; init; } = string.Empty;
|
||||||
|
|
||||||
|
[JsonPropertyName("summary")]
|
||||||
|
public string? Summary { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("canonicalUrl")]
|
||||||
|
public string CanonicalUrl { get; init; } = string.Empty;
|
||||||
|
|
||||||
|
[JsonPropertyName("contentHtml")]
|
||||||
|
public string ContentHtml { get; init; } = string.Empty;
|
||||||
|
|
||||||
|
[JsonPropertyName("published")]
|
||||||
|
public DateTimeOffset? Published { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("modified")]
|
||||||
|
public DateTimeOffset? Modified { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("alertType")]
|
||||||
|
public string? AlertType { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("subject")]
|
||||||
|
public string? Subject { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("products")]
|
||||||
|
public IReadOnlyList<string> Products { get; init; } = Array.Empty<string>();
|
||||||
|
|
||||||
|
[JsonPropertyName("references")]
|
||||||
|
public IReadOnlyList<CccsReferenceDto> References { get; init; } = Array.Empty<CccsReferenceDto>();
|
||||||
|
|
||||||
|
[JsonPropertyName("cveIds")]
|
||||||
|
public IReadOnlyList<string> CveIds { get; init; } = Array.Empty<string>();
|
||||||
|
}
|
||||||
|
|
||||||
|
internal sealed record CccsReferenceDto(
|
||||||
|
[property: JsonPropertyName("url")] string Url,
|
||||||
|
[property: JsonPropertyName("label")] string? Label);
|
||||||
145
src/StellaOps.Feedser.Source.Cccs/Internal/CccsCursor.cs
Normal file
145
src/StellaOps.Feedser.Source.Cccs/Internal/CccsCursor.cs
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using MongoDB.Bson;
|
||||||
|
|
||||||
|
namespace StellaOps.Feedser.Source.Cccs.Internal;
|
||||||
|
|
||||||
|
internal sealed record CccsCursor(
|
||||||
|
IReadOnlyCollection<Guid> PendingDocuments,
|
||||||
|
IReadOnlyCollection<Guid> PendingMappings,
|
||||||
|
IReadOnlyDictionary<string, string> KnownEntryHashes,
|
||||||
|
DateTimeOffset? LastFetchAt)
|
||||||
|
{
|
||||||
|
private static readonly IReadOnlyCollection<Guid> EmptyGuidCollection = Array.Empty<Guid>();
|
||||||
|
private static readonly IReadOnlyDictionary<string, string> EmptyHashes = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||||
|
|
||||||
|
public static CccsCursor Empty { get; } = new(EmptyGuidCollection, EmptyGuidCollection, EmptyHashes, null);
|
||||||
|
|
||||||
|
public CccsCursor WithPendingDocuments(IEnumerable<Guid> documents)
|
||||||
|
{
|
||||||
|
var distinct = (documents ?? Enumerable.Empty<Guid>()).Distinct().ToArray();
|
||||||
|
return this with { PendingDocuments = distinct };
|
||||||
|
}
|
||||||
|
|
||||||
|
public CccsCursor WithPendingMappings(IEnumerable<Guid> mappings)
|
||||||
|
{
|
||||||
|
var distinct = (mappings ?? Enumerable.Empty<Guid>()).Distinct().ToArray();
|
||||||
|
return this with { PendingMappings = distinct };
|
||||||
|
}
|
||||||
|
|
||||||
|
public CccsCursor WithKnownEntryHashes(IReadOnlyDictionary<string, string> hashes)
|
||||||
|
{
|
||||||
|
var map = hashes is null || hashes.Count == 0
|
||||||
|
? EmptyHashes
|
||||||
|
: new Dictionary<string, string>(hashes, StringComparer.Ordinal);
|
||||||
|
return this with { KnownEntryHashes = map };
|
||||||
|
}
|
||||||
|
|
||||||
|
public CccsCursor WithLastFetch(DateTimeOffset? timestamp)
|
||||||
|
=> this with { LastFetchAt = timestamp };
|
||||||
|
|
||||||
|
public BsonDocument ToBsonDocument()
|
||||||
|
{
|
||||||
|
var doc = new BsonDocument
|
||||||
|
{
|
||||||
|
["pendingDocuments"] = new BsonArray(PendingDocuments.Select(id => id.ToString())),
|
||||||
|
["pendingMappings"] = new BsonArray(PendingMappings.Select(id => id.ToString())),
|
||||||
|
};
|
||||||
|
|
||||||
|
if (KnownEntryHashes.Count > 0)
|
||||||
|
{
|
||||||
|
var hashes = new BsonArray();
|
||||||
|
foreach (var kvp in KnownEntryHashes)
|
||||||
|
{
|
||||||
|
hashes.Add(new BsonDocument
|
||||||
|
{
|
||||||
|
["uri"] = kvp.Key,
|
||||||
|
["hash"] = kvp.Value,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
doc["knownEntryHashes"] = hashes;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (LastFetchAt.HasValue)
|
||||||
|
{
|
||||||
|
doc["lastFetchAt"] = LastFetchAt.Value.UtcDateTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
return doc;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static CccsCursor FromBson(BsonDocument? document)
|
||||||
|
{
|
||||||
|
if (document is null || document.ElementCount == 0)
|
||||||
|
{
|
||||||
|
return Empty;
|
||||||
|
}
|
||||||
|
|
||||||
|
var pendingDocuments = ReadGuidArray(document, "pendingDocuments");
|
||||||
|
var pendingMappings = ReadGuidArray(document, "pendingMappings");
|
||||||
|
var hashes = ReadHashMap(document);
|
||||||
|
var lastFetch = document.TryGetValue("lastFetchAt", out var value)
|
||||||
|
? ParseDateTime(value)
|
||||||
|
: null;
|
||||||
|
|
||||||
|
return new CccsCursor(pendingDocuments, pendingMappings, hashes, lastFetch);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyCollection<Guid> ReadGuidArray(BsonDocument document, string field)
|
||||||
|
{
|
||||||
|
if (!document.TryGetValue(field, out var value) || value is not BsonArray array)
|
||||||
|
{
|
||||||
|
return EmptyGuidCollection;
|
||||||
|
}
|
||||||
|
|
||||||
|
var items = new List<Guid>(array.Count);
|
||||||
|
foreach (var element in array)
|
||||||
|
{
|
||||||
|
if (Guid.TryParse(element?.ToString(), out var guid))
|
||||||
|
{
|
||||||
|
items.Add(guid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return items;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyDictionary<string, string> ReadHashMap(BsonDocument document)
|
||||||
|
{
|
||||||
|
if (!document.TryGetValue("knownEntryHashes", out var value) || value is not BsonArray array || array.Count == 0)
|
||||||
|
{
|
||||||
|
return EmptyHashes;
|
||||||
|
}
|
||||||
|
|
||||||
|
var map = new Dictionary<string, string>(array.Count, StringComparer.Ordinal);
|
||||||
|
foreach (var element in array)
|
||||||
|
{
|
||||||
|
if (element is not BsonDocument entry)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!entry.TryGetValue("uri", out var uriValue) || uriValue.IsBsonNull || string.IsNullOrWhiteSpace(uriValue.AsString))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var hash = entry.TryGetValue("hash", out var hashValue) && !hashValue.IsBsonNull
|
||||||
|
? hashValue.AsString
|
||||||
|
: string.Empty;
|
||||||
|
map[uriValue.AsString] = hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static DateTimeOffset? ParseDateTime(BsonValue value)
|
||||||
|
=> value.BsonType switch
|
||||||
|
{
|
||||||
|
BsonType.DateTime => DateTime.SpecifyKind(value.ToUniversalTime(), DateTimeKind.Utc),
|
||||||
|
BsonType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(),
|
||||||
|
_ => null,
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -0,0 +1,58 @@
|
|||||||
|
using System.Diagnostics.Metrics;
|
||||||
|
|
||||||
|
namespace StellaOps.Feedser.Source.Cccs.Internal;
|
||||||
|
|
||||||
|
public sealed class CccsDiagnostics : IDisposable
|
||||||
|
{
|
||||||
|
private const string MeterName = "StellaOps.Feedser.Source.Cccs";
|
||||||
|
private const string MeterVersion = "1.0.0";
|
||||||
|
|
||||||
|
private readonly Meter _meter;
|
||||||
|
private readonly Counter<long> _fetchAttempts;
|
||||||
|
private readonly Counter<long> _fetchSuccess;
|
||||||
|
private readonly Counter<long> _fetchDocuments;
|
||||||
|
private readonly Counter<long> _fetchUnchanged;
|
||||||
|
private readonly Counter<long> _fetchFailures;
|
||||||
|
private readonly Counter<long> _parseSuccess;
|
||||||
|
private readonly Counter<long> _parseFailures;
|
||||||
|
private readonly Counter<long> _parseQuarantine;
|
||||||
|
private readonly Counter<long> _mapSuccess;
|
||||||
|
private readonly Counter<long> _mapFailures;
|
||||||
|
|
||||||
|
public CccsDiagnostics()
|
||||||
|
{
|
||||||
|
_meter = new Meter(MeterName, MeterVersion);
|
||||||
|
_fetchAttempts = _meter.CreateCounter<long>("cccs.fetch.attempts", unit: "operations");
|
||||||
|
_fetchSuccess = _meter.CreateCounter<long>("cccs.fetch.success", unit: "operations");
|
||||||
|
_fetchDocuments = _meter.CreateCounter<long>("cccs.fetch.documents", unit: "documents");
|
||||||
|
_fetchUnchanged = _meter.CreateCounter<long>("cccs.fetch.unchanged", unit: "documents");
|
||||||
|
_fetchFailures = _meter.CreateCounter<long>("cccs.fetch.failures", unit: "operations");
|
||||||
|
_parseSuccess = _meter.CreateCounter<long>("cccs.parse.success", unit: "documents");
|
||||||
|
_parseFailures = _meter.CreateCounter<long>("cccs.parse.failures", unit: "documents");
|
||||||
|
_parseQuarantine = _meter.CreateCounter<long>("cccs.parse.quarantine", unit: "documents");
|
||||||
|
_mapSuccess = _meter.CreateCounter<long>("cccs.map.success", unit: "advisories");
|
||||||
|
_mapFailures = _meter.CreateCounter<long>("cccs.map.failures", unit: "advisories");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void FetchAttempt() => _fetchAttempts.Add(1);
|
||||||
|
|
||||||
|
public void FetchSuccess() => _fetchSuccess.Add(1);
|
||||||
|
|
||||||
|
public void FetchDocument() => _fetchDocuments.Add(1);
|
||||||
|
|
||||||
|
public void FetchUnchanged() => _fetchUnchanged.Add(1);
|
||||||
|
|
||||||
|
public void FetchFailure() => _fetchFailures.Add(1);
|
||||||
|
|
||||||
|
public void ParseSuccess() => _parseSuccess.Add(1);
|
||||||
|
|
||||||
|
public void ParseFailure() => _parseFailures.Add(1);
|
||||||
|
|
||||||
|
public void ParseQuarantine() => _parseQuarantine.Add(1);
|
||||||
|
|
||||||
|
public void MapSuccess() => _mapSuccess.Add(1);
|
||||||
|
|
||||||
|
public void MapFailure() => _mapFailures.Add(1);
|
||||||
|
|
||||||
|
public void Dispose() => _meter.Dispose();
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user